diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index 0ad4c510a55..ce89d8c2b10 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -32,7 +32,7 @@ jobs: fetch-depth: 0 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -69,7 +69,7 @@ jobs: run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T - - name: Upload translations - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: translations path: translations.tar.gz @@ -94,7 +94,7 @@ jobs: - name: Download nightly wheels of frontend if: needs.init.outputs.channel == 'dev' - uses: dawidd6/action-download-artifact@v8 + uses: dawidd6/action-download-artifact@v9 with: github_token: ${{secrets.GITHUB_TOKEN}} repo: home-assistant/frontend @@ -105,7 +105,7 @@ jobs: - name: Download nightly wheels of intents if: needs.init.outputs.channel == 'dev' - uses: dawidd6/action-download-artifact@v8 + uses: dawidd6/action-download-artifact@v9 with: github_token: ${{secrets.GITHUB_TOKEN}} repo: home-assistant/intents-package @@ -116,7 +116,7 @@ jobs: - name: Set up Python ${{ env.DEFAULT_PYTHON }} if: needs.init.outputs.channel == 'dev' - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -175,7 +175,7 @@ jobs: sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt - name: Download translations - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: name: translations @@ -190,14 +190,14 @@ jobs: echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE - name: Login to GitHub Container Registry - uses: docker/login-action@v3.3.0 + uses: docker/login-action@v3.4.0 with: registry: ghcr.io username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} - name: Build base image - uses: home-assistant/builder@2025.02.0 + uses: home-assistant/builder@2025.03.0 with: args: | $BUILD_ARGS \ @@ -256,14 +256,14 @@ jobs: fi - name: Login to GitHub Container Registry - uses: docker/login-action@v3.3.0 + uses: docker/login-action@v3.4.0 with: registry: ghcr.io username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} - name: Build base image - uses: home-assistant/builder@2025.02.0 + uses: home-assistant/builder@2025.03.0 with: args: | $BUILD_ARGS \ @@ -330,14 +330,14 @@ jobs: - name: Login to DockerHub if: matrix.registry == 'docker.io/homeassistant' - uses: docker/login-action@v3.3.0 + uses: docker/login-action@v3.4.0 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Login to GitHub Container Registry if: matrix.registry == 'ghcr.io/home-assistant' - uses: docker/login-action@v3.3.0 + uses: docker/login-action@v3.4.0 with: registry: ghcr.io username: ${{ github.repository_owner }} @@ -457,12 +457,12 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} - name: Download translations - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: name: translations @@ -502,14 +502,14 @@ jobs: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Login to GitHub Container Registry - uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 + uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0 with: registry: ghcr.io username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} - name: Build Docker image - uses: docker/build-push-action@0adf9959216b96bec444f325f1e493d4aa344497 # v6.14.0 + uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0 with: context: . # So action will not pull the repository again file: ./script/hassfest/docker/Dockerfile @@ -522,7 +522,7 @@ jobs: - name: Push Docker image if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true' id: push - uses: docker/build-push-action@0adf9959216b96bec444f325f1e493d4aa344497 # v6.14.0 + uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0 with: context: . # So action will not pull the repository again file: ./script/hassfest/docker/Dockerfile @@ -531,7 +531,7 @@ jobs: - name: Generate artifact attestation if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true' - uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0 + uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2 # v2.2.3 with: subject-name: ${{ env.HASSFEST_IMAGE_NAME }} subject-digest: ${{ steps.push.outputs.digest }} diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8745ab63470..c46ec3cda54 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -37,10 +37,10 @@ on: type: boolean env: - CACHE_VERSION: 11 + CACHE_VERSION: 12 UV_CACHE_VERSION: 1 MYPY_CACHE_VERSION: 9 - HA_SHORT_VERSION: "2025.3" + HA_SHORT_VERSION: "2025.4" DEFAULT_PYTHON: "3.13" ALL_PYTHON_VERSIONS: "['3.13']" # 10.3 is the oldest supported version @@ -89,6 +89,7 @@ jobs: test_groups: ${{ steps.info.outputs.test_groups }} tests_glob: ${{ steps.info.outputs.tests_glob }} tests: ${{ steps.info.outputs.tests }} + lint_only: ${{ steps.info.outputs.lint_only }} skip_coverage: ${{ steps.info.outputs.skip_coverage }} runs-on: ubuntu-24.04 steps: @@ -142,6 +143,7 @@ jobs: test_group_count=10 tests="[]" tests_glob="" + lint_only="" skip_coverage="" if [[ "${{ steps.integrations.outputs.changes }}" != "[]" ]]; @@ -192,6 +194,17 @@ jobs: test_full_suite="true" fi + if [[ "${{ github.event.inputs.lint-only }}" == "true" ]] \ + || [[ "${{ github.event.inputs.pylint-only }}" == "true" ]] \ + || [[ "${{ github.event.inputs.mypy-only }}" == "true" ]] \ + || [[ "${{ github.event.inputs.audit-licenses-only }}" == "true" ]] \ + || [[ "${{ github.event_name }}" == "push" \ + && "${{ github.event.repository.full_name }}" != "home-assistant/core" ]]; + then + lint_only="true" + skip_coverage="true" + fi + if [[ "${{ github.event.inputs.skip-coverage }}" == "true" ]] \ || [[ "${{ contains(github.event.pull_request.labels.*.name, 'ci-skip-coverage') }}" == "true" ]]; then @@ -217,6 +230,8 @@ jobs: echo "tests=${tests}" >> $GITHUB_OUTPUT echo "tests_glob: ${tests_glob}" echo "tests_glob=${tests_glob}" >> $GITHUB_OUTPUT + echo "lint_only": ${lint_only} + echo "lint_only=${lint_only}" >> $GITHUB_OUTPUT echo "skip_coverage: ${skip_coverage}" echo "skip_coverage=${skip_coverage}" >> $GITHUB_OUTPUT @@ -234,13 +249,13 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache@v4.2.1 + uses: actions/cache@v4.2.3 with: path: venv key: >- @@ -256,7 +271,7 @@ jobs: uv pip install "$(cat requirements_test.txt | grep pre-commit)" - name: Restore pre-commit environment from cache id: cache-precommit - uses: actions/cache@v4.2.1 + uses: actions/cache@v4.2.3 with: path: ${{ env.PRE_COMMIT_CACHE }} lookup-only: true @@ -279,14 +294,14 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 id: python with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.1 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -295,7 +310,7 @@ jobs: needs.info.outputs.pre-commit_cache_key }} - name: Restore pre-commit environment from cache id: cache-precommit - uses: actions/cache/restore@v4.2.1 + uses: actions/cache/restore@v4.2.3 with: path: ${{ env.PRE_COMMIT_CACHE }} fail-on-cache-miss: true @@ -319,14 +334,14 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 id: python with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.1 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -335,7 +350,7 @@ jobs: needs.info.outputs.pre-commit_cache_key }} - name: Restore pre-commit environment from cache id: cache-precommit - uses: actions/cache/restore@v4.2.1 + uses: actions/cache/restore@v4.2.3 with: path: ${{ env.PRE_COMMIT_CACHE }} fail-on-cache-miss: true @@ -359,14 +374,14 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 id: python with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.1 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -375,7 +390,7 @@ jobs: needs.info.outputs.pre-commit_cache_key }} - name: Restore pre-commit environment from cache id: cache-precommit - uses: actions/cache/restore@v4.2.1 + uses: actions/cache/restore@v4.2.3 with: path: ${{ env.PRE_COMMIT_CACHE }} fail-on-cache-miss: true @@ -469,7 +484,7 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ matrix.python-version }} check-latest: true @@ -482,7 +497,7 @@ jobs: env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache@v4.2.1 + uses: actions/cache@v4.2.3 with: path: venv key: >- @@ -490,7 +505,7 @@ jobs: needs.info.outputs.python_cache_key }} - name: Restore uv wheel cache if: steps.cache-venv.outputs.cache-hit != 'true' - uses: actions/cache@v4.2.1 + uses: actions/cache@v4.2.3 with: path: ${{ env.UV_CACHE_DIR }} key: >- @@ -537,7 +552,7 @@ jobs: python --version uv pip freeze >> pip_freeze.txt - name: Upload pip_freeze artifact - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: pip-freeze-${{ matrix.python-version }} path: pip_freeze.txt @@ -572,13 +587,13 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.1 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -605,13 +620,13 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.1 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -623,6 +638,25 @@ jobs: . venv/bin/activate python -m script.gen_requirements_all validate + dependency-review: + name: Dependency review + runs-on: ubuntu-24.04 + needs: + - info + - base + if: | + github.event.inputs.pylint-only != 'true' + && github.event.inputs.mypy-only != 'true' + && needs.info.outputs.requirements == 'true' + && github.event_name == 'pull_request' + steps: + - name: Check out code from GitHub + uses: actions/checkout@v4.2.2 + - name: Dependency review + uses: actions/dependency-review-action@v4.5.0 + with: + license-check: false # We use our own license audit checks + audit-licenses: name: Audit licenses runs-on: ubuntu-24.04 @@ -643,13 +677,13 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ matrix.python-version }} check-latest: true - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.1 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -661,7 +695,7 @@ jobs: . venv/bin/activate python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json - name: Upload licenses - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: licenses-${{ github.run_number }}-${{ matrix.python-version }} path: licenses-${{ matrix.python-version }}.json @@ -686,13 +720,13 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.1 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -733,13 +767,13 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.1 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -778,7 +812,7 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -791,7 +825,7 @@ jobs: env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.1 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -799,7 +833,7 @@ jobs: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{ needs.info.outputs.python_cache_key }} - name: Restore mypy cache - uses: actions/cache@v4.2.1 + uses: actions/cache@v4.2.3 with: path: .mypy_cache key: >- @@ -829,11 +863,7 @@ jobs: prepare-pytest-full: runs-on: ubuntu-24.04 if: | - (github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core') - && github.event.inputs.lint-only != 'true' - && github.event.inputs.pylint-only != 'true' - && github.event.inputs.mypy-only != 'true' - && github.event.inputs.audit-licenses-only != 'true' + needs.info.outputs.lint_only != 'true' && needs.info.outputs.test_full_suite == 'true' needs: - info @@ -859,13 +889,13 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.1 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -877,7 +907,7 @@ jobs: . venv/bin/activate python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests - name: Upload pytest_buckets - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: pytest_buckets path: pytest_buckets.txt @@ -886,11 +916,7 @@ jobs: pytest-full: runs-on: ubuntu-24.04 if: | - (github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core') - && github.event.inputs.lint-only != 'true' - && github.event.inputs.pylint-only != 'true' - && github.event.inputs.mypy-only != 'true' - && github.event.inputs.audit-licenses-only != 'true' + needs.info.outputs.lint_only != 'true' && needs.info.outputs.test_full_suite == 'true' needs: - info @@ -923,13 +949,13 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ matrix.python-version }} check-latest: true - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.1 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -942,7 +968,7 @@ jobs: run: | echo "::add-matcher::.github/workflows/matchers/pytest-slow.json" - name: Download pytest_buckets - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: name: pytest_buckets - name: Compile English translations @@ -962,6 +988,7 @@ jobs: if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then cov_params+=(--cov="homeassistant") cov_params+=(--cov-report=xml) + cov_params+=(--junitxml=junit.xml -o junit_family=legacy) fi echo "Test group ${{ matrix.group }}: $(sed -n "${{ matrix.group }},1p" pytest_buckets.txt)" @@ -980,18 +1007,24 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-full.conclusion == 'failure' - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml overwrite: true + - name: Upload test results artifact + if: needs.info.outputs.skip_coverage != 'true' && !cancelled() + uses: actions/upload-artifact@v4.6.2 + with: + name: test-results-full-${{ matrix.python-version }}-${{ matrix.group }} + path: junit.xml - name: Remove pytest_buckets run: rm pytest_buckets.txt - name: Check dirty @@ -1009,11 +1042,7 @@ jobs: MYSQL_ROOT_PASSWORD: password options: --health-cmd="mysqladmin ping -uroot -ppassword" --health-interval=5s --health-timeout=2s --health-retries=3 if: | - (github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core') - && github.event.inputs.lint-only != 'true' - && github.event.inputs.pylint-only != 'true' - && github.event.inputs.mypy-only != 'true' - && github.event.inputs.audit-licenses-only != 'true' + needs.info.outputs.lint_only != 'true' && needs.info.outputs.mariadb_groups != '[]' needs: - info @@ -1045,13 +1074,13 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ matrix.python-version }} check-latest: true - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.1 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -1088,6 +1117,7 @@ jobs: cov_params+=(--cov="homeassistant.components.recorder") cov_params+=(--cov-report=xml) cov_params+=(--cov-report=term-missing) + cov_params+=(--junitxml=junit.xml -o junit_family=legacy) fi python3 -b -X dev -m pytest \ @@ -1108,7 +1138,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -1116,12 +1146,19 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} path: coverage.xml overwrite: true + - name: Upload test results artifact + if: needs.info.outputs.skip_coverage != 'true' && !cancelled() + uses: actions/upload-artifact@v4.6.2 + with: + name: test-results-mariadb-${{ matrix.python-version }}-${{ + steps.pytest-partial.outputs.mariadb }} + path: junit.xml - name: Check dirty run: | ./script/check_dirty @@ -1137,11 +1174,7 @@ jobs: POSTGRES_PASSWORD: password options: --health-cmd="pg_isready -hlocalhost -Upostgres" --health-interval=5s --health-timeout=2s --health-retries=3 if: | - (github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core') - && github.event.inputs.lint-only != 'true' - && github.event.inputs.pylint-only != 'true' - && github.event.inputs.mypy-only != 'true' - && github.event.inputs.audit-licenses-only != 'true' + needs.info.outputs.lint_only != 'true' && needs.info.outputs.postgresql_groups != '[]' needs: - info @@ -1175,13 +1208,13 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ matrix.python-version }} check-latest: true - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.1 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -1218,6 +1251,7 @@ jobs: cov_params+=(--cov="homeassistant.components.recorder") cov_params+=(--cov-report=xml) cov_params+=(--cov-report=term-missing) + cov_params+=(--junitxml=junit.xml -o junit_family=legacy) fi python3 -b -X dev -m pytest \ @@ -1239,7 +1273,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1247,12 +1281,19 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} path: coverage.xml overwrite: true + - name: Upload test results artifact + if: needs.info.outputs.skip_coverage != 'true' && !cancelled() + uses: actions/upload-artifact@v4.6.2 + with: + name: test-results-postgres-${{ matrix.python-version }}-${{ + steps.pytest-partial.outputs.postgresql }} + path: junit.xml - name: Check dirty run: | ./script/check_dirty @@ -1271,12 +1312,12 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.2.2 - name: Download all coverage artifacts - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: pattern: coverage-* - name: Upload coverage to Codecov if: needs.info.outputs.test_full_suite == 'true' - uses: codecov/codecov-action@v5.3.1 + uses: codecov/codecov-action@v5.4.0 with: fail_ci_if_error: true flags: full-suite @@ -1285,11 +1326,7 @@ jobs: pytest-partial: runs-on: ubuntu-24.04 if: | - (github.event_name != 'push' || github.event.repository.full_name == 'home-assistant/core') - && github.event.inputs.lint-only != 'true' - && github.event.inputs.pylint-only != 'true' - && github.event.inputs.mypy-only != 'true' - && github.event.inputs.audit-licenses-only != 'true' + needs.info.outputs.lint_only != 'true' && needs.info.outputs.tests_glob && needs.info.outputs.test_full_suite == 'false' needs: @@ -1322,13 +1359,13 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ matrix.python-version }} check-latest: true - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv - uses: actions/cache/restore@v4.2.1 + uses: actions/cache/restore@v4.2.3 with: path: venv fail-on-cache-miss: true @@ -1365,6 +1402,7 @@ jobs: cov_params+=(--cov="homeassistant.components.${{ matrix.group }}") cov_params+=(--cov-report=xml) cov_params+=(--cov-report=term-missing) + cov_params+=(--junitxml=junit.xml -o junit_family=legacy) fi python3 -b -X dev -m pytest \ @@ -1382,18 +1420,24 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml overwrite: true + - name: Upload test results artifact + if: needs.info.outputs.skip_coverage != 'true' && !cancelled() + uses: actions/upload-artifact@v4.6.2 + with: + name: test-results-partial-${{ matrix.python-version }}-${{ matrix.group }} + path: junit.xml - name: Check dirty run: | ./script/check_dirty @@ -1410,12 +1454,37 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.2.2 - name: Download all coverage artifacts - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: pattern: coverage-* - name: Upload coverage to Codecov if: needs.info.outputs.test_full_suite == 'false' - uses: codecov/codecov-action@v5.3.1 + uses: codecov/codecov-action@v5.4.0 with: fail_ci_if_error: true token: ${{ secrets.CODECOV_TOKEN }} + + upload-test-results: + name: Upload test results to Codecov + # codecov/test-results-action currently doesn't support tokenless uploads + # therefore we can't run it on forks + if: ${{ (github.event_name != 'pull_request' || !github.event.pull_request.head.repo.fork) && needs.info.outputs.skip_coverage != 'true' && !cancelled() }} + runs-on: ubuntu-24.04 + needs: + - info + - pytest-partial + - pytest-full + - pytest-postgres + - pytest-mariadb + timeout-minutes: 10 + steps: + - name: Download all coverage artifacts + uses: actions/download-artifact@v4.2.1 + with: + pattern: test-results-* + - name: Upload test results to Codecov + uses: codecov/test-results-action@v1 + with: + fail_ci_if_error: true + verbose: true + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 4bdddf50c25..bd072752d16 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,11 +24,11 @@ jobs: uses: actions/checkout@v4.2.2 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.28.10 + uses: github/codeql-action/init@v3.28.13 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.28.10 + uses: github/codeql-action/analyze@v3.28.13 with: category: "/language:python" diff --git a/.github/workflows/translations.yml b/.github/workflows/translations.yml index 619d83aef51..0b6abe8fe2c 100644 --- a/.github/workflows/translations.yml +++ b/.github/workflows/translations.yml @@ -22,7 +22,7 @@ jobs: uses: actions/checkout@v4.2.2 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 4b1628c57bb..d27a62bab80 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -36,7 +36,7 @@ jobs: - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.4.0 + uses: actions/setup-python@v5.5.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -91,7 +91,7 @@ jobs: ) > build_constraints.txt - name: Upload env_file - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: env_file path: ./.env_file @@ -99,14 +99,14 @@ jobs: overwrite: true - name: Upload build_constraints - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: build_constraints path: ./build_constraints.txt overwrite: true - name: Upload requirements_diff - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: requirements_diff path: ./requirements_diff.txt @@ -118,7 +118,7 @@ jobs: python -m script.gen_requirements_all ci - name: Upload requirements_all_wheels - uses: actions/upload-artifact@v4.6.1 + uses: actions/upload-artifact@v4.6.2 with: name: requirements_all_wheels path: ./requirements_all_wheels_*.txt @@ -138,17 +138,17 @@ jobs: uses: actions/checkout@v4.2.2 - name: Download env_file - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: name: env_file - name: Download build_constraints - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: name: build_constraints - name: Download requirements_diff - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: name: requirements_diff @@ -159,7 +159,7 @@ jobs: sed -i "/uv/d" requirements_diff.txt - name: Build wheels - uses: home-assistant/wheels@2024.11.0 + uses: home-assistant/wheels@2025.03.0 with: abi: ${{ matrix.abi }} tag: musllinux_1_2 @@ -187,22 +187,22 @@ jobs: uses: actions/checkout@v4.2.2 - name: Download env_file - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: name: env_file - name: Download build_constraints - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: name: build_constraints - name: Download requirements_diff - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: name: requirements_diff - name: Download requirements_all_wheels - uses: actions/download-artifact@v4.1.9 + uses: actions/download-artifact@v4.2.1 with: name: requirements_all_wheels @@ -219,7 +219,7 @@ jobs: sed -i "/uv/d" requirements_diff.txt - name: Build wheels - uses: home-assistant/wheels@2024.11.0 + uses: home-assistant/wheels@2025.03.0 with: abi: ${{ matrix.abi }} tag: musllinux_1_2 diff --git a/.gitignore b/.gitignore index 241255253c5..5aa51c9d762 100644 --- a/.gitignore +++ b/.gitignore @@ -69,6 +69,7 @@ test-reports/ test-results.xml test-output.xml pytest-*.txt +junit.xml # Translations *.mo diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5b701b21b9e..42e05a869c3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.9.7 + rev: v0.11.0 hooks: - id: ruff args: diff --git a/.strict-typing b/.strict-typing index 1df49300b1e..e0c4e569f4b 100644 --- a/.strict-typing +++ b/.strict-typing @@ -119,6 +119,7 @@ homeassistant.components.bluetooth_adapters.* homeassistant.components.bluetooth_tracker.* homeassistant.components.bmw_connected_drive.* homeassistant.components.bond.* +homeassistant.components.bosch_alarm.* homeassistant.components.braviatv.* homeassistant.components.bring.* homeassistant.components.brother.* @@ -136,6 +137,7 @@ homeassistant.components.clicksend.* homeassistant.components.climate.* homeassistant.components.cloud.* homeassistant.components.co2signal.* +homeassistant.components.comelit.* homeassistant.components.command_line.* homeassistant.components.config.* homeassistant.components.configurator.* @@ -396,6 +398,7 @@ homeassistant.components.pure_energie.* homeassistant.components.purpleair.* homeassistant.components.pushbullet.* homeassistant.components.pvoutput.* +homeassistant.components.pyload.* homeassistant.components.python_script.* homeassistant.components.qbus.* homeassistant.components.qnap_qsw.* @@ -410,6 +413,7 @@ homeassistant.components.recollect_waste.* homeassistant.components.recorder.* homeassistant.components.remember_the_milk.* homeassistant.components.remote.* +homeassistant.components.remote_calendar.* homeassistant.components.renault.* homeassistant.components.reolink.* homeassistant.components.repairs.* @@ -528,6 +532,7 @@ homeassistant.components.vallox.* homeassistant.components.valve.* homeassistant.components.velbus.* homeassistant.components.vlc_telnet.* +homeassistant.components.vodafone_station.* homeassistant.components.wake_on_lan.* homeassistant.components.wake_word.* homeassistant.components.wallbox.* diff --git a/.vscode/tasks.json b/.vscode/tasks.json index b699ed44b96..09c1d374299 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -4,7 +4,7 @@ { "label": "Run Home Assistant Core", "type": "shell", - "command": "hass -c ./config", + "command": "${command:python.interpreterPath} -m homeassistant -c ./config", "group": "test", "presentation": { "reveal": "always", diff --git a/CODEOWNERS b/CODEOWNERS index 3366bfb0885..8afd3bab028 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -216,6 +216,8 @@ build.json @home-assistant/supervisor /tests/components/bmw_connected_drive/ @gerard33 @rikroe /homeassistant/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto /tests/components/bond/ @bdraco @prystupa @joshs85 @marciogranzotto +/homeassistant/components/bosch_alarm/ @mag1024 @sanjay900 +/tests/components/bosch_alarm/ @mag1024 @sanjay900 /homeassistant/components/bosch_shc/ @tschamm /tests/components/bosch_shc/ @tschamm /homeassistant/components/braviatv/ @bieniu @Drafteed @@ -570,8 +572,8 @@ build.json @home-assistant/supervisor /tests/components/google_cloud/ @lufton @tronikos /homeassistant/components/google_drive/ @tronikos /tests/components/google_drive/ @tronikos -/homeassistant/components/google_generative_ai_conversation/ @tronikos -/tests/components/google_generative_ai_conversation/ @tronikos +/homeassistant/components/google_generative_ai_conversation/ @tronikos @ivanlh +/tests/components/google_generative_ai_conversation/ @tronikos @ivanlh /homeassistant/components/google_mail/ @tkdrob /tests/components/google_mail/ @tkdrob /homeassistant/components/google_photos/ @allenporter @@ -1183,6 +1185,8 @@ build.json @home-assistant/supervisor /tests/components/prusalink/ @balloob /homeassistant/components/ps4/ @ktnrg45 /tests/components/ps4/ @ktnrg45 +/homeassistant/components/pterodactyl/ @elmurato +/tests/components/pterodactyl/ @elmurato /homeassistant/components/pure_energie/ @klaasnicolaas /tests/components/pure_energie/ @klaasnicolaas /homeassistant/components/purpleair/ @bachya @@ -1252,6 +1256,8 @@ build.json @home-assistant/supervisor /tests/components/refoss/ @ashionky /homeassistant/components/remote/ @home-assistant/core /tests/components/remote/ @home-assistant/core +/homeassistant/components/remote_calendar/ @Thomas55555 +/tests/components/remote_calendar/ @Thomas55555 /homeassistant/components/renault/ @epenet /tests/components/renault/ @epenet /homeassistant/components/renson/ @jimmyd-be @@ -1474,8 +1480,6 @@ build.json @home-assistant/supervisor /tests/components/suez_water/ @ooii @jb101010-2 /homeassistant/components/sun/ @Swamp-Ig /tests/components/sun/ @Swamp-Ig -/homeassistant/components/sunweg/ @rokam -/tests/components/sunweg/ @rokam /homeassistant/components/supla/ @mwegrzynek /homeassistant/components/surepetcare/ @benleb @danielhiversen /tests/components/surepetcare/ @benleb @danielhiversen @@ -1529,8 +1533,8 @@ build.json @home-assistant/supervisor /tests/components/tedee/ @patrickhilker @zweckj /homeassistant/components/tellduslive/ @fredrike /tests/components/tellduslive/ @fredrike -/homeassistant/components/template/ @PhracturedBlue @home-assistant/core -/tests/components/template/ @PhracturedBlue @home-assistant/core +/homeassistant/components/template/ @Petro31 @PhracturedBlue @home-assistant/core +/tests/components/template/ @Petro31 @PhracturedBlue @home-assistant/core /homeassistant/components/tesla_fleet/ @Bre77 /tests/components/tesla_fleet/ @Bre77 /homeassistant/components/tesla_wall_connector/ @einarhauks diff --git a/Dockerfile b/Dockerfile index 3ab0bb37b9a..0a74e0a3aac 100644 --- a/Dockerfile +++ b/Dockerfile @@ -25,13 +25,13 @@ RUN \ "armv7") go2rtc_suffix='arm' ;; \ *) go2rtc_suffix=${BUILD_ARCH} ;; \ esac \ - && curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.8/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \ + && curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.9/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \ && chmod +x /bin/go2rtc \ # Verify go2rtc can be executed && go2rtc --version # Install uv -RUN pip3 install uv==0.6.1 +RUN pip3 install uv==0.6.10 WORKDIR /usr/src diff --git a/build.yaml b/build.yaml index cd54e410493..87dad1bf5ef 100644 --- a/build.yaml +++ b/build.yaml @@ -19,4 +19,4 @@ labels: org.opencontainers.image.authors: The Home Assistant Authors org.opencontainers.image.url: https://www.home-assistant.io/ org.opencontainers.image.documentation: https://www.home-assistant.io/docs/ - org.opencontainers.image.licenses: Apache License 2.0 + org.opencontainers.image.licenses: Apache-2.0 diff --git a/homeassistant/block_async_io.py b/homeassistant/block_async_io.py index d224b0b151d..eb81268434b 100644 --- a/homeassistant/block_async_io.py +++ b/homeassistant/block_async_io.py @@ -178,6 +178,15 @@ _BLOCKING_CALLS: tuple[BlockingCall, ...] = ( strict_core=False, skip_for_tests=True, ), + BlockingCall( + original_func=SSLContext.set_default_verify_paths, + object=SSLContext, + function="set_default_verify_paths", + check_allowed=None, + strict=False, + strict_core=False, + skip_for_tests=True, + ), BlockingCall( original_func=Path.open, object=Path, diff --git a/homeassistant/bootstrap.py b/homeassistant/bootstrap.py index e25bfbe358c..02a3b8c8fcc 100644 --- a/homeassistant/bootstrap.py +++ b/homeassistant/bootstrap.py @@ -81,6 +81,7 @@ from .helpers import ( entity, entity_registry, floor_registry, + frame, issue_registry, label_registry, recorder, @@ -92,6 +93,7 @@ from .helpers.dispatcher import async_dispatcher_send_internal from .helpers.storage import get_internal_store_manager from .helpers.system_info import async_get_system_info from .helpers.typing import ConfigType +from .loader import Integration from .setup import ( # _setup_started is marked as protected to make it clear # that it is not part of the public API and should not be used @@ -298,14 +300,6 @@ async def async_setup_hass( return hass - async def stop_hass(hass: core.HomeAssistant) -> None: - """Stop hass.""" - # Ask integrations to shut down. It's messy but we can't - # do a clean stop without knowing what is broken - with contextlib.suppress(TimeoutError): - async with hass.timeout.async_timeout(10): - await hass.async_stop() - hass = await create_hass() if runtime_config.skip_pip or runtime_config.skip_pip_packages: @@ -344,7 +338,7 @@ async def async_setup_hass( if config_dict is None: recovery_mode = True - await stop_hass(hass) + await hass.async_stop(force=True) hass = await create_hass() elif not basic_setup_success: @@ -352,7 +346,7 @@ async def async_setup_hass( "Unable to set up core integrations. Activating recovery mode" ) recovery_mode = True - await stop_hass(hass) + await hass.async_stop(force=True) hass = await create_hass() elif any( @@ -367,7 +361,7 @@ async def async_setup_hass( old_logging = hass.data.get(DATA_LOGGING) recovery_mode = True - await stop_hass(hass) + await hass.async_stop(force=True) hass = await create_hass() if old_logging: @@ -441,9 +435,10 @@ async def async_load_base_functionality(hass: core.HomeAssistant) -> None: if DATA_REGISTRIES_LOADED in hass.data: return hass.data[DATA_REGISTRIES_LOADED] = None - translation.async_setup(hass) entity.async_setup(hass) + frame.async_setup(hass) template.async_setup(hass) + translation.async_setup(hass) await asyncio.gather( create_eager_task(get_internal_store_manager(hass).async_initialize()), create_eager_task(area_registry.async_load(hass)), @@ -664,11 +659,10 @@ def _create_log_file( err_handler = _RotatingFileHandlerWithoutShouldRollOver( err_log_path, backupCount=1 ) - - try: - err_handler.doRollover() - except OSError as err: - _LOGGER.error("Error rolling over log file: %s", err) + try: + err_handler.doRollover() + except OSError as err: + _LOGGER.error("Error rolling over log file: %s", err) return err_handler @@ -718,20 +712,25 @@ def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]: return domains -async def _async_resolve_domains_to_setup( +async def _async_resolve_domains_and_preload( hass: core.HomeAssistant, config: dict[str, Any] -) -> tuple[set[str], dict[str, loader.Integration]]: - """Resolve all dependencies and return list of domains to set up.""" +) -> tuple[dict[str, Integration], dict[str, Integration]]: + """Resolve all dependencies and return integrations to set up. + + The return value is a tuple of two dictionaries: + - The first dictionary contains integrations + specified by the configuration (including config entries). + - The second dictionary contains the same integrations as the first dictionary + together with all their dependencies. + """ domains_to_setup = _get_domains(hass, config) - needed_requirements: set[str] = set() platform_integrations = conf_util.extract_platform_integrations( config, BASE_PLATFORMS ) - # Ensure base platforms that have platform integrations are added to - # to `domains_to_setup so they can be setup first instead of - # discovering them when later when a config entry setup task - # notices its needed and there is already a long line to use - # the import executor. + # Ensure base platforms that have platform integrations are added to `domains`, + # so they can be setup first instead of discovering them later when a config + # entry setup task notices that it's needed and there is already a long line + # to use the import executor. # # For example if we have # sensor: @@ -747,111 +746,78 @@ async def _async_resolve_domains_to_setup( # so this will be less of a problem in the future. domains_to_setup.update(platform_integrations) - # Load manifests for base platforms and platform based integrations - # that are defined under base platforms right away since we do not require - # the manifest to list them as dependencies and we want to avoid the lock - # contention when multiple integrations try to load them at once - additional_manifests_to_load = { + # Additionally process base platforms since we do not require the manifest + # to list them as dependencies. + # We want to later avoid lock contention when multiple integrations try to load + # their manifests at once. + # Also process integrations that are defined under base platforms + # to speed things up. + additional_domains_to_process = { *BASE_PLATFORMS, *chain.from_iterable(platform_integrations.values()), } - translations_to_load = additional_manifests_to_load.copy() - # Resolve all dependencies so we know all integrations # that will have to be loaded and start right-away - integration_cache: dict[str, loader.Integration] = {} - to_resolve: set[str] = domains_to_setup - while to_resolve or additional_manifests_to_load: - old_to_resolve: set[str] = to_resolve - to_resolve = set() + integrations_or_excs = await loader.async_get_integrations( + hass, {*domains_to_setup, *additional_domains_to_process} + ) + # Eliminate those missing or with invalid manifest + integrations_to_process = { + domain: itg + for domain, itg in integrations_or_excs.items() + if isinstance(itg, Integration) + } + integrations_dependencies = await loader.resolve_integrations_dependencies( + hass, integrations_to_process.values() + ) + # Eliminate those without valid dependencies + integrations_to_process = { + domain: integrations_to_process[domain] for domain in integrations_dependencies + } - if additional_manifests_to_load: - to_get = {*old_to_resolve, *additional_manifests_to_load} - additional_manifests_to_load.clear() - else: - to_get = old_to_resolve + integrations_to_setup = { + domain: itg + for domain, itg in integrations_to_process.items() + if domain in domains_to_setup + } + all_integrations_to_setup = integrations_to_setup.copy() + all_integrations_to_setup.update( + (dep, loader.async_get_loaded_integration(hass, dep)) + for domain in integrations_to_setup + for dep in integrations_dependencies[domain].difference( + all_integrations_to_setup + ) + ) - manifest_deps: set[str] = set() - resolve_dependencies_tasks: list[asyncio.Task[bool]] = [] - integrations_to_process: list[loader.Integration] = [] - - for domain, itg in (await loader.async_get_integrations(hass, to_get)).items(): - if not isinstance(itg, loader.Integration): - continue - integration_cache[domain] = itg - needed_requirements.update(itg.requirements) - - # Make sure manifests for dependencies are loaded in the next - # loop to try to group as many as manifest loads in a single - # call to avoid the creating one-off executor jobs later in - # the setup process - additional_manifests_to_load.update( - dep - for dep in chain(itg.dependencies, itg.after_dependencies) - if dep not in integration_cache - ) - - if domain not in old_to_resolve: - continue - - integrations_to_process.append(itg) - manifest_deps.update(itg.dependencies) - manifest_deps.update(itg.after_dependencies) - if not itg.all_dependencies_resolved: - resolve_dependencies_tasks.append( - create_eager_task( - itg.resolve_dependencies(), - name=f"resolve dependencies {domain}", - loop=hass.loop, - ) - ) - - if unseen_deps := manifest_deps - integration_cache.keys(): - # If there are dependencies, try to preload all - # the integrations manifest at once and add them - # to the list of requirements we need to install - # so we can try to check if they are already installed - # in a single call below which avoids each integration - # having to wait for the lock to do it individually - deps = await loader.async_get_integrations(hass, unseen_deps) - for dependant_domain, dependant_itg in deps.items(): - if isinstance(dependant_itg, loader.Integration): - integration_cache[dependant_domain] = dependant_itg - needed_requirements.update(dependant_itg.requirements) - - if resolve_dependencies_tasks: - await asyncio.gather(*resolve_dependencies_tasks) - - for itg in integrations_to_process: - try: - all_deps = itg.all_dependencies - except RuntimeError: - # Integration.all_dependencies raises RuntimeError if - # dependencies could not be resolved - continue - for dep in all_deps: - if dep in domains_to_setup: - continue - domains_to_setup.add(dep) - to_resolve.add(dep) - - _LOGGER.info("Domains to be set up: %s", domains_to_setup) + # Gather requirements for all integrations, + # their dependencies and after dependencies. + # To gather all the requirements we must ignore exceptions here. + # The exceptions will be detected and handled later in the bootstrap process. + integrations_after_dependencies = ( + await loader.resolve_integrations_after_dependencies( + hass, integrations_to_process.values(), ignore_exceptions=True + ) + ) + integrations_requirements = { + domain: itg.requirements for domain, itg in integrations_to_process.items() + } + integrations_requirements.update( + (dep, loader.async_get_loaded_integration(hass, dep).requirements) + for deps in integrations_after_dependencies.values() + for dep in deps.difference(integrations_requirements) + ) + all_requirements = set(chain.from_iterable(integrations_requirements.values())) # Optimistically check if requirements are already installed # ahead of setting up the integrations so we can prime the cache - # We do not wait for this since its an optimization only + # We do not wait for this since it's an optimization only hass.async_create_background_task( - requirements.async_load_installed_versions(hass, needed_requirements), + requirements.async_load_installed_versions(hass, all_requirements), "check installed requirements", eager_start=True, ) - # - # Only add the domains_to_setup after we finish resolving - # as new domains are likely to added in the process - # - translations_to_load.update(domains_to_setup) # Start loading translations for all integrations we are going to set up # in the background so they are ready when we need them. This avoids a # lot of waiting for the translation load lock and a thundering herd of @@ -862,6 +828,7 @@ async def _async_resolve_domains_to_setup( # hold the translation load lock and if anything is fast enough to # wait for the translation load lock, loading will be done by the # time it gets to it. + translations_to_load = {*all_integrations_to_setup, *additional_domains_to_process} hass.async_create_background_task( translation.async_load_integrations(hass, translations_to_load), "load translations", @@ -873,13 +840,13 @@ async def _async_resolve_domains_to_setup( # in the setup process. hass.async_create_background_task( get_internal_store_manager(hass).async_preload( - [*PRELOAD_STORAGE, *domains_to_setup] + [*PRELOAD_STORAGE, *all_integrations_to_setup] ), "preload storage", eager_start=True, ) - return domains_to_setup, integration_cache + return integrations_to_setup, all_integrations_to_setup async def _async_set_up_integrations( @@ -889,69 +856,90 @@ async def _async_set_up_integrations( watcher = _WatchPendingSetups(hass, _setup_started(hass)) watcher.async_start() - domains_to_setup, integration_cache = await _async_resolve_domains_to_setup( + integrations, all_integrations = await _async_resolve_domains_and_preload( hass, config ) - stage_2_domains = domains_to_setup.copy() + all_domains = set(all_integrations) + domains = set(integrations) + + _LOGGER.info( + "Domains to be set up: %s | %s", + domains, + all_domains - domains, + ) # Initialize recorder - if "recorder" in domains_to_setup: + if "recorder" in all_domains: recorder.async_initialize_recorder(hass) # Initialize backup - if "backup" in domains_to_setup: + if "backup" in all_domains: backup.async_initialize_backup(hass) - stage_0_and_1_domains: list[tuple[str, set[str], int | None]] = [ + stages: list[tuple[str, set[str], int | None]] = [ *( - (name, domain_group & domains_to_setup, timeout) + (name, domain_group, timeout) for name, domain_group, timeout in STAGE_0_INTEGRATIONS ), - ("stage 1", STAGE_1_INTEGRATIONS & domains_to_setup, STAGE_1_TIMEOUT), + ("1", STAGE_1_INTEGRATIONS, STAGE_1_TIMEOUT), + ("2", domains, STAGE_2_TIMEOUT), ] - _LOGGER.info("Setting up stage 0 and 1") - for name, domain_group, timeout in stage_0_and_1_domains: - if not domain_group: + _LOGGER.info("Setting up stage 0") + for name, domain_group, timeout in stages: + stage_domains_unfiltered = domain_group & all_domains + if not stage_domains_unfiltered: + _LOGGER.info("Nothing to set up in stage %s: %s", name, domain_group) continue - _LOGGER.info("Setting up %s: %s", name, domain_group) - to_be_loaded = domain_group.copy() - to_be_loaded.update( + stage_domains = stage_domains_unfiltered - hass.config.components + if not stage_domains: + _LOGGER.info("Already set up stage %s: %s", name, stage_domains_unfiltered) + continue + + stage_dep_domains_unfiltered = { dep - for domain in domain_group - if (integration := integration_cache.get(domain)) is not None - for dep in integration.all_dependencies + for domain in stage_domains + for dep in all_integrations[domain].all_dependencies + if dep not in stage_domains + } + stage_dep_domains = stage_dep_domains_unfiltered - hass.config.components + + stage_all_domains = stage_domains | stage_dep_domains + stage_all_integrations = { + domain: all_integrations[domain] for domain in stage_all_domains + } + # Detect all cycles + stage_integrations_after_dependencies = ( + await loader.resolve_integrations_after_dependencies( + hass, stage_all_integrations.values(), stage_all_domains + ) ) - async_set_domains_to_be_loaded(hass, to_be_loaded) - stage_2_domains -= to_be_loaded + stage_all_domains = set(stage_integrations_after_dependencies) + stage_domains &= stage_all_domains + stage_dep_domains &= stage_all_domains + + _LOGGER.info( + "Setting up stage %s: %s | %s\nDependencies: %s | %s", + name, + stage_domains, + stage_domains_unfiltered - stage_domains, + stage_dep_domains, + stage_dep_domains_unfiltered - stage_dep_domains, + ) + + async_set_domains_to_be_loaded(hass, stage_all_domains) if timeout is None: - await _async_setup_multi_components(hass, domain_group, config) - else: - try: - async with hass.timeout.async_timeout(timeout, cool_down=COOLDOWN_TIME): - await _async_setup_multi_components(hass, domain_group, config) - except TimeoutError: - _LOGGER.warning( - "Setup timed out for %s waiting on %s - moving forward", - name, - hass._active_tasks, # noqa: SLF001 - ) - - # Add after dependencies when setting up stage 2 domains - async_set_domains_to_be_loaded(hass, stage_2_domains) - - if stage_2_domains: - _LOGGER.info("Setting up stage 2: %s", stage_2_domains) + await _async_setup_multi_components(hass, stage_all_domains, config) + continue try: - async with hass.timeout.async_timeout( - STAGE_2_TIMEOUT, cool_down=COOLDOWN_TIME - ): - await _async_setup_multi_components(hass, stage_2_domains, config) + async with hass.timeout.async_timeout(timeout, cool_down=COOLDOWN_TIME): + await _async_setup_multi_components(hass, stage_all_domains, config) except TimeoutError: _LOGGER.warning( - "Setup timed out for stage 2 waiting on %s - moving forward", + "Setup timed out for stage %s waiting on %s - moving forward", + name, hass._active_tasks, # noqa: SLF001 ) @@ -1053,8 +1041,6 @@ async def _async_setup_multi_components( config: dict[str, Any], ) -> None: """Set up multiple domains. Log on failure.""" - # Avoid creating tasks for domains that were setup in a previous stage - domains_not_yet_setup = domains - hass.config.components # Create setup tasks for base platforms first since everything will have # to wait to be imported, and the sooner we can get the base platforms # loaded the sooner we can start loading the rest of the integrations. @@ -1064,9 +1050,7 @@ async def _async_setup_multi_components( f"setup component {domain}", eager_start=True, ) - for domain in sorted( - domains_not_yet_setup, key=SETUP_ORDER_SORT_KEY, reverse=True - ) + for domain in sorted(domains, key=SETUP_ORDER_SORT_KEY, reverse=True) } results = await asyncio.gather(*futures.values(), return_exceptions=True) for idx, domain in enumerate(futures): diff --git a/homeassistant/brands/bosch.json b/homeassistant/brands/bosch.json new file mode 100644 index 00000000000..090cc2af7c3 --- /dev/null +++ b/homeassistant/brands/bosch.json @@ -0,0 +1,5 @@ +{ + "domain": "bosch", + "name": "Bosch", + "integrations": ["bosch_alarm", "bosch_shc", "home_connect"] +} diff --git a/homeassistant/brands/eve.json b/homeassistant/brands/eve.json new file mode 100644 index 00000000000..f27c8b3d849 --- /dev/null +++ b/homeassistant/brands/eve.json @@ -0,0 +1,5 @@ +{ + "domain": "eve", + "name": "Eve", + "iot_standards": ["matter"] +} diff --git a/homeassistant/brands/motionblinds.json b/homeassistant/brands/motionblinds.json index 67013e75966..5a48b573b4d 100644 --- a/homeassistant/brands/motionblinds.json +++ b/homeassistant/brands/motionblinds.json @@ -1,5 +1,6 @@ { "domain": "motionblinds", "name": "Motionblinds", - "integrations": ["motion_blinds", "motionblinds_ble"] + "integrations": ["motion_blinds", "motionblinds_ble"], + "iot_standards": ["matter"] } diff --git a/homeassistant/components/accuweather/const.py b/homeassistant/components/accuweather/const.py index 1bbf5a36187..7216f5a0b9b 100644 --- a/homeassistant/components/accuweather/const.py +++ b/homeassistant/components/accuweather/const.py @@ -24,7 +24,7 @@ from homeassistant.components.weather import ( API_METRIC: Final = "Metric" ATTRIBUTION: Final = "Data provided by AccuWeather" -ATTR_CATEGORY: Final = "Category" +ATTR_CATEGORY_VALUE = "CategoryValue" ATTR_DIRECTION: Final = "Direction" ATTR_ENGLISH: Final = "English" ATTR_LEVEL: Final = "level" @@ -55,5 +55,18 @@ CONDITION_MAP = { for cond_ha, cond_codes in CONDITION_CLASSES.items() for cond_code in cond_codes } +AIR_QUALITY_CATEGORY_MAP = { + 1: "good", + 2: "moderate", + 3: "unhealthy", + 4: "very_unhealthy", + 5: "hazardous", +} +POLLEN_CATEGORY_MAP = { + 1: "low", + 2: "moderate", + 3: "high", + 4: "very_high", +} UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=40) UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6) diff --git a/homeassistant/components/accuweather/coordinator.py b/homeassistant/components/accuweather/coordinator.py index 40ff3ad2c87..780c977f930 100644 --- a/homeassistant/components/accuweather/coordinator.py +++ b/homeassistant/components/accuweather/coordinator.py @@ -75,7 +75,11 @@ class AccuWeatherObservationDataUpdateCoordinator( async with timeout(10): result = await self.accuweather.async_get_current_conditions() except EXCEPTIONS as error: - raise UpdateFailed(error) from error + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="current_conditions_update_error", + translation_placeholders={"error": repr(error)}, + ) from error _LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining) @@ -117,9 +121,15 @@ class AccuWeatherDailyForecastDataUpdateCoordinator( """Update data via library.""" try: async with timeout(10): - result = await self.accuweather.async_get_daily_forecast() + result = await self.accuweather.async_get_daily_forecast( + language=self.hass.config.language + ) except EXCEPTIONS as error: - raise UpdateFailed(error) from error + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="forecast_update_error", + translation_placeholders={"error": repr(error)}, + ) from error _LOGGER.debug("Requests remaining: %d", self.accuweather.requests_remaining) diff --git a/homeassistant/components/accuweather/manifest.json b/homeassistant/components/accuweather/manifest.json index 5a019ef968e..810557519eb 100644 --- a/homeassistant/components/accuweather/manifest.json +++ b/homeassistant/components/accuweather/manifest.json @@ -7,6 +7,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["accuweather"], - "requirements": ["accuweather==4.1.0"], + "requirements": ["accuweather==4.2.0"], "single_config_entry": true } diff --git a/homeassistant/components/accuweather/sensor.py b/homeassistant/components/accuweather/sensor.py index f14584cf08c..415df402d55 100644 --- a/homeassistant/components/accuweather/sensor.py +++ b/homeassistant/components/accuweather/sensor.py @@ -29,8 +29,9 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import ( + AIR_QUALITY_CATEGORY_MAP, API_METRIC, - ATTR_CATEGORY, + ATTR_CATEGORY_VALUE, ATTR_DIRECTION, ATTR_ENGLISH, ATTR_LEVEL, @@ -38,6 +39,7 @@ from .const import ( ATTR_VALUE, ATTRIBUTION, MAX_FORECAST_DAYS, + POLLEN_CATEGORY_MAP, ) from .coordinator import ( AccuWeatherConfigEntry, @@ -59,9 +61,9 @@ class AccuWeatherSensorDescription(SensorEntityDescription): FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = ( AccuWeatherSensorDescription( key="AirQuality", - value_fn=lambda data: cast(str, data[ATTR_CATEGORY]), + value_fn=lambda data: AIR_QUALITY_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]], device_class=SensorDeviceClass.ENUM, - options=["good", "hazardous", "high", "low", "moderate", "unhealthy"], + options=list(AIR_QUALITY_CATEGORY_MAP.values()), translation_key="air_quality", ), AccuWeatherSensorDescription( @@ -83,7 +85,9 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = ( entity_registry_enabled_default=False, native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER, value_fn=lambda data: cast(int, data[ATTR_VALUE]), - attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]}, + attr_fn=lambda data: { + ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]] + }, translation_key="grass_pollen", ), AccuWeatherSensorDescription( @@ -107,7 +111,9 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = ( entity_registry_enabled_default=False, native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER, value_fn=lambda data: cast(int, data[ATTR_VALUE]), - attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]}, + attr_fn=lambda data: { + ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]] + }, translation_key="mold_pollen", ), AccuWeatherSensorDescription( @@ -115,7 +121,9 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = ( native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER, entity_registry_enabled_default=False, value_fn=lambda data: cast(int, data[ATTR_VALUE]), - attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]}, + attr_fn=lambda data: { + ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]] + }, translation_key="ragweed_pollen", ), AccuWeatherSensorDescription( @@ -181,14 +189,18 @@ FORECAST_SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = ( native_unit_of_measurement=CONCENTRATION_PARTS_PER_CUBIC_METER, entity_registry_enabled_default=False, value_fn=lambda data: cast(int, data[ATTR_VALUE]), - attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]}, + attr_fn=lambda data: { + ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]] + }, translation_key="tree_pollen", ), AccuWeatherSensorDescription( key="UVIndex", native_unit_of_measurement=UV_INDEX, value_fn=lambda data: cast(int, data[ATTR_VALUE]), - attr_fn=lambda data: {ATTR_LEVEL: data[ATTR_CATEGORY]}, + attr_fn=lambda data: { + ATTR_LEVEL: POLLEN_CATEGORY_MAP[data[ATTR_CATEGORY_VALUE]] + }, translation_key="uv_index_forecast", ), AccuWeatherSensorDescription( diff --git a/homeassistant/components/accuweather/strings.json b/homeassistant/components/accuweather/strings.json index d0250a382e9..e1a71c5e1a5 100644 --- a/homeassistant/components/accuweather/strings.json +++ b/homeassistant/components/accuweather/strings.json @@ -26,10 +26,20 @@ "state": { "good": "Good", "hazardous": "Hazardous", - "high": "High", - "low": "Low", "moderate": "Moderate", - "unhealthy": "Unhealthy" + "unhealthy": "Unhealthy", + "very_unhealthy": "Very unhealthy" + }, + "state_attributes": { + "options": { + "state": { + "good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]", + "hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]", + "moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]", + "unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]", + "very_unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::very_unhealthy%]" + } + } } }, "apparent_temperature": { @@ -62,12 +72,10 @@ "level": { "name": "Level", "state": { - "good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]", - "hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]", - "high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]", - "low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]", - "moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]", - "unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]" + "high": "High", + "low": "Low", + "moderate": "Moderate", + "very_high": "Very high" } } } @@ -81,12 +89,10 @@ "level": { "name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]", "state": { - "good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]", - "hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]", - "high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]", - "low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]", - "moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]", - "unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]" + "high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]", + "low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]", + "moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]", + "very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]" } } } @@ -100,6 +106,15 @@ "steady": "Steady", "rising": "Rising", "falling": "Falling" + }, + "state_attributes": { + "options": { + "state": { + "falling": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::falling%]", + "rising": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::rising%]", + "steady": "[%key:component::accuweather::entity::sensor::pressure_tendency::state::steady%]" + } + } } }, "ragweed_pollen": { @@ -108,12 +123,10 @@ "level": { "name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]", "state": { - "good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]", - "hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]", - "high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]", - "low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]", - "moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]", - "unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]" + "high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]", + "low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]", + "moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]", + "very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]" } } } @@ -154,12 +167,10 @@ "level": { "name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]", "state": { - "good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]", - "hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]", - "high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]", - "low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]", - "moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]", - "unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]" + "high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]", + "low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]", + "moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]", + "very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]" } } } @@ -170,12 +181,10 @@ "level": { "name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]", "state": { - "good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]", - "hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]", - "high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]", - "low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]", - "moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]", - "unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]" + "high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]", + "low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]", + "moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]", + "very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]" } } } @@ -186,12 +195,10 @@ "level": { "name": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::name%]", "state": { - "good": "[%key:component::accuweather::entity::sensor::air_quality::state::good%]", - "hazardous": "[%key:component::accuweather::entity::sensor::air_quality::state::hazardous%]", - "high": "[%key:component::accuweather::entity::sensor::air_quality::state::high%]", - "low": "[%key:component::accuweather::entity::sensor::air_quality::state::low%]", - "moderate": "[%key:component::accuweather::entity::sensor::air_quality::state::moderate%]", - "unhealthy": "[%key:component::accuweather::entity::sensor::air_quality::state::unhealthy%]" + "high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::high%]", + "low": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::low%]", + "moderate": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::moderate%]", + "very_high": "[%key:component::accuweather::entity::sensor::grass_pollen::state_attributes::level::state::very_high%]" } } } @@ -222,6 +229,14 @@ } } }, + "exceptions": { + "current_conditions_update_error": { + "message": "An error occurred while retrieving weather current conditions data from the AccuWeather API: {error}" + }, + "forecast_update_error": { + "message": "An error occurred while retrieving weather forecast data from the AccuWeather API: {error}" + } + }, "system_health": { "info": { "can_reach_server": "Reach AccuWeather server", diff --git a/homeassistant/components/adax/strings.json b/homeassistant/components/adax/strings.json index 6157b7dfc91..9ba497a9aca 100644 --- a/homeassistant/components/adax/strings.json +++ b/homeassistant/components/adax/strings.json @@ -5,14 +5,14 @@ "data": { "connection_type": "Select connection type" }, - "description": "Select connection type. Local requires heaters with bluetooth" + "description": "Select connection type. Local requires heaters with Bluetooth" }, "local": { "data": { "wifi_ssid": "Wi-Fi SSID", - "wifi_pswd": "Wi-Fi Password" + "wifi_pswd": "Wi-Fi password" }, - "description": "Reset the heater by pressing + and OK until display shows 'Reset'. Then press and hold OK button on the heater until the blue led starts blinking before pressing Submit. Configuring heater might take some minutes." + "description": "Reset the heater by pressing + and OK until display shows 'Reset'. Then press and hold OK button on the heater until the blue LED starts blinking before pressing Submit. Configuring heater might take some minutes." }, "cloud": { "data": { diff --git a/homeassistant/components/advantage_air/climate.py b/homeassistant/components/advantage_air/climate.py index c023d4cf8f3..1d593c5c3c8 100644 --- a/homeassistant/components/advantage_air/climate.py +++ b/homeassistant/components/advantage_air/climate.py @@ -2,6 +2,7 @@ from __future__ import annotations +from decimal import Decimal import logging from typing import Any @@ -14,6 +15,7 @@ from homeassistant.components.climate import ( FAN_MEDIUM, ClimateEntity, ClimateEntityFeature, + HVACAction, HVACMode, ) from homeassistant.const import ATTR_TEMPERATURE, PRECISION_WHOLE, UnitOfTemperature @@ -49,6 +51,14 @@ ADVANTAGE_AIR_MYTEMP_ENABLED = "climateControlModeEnabled" ADVANTAGE_AIR_HEAT_TARGET = "myAutoHeatTargetTemp" ADVANTAGE_AIR_COOL_TARGET = "myAutoCoolTargetTemp" ADVANTAGE_AIR_MYFAN = "autoAA" +ADVANTAGE_AIR_MYAUTO_MODE_SET = "myAutoModeCurrentSetMode" + +HVAC_ACTIONS = { + "cool": HVACAction.COOLING, + "heat": HVACAction.HEATING, + "vent": HVACAction.FAN, + "dry": HVACAction.DRYING, +} HVAC_MODES = [ HVACMode.OFF, @@ -175,6 +185,17 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity): return ADVANTAGE_AIR_HVAC_MODES.get(self._ac["mode"]) return HVACMode.OFF + @property + def hvac_action(self) -> HVACAction | None: + """Return the current running HVAC action.""" + if self._ac["state"] == ADVANTAGE_AIR_STATE_OFF: + return HVACAction.OFF + if self._ac["mode"] == "myauto": + return HVAC_ACTIONS.get( + self._ac.get(ADVANTAGE_AIR_MYAUTO_MODE_SET, HVACAction.OFF) + ) + return HVAC_ACTIONS.get(self._ac["mode"]) + @property def fan_mode(self) -> str | None: """Return the current fan modes.""" @@ -273,6 +294,22 @@ class AdvantageAirZone(AdvantageAirZoneEntity, ClimateEntity): return HVACMode.HEAT_COOL return HVACMode.OFF + @property + def hvac_action(self) -> HVACAction | None: + """Return the HVAC action, inheriting from master AC if zone is open but idle if air is <= 5%.""" + if self._ac["state"] == ADVANTAGE_AIR_STATE_OFF: + return HVACAction.OFF + master_action = HVAC_ACTIONS.get(self._ac["mode"], HVACAction.OFF) + if self._ac["mode"] == "myauto": + master_action = HVAC_ACTIONS.get( + str(self._ac.get(ADVANTAGE_AIR_MYAUTO_MODE_SET)), HVACAction.OFF + ) + if self._zone["state"] == ADVANTAGE_AIR_STATE_OPEN: + if self._zone["value"] <= Decimal(5): + return HVACAction.IDLE + return master_action + return HVACAction.OFF + @property def current_temperature(self) -> float | None: """Return the current temperature.""" diff --git a/homeassistant/components/advantage_air/const.py b/homeassistant/components/advantage_air/const.py index 6ae0a0e06d5..103ca57f6ef 100644 --- a/homeassistant/components/advantage_air/const.py +++ b/homeassistant/components/advantage_air/const.py @@ -7,3 +7,4 @@ ADVANTAGE_AIR_STATE_CLOSE = "close" ADVANTAGE_AIR_STATE_ON = "on" ADVANTAGE_AIR_STATE_OFF = "off" ADVANTAGE_AIR_AUTOFAN_ENABLED = "aaAutoFanModeEnabled" +ADVANTAGE_AIR_NIGHT_MODE_ENABLED = "quietNightModeEnabled" diff --git a/homeassistant/components/advantage_air/cover.py b/homeassistant/components/advantage_air/cover.py index b5b982597f0..e764d484128 100644 --- a/homeassistant/components/advantage_air/cover.py +++ b/homeassistant/components/advantage_air/cover.py @@ -41,7 +41,7 @@ async def async_setup_entry( entities.append( AdvantageAirThingCover(instance, thing, CoverDeviceClass.BLIND) ) - elif thing["channelDipState"] == 3: # 3 = "Garage door" + elif thing["channelDipState"] in [3, 10]: # 3 & 10 = "Garage door" entities.append( AdvantageAirThingCover(instance, thing, CoverDeviceClass.GARAGE) ) diff --git a/homeassistant/components/advantage_air/switch.py b/homeassistant/components/advantage_air/switch.py index 5c4528b44c6..8560c9a9138 100644 --- a/homeassistant/components/advantage_air/switch.py +++ b/homeassistant/components/advantage_air/switch.py @@ -9,6 +9,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from . import AdvantageAirDataConfigEntry from .const import ( ADVANTAGE_AIR_AUTOFAN_ENABLED, + ADVANTAGE_AIR_NIGHT_MODE_ENABLED, ADVANTAGE_AIR_STATE_OFF, ADVANTAGE_AIR_STATE_ON, ) @@ -32,6 +33,8 @@ async def async_setup_entry( entities.append(AdvantageAirFreshAir(instance, ac_key)) if ADVANTAGE_AIR_AUTOFAN_ENABLED in ac_device["info"]: entities.append(AdvantageAirMyFan(instance, ac_key)) + if ADVANTAGE_AIR_NIGHT_MODE_ENABLED in ac_device["info"]: + entities.append(AdvantageAirNightMode(instance, ac_key)) if things := instance.coordinator.data.get("myThings"): entities.extend( AdvantageAirRelay(instance, thing) @@ -93,6 +96,32 @@ class AdvantageAirMyFan(AdvantageAirAcEntity, SwitchEntity): await self.async_update_ac({ADVANTAGE_AIR_AUTOFAN_ENABLED: False}) +class AdvantageAirNightMode(AdvantageAirAcEntity, SwitchEntity): + """Representation of Advantage 'MySleep$aver' Mode control.""" + + _attr_icon = "mdi:weather-night" + _attr_name = "MySleep$aver" + _attr_device_class = SwitchDeviceClass.SWITCH + + def __init__(self, instance: AdvantageAirData, ac_key: str) -> None: + """Initialize an Advantage Air Night Mode control.""" + super().__init__(instance, ac_key) + self._attr_unique_id += "-nightmode" + + @property + def is_on(self) -> bool: + """Return the Night Mode status.""" + return self._ac[ADVANTAGE_AIR_NIGHT_MODE_ENABLED] + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn Night Mode on.""" + await self.async_update_ac({ADVANTAGE_AIR_NIGHT_MODE_ENABLED: True}) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn Night Mode off.""" + await self.async_update_ac({ADVANTAGE_AIR_NIGHT_MODE_ENABLED: False}) + + class AdvantageAirRelay(AdvantageAirThingEntity, SwitchEntity): """Representation of Advantage Air Thing.""" diff --git a/homeassistant/components/aftership/strings.json b/homeassistant/components/aftership/strings.json index ace8eb6d2d3..c3817a0cd24 100644 --- a/homeassistant/components/aftership/strings.json +++ b/homeassistant/components/aftership/strings.json @@ -51,7 +51,7 @@ "issues": { "deprecated_yaml_import_issue_cannot_connect": { "title": "The {integration_title} YAML configuration import failed", - "description": "Configuring {integration_title} using YAML is being removed but there was an connection error importing your YAML configuration.\n\nEnsure connection to {integration_title} works and restart Home Assistant to try again or remove the {integration_title} YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." + "description": "Configuring {integration_title} using YAML is being removed but there was a connection error importing your YAML configuration.\n\nEnsure connection to {integration_title} works and restart Home Assistant to try again or remove the {integration_title} YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." } } } diff --git a/homeassistant/components/airgradient/strings.json b/homeassistant/components/airgradient/strings.json index 4cf3a6a34ea..2d9b6be529d 100644 --- a/homeassistant/components/airgradient/strings.json +++ b/homeassistant/components/airgradient/strings.json @@ -11,7 +11,7 @@ } }, "discovery_confirm": { - "description": "Do you want to setup {model}?" + "description": "Do you want to set up {model}?" } }, "abort": { diff --git a/homeassistant/components/airly/coordinator.py b/homeassistant/components/airly/coordinator.py index b255c5f078f..668cabdae63 100644 --- a/homeassistant/components/airly/coordinator.py +++ b/homeassistant/components/airly/coordinator.py @@ -105,7 +105,14 @@ class AirlyDataUpdateCoordinator(DataUpdateCoordinator[dict[str, str | float | i try: await measurements.update() except (AirlyError, ClientConnectorError) as error: - raise UpdateFailed(error) from error + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + translation_placeholders={ + "entry": self.config_entry.title, + "error": repr(error), + }, + ) from error _LOGGER.debug( "Requests remaining: %s/%s", @@ -126,7 +133,11 @@ class AirlyDataUpdateCoordinator(DataUpdateCoordinator[dict[str, str | float | i standards = measurements.current["standards"] if index["description"] == NO_AIRLY_SENSORS: - raise UpdateFailed("Can't retrieve data: no Airly sensors in this area") + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="no_station", + translation_placeholders={"entry": self.config_entry.title}, + ) for value in values: data[value["name"]] = value["value"] for standard in standards: diff --git a/homeassistant/components/airly/strings.json b/homeassistant/components/airly/strings.json index 33ee8bbe4c9..fe4ccbb4745 100644 --- a/homeassistant/components/airly/strings.json +++ b/homeassistant/components/airly/strings.json @@ -36,5 +36,13 @@ "name": "[%key:component::sensor::entity_component::carbon_monoxide::name%]" } } + }, + "exceptions": { + "update_error": { + "message": "An error occurred while retrieving data from the Airly API for {entry}: {error}" + }, + "no_station": { + "message": "An error occurred while retrieving data from the Airly API for {entry}: no measuring stations in this area" + } } } diff --git a/homeassistant/components/airnow/coordinator.py b/homeassistant/components/airnow/coordinator.py index ee5bf4a1dd7..1e73bc7551e 100644 --- a/homeassistant/components/airnow/coordinator.py +++ b/homeassistant/components/airnow/coordinator.py @@ -8,7 +8,7 @@ from aiohttp import ClientSession from aiohttp.client_exceptions import ClientConnectorError from pyairnow import WebServiceAPI from pyairnow.conv import aqi_to_concentration -from pyairnow.errors import AirNowError +from pyairnow.errors import AirNowError, InvalidJsonError from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -79,7 +79,7 @@ class AirNowDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): distance=self.distance, ) - except (AirNowError, ClientConnectorError) as error: + except (AirNowError, ClientConnectorError, InvalidJsonError) as error: raise UpdateFailed(error) from error if not obs: diff --git a/homeassistant/components/airnow/strings.json b/homeassistant/components/airnow/strings.json index d5fb22106f9..a69f67948cb 100644 --- a/homeassistant/components/airnow/strings.json +++ b/homeassistant/components/airnow/strings.json @@ -7,7 +7,7 @@ "api_key": "[%key:common::config_flow::data::api_key%]", "latitude": "[%key:common::config_flow::data::latitude%]", "longitude": "[%key:common::config_flow::data::longitude%]", - "radius": "Station Radius (miles; optional)" + "radius": "Station radius (miles; optional)" } } }, @@ -25,7 +25,7 @@ "step": { "init": { "data": { - "radius": "Station Radius (miles)" + "radius": "Station radius (miles)" } } } diff --git a/homeassistant/components/airq/strings.json b/homeassistant/components/airq/strings.json index 26b944467e6..9c16975a3ab 100644 --- a/homeassistant/components/airq/strings.json +++ b/homeassistant/components/airq/strings.json @@ -91,7 +91,7 @@ "name": "Hydrogen fluoride" }, "health_index": { - "name": "Health Index" + "name": "Health index" }, "absolute_humidity": { "name": "Absolute humidity" @@ -112,10 +112,10 @@ "name": "Oxygen" }, "performance_index": { - "name": "Performance Index" + "name": "Performance index" }, "hydrogen_phosphide": { - "name": "Hydrogen Phosphide" + "name": "Hydrogen phosphide" }, "relative_pressure": { "name": "Relative pressure" @@ -127,22 +127,22 @@ "name": "Refrigerant" }, "silicon_hydride": { - "name": "Silicon Hydride" + "name": "Silicon hydride" }, "noise": { "name": "Noise" }, "maximum_noise": { - "name": "Noise (Maximum)" + "name": "Noise (maximum)" }, "radon": { "name": "Radon" }, "industrial_volatile_organic_compounds": { - "name": "VOCs (Industrial)" + "name": "VOCs (industrial)" }, "virus_index": { - "name": "Virus Index" + "name": "Virus index" } } } diff --git a/homeassistant/components/airthings_ble/config_flow.py b/homeassistant/components/airthings_ble/config_flow.py index 3e7b659bff1..2d32fa6e7df 100644 --- a/homeassistant/components/airthings_ble/config_flow.py +++ b/homeassistant/components/airthings_ble/config_flow.py @@ -102,7 +102,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): device = await self._get_device_data(discovery_info) except AirthingsDeviceUpdateError: return self.async_abort(reason="cannot_connect") - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unknown error occurred") return self.async_abort(reason="unknown") name = get_name(device) @@ -160,7 +161,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN): device = await self._get_device_data(discovery_info) except AirthingsDeviceUpdateError: return self.async_abort(reason="cannot_connect") - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unknown error occurred") return self.async_abort(reason="unknown") name = get_name(device) self._discovered_devices[address] = Discovery(name, discovery_info, device) diff --git a/homeassistant/components/airtouch5/config_flow.py b/homeassistant/components/airtouch5/config_flow.py index d96aaed96b7..38c85e45fb8 100644 --- a/homeassistant/components/airtouch5/config_flow.py +++ b/homeassistant/components/airtouch5/config_flow.py @@ -32,7 +32,8 @@ class AirTouch5ConfigFlow(ConfigFlow, domain=DOMAIN): client = Airtouch5SimpleClient(user_input[CONF_HOST]) try: await client.test_connection() - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors = {"base": "cannot_connect"} else: await self.async_set_unique_id(user_input[CONF_HOST]) diff --git a/homeassistant/components/airvisual/strings.json b/homeassistant/components/airvisual/strings.json index 148b1368a19..7a5f8b1d5c7 100644 --- a/homeassistant/components/airvisual/strings.json +++ b/homeassistant/components/airvisual/strings.json @@ -2,7 +2,7 @@ "config": { "step": { "geography_by_coords": { - "title": "Configure a Geography", + "title": "Configure a geography", "description": "Use the AirVisual cloud API to monitor a latitude/longitude.", "data": { "api_key": "[%key:common::config_flow::data::api_key%]", @@ -56,12 +56,12 @@ "sensor": { "pollutant_label": { "state": { - "co": "Carbon Monoxide", - "n2": "Nitrogen Dioxide", + "co": "Carbon monoxide", + "n2": "Nitrogen dioxide", "o3": "Ozone", "p1": "PM10", "p2": "PM2.5", - "s2": "Sulfur Dioxide" + "s2": "Sulfur dioxide" } }, "pollutant_level": { diff --git a/homeassistant/components/airzone_cloud/manifest.json b/homeassistant/components/airzone_cloud/manifest.json index 0e21e57ec52..3b6f94df57c 100644 --- a/homeassistant/components/airzone_cloud/manifest.json +++ b/homeassistant/components/airzone_cloud/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/airzone_cloud", "iot_class": "cloud_push", "loggers": ["aioairzone_cloud"], - "requirements": ["aioairzone-cloud==0.6.10"] + "requirements": ["aioairzone-cloud==0.6.11"] } diff --git a/homeassistant/components/alexa/capabilities.py b/homeassistant/components/alexa/capabilities.py index e70055c20b1..897037987a7 100644 --- a/homeassistant/components/alexa/capabilities.py +++ b/homeassistant/components/alexa/capabilities.py @@ -1438,7 +1438,7 @@ class AlexaModeController(AlexaCapability): # Fan preset_mode if self.instance == f"{fan.DOMAIN}.{fan.ATTR_PRESET_MODE}": mode = self.entity.attributes.get(fan.ATTR_PRESET_MODE, None) - if mode in self.entity.attributes.get(fan.ATTR_PRESET_MODES, None): + if mode in self.entity.attributes.get(fan.ATTR_PRESET_MODES, ()): return f"{fan.ATTR_PRESET_MODE}.{mode}" # Humidifier mode diff --git a/homeassistant/components/ambient_network/sensor.py b/homeassistant/components/ambient_network/sensor.py index 9ec6db6ff45..b96da9863a1 100644 --- a/homeassistant/components/ambient_network/sensor.py +++ b/homeassistant/components/ambient_network/sensor.py @@ -240,6 +240,7 @@ SENSOR_DESCRIPTIONS = ( suggested_display_precision=0, entity_registry_enabled_default=False, device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), SensorEntityDescription( key=TYPE_WINDGUSTMPH, diff --git a/homeassistant/components/ambient_station/sensor.py b/homeassistant/components/ambient_station/sensor.py index 730b798bd15..1b4334774d4 100644 --- a/homeassistant/components/ambient_station/sensor.py +++ b/homeassistant/components/ambient_station/sensor.py @@ -609,6 +609,7 @@ SENSOR_DESCRIPTIONS = ( translation_key="wind_direction", native_unit_of_measurement=DEGREE, device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), SensorEntityDescription( key=TYPE_WINDDIR_AVG10M, diff --git a/homeassistant/components/analytics_insights/config_flow.py b/homeassistant/components/analytics_insights/config_flow.py index da77a35f789..b2648f7c13c 100644 --- a/homeassistant/components/analytics_insights/config_flow.py +++ b/homeassistant/components/analytics_insights/config_flow.py @@ -8,7 +8,7 @@ from python_homeassistant_analytics import ( HomeassistantAnalyticsClient, HomeassistantAnalyticsConnectionError, ) -from python_homeassistant_analytics.models import IntegrationType +from python_homeassistant_analytics.models import Environment, IntegrationType import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow @@ -81,7 +81,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN): ) try: addons = await client.get_addons() - integrations = await client.get_integrations() + integrations = await client.get_integrations(Environment.NEXT) custom_integrations = await client.get_custom_integrations() except HomeassistantAnalyticsConnectionError: LOGGER.exception("Error connecting to Home Assistant analytics") @@ -165,7 +165,7 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlow): ) try: addons = await client.get_addons() - integrations = await client.get_integrations() + integrations = await client.get_integrations(Environment.NEXT) custom_integrations = await client.get_custom_integrations() except HomeassistantAnalyticsConnectionError: LOGGER.exception("Error connecting to Home Assistant analytics") diff --git a/homeassistant/components/androidtv_remote/manifest.json b/homeassistant/components/androidtv_remote/manifest.json index 1c45e825359..89cc0fc3965 100644 --- a/homeassistant/components/androidtv_remote/manifest.json +++ b/homeassistant/components/androidtv_remote/manifest.json @@ -7,6 +7,6 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["androidtvremote2"], - "requirements": ["androidtvremote2==0.2.0"], + "requirements": ["androidtvremote2==0.2.1"], "zeroconf": ["_androidtvremote2._tcp.local."] } diff --git a/homeassistant/components/anova/config_flow.py b/homeassistant/components/anova/config_flow.py index bc4723b1dba..f382606baba 100644 --- a/homeassistant/components/anova/config_flow.py +++ b/homeassistant/components/anova/config_flow.py @@ -2,6 +2,8 @@ from __future__ import annotations +import logging + from anova_wifi import AnovaApi, InvalidLogin import voluptuous as vol @@ -11,8 +13,10 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) -class AnovaConfligFlow(ConfigFlow, domain=DOMAIN): + +class AnovaConfigFlow(ConfigFlow, domain=DOMAIN): """Sets up a config flow for Anova.""" VERSION = 1 @@ -35,7 +39,8 @@ class AnovaConfligFlow(ConfigFlow, domain=DOMAIN): await api.authenticate() except InvalidLogin: errors["base"] = "invalid_auth" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: return self.async_create_entry( diff --git a/homeassistant/components/anthemav/media_player.py b/homeassistant/components/anthemav/media_player.py index cfbd3c29547..317498e96b5 100644 --- a/homeassistant/components/anthemav/media_player.py +++ b/homeassistant/components/anthemav/media_player.py @@ -22,6 +22,7 @@ from . import AnthemavConfigEntry from .const import ANTHEMAV_UPDATE_SIGNAL, DOMAIN, MANUFACTURER _LOGGER = logging.getLogger(__name__) +VOLUME_STEP = 0.01 async def async_setup_entry( @@ -60,6 +61,7 @@ class AnthemAVR(MediaPlayerEntity): | MediaPlayerEntityFeature.TURN_OFF | MediaPlayerEntityFeature.SELECT_SOURCE ) + _attr_volume_step = VOLUME_STEP def __init__( self, diff --git a/homeassistant/components/anthropic/__init__.py b/homeassistant/components/anthropic/__init__.py index 84c9054b476..a9745d1a6a5 100644 --- a/homeassistant/components/anthropic/__init__.py +++ b/homeassistant/components/anthropic/__init__.py @@ -12,7 +12,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv -from .const import DOMAIN, LOGGER +from .const import CONF_CHAT_MODEL, DOMAIN, LOGGER, RECOMMENDED_CHAT_MODEL PLATFORMS = (Platform.CONVERSATION,) CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) @@ -26,12 +26,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) -> partial(anthropic.AsyncAnthropic, api_key=entry.data[CONF_API_KEY]) ) try: - await client.messages.create( - model="claude-3-haiku-20240307", - max_tokens=1, - messages=[{"role": "user", "content": "Hi"}], - timeout=10.0, - ) + model_id = entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL) + model = await client.models.retrieve(model_id=model_id, timeout=10.0) + LOGGER.debug("Anthropic model: %s", model.display_name) except anthropic.AuthenticationError as err: LOGGER.error("Invalid API key: %s", err) return False diff --git a/homeassistant/components/anthropic/config_flow.py b/homeassistant/components/anthropic/config_flow.py index 63a70f31fea..e53a479d7d4 100644 --- a/homeassistant/components/anthropic/config_flow.py +++ b/homeassistant/components/anthropic/config_flow.py @@ -34,10 +34,12 @@ from .const import ( CONF_PROMPT, CONF_RECOMMENDED, CONF_TEMPERATURE, + CONF_THINKING_BUDGET, DOMAIN, RECOMMENDED_CHAT_MODEL, RECOMMENDED_MAX_TOKENS, RECOMMENDED_TEMPERATURE, + RECOMMENDED_THINKING_BUDGET, ) _LOGGER = logging.getLogger(__name__) @@ -63,12 +65,7 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None: client = await hass.async_add_executor_job( partial(anthropic.AsyncAnthropic, api_key=data[CONF_API_KEY]) ) - await client.messages.create( - model="claude-3-haiku-20240307", - max_tokens=1, - messages=[{"role": "user", "content": "Hi"}], - timeout=10.0, - ) + await client.models.list(timeout=10.0) class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN): @@ -133,21 +130,29 @@ class AnthropicOptionsFlow(OptionsFlow): ) -> ConfigFlowResult: """Manage the options.""" options: dict[str, Any] | MappingProxyType[str, Any] = self.config_entry.options + errors: dict[str, str] = {} if user_input is not None: if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended: if user_input[CONF_LLM_HASS_API] == "none": user_input.pop(CONF_LLM_HASS_API) - return self.async_create_entry(title="", data=user_input) - # Re-render the options again, now with the recommended options shown/hidden - self.last_rendered_recommended = user_input[CONF_RECOMMENDED] + if user_input.get( + CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET + ) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS): + errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large" - options = { - CONF_RECOMMENDED: user_input[CONF_RECOMMENDED], - CONF_PROMPT: user_input[CONF_PROMPT], - CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API], - } + if not errors: + return self.async_create_entry(title="", data=user_input) + else: + # Re-render the options again, now with the recommended options shown/hidden + self.last_rendered_recommended = user_input[CONF_RECOMMENDED] + + options = { + CONF_RECOMMENDED: user_input[CONF_RECOMMENDED], + CONF_PROMPT: user_input[CONF_PROMPT], + CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API], + } suggested_values = options.copy() if not suggested_values.get(CONF_PROMPT): @@ -161,6 +166,7 @@ class AnthropicOptionsFlow(OptionsFlow): return self.async_show_form( step_id="init", data_schema=schema, + errors=errors or None, ) @@ -210,6 +216,10 @@ def anthropic_config_option_schema( CONF_TEMPERATURE, default=RECOMMENDED_TEMPERATURE, ): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)), + vol.Optional( + CONF_THINKING_BUDGET, + default=RECOMMENDED_THINKING_BUDGET, + ): int, } ) return schema diff --git a/homeassistant/components/anthropic/const.py b/homeassistant/components/anthropic/const.py index 0dbf9c51ac1..38e4270e6e1 100644 --- a/homeassistant/components/anthropic/const.py +++ b/homeassistant/components/anthropic/const.py @@ -13,3 +13,8 @@ CONF_MAX_TOKENS = "max_tokens" RECOMMENDED_MAX_TOKENS = 1024 CONF_TEMPERATURE = "temperature" RECOMMENDED_TEMPERATURE = 1.0 +CONF_THINKING_BUDGET = "thinking_budget" +RECOMMENDED_THINKING_BUDGET = 0 +MIN_THINKING_BUDGET = 1024 + +THINKING_MODELS = ["claude-3-7-sonnet-20250219", "claude-3-7-sonnet-latest"] diff --git a/homeassistant/components/anthropic/conversation.py b/homeassistant/components/anthropic/conversation.py index 5511119d377..5e5ad464eaa 100644 --- a/homeassistant/components/anthropic/conversation.py +++ b/homeassistant/components/anthropic/conversation.py @@ -1,23 +1,32 @@ """Conversation support for Anthropic.""" -from collections.abc import AsyncGenerator, Callable +from collections.abc import AsyncGenerator, Callable, Iterable import json -from typing import Any, Literal +from typing import Any, Literal, cast import anthropic from anthropic import AsyncStream from anthropic._types import NOT_GIVEN from anthropic.types import ( InputJSONDelta, - Message, MessageParam, MessageStreamEvent, RawContentBlockDeltaEvent, RawContentBlockStartEvent, RawContentBlockStopEvent, + RawMessageStartEvent, + RawMessageStopEvent, + RedactedThinkingBlock, + RedactedThinkingBlockParam, + SignatureDelta, TextBlock, TextBlockParam, TextDelta, + ThinkingBlock, + ThinkingBlockParam, + ThinkingConfigDisabledParam, + ThinkingConfigEnabledParam, + ThinkingDelta, ToolParam, ToolResultBlockParam, ToolUseBlock, @@ -30,7 +39,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import chat_session, device_registry as dr, intent, llm +from homeassistant.helpers import device_registry as dr, intent, llm from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from . import AnthropicConfigEntry @@ -39,11 +48,15 @@ from .const import ( CONF_MAX_TOKENS, CONF_PROMPT, CONF_TEMPERATURE, + CONF_THINKING_BUDGET, DOMAIN, LOGGER, + MIN_THINKING_BUDGET, RECOMMENDED_CHAT_MODEL, RECOMMENDED_MAX_TOKENS, RECOMMENDED_TEMPERATURE, + RECOMMENDED_THINKING_BUDGET, + THINKING_MODELS, ) # Max number of back and forth with the LLM to generate a response @@ -71,73 +84,101 @@ def _format_tool( ) -def _message_convert( - message: Message, -) -> MessageParam: - """Convert from class to TypedDict.""" - param_content: list[TextBlockParam | ToolUseBlockParam] = [] +def _convert_content( + chat_content: Iterable[conversation.Content], +) -> list[MessageParam]: + """Transform HA chat_log content into Anthropic API format.""" + messages: list[MessageParam] = [] - for message_content in message.content: - if isinstance(message_content, TextBlock): - param_content.append(TextBlockParam(type="text", text=message_content.text)) - elif isinstance(message_content, ToolUseBlock): - param_content.append( - ToolUseBlockParam( - type="tool_use", - id=message_content.id, - name=message_content.name, - input=message_content.input, - ) + for content in chat_content: + if isinstance(content, conversation.ToolResultContent): + tool_result_block = ToolResultBlockParam( + type="tool_result", + tool_use_id=content.tool_call_id, + content=json.dumps(content.tool_result), ) - - return MessageParam(role=message.role, content=param_content) - - -def _convert_content(chat_content: conversation.Content) -> MessageParam: - """Create tool response content.""" - if isinstance(chat_content, conversation.ToolResultContent): - return MessageParam( - role="user", - content=[ - ToolResultBlockParam( - type="tool_result", - tool_use_id=chat_content.tool_call_id, - content=json.dumps(chat_content.tool_result), - ) - ], - ) - if isinstance(chat_content, conversation.AssistantContent): - return MessageParam( - role="assistant", - content=[ - TextBlockParam(type="text", text=chat_content.content or ""), - *[ - ToolUseBlockParam( - type="tool_use", - id=tool_call.id, - name=tool_call.tool_name, - input=tool_call.tool_args, + if not messages or messages[-1]["role"] != "user": + messages.append( + MessageParam( + role="user", + content=[tool_result_block], ) - for tool_call in chat_content.tool_calls or () - ], - ], - ) - if isinstance(chat_content, conversation.UserContent): - return MessageParam( - role="user", - content=chat_content.content, - ) - # Note: We don't pass SystemContent here as its passed to the API as the prompt - raise ValueError(f"Unexpected content type: {type(chat_content)}") + ) + elif isinstance(messages[-1]["content"], str): + messages[-1]["content"] = [ + TextBlockParam(type="text", text=messages[-1]["content"]), + tool_result_block, + ] + else: + messages[-1]["content"].append(tool_result_block) # type: ignore[attr-defined] + elif isinstance(content, conversation.UserContent): + # Combine consequent user messages + if not messages or messages[-1]["role"] != "user": + messages.append( + MessageParam( + role="user", + content=content.content, + ) + ) + elif isinstance(messages[-1]["content"], str): + messages[-1]["content"] = [ + TextBlockParam(type="text", text=messages[-1]["content"]), + TextBlockParam(type="text", text=content.content), + ] + else: + messages[-1]["content"].append( # type: ignore[attr-defined] + TextBlockParam(type="text", text=content.content) + ) + elif isinstance(content, conversation.AssistantContent): + # Combine consequent assistant messages + if not messages or messages[-1]["role"] != "assistant": + messages.append( + MessageParam( + role="assistant", + content=[], + ) + ) + + if content.content: + messages[-1]["content"].append( # type: ignore[union-attr] + TextBlockParam(type="text", text=content.content) + ) + if content.tool_calls: + messages[-1]["content"].extend( # type: ignore[union-attr] + [ + ToolUseBlockParam( + type="tool_use", + id=tool_call.id, + name=tool_call.tool_name, + input=tool_call.tool_args, + ) + for tool_call in content.tool_calls + ] + ) + else: + # Note: We don't pass SystemContent here as its passed to the API as the prompt + raise TypeError(f"Unexpected content type: {type(content)}") + + return messages async def _transform_stream( result: AsyncStream[MessageStreamEvent], + messages: list[MessageParam], ) -> AsyncGenerator[conversation.AssistantContentDeltaDict]: """Transform the response stream into HA format. A typical stream of responses might look something like the following: - RawMessageStartEvent with no content + - RawContentBlockStartEvent with an empty ThinkingBlock (if extended thinking is enabled) + - RawContentBlockDeltaEvent with a ThinkingDelta + - RawContentBlockDeltaEvent with a ThinkingDelta + - RawContentBlockDeltaEvent with a ThinkingDelta + - ... + - RawContentBlockDeltaEvent with a SignatureDelta + - RawContentBlockStopEvent + - RawContentBlockStartEvent with a RedactedThinkingBlock (occasionally) + - RawContentBlockStopEvent (RedactedThinkingBlock does not have a delta) - RawContentBlockStartEvent with an empty TextBlock - RawContentBlockDeltaEvent with a TextDelta - RawContentBlockDeltaEvent with a TextDelta @@ -151,44 +192,103 @@ async def _transform_stream( - RawContentBlockStopEvent - RawMessageDeltaEvent with a stop_reason='tool_use' - RawMessageStopEvent(type='message_stop') + + Each message could contain multiple blocks of the same type. """ if result is None: raise TypeError("Expected a stream of messages") - current_tool_call: dict | None = None + current_message: MessageParam | None = None + current_block: ( + TextBlockParam + | ToolUseBlockParam + | ThinkingBlockParam + | RedactedThinkingBlockParam + | None + ) = None + current_tool_args: str async for response in result: LOGGER.debug("Received response: %s", response) - if isinstance(response, RawContentBlockStartEvent): + if isinstance(response, RawMessageStartEvent): + if response.message.role != "assistant": + raise ValueError("Unexpected message role") + current_message = MessageParam(role=response.message.role, content=[]) + elif isinstance(response, RawContentBlockStartEvent): if isinstance(response.content_block, ToolUseBlock): - current_tool_call = { - "id": response.content_block.id, - "name": response.content_block.name, - "input": "", - } + current_block = ToolUseBlockParam( + type="tool_use", + id=response.content_block.id, + name=response.content_block.name, + input="", + ) + current_tool_args = "" elif isinstance(response.content_block, TextBlock): + current_block = TextBlockParam( + type="text", text=response.content_block.text + ) yield {"role": "assistant"} + if response.content_block.text: + yield {"content": response.content_block.text} + elif isinstance(response.content_block, ThinkingBlock): + current_block = ThinkingBlockParam( + type="thinking", + thinking=response.content_block.thinking, + signature=response.content_block.signature, + ) + elif isinstance(response.content_block, RedactedThinkingBlock): + current_block = RedactedThinkingBlockParam( + type="redacted_thinking", data=response.content_block.data + ) + LOGGER.debug( + "Some of Claude’s internal reasoning has been automatically " + "encrypted for safety reasons. This doesn’t affect the quality of " + "responses" + ) elif isinstance(response, RawContentBlockDeltaEvent): + if current_block is None: + raise ValueError("Unexpected delta without a block") if isinstance(response.delta, InputJSONDelta): - if current_tool_call is None: - raise ValueError("Unexpected delta without a tool call") - current_tool_call["input"] += response.delta.partial_json + current_tool_args += response.delta.partial_json elif isinstance(response.delta, TextDelta): - LOGGER.debug("yielding delta: %s", response.delta.text) + text_block = cast(TextBlockParam, current_block) + text_block["text"] += response.delta.text yield {"content": response.delta.text} + elif isinstance(response.delta, ThinkingDelta): + thinking_block = cast(ThinkingBlockParam, current_block) + thinking_block["thinking"] += response.delta.thinking + elif isinstance(response.delta, SignatureDelta): + thinking_block = cast(ThinkingBlockParam, current_block) + thinking_block["signature"] += response.delta.signature elif isinstance(response, RawContentBlockStopEvent): - if current_tool_call: + if current_block is None: + raise ValueError("Unexpected stop event without a current block") + if current_block["type"] == "tool_use": + tool_block = cast(ToolUseBlockParam, current_block) + tool_args = json.loads(current_tool_args) + tool_block["input"] = tool_args yield { "tool_calls": [ llm.ToolInput( - id=current_tool_call["id"], - tool_name=current_tool_call["name"], - tool_args=json.loads(current_tool_call["input"]), + id=tool_block["id"], + tool_name=tool_block["name"], + tool_args=tool_args, ) ] } - current_tool_call = None + elif current_block["type"] == "thinking": + thinking_block = cast(ThinkingBlockParam, current_block) + LOGGER.debug("Thinking: %s", thinking_block["thinking"]) + + if current_message is None: + raise ValueError("Unexpected stop event without a current message") + current_message["content"].append(current_block) # type: ignore[union-attr] + current_block = None + elif isinstance(response, RawMessageStopEvent): + if current_message is not None: + messages.append(current_message) + current_message = None class AnthropicConversationEntity( @@ -226,18 +326,6 @@ class AnthropicConversationEntity( self.entry.add_update_listener(self._async_entry_update_listener) ) - async def async_process( - self, user_input: conversation.ConversationInput - ) -> conversation.ConversationResult: - """Process a sentence.""" - with ( - chat_session.async_get_chat_session( - self.hass, user_input.conversation_id - ) as session, - conversation.async_get_chat_log(self.hass, session, user_input) as chat_log, - ): - return await self._async_handle_message(user_input, chat_log) - async def _async_handle_message( self, user_input: conversation.ConversationInput, @@ -266,34 +354,50 @@ class AnthropicConversationEntity( system = chat_log.content[0] if not isinstance(system, conversation.SystemContent): raise TypeError("First message must be a system message") - messages = [_convert_content(content) for content in chat_log.content[1:]] + messages = _convert_content(chat_log.content[1:]) client = self.entry.runtime_data + thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET) + model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL) + # To prevent infinite loops, we limit the number of iterations for _iteration in range(MAX_TOOL_ITERATIONS): - try: - stream = await client.messages.create( - model=options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL), - messages=messages, - tools=tools or NOT_GIVEN, - max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS), - system=system.content, - temperature=options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE), - stream=True, + model_args = { + "model": model, + "messages": messages, + "tools": tools or NOT_GIVEN, + "max_tokens": options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS), + "system": system.content, + "stream": True, + } + if model in THINKING_MODELS and thinking_budget >= MIN_THINKING_BUDGET: + model_args["thinking"] = ThinkingConfigEnabledParam( + type="enabled", budget_tokens=thinking_budget ) + else: + model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled") + model_args["temperature"] = options.get( + CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE + ) + + try: + stream = await client.messages.create(**model_args) except anthropic.AnthropicError as err: raise HomeAssistantError( f"Sorry, I had a problem talking to Anthropic: {err}" ) from err messages.extend( - [ - _convert_content(content) - async for content in chat_log.async_add_delta_content_stream( - user_input.agent_id, _transform_stream(stream) - ) - ] + _convert_content( + [ + content + async for content in chat_log.async_add_delta_content_stream( + user_input.agent_id, _transform_stream(stream, messages) + ) + if not isinstance(content, conversation.AssistantContent) + ] + ) ) if not chat_log.unresponded_tool_results: @@ -305,7 +409,9 @@ class AnthropicConversationEntity( intent_response = intent.IntentResponse(language=user_input.language) intent_response.async_set_speech(response_content.content or "") return conversation.ConversationResult( - response=intent_response, conversation_id=chat_log.conversation_id + response=intent_response, + conversation_id=chat_log.conversation_id, + continue_conversation=chat_log.continue_conversation, ) async def _async_entry_update_listener( diff --git a/homeassistant/components/anthropic/strings.json b/homeassistant/components/anthropic/strings.json index 9550a1a6672..c2caf3a6666 100644 --- a/homeassistant/components/anthropic/strings.json +++ b/homeassistant/components/anthropic/strings.json @@ -23,12 +23,17 @@ "max_tokens": "Maximum tokens to return in response", "temperature": "Temperature", "llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]", - "recommended": "Recommended model settings" + "recommended": "Recommended model settings", + "thinking_budget_tokens": "Thinking budget" }, "data_description": { - "prompt": "Instruct how the LLM should respond. This can be a template." + "prompt": "Instruct how the LLM should respond. This can be a template.", + "thinking_budget_tokens": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking." } } + }, + "error": { + "thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget." } } } diff --git a/homeassistant/components/apcupsd/strings.json b/homeassistant/components/apcupsd/strings.json index 93102ac1393..fb5df9ec390 100644 --- a/homeassistant/components/apcupsd/strings.json +++ b/homeassistant/components/apcupsd/strings.json @@ -57,7 +57,7 @@ "name": "Status date" }, "dip_switch_settings": { - "name": "Dip switch settings" + "name": "DIP switch settings" }, "low_battery_signal": { "name": "Low battery signal" diff --git a/homeassistant/components/aprilaire/manifest.json b/homeassistant/components/aprilaire/manifest.json index 577de8ae88d..b40460dd61b 100644 --- a/homeassistant/components/aprilaire/manifest.json +++ b/homeassistant/components/aprilaire/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["pyaprilaire"], - "requirements": ["pyaprilaire==0.7.7"] + "requirements": ["pyaprilaire==0.8.1"] } diff --git a/homeassistant/components/aquacell/config_flow.py b/homeassistant/components/aquacell/config_flow.py index 1ee89035d93..277cb742486 100644 --- a/homeassistant/components/aquacell/config_flow.py +++ b/homeassistant/components/aquacell/config_flow.py @@ -60,7 +60,7 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except AuthenticationFailed: errors["base"] = "invalid_auth" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: diff --git a/homeassistant/components/arwn/sensor.py b/homeassistant/components/arwn/sensor.py index a31156bbba6..4cc4feed2d4 100644 --- a/homeassistant/components/arwn/sensor.py +++ b/homeassistant/components/arwn/sensor.py @@ -6,7 +6,11 @@ import logging from typing import Any from homeassistant.components import mqtt -from homeassistant.components.sensor import SensorDeviceClass, SensorEntity +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorStateClass, +) from homeassistant.const import DEGREE, UnitOfPrecipitationDepth, UnitOfTemperature from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -98,6 +102,7 @@ def discover_sensors(topic: str, payload: dict[str, Any]) -> list[ArwnSensor] | DEGREE, "mdi:compass", device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), ] return None @@ -178,6 +183,7 @@ class ArwnSensor(SensorEntity): units: str, icon: str | None = None, device_class: SensorDeviceClass | None = None, + state_class: SensorStateClass | None = None, ) -> None: """Initialize the sensor.""" self.entity_id = _slug(name) @@ -188,6 +194,7 @@ class ArwnSensor(SensorEntity): self._attr_native_unit_of_measurement = units self._attr_icon = icon self._attr_device_class = device_class + self._attr_state_class = state_class def set_event(self, event: dict[str, Any]) -> None: """Update the sensor with the most recent event.""" diff --git a/homeassistant/components/assist_pipeline/__init__.py b/homeassistant/components/assist_pipeline/__init__.py index 9a32821e3a0..59bd987d90e 100644 --- a/homeassistant/components/assist_pipeline/__init__.py +++ b/homeassistant/components/assist_pipeline/__init__.py @@ -117,7 +117,7 @@ async def async_pipeline_from_audio_stream( """ with chat_session.async_get_chat_session(hass, conversation_id) as session: pipeline_input = PipelineInput( - conversation_id=session.conversation_id, + session=session, device_id=device_id, stt_metadata=stt_metadata, stt_stream=stt_stream, diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index 75811a0ec36..a205db4e615 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -19,14 +19,7 @@ import wave import hass_nabucasa import voluptuous as vol -from homeassistant.components import ( - conversation, - media_source, - stt, - tts, - wake_word, - websocket_api, -) +from homeassistant.components import conversation, stt, tts, wake_word, websocket_api from homeassistant.components.tts import ( generate_media_source_id as tts_generate_media_source_id, ) @@ -96,6 +89,9 @@ ENGINE_LANGUAGE_PAIRS = ( ) KEY_ASSIST_PIPELINE: HassKey[PipelineData] = HassKey(DOMAIN) +KEY_PIPELINE_CONVERSATION_DATA: HassKey[dict[str, PipelineConversationData]] = HassKey( + "pipeline_conversation_data" +) def validate_language(data: dict[str, Any]) -> Any: @@ -129,7 +125,7 @@ SAVE_DELAY = 10 @callback def _async_local_fallback_intent_filter(result: RecognizeResult) -> bool: """Filter out intents that are not local fallback.""" - return result.intent.name in (intent.INTENT_GET_STATE, intent.INTENT_NEVERMIND) + return result.intent.name in (intent.INTENT_GET_STATE) @callback @@ -566,8 +562,7 @@ class PipelineRun: id: str = field(default_factory=ulid_util.ulid_now) stt_provider: stt.SpeechToTextEntity | stt.Provider = field(init=False, repr=False) - tts_engine: str = field(init=False, repr=False) - tts_options: dict | None = field(init=False, default=None) + tts_stream: tts.ResultStream | None = field(init=False, default=None) wake_word_entity_id: str | None = field(init=False, default=None, repr=False) wake_word_entity: wake_word.WakeWordDetectionEntity = field(init=False, repr=False) @@ -590,6 +585,12 @@ class PipelineRun: _device_id: str | None = None """Optional device id set during run start.""" + _conversation_data: PipelineConversationData | None = None + """Data tied to the conversation ID.""" + + _intent_agent_only = False + """If request should only be handled by agent, ignoring sentence triggers and local processing.""" + def __post_init__(self) -> None: """Set language for pipeline.""" self.language = self.pipeline.language or self.hass.config.language @@ -639,13 +640,19 @@ class PipelineRun: self._device_id = device_id self._start_debug_recording_thread() - data = { + data: dict[str, Any] = { "pipeline": self.pipeline.id, "language": self.language, "conversation_id": conversation_id, } if self.runner_data is not None: data["runner_data"] = self.runner_data + if self.tts_stream: + data["tts_output"] = { + "token": self.tts_stream.token, + "url": self.tts_stream.url, + "mime_type": self.tts_stream.content_type, + } self.process_event(PipelineEvent(PipelineEventType.RUN_START, data)) @@ -1007,19 +1014,36 @@ class PipelineRun: yield chunk.audio - async def prepare_recognize_intent(self) -> None: + async def prepare_recognize_intent(self, session: chat_session.ChatSession) -> None: """Prepare recognizing an intent.""" - agent_info = conversation.async_get_agent_info( - self.hass, - self.pipeline.conversation_engine or conversation.HOME_ASSISTANT_AGENT, + self._conversation_data = async_get_pipeline_conversation_data( + self.hass, session ) - if agent_info is None: - engine = self.pipeline.conversation_engine or "default" - raise IntentRecognitionError( - code="intent-not-supported", - message=f"Intent recognition engine {engine} is not found", + if self._conversation_data.continue_conversation_agent is not None: + agent_info = conversation.async_get_agent_info( + self.hass, self._conversation_data.continue_conversation_agent ) + self._conversation_data.continue_conversation_agent = None + if agent_info is None: + raise IntentRecognitionError( + code="intent-agent-not-found", + message=f"Intent recognition engine {self._conversation_data.continue_conversation_agent} asked for follow-up but is no longer found", + ) + self._intent_agent_only = True + + else: + agent_info = conversation.async_get_agent_info( + self.hass, + self.pipeline.conversation_engine or conversation.HOME_ASSISTANT_AGENT, + ) + + if agent_info is None: + engine = self.pipeline.conversation_engine or "default" + raise IntentRecognitionError( + code="intent-not-supported", + message=f"Intent recognition engine {engine} is not found", + ) self.intent_agent = agent_info.id @@ -1031,7 +1055,7 @@ class PipelineRun: conversation_extra_system_prompt: str | None, ) -> str: """Run intent recognition portion of pipeline. Returns text to speak.""" - if self.intent_agent is None: + if self.intent_agent is None or self._conversation_data is None: raise RuntimeError("Recognize intent was not prepared") if self.pipeline.conversation_language == MATCH_ALL: @@ -1078,7 +1102,7 @@ class PipelineRun: agent_id = self.intent_agent processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT intent_response: intent.IntentResponse | None = None - if not processed_locally: + if not processed_locally and not self._intent_agent_only: # Sentence triggers override conversation agent if ( trigger_response_text @@ -1195,6 +1219,9 @@ class PipelineRun: ) ) + if conversation_result.continue_conversation: + self._conversation_data.continue_conversation_agent = agent_id + return speech async def prepare_text_to_speech(self) -> None: @@ -1217,36 +1244,31 @@ class PipelineRun: tts_options[tts.ATTR_PREFERRED_SAMPLE_BYTES] = SAMPLE_WIDTH try: - options_supported = await tts.async_support_options( - self.hass, - engine, - self.pipeline.tts_language, - tts_options, + self.tts_stream = tts.async_create_stream( + hass=self.hass, + engine=engine, + language=self.pipeline.tts_language, + options=tts_options, ) except HomeAssistantError as err: - raise TextToSpeechError( - code="tts-not-supported", - message=f"Text-to-speech engine '{engine}' not found", - ) from err - if not options_supported: raise TextToSpeechError( code="tts-not-supported", message=( f"Text-to-speech engine {engine} " - f"does not support language {self.pipeline.tts_language} or options {tts_options}" + f"does not support language {self.pipeline.tts_language} or options {tts_options}:" + f" {err}" ), - ) - - self.tts_engine = engine - self.tts_options = tts_options + ) from err async def text_to_speech(self, tts_input: str) -> None: """Run text-to-speech portion of pipeline.""" + assert self.tts_stream is not None + self.process_event( PipelineEvent( PipelineEventType.TTS_START, { - "engine": self.tts_engine, + "engine": self.tts_stream.engine, "language": self.pipeline.tts_language, "voice": self.pipeline.tts_voice, "tts_input": tts_input, @@ -1259,14 +1281,9 @@ class PipelineRun: tts_media_id = tts_generate_media_source_id( self.hass, tts_input, - engine=self.tts_engine, - language=self.pipeline.tts_language, - options=self.tts_options, - ) - tts_media = await media_source.async_resolve_media( - self.hass, - tts_media_id, - None, + engine=self.tts_stream.engine, + language=self.tts_stream.language, + options=self.tts_stream.options, ) except Exception as src_error: _LOGGER.exception("Unexpected error during text-to-speech") @@ -1275,10 +1292,13 @@ class PipelineRun: message="Unexpected error during text-to-speech", ) from src_error - _LOGGER.debug("TTS result %s", tts_media) + self.tts_stream.async_set_message(tts_input) + tts_output = { "media_id": tts_media_id, - **asdict(tts_media), + "token": self.tts_stream.token, + "url": self.tts_stream.url, + "mime_type": self.tts_stream.content_type, } self.process_event( @@ -1458,8 +1478,8 @@ class PipelineInput: run: PipelineRun - conversation_id: str - """Identifier for the conversation.""" + session: chat_session.ChatSession + """Session for the conversation.""" stt_metadata: stt.SpeechMetadata | None = None """Metadata of stt input audio. Required when start_stage = stt.""" @@ -1484,7 +1504,9 @@ class PipelineInput: async def execute(self) -> None: """Run pipeline.""" - self.run.start(conversation_id=self.conversation_id, device_id=self.device_id) + self.run.start( + conversation_id=self.session.conversation_id, device_id=self.device_id + ) current_stage: PipelineStage | None = self.run.start_stage stt_audio_buffer: list[EnhancedAudioChunk] = [] stt_processed_stream: AsyncIterable[EnhancedAudioChunk] | None = None @@ -1568,7 +1590,7 @@ class PipelineInput: assert intent_input is not None tts_input = await self.run.recognize_intent( intent_input, - self.conversation_id, + self.session.conversation_id, self.device_id, self.conversation_extra_system_prompt, ) @@ -1652,7 +1674,7 @@ class PipelineInput: <= PIPELINE_STAGE_ORDER.index(PipelineStage.INTENT) <= end_stage_index ): - prepare_tasks.append(self.run.prepare_recognize_intent()) + prepare_tasks.append(self.run.prepare_recognize_intent(self.session)) if ( start_stage_index @@ -1931,7 +1953,7 @@ class PipelineRunDebug: class PipelineStore(Store[SerializedPipelineStorageCollection]): - """Store entity registry data.""" + """Store pipeline data.""" async def _async_migrate_func( self, @@ -2013,3 +2035,37 @@ async def async_run_migrations(hass: HomeAssistant) -> None: for pipeline, attr_updates in updates: await async_update_pipeline(hass, pipeline, **attr_updates) + + +@dataclass +class PipelineConversationData: + """Hold data for the duration of a conversation.""" + + continue_conversation_agent: str | None = None + """The agent that requested the conversation to be continued.""" + + +@callback +def async_get_pipeline_conversation_data( + hass: HomeAssistant, session: chat_session.ChatSession +) -> PipelineConversationData: + """Get the pipeline data for a specific conversation.""" + all_conversation_data = hass.data.get(KEY_PIPELINE_CONVERSATION_DATA) + if all_conversation_data is None: + all_conversation_data = {} + hass.data[KEY_PIPELINE_CONVERSATION_DATA] = all_conversation_data + + data = all_conversation_data.get(session.conversation_id) + + if data is not None: + return data + + @callback + def do_cleanup() -> None: + """Handle cleanup.""" + all_conversation_data.pop(session.conversation_id) + + session.async_on_cleanup(do_cleanup) + + data = all_conversation_data[session.conversation_id] = PipelineConversationData() + return data diff --git a/homeassistant/components/assist_pipeline/websocket_api.py b/homeassistant/components/assist_pipeline/websocket_api.py index d2d54a1b7c3..937b3a0ea45 100644 --- a/homeassistant/components/assist_pipeline/websocket_api.py +++ b/homeassistant/components/assist_pipeline/websocket_api.py @@ -239,7 +239,7 @@ async def websocket_run( with chat_session.async_get_chat_session( hass, msg.get("conversation_id") ) as session: - input_args["conversation_id"] = session.conversation_id + input_args["session"] = session pipeline_input = PipelineInput(**input_args) try: diff --git a/homeassistant/components/assist_satellite/__init__.py b/homeassistant/components/assist_satellite/__init__.py index 038ff517264..3338f223bc9 100644 --- a/homeassistant/components/assist_satellite/__init__.py +++ b/homeassistant/components/assist_satellite/__init__.py @@ -1,9 +1,11 @@ """Base class for assist satellite entities.""" import logging +from pathlib import Path import voluptuous as vol +from homeassistant.components.http import StaticPathConfig from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv @@ -15,6 +17,8 @@ from .const import ( CONNECTION_TEST_DATA, DATA_COMPONENT, DOMAIN, + PREANNOUNCE_FILENAME, + PREANNOUNCE_URL, AssistSatelliteEntityFeature, ) from .entity import ( @@ -56,6 +60,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: { vol.Optional("message"): str, vol.Optional("media_id"): str, + vol.Optional("preannounce"): bool, + vol.Optional("preannounce_media_id"): str, } ), cv.has_at_least_one_key("message", "media_id"), @@ -70,6 +76,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: { vol.Optional("start_message"): str, vol.Optional("start_media_id"): str, + vol.Optional("preannounce"): bool, + vol.Optional("preannounce_media_id"): str, vol.Optional("extra_system_prompt"): str, } ), @@ -82,6 +90,15 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async_register_websocket_api(hass) hass.http.register_view(ConnectionTestView()) + # Default preannounce sound + await hass.http.async_register_static_paths( + [ + StaticPathConfig( + PREANNOUNCE_URL, str(Path(__file__).parent / PREANNOUNCE_FILENAME) + ) + ] + ) + return True diff --git a/homeassistant/components/assist_satellite/const.py b/homeassistant/components/assist_satellite/const.py index f7ac7e524b4..7fca88f3b12 100644 --- a/homeassistant/components/assist_satellite/const.py +++ b/homeassistant/components/assist_satellite/const.py @@ -20,6 +20,9 @@ CONNECTION_TEST_DATA: HassKey[dict[str, asyncio.Event]] = HassKey( f"{DOMAIN}_connection_tests" ) +PREANNOUNCE_FILENAME = "preannounce.mp3" +PREANNOUNCE_URL = f"/api/assist_satellite/static/{PREANNOUNCE_FILENAME}" + class AssistSatelliteEntityFeature(IntFlag): """Supported features of Assist satellite entity.""" diff --git a/homeassistant/components/assist_satellite/entity.py b/homeassistant/components/assist_satellite/entity.py index 8c63525294c..dc20c7650d7 100644 --- a/homeassistant/components/assist_satellite/entity.py +++ b/homeassistant/components/assist_satellite/entity.py @@ -23,15 +23,12 @@ from homeassistant.components.assist_pipeline import ( vad, ) from homeassistant.components.media_player import async_process_play_media_url -from homeassistant.components.tts import ( - generate_media_source_id as tts_generate_media_source_id, -) from homeassistant.core import Context, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import chat_session, entity from homeassistant.helpers.entity import EntityDescription -from .const import AssistSatelliteEntityFeature +from .const import PREANNOUNCE_URL, AssistSatelliteEntityFeature from .errors import AssistSatelliteError, SatelliteBusyError _LOGGER = logging.getLogger(__name__) @@ -98,9 +95,15 @@ class AssistSatelliteAnnouncement: original_media_id: str """The raw media ID before processing.""" + tts_token: str | None + """The TTS token of the media.""" + media_id_source: Literal["url", "media_id", "tts"] """Source of the media ID.""" + preannounce_media_id: str | None = None + """Media ID to be played before announcement.""" + class AssistSatelliteEntity(entity.Entity): """Entity encapsulating the state and functionality of an Assist satellite.""" @@ -177,6 +180,8 @@ class AssistSatelliteEntity(entity.Entity): self, message: str | None = None, media_id: str | None = None, + preannounce: bool = True, + preannounce_media_id: str = PREANNOUNCE_URL, ) -> None: """Play and show an announcement on the satellite. @@ -186,6 +191,9 @@ class AssistSatelliteEntity(entity.Entity): If media_id is provided, it is played directly. It is possible to omit the message and the satellite will not show any text. + If preannounce is True, a sound is played before the announcement. + If preannounce_media_id is provided, it overrides the default sound. + Calls async_announce with message and media id. """ await self._cancel_running_pipeline() @@ -193,7 +201,11 @@ class AssistSatelliteEntity(entity.Entity): if message is None: message = "" - announcement = await self._resolve_announcement_media_id(message, media_id) + announcement = await self._resolve_announcement_media_id( + message, + media_id, + preannounce_media_id=preannounce_media_id if preannounce else None, + ) if self._is_announcing: raise SatelliteBusyError @@ -220,6 +232,8 @@ class AssistSatelliteEntity(entity.Entity): start_message: str | None = None, start_media_id: str | None = None, extra_system_prompt: str | None = None, + preannounce: bool = True, + preannounce_media_id: str = PREANNOUNCE_URL, ) -> None: """Start a conversation from the satellite. @@ -229,6 +243,9 @@ class AssistSatelliteEntity(entity.Entity): If start_media_id is provided, it is played directly. It is possible to omit the message and the satellite will not show any text. + If preannounce is True, a sound is played before the start message or media. + If preannounce_media_id is provided, it overrides the default sound. + Calls async_start_conversation. """ await self._cancel_running_pipeline() @@ -244,13 +261,17 @@ class AssistSatelliteEntity(entity.Entity): start_message = "" announcement = await self._resolve_announcement_media_id( - start_message, start_media_id + start_message, + start_media_id, + preannounce_media_id=preannounce_media_id if preannounce else None, ) if self._is_announcing: raise SatelliteBusyError self._is_announcing = True + self._set_state(AssistSatelliteState.RESPONDING) + # Provide our start info to the LLM so it understands context of incoming message if extra_system_prompt is not None: self._extra_system_prompt = extra_system_prompt @@ -280,6 +301,7 @@ class AssistSatelliteEntity(entity.Entity): raise finally: self._is_announcing = False + self._set_state(AssistSatelliteState.IDLE) async def async_start_conversation( self, start_announcement: AssistSatelliteAnnouncement @@ -470,20 +492,27 @@ class AssistSatelliteEntity(entity.Entity): return vad.VadSensitivity.to_seconds(vad_sensitivity) async def _resolve_announcement_media_id( - self, message: str, media_id: str | None + self, + message: str, + media_id: str | None, + preannounce_media_id: str | None = None, ) -> AssistSatelliteAnnouncement: """Resolve the media ID.""" media_id_source: Literal["url", "media_id", "tts"] | None = None + tts_token: str | None = None if media_id: original_media_id = media_id - else: media_id_source = "tts" # Synthesize audio and get URL pipeline_id = self._resolve_pipeline() pipeline = async_get_pipeline(self.hass, pipeline_id) + engine = tts.async_resolve_engine(self.hass, pipeline.tts_engine) + if engine is None: + raise HomeAssistantError(f"TTS engine {pipeline.tts_engine} not found") + tts_options: dict[str, Any] = {} if pipeline.tts_voice is not None: tts_options[tts.ATTR_VOICE] = pipeline.tts_voice @@ -491,14 +520,23 @@ class AssistSatelliteEntity(entity.Entity): if self.tts_options is not None: tts_options.update(self.tts_options) - media_id = tts_generate_media_source_id( + stream = tts.async_create_stream( self.hass, - message, - engine=pipeline.tts_engine, + engine=engine, + language=pipeline.tts_language, + options=tts_options, + ) + stream.async_set_message(message) + + tts_token = stream.token + media_id = stream.url + original_media_id = tts.generate_media_source_id( + self.hass, + message, + engine=engine, language=pipeline.tts_language, options=tts_options, ) - original_media_id = media_id if media_source.is_media_source_id(media_id): if not media_id_source: @@ -516,6 +554,26 @@ class AssistSatelliteEntity(entity.Entity): # Resolve to full URL media_id = async_process_play_media_url(self.hass, media_id) + # Resolve preannounce media id + if preannounce_media_id: + if media_source.is_media_source_id(preannounce_media_id): + preannounce_media = await media_source.async_resolve_media( + self.hass, + preannounce_media_id, + None, + ) + preannounce_media_id = preannounce_media.url + + # Resolve to full URL + preannounce_media_id = async_process_play_media_url( + self.hass, preannounce_media_id + ) + return AssistSatelliteAnnouncement( - message, media_id, original_media_id, media_id_source + message=message, + media_id=media_id, + original_media_id=original_media_id, + tts_token=tts_token, + media_id_source=media_id_source, + preannounce_media_id=preannounce_media_id, ) diff --git a/homeassistant/components/assist_satellite/preannounce.mp3 b/homeassistant/components/assist_satellite/preannounce.mp3 new file mode 100644 index 00000000000..6e2fa0aba3e Binary files /dev/null and b/homeassistant/components/assist_satellite/preannounce.mp3 differ diff --git a/homeassistant/components/assist_satellite/services.yaml b/homeassistant/components/assist_satellite/services.yaml index 89a20ada6f3..d88710c4c4e 100644 --- a/homeassistant/components/assist_satellite/services.yaml +++ b/homeassistant/components/assist_satellite/services.yaml @@ -8,12 +8,22 @@ announce: message: required: false example: "Time to wake up!" + default: "" selector: text: media_id: required: false selector: text: + preannounce: + required: false + default: true + selector: + boolean: + preannounce_media_id: + required: false + selector: + text: start_conversation: target: entity: @@ -24,6 +34,7 @@ start_conversation: start_message: required: false example: "You left the lights on in the living room. Turn them off?" + default: "" selector: text: start_media_id: @@ -34,3 +45,12 @@ start_conversation: required: false selector: text: + preannounce: + required: false + default: true + selector: + boolean: + preannounce_media_id: + required: false + selector: + text: diff --git a/homeassistant/components/assist_satellite/strings.json b/homeassistant/components/assist_satellite/strings.json index fa2dc984ab7..b69711c7106 100644 --- a/homeassistant/components/assist_satellite/strings.json +++ b/homeassistant/components/assist_satellite/strings.json @@ -23,6 +23,14 @@ "media_id": { "name": "Media ID", "description": "The media ID to announce instead of using text-to-speech." + }, + "preannounce": { + "name": "Preannounce", + "description": "Play a sound before the announcement." + }, + "preannounce_media_id": { + "name": "Preannounce media ID", + "description": "Custom media ID to play before the announcement." } } }, @@ -41,6 +49,14 @@ "extra_system_prompt": { "name": "Extra system prompt", "description": "Provide background information to the AI about the request." + }, + "preannounce": { + "name": "Preannounce", + "description": "Play a sound before the start message or media." + }, + "preannounce_media_id": { + "name": "Preannounce media ID", + "description": "Custom media ID to play before the start message or media." } } } diff --git a/homeassistant/components/assist_satellite/websocket_api.py b/homeassistant/components/assist_satellite/websocket_api.py index 4fc1708b866..6f8b3d723ad 100644 --- a/homeassistant/components/assist_satellite/websocket_api.py +++ b/homeassistant/components/assist_satellite/websocket_api.py @@ -198,7 +198,8 @@ async def websocket_test_connection( hass.async_create_background_task( satellite.async_internal_announce( - media_id=f"{CONNECTION_TEST_URL_BASE}/{connection_id}" + media_id=f"{CONNECTION_TEST_URL_BASE}/{connection_id}", + preannounce=False, ), f"assist_satellite_connection_test_{msg['entity_id']}", ) diff --git a/homeassistant/components/asuswrt/strings.json b/homeassistant/components/asuswrt/strings.json index 9d50f50c7e9..cac37c0cfd0 100644 --- a/homeassistant/components/asuswrt/strings.json +++ b/homeassistant/components/asuswrt/strings.json @@ -66,28 +66,28 @@ "name": "Upload" }, "load_avg_1m": { - "name": "Average load (1m)" + "name": "Average load (1 min)" }, "load_avg_5m": { - "name": "Average load (5m)" + "name": "Average load (5 min)" }, "load_avg_15m": { - "name": "Average load (15m)" + "name": "Average load (15 min)" }, "24ghz_temperature": { - "name": "2.4GHz Temperature" + "name": "2.4GHz temperature" }, "5ghz_temperature": { - "name": "5GHz Temperature" + "name": "5GHz temperature" }, "cpu_temperature": { - "name": "CPU Temperature" + "name": "CPU temperature" }, "5ghz_2_temperature": { - "name": "5GHz Temperature (Radio 2)" + "name": "5GHz temperature (Radio 2)" }, "6ghz_temperature": { - "name": "6GHz Temperature" + "name": "6GHz temperature" }, "cpu_usage": { "name": "CPU usage" diff --git a/homeassistant/components/azure_devops/strings.json b/homeassistant/components/azure_devops/strings.json index f5fe5cd06a7..611a8b9a758 100644 --- a/homeassistant/components/azure_devops/strings.json +++ b/homeassistant/components/azure_devops/strings.json @@ -14,7 +14,7 @@ "personal_access_token": "Personal Access Token (PAT)" }, "description": "Set up an Azure DevOps instance to access your project. A Personal Access Token is only required for a private project.", - "title": "Add Azure DevOps Project" + "title": "Add Azure DevOps project" }, "reauth_confirm": { "data": { @@ -32,7 +32,7 @@ "entity": { "sensor": { "build_id": { - "name": "{definition_name} latest build id" + "name": "{definition_name} latest build ID" }, "finish_time": { "name": "{definition_name} latest build finish time" @@ -59,7 +59,7 @@ "name": "{definition_name} latest build start time" }, "url": { - "name": "{definition_name} latest build url" + "name": "{definition_name} latest build URL" }, "work_item_count": { "name": "{item_type} {item_state} work items" @@ -68,7 +68,7 @@ }, "exceptions": { "authentication_failed": { - "message": "Could not authorize with Azure DevOps for {title}. You will need to update your personal access token." + "message": "Could not authorize with Azure DevOps for {title}. You will need to update your Personal Access Token." } } } diff --git a/homeassistant/components/azure_storage/__init__.py b/homeassistant/components/azure_storage/__init__.py index 873a9ab90ca..f22e7b70c12 100644 --- a/homeassistant/components/azure_storage/__init__.py +++ b/homeassistant/components/azure_storage/__init__.py @@ -13,7 +13,11 @@ from azure.storage.blob.aio import ContainerClient from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady +from homeassistant.exceptions import ( + ConfigEntryAuthFailed, + ConfigEntryError, + ConfigEntryNotReady, +) from homeassistant.helpers.aiohttp_client import async_create_clientsession from .const import ( @@ -52,7 +56,7 @@ async def async_setup_entry( translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]}, ) from err except ClientAuthenticationError as err: - raise ConfigEntryError( + raise ConfigEntryAuthFailed( translation_domain=DOMAIN, translation_key="invalid_auth", translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]}, diff --git a/homeassistant/components/azure_storage/backup.py b/homeassistant/components/azure_storage/backup.py index 6f39295761d..4a9254213dc 100644 --- a/homeassistant/components/azure_storage/backup.py +++ b/homeassistant/components/azure_storage/backup.py @@ -141,7 +141,7 @@ class AzureStorageBackupAgent(BackupAgent): """Delete a backup file.""" blob = await self._find_blob_by_backup_id(backup_id) if blob is None: - return + raise BackupNotFound(f"Backup {backup_id} not found") await self._client.delete_blob(blob.name) @handle_backup_errors @@ -163,11 +163,11 @@ class AzureStorageBackupAgent(BackupAgent): self, backup_id: str, **kwargs: Any, - ) -> AgentBackup | None: + ) -> AgentBackup: """Return a backup.""" blob = await self._find_blob_by_backup_id(backup_id) if blob is None: - return None + raise BackupNotFound(f"Backup {backup_id} not found") return AgentBackup.from_dict(json.loads(blob.metadata["backup_metadata"])) @@ -175,7 +175,8 @@ class AzureStorageBackupAgent(BackupAgent): """Find a blob by backup id.""" async for blob in self._client.list_blobs(include="metadata"): if ( - backup_id == blob.metadata.get("backup_id", "") + blob.metadata is not None + and backup_id == blob.metadata.get("backup_id", "") and blob.metadata.get("metadata_version") == METADATA_VERSION ): return blob diff --git a/homeassistant/components/azure_storage/config_flow.py b/homeassistant/components/azure_storage/config_flow.py index e5b1214fa5b..2862d290f95 100644 --- a/homeassistant/components/azure_storage/config_flow.py +++ b/homeassistant/components/azure_storage/config_flow.py @@ -1,5 +1,6 @@ """Config flow for Azure Storage integration.""" +from collections.abc import Mapping import logging from typing import Any @@ -26,6 +27,26 @@ _LOGGER = logging.getLogger(__name__) class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for azure storage.""" + def get_account_url(self, account_name: str) -> str: + """Get the account URL.""" + return f"https://{account_name}.blob.core.windows.net/" + + async def validate_config( + self, container_client: ContainerClient + ) -> dict[str, str]: + """Validate the configuration.""" + errors: dict[str, str] = {} + try: + await container_client.exists() + except ResourceNotFoundError: + errors["base"] = "cannot_connect" + except ClientAuthenticationError: + errors[CONF_STORAGE_ACCOUNT_KEY] = "invalid_auth" + except Exception: + _LOGGER.exception("Unknown exception occurred") + errors["base"] = "unknown" + return errors + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -38,20 +59,13 @@ class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN): {CONF_ACCOUNT_NAME: user_input[CONF_ACCOUNT_NAME]} ) container_client = ContainerClient( - account_url=f"https://{user_input[CONF_ACCOUNT_NAME]}.blob.core.windows.net/", + account_url=self.get_account_url(user_input[CONF_ACCOUNT_NAME]), container_name=user_input[CONF_CONTAINER_NAME], credential=user_input[CONF_STORAGE_ACCOUNT_KEY], transport=AioHttpTransport(session=async_get_clientsession(self.hass)), ) - try: - await container_client.exists() - except ResourceNotFoundError: - errors["base"] = "cannot_connect" - except ClientAuthenticationError: - errors[CONF_STORAGE_ACCOUNT_KEY] = "invalid_auth" - except Exception: - _LOGGER.exception("Unknown exception occurred") - errors["base"] = "unknown" + errors = await self.validate_config(container_client) + if not errors: return self.async_create_entry( title=f"{user_input[CONF_ACCOUNT_NAME]}/{user_input[CONF_CONTAINER_NAME]}", @@ -70,3 +84,77 @@ class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN): ), errors=errors, ) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Perform reauth upon an API authentication error.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm reauth dialog.""" + errors: dict[str, str] = {} + reauth_entry = self._get_reauth_entry() + + if user_input is not None: + container_client = ContainerClient( + account_url=self.get_account_url(reauth_entry.data[CONF_ACCOUNT_NAME]), + container_name=reauth_entry.data[CONF_CONTAINER_NAME], + credential=user_input[CONF_STORAGE_ACCOUNT_KEY], + transport=AioHttpTransport(session=async_get_clientsession(self.hass)), + ) + errors = await self.validate_config(container_client) + if not errors: + return self.async_update_reload_and_abort( + reauth_entry, + data={**reauth_entry.data, **user_input}, + ) + return self.async_show_form( + step_id="reauth_confirm", + data_schema=vol.Schema( + { + vol.Required(CONF_STORAGE_ACCOUNT_KEY): str, + } + ), + errors=errors, + ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Reconfigure the entry.""" + errors: dict[str, str] = {} + reconfigure_entry = self._get_reconfigure_entry() + + if user_input is not None: + container_client = ContainerClient( + account_url=self.get_account_url( + reconfigure_entry.data[CONF_ACCOUNT_NAME] + ), + container_name=user_input[CONF_CONTAINER_NAME], + credential=user_input[CONF_STORAGE_ACCOUNT_KEY], + transport=AioHttpTransport(session=async_get_clientsession(self.hass)), + ) + errors = await self.validate_config(container_client) + if not errors: + return self.async_update_reload_and_abort( + reconfigure_entry, + data={**reconfigure_entry.data, **user_input}, + ) + return self.async_show_form( + data_schema=vol.Schema( + { + vol.Required( + CONF_CONTAINER_NAME, + default=reconfigure_entry.data[CONF_CONTAINER_NAME], + ): str, + vol.Required( + CONF_STORAGE_ACCOUNT_KEY, + default=reconfigure_entry.data[CONF_STORAGE_ACCOUNT_KEY], + ): str, + } + ), + errors=errors, + ) diff --git a/homeassistant/components/azure_storage/manifest.json b/homeassistant/components/azure_storage/manifest.json index 8f2d8aeaca7..729334f851d 100644 --- a/homeassistant/components/azure_storage/manifest.json +++ b/homeassistant/components/azure_storage/manifest.json @@ -7,6 +7,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["azure-storage-blob"], - "quality_scale": "bronze", + "quality_scale": "platinum", "requirements": ["azure-storage-blob==12.24.0"] } diff --git a/homeassistant/components/azure_storage/quality_scale.yaml b/homeassistant/components/azure_storage/quality_scale.yaml index 6b6f90de494..6199ba514a3 100644 --- a/homeassistant/components/azure_storage/quality_scale.yaml +++ b/homeassistant/components/azure_storage/quality_scale.yaml @@ -57,7 +57,7 @@ rules: status: exempt comment: | This integration does not have platforms. - reauthentication-flow: todo + reauthentication-flow: done test-coverage: done # Gold @@ -121,7 +121,7 @@ rules: status: exempt comment: | This integration does not have entities. - reconfiguration-flow: todo + reconfiguration-flow: done repair-issues: done stale-devices: status: exempt diff --git a/homeassistant/components/azure_storage/strings.json b/homeassistant/components/azure_storage/strings.json index 4bd4cb0dfba..e9053f113cc 100644 --- a/homeassistant/components/azure_storage/strings.json +++ b/homeassistant/components/azure_storage/strings.json @@ -19,10 +19,34 @@ }, "description": "Set up an Azure (Blob) storage account to be used for backups.", "title": "Add Azure storage account" + }, + "reauth_confirm": { + "data": { + "storage_account_key": "[%key:component::azure_storage::config::step::user::data::storage_account_key%]" + }, + "data_description": { + "storage_account_key": "[%key:component::azure_storage::config::step::user::data_description::storage_account_key%]" + }, + "description": "Provide a new storage account key.", + "title": "Reauthenticate Azure storage account" + }, + "reconfigure": { + "data": { + "container_name": "[%key:component::azure_storage::config::step::user::data::container_name%]", + "storage_account_key": "[%key:component::azure_storage::config::step::user::data::storage_account_key%]" + }, + "data_description": { + "container_name": "[%key:component::azure_storage::config::step::user::data_description::container_name%]", + "storage_account_key": "[%key:component::azure_storage::config::step::user::data_description::storage_account_key%]" + }, + "description": "Change the settings of the Azure storage integration.", + "title": "Reconfigure Azure storage account" } }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" } }, "issues": { diff --git a/homeassistant/components/backup/__init__.py b/homeassistant/components/backup/__init__.py index d9d1c3cc2fe..124ce8b872c 100644 --- a/homeassistant/components/backup/__init__.py +++ b/homeassistant/components/backup/__init__.py @@ -1,7 +1,9 @@ """The Backup integration.""" +from homeassistant.config_entries import SOURCE_SYSTEM +from homeassistant.const import Platform from homeassistant.core import HomeAssistant, ServiceCall -from homeassistant.helpers import config_validation as cv +from homeassistant.helpers import config_validation as cv, discovery_flow from homeassistant.helpers.backup import DATA_BACKUP from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.typing import ConfigType @@ -18,10 +20,12 @@ from .agent import ( ) from .config import BackupConfig, CreateBackupParametersDict from .const import DATA_MANAGER, DOMAIN +from .coordinator import BackupConfigEntry, BackupDataUpdateCoordinator from .http import async_register_http_views from .manager import ( BackupManager, BackupManagerError, + BackupPlatformEvent, BackupPlatformProtocol, BackupReaderWriter, BackupReaderWriterError, @@ -52,6 +56,7 @@ __all__ = [ "BackupConfig", "BackupManagerError", "BackupNotFound", + "BackupPlatformEvent", "BackupPlatformProtocol", "BackupReaderWriter", "BackupReaderWriterError", @@ -74,6 +79,8 @@ __all__ = [ "suggested_filename_from_name_date", ] +PLATFORMS = [Platform.SENSOR] + CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) @@ -128,4 +135,28 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async_register_http_views(hass) + discovery_flow.async_create_flow( + hass, DOMAIN, context={"source": SOURCE_SYSTEM}, data={} + ) + return True + + +async def async_setup_entry(hass: HomeAssistant, entry: BackupConfigEntry) -> bool: + """Set up a config entry.""" + backup_manager: BackupManager = hass.data[DATA_MANAGER] + coordinator = BackupDataUpdateCoordinator(hass, entry, backup_manager) + await coordinator.async_config_entry_first_refresh() + + entry.async_on_unload(coordinator.async_unsubscribe) + + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: BackupConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/backup/agent.py b/homeassistant/components/backup/agent.py index 9530f386c7b..8093ac88338 100644 --- a/homeassistant/components/backup/agent.py +++ b/homeassistant/components/backup/agent.py @@ -41,6 +41,8 @@ class BackupAgent(abc.ABC): ) -> AsyncIterator[bytes]: """Download a backup file. + Raises BackupNotFound if the backup does not exist. + :param backup_id: The ID of the backup that was returned in async_list_backups. :return: An async iterator that yields bytes. """ @@ -67,6 +69,8 @@ class BackupAgent(abc.ABC): ) -> None: """Delete a backup file. + Raises BackupNotFound if the backup does not exist. + :param backup_id: The ID of the backup that was returned in async_list_backups. """ @@ -79,8 +83,11 @@ class BackupAgent(abc.ABC): self, backup_id: str, **kwargs: Any, - ) -> AgentBackup | None: - """Return a backup.""" + ) -> AgentBackup: + """Return a backup. + + Raises BackupNotFound if the backup does not exist. + """ class LocalBackupAgent(BackupAgent): diff --git a/homeassistant/components/backup/backup.py b/homeassistant/components/backup/backup.py index c3a46a6ab1f..de2cfecb1a5 100644 --- a/homeassistant/components/backup/backup.py +++ b/homeassistant/components/backup/backup.py @@ -88,13 +88,13 @@ class CoreLocalBackupAgent(LocalBackupAgent): self, backup_id: str, **kwargs: Any, - ) -> AgentBackup | None: + ) -> AgentBackup: """Return a backup.""" if not self._loaded_backups: await self._load_backups() if backup_id not in self._backups: - return None + raise BackupNotFound(f"Backup {backup_id} not found") backup, backup_path = self._backups[backup_id] if not await self._hass.async_add_executor_job(backup_path.exists): @@ -107,7 +107,7 @@ class CoreLocalBackupAgent(LocalBackupAgent): backup_path, ) self._backups.pop(backup_id) - return None + raise BackupNotFound(f"Backup {backup_id} not found") return backup @@ -130,10 +130,7 @@ class CoreLocalBackupAgent(LocalBackupAgent): if not self._loaded_backups: await self._load_backups() - try: - backup_path = self.get_backup_path(backup_id) - except BackupNotFound: - return + backup_path = self.get_backup_path(backup_id) await self._hass.async_add_executor_job(backup_path.unlink, True) LOGGER.debug("Deleted backup located at %s", backup_path) self._backups.pop(backup_id) diff --git a/homeassistant/components/backup/config_flow.py b/homeassistant/components/backup/config_flow.py new file mode 100644 index 00000000000..ab1f884ea86 --- /dev/null +++ b/homeassistant/components/backup/config_flow.py @@ -0,0 +1,21 @@ +"""Config flow for Home Assistant Backup integration.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult + +from .const import DOMAIN + + +class BackupConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Home Assistant Backup.""" + + VERSION = 1 + + async def async_step_system( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + return self.async_create_entry(title="Backup", data={}) diff --git a/homeassistant/components/backup/const.py b/homeassistant/components/backup/const.py index c2070a37b2d..773deaef174 100644 --- a/homeassistant/components/backup/const.py +++ b/homeassistant/components/backup/const.py @@ -16,8 +16,8 @@ DATA_MANAGER: HassKey[BackupManager] = HassKey(DOMAIN) LOGGER = getLogger(__package__) EXCLUDE_FROM_BACKUP = [ - "__pycache__/*", - ".DS_Store", + "**/__pycache__/*", + "**/.DS_Store", ".HA_RESTORE", "*.db-shm", "*.log.*", diff --git a/homeassistant/components/backup/coordinator.py b/homeassistant/components/backup/coordinator.py new file mode 100644 index 00000000000..377f23567e0 --- /dev/null +++ b/homeassistant/components/backup/coordinator.py @@ -0,0 +1,81 @@ +"""Coordinator for Home Assistant Backup integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import datetime + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.backup import ( + async_subscribe_events, + async_subscribe_platform_events, +) +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .const import DOMAIN, LOGGER +from .manager import ( + BackupManager, + BackupManagerState, + BackupPlatformEvent, + ManagerStateEvent, +) + +type BackupConfigEntry = ConfigEntry[BackupDataUpdateCoordinator] + + +@dataclass +class BackupCoordinatorData: + """Class to hold backup data.""" + + backup_manager_state: BackupManagerState + last_successful_automatic_backup: datetime | None + next_scheduled_automatic_backup: datetime | None + + +class BackupDataUpdateCoordinator(DataUpdateCoordinator[BackupCoordinatorData]): + """Class to retrieve backup status.""" + + config_entry: ConfigEntry + + def __init__( + self, + hass: HomeAssistant, + config_entry: ConfigEntry, + backup_manager: BackupManager, + ) -> None: + """Initialize coordinator.""" + super().__init__( + hass, + LOGGER, + config_entry=config_entry, + name=DOMAIN, + update_interval=None, + ) + self.unsubscribe: list[Callable[[], None]] = [ + async_subscribe_events(hass, self._on_event), + async_subscribe_platform_events(hass, self._on_event), + ] + + self.backup_manager = backup_manager + + @callback + def _on_event(self, event: ManagerStateEvent | BackupPlatformEvent) -> None: + """Handle new event.""" + LOGGER.debug("Received backup event: %s", event) + self.config_entry.async_create_task(self.hass, self.async_refresh()) + + async def _async_update_data(self) -> BackupCoordinatorData: + """Update backup manager data.""" + return BackupCoordinatorData( + self.backup_manager.state, + self.backup_manager.config.data.last_completed_automatic_backup, + self.backup_manager.config.data.schedule.next_automatic_backup, + ) + + @callback + def async_unsubscribe(self) -> None: + """Unsubscribe from events.""" + for unsub in self.unsubscribe: + unsub() diff --git a/homeassistant/components/backup/diagnostics.py b/homeassistant/components/backup/diagnostics.py new file mode 100644 index 00000000000..9c3e28bde5b --- /dev/null +++ b/homeassistant/components/backup/diagnostics.py @@ -0,0 +1,27 @@ +"""Diagnostics support for Home Assistant Backup integration.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.const import CONF_PASSWORD +from homeassistant.core import HomeAssistant + +from .coordinator import BackupConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: BackupConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + coordinator = entry.runtime_data + return { + "backup_agents": [ + {"name": agent.name, "agent_id": agent.agent_id} + for agent in coordinator.backup_manager.backup_agents.values() + ], + "backup_config": async_redact_data( + coordinator.backup_manager.config.data.to_dict(), [CONF_PASSWORD] + ), + } diff --git a/homeassistant/components/backup/entity.py b/homeassistant/components/backup/entity.py new file mode 100644 index 00000000000..ff7c7889dc5 --- /dev/null +++ b/homeassistant/components/backup/entity.py @@ -0,0 +1,36 @@ +"""Base for backup entities.""" + +from __future__ import annotations + +from homeassistant.const import __version__ as HA_VERSION +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import BackupDataUpdateCoordinator + + +class BackupManagerEntity(CoordinatorEntity[BackupDataUpdateCoordinator]): + """Base entity for backup manager.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: BackupDataUpdateCoordinator, + entity_description: EntityDescription, + ) -> None: + """Initialize base entity.""" + super().__init__(coordinator) + self.entity_description = entity_description + self._attr_unique_id = entity_description.key + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, "backup_manager")}, + manufacturer="Home Assistant", + model="Home Assistant Backup", + sw_version=HA_VERSION, + name="Backup", + entry_type=DeviceEntryType.SERVICE, + configuration_url="homeassistant://config/backup", + ) diff --git a/homeassistant/components/backup/http.py b/homeassistant/components/backup/http.py index 58f44d4a449..8f241e6363d 100644 --- a/homeassistant/components/backup/http.py +++ b/homeassistant/components/backup/http.py @@ -15,6 +15,7 @@ from multidict import istr from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import frame from homeassistant.util import slugify from . import util @@ -59,11 +60,19 @@ class DownloadBackupView(HomeAssistantView): if agent_id not in manager.backup_agents: return Response(status=HTTPStatus.BAD_REQUEST) agent = manager.backup_agents[agent_id] - backup = await agent.async_get_backup(backup_id) + try: + backup = await agent.async_get_backup(backup_id) + except BackupNotFound: + return Response(status=HTTPStatus.NOT_FOUND) - # We don't need to check if the path exists, aiohttp.FileResponse will handle - # that - if backup is None: + # Check for None to be backwards compatible with the old BackupAgent API, + # this can be removed in HA Core 2025.10 + if not backup: + frame.report_usage( + "returns None from BackupAgent.async_get_backup", + breaks_in_ha_version="2025.10", + integration_domain=agent_id.partition(".")[0], + ) return Response(status=HTTPStatus.NOT_FOUND) headers = { @@ -92,6 +101,8 @@ class DownloadBackupView(HomeAssistantView): ) -> StreamResponse | FileResponse | Response: if agent_id in manager.local_backup_agents: local_agent = manager.local_backup_agents[agent_id] + # We don't need to check if the path exists, aiohttp.FileResponse will + # handle that path = local_agent.get_backup_path(backup_id) return FileResponse(path=path.as_posix(), headers=headers) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index 872ea0d0e02..43a7be6db8d 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -30,6 +30,7 @@ from homeassistant.backup_restore import ( from homeassistant.const import __version__ as HAVERSION from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import ( + frame, instance_id, integration_platform, issue_registry as ir, @@ -64,6 +65,7 @@ from .models import ( AgentBackup, BackupError, BackupManagerError, + BackupNotFound, BackupReaderWriterError, BaseBackup, Folder, @@ -227,6 +229,13 @@ class RestoreBackupEvent(ManagerStateEvent): state: RestoreBackupState +@dataclass(frozen=True, kw_only=True, slots=True) +class BackupPlatformEvent: + """Backup platform class.""" + + domain: str + + @dataclass(frozen=True, kw_only=True, slots=True) class BlockedEvent(ManagerStateEvent): """Backup manager blocked, Home Assistant is starting.""" @@ -349,10 +358,13 @@ class BackupManager: # Latest backup event and backup event subscribers self.last_event: ManagerStateEvent = BlockedEvent() - self.last_non_idle_event: ManagerStateEvent | None = None + self.last_action_event: ManagerStateEvent | None = None self._backup_event_subscriptions = hass.data[ DATA_BACKUP ].backup_event_subscriptions + self._backup_platform_event_subscriptions = hass.data[ + DATA_BACKUP + ].backup_platform_event_subscriptions async def async_setup(self) -> None: """Set up the backup manager.""" @@ -463,6 +475,9 @@ class BackupManager: LOGGER.debug("%s platforms loaded in total", len(self.platforms)) LOGGER.debug("%s agents loaded in total", len(self.backup_agents)) LOGGER.debug("%s local agents loaded in total", len(self.local_backup_agents)) + event = BackupPlatformEvent(domain=integration_domain) + for subscription in self._backup_platform_event_subscriptions: + subscription(event) async def async_pre_backup_actions(self) -> None: """Perform pre backup actions.""" @@ -665,6 +680,8 @@ class BackupManager: ) for idx, result in enumerate(get_backup_results): agent_id = agent_ids[idx] + if isinstance(result, BackupNotFound): + continue if isinstance(result, BackupAgentError): agent_errors[agent_id] = result continue @@ -676,7 +693,14 @@ class BackupManager: continue if isinstance(result, BaseException): raise result # unexpected error + # Check for None to be backwards compatible with the old BackupAgent API, + # this can be removed in HA Core 2025.10 if not result: + frame.report_usage( + "returns None from BackupAgent.async_get_backup", + breaks_in_ha_version="2025.10", + integration_domain=agent_id.partition(".")[0], + ) continue if backup is None: if known_backup := self.known_backups.get(backup_id): @@ -740,6 +764,8 @@ class BackupManager: ) for idx, result in enumerate(delete_backup_results): agent_id = agent_ids[idx] + if isinstance(result, BackupNotFound): + continue if isinstance(result, BackupAgentError): agent_errors[agent_id] = result continue @@ -849,7 +875,7 @@ class BackupManager: agent_errors = { backup_id: error for backup_id, error in zip(backup_ids, delete_results, strict=True) - if error + if error and not isinstance(error, BackupNotFound) } if agent_errors: LOGGER.error( @@ -1281,7 +1307,20 @@ class BackupManager: ) -> None: """Initiate restoring a backup.""" agent = self.backup_agents[agent_id] - if not await agent.async_get_backup(backup_id): + try: + backup = await agent.async_get_backup(backup_id) + except BackupNotFound as err: + raise BackupManagerError( + f"Backup {backup_id} not found in agent {agent_id}" + ) from err + # Check for None to be backwards compatible with the old BackupAgent API, + # this can be removed in HA Core 2025.10 + if not backup: + frame.report_usage( + "returns None from BackupAgent.async_get_backup", + breaks_in_ha_version="2025.10", + integration_domain=agent_id.partition(".")[0], + ) raise BackupManagerError( f"Backup {backup_id} not found in agent {agent_id}" ) @@ -1311,7 +1350,7 @@ class BackupManager: LOGGER.debug("Backup state: %s -> %s", current_state, new_state) self.last_event = event if not isinstance(event, (BlockedEvent, IdleEvent)): - self.last_non_idle_event = event + self.last_action_event = event for subscription in self._backup_event_subscriptions: subscription(event) @@ -1369,7 +1408,20 @@ class BackupManager: agent = self.backup_agents[agent_id] except KeyError as err: raise BackupManagerError(f"Invalid agent selected: {agent_id}") from err - if not await agent.async_get_backup(backup_id): + try: + backup = await agent.async_get_backup(backup_id) + except BackupNotFound as err: + raise BackupManagerError( + f"Backup {backup_id} not found in agent {agent_id}" + ) from err + # Check for None to be backwards compatible with the old BackupAgent API, + # this can be removed in HA Core 2025.10 + if not backup: + frame.report_usage( + "returns None from BackupAgent.async_get_backup", + breaks_in_ha_version="2025.10", + integration_domain=agent_id.partition(".")[0], + ) raise BackupManagerError( f"Backup {backup_id} not found in agent {agent_id}" ) @@ -1674,7 +1726,9 @@ class CoreBackupReaderWriter(BackupReaderWriter): """Filter to filter excludes.""" for exclude in excludes: - if not path.match(exclude): + # The home assistant core configuration directory is added as "data" + # in the tar file, so we need to prefix that path to the filters. + if not path.full_match(f"data/{exclude}"): continue LOGGER.debug("Ignoring %s because of %s", path, exclude) return True diff --git a/homeassistant/components/backup/manifest.json b/homeassistant/components/backup/manifest.json index db0719983b1..3c7b1e5e014 100644 --- a/homeassistant/components/backup/manifest.json +++ b/homeassistant/components/backup/manifest.json @@ -5,8 +5,9 @@ "codeowners": ["@home-assistant/core"], "dependencies": ["http", "websocket_api"], "documentation": "https://www.home-assistant.io/integrations/backup", - "integration_type": "system", + "integration_type": "service", "iot_class": "calculated", "quality_scale": "internal", - "requirements": ["cronsim==2.6", "securetar==2025.2.1"] + "requirements": ["cronsim==2.6", "securetar==2025.2.1"], + "single_config_entry": true } diff --git a/homeassistant/components/backup/sensor.py b/homeassistant/components/backup/sensor.py new file mode 100644 index 00000000000..59e98ae7c2d --- /dev/null +++ b/homeassistant/components/backup/sensor.py @@ -0,0 +1,75 @@ +"""Sensor platform for Home Assistant Backup integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import datetime + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .coordinator import BackupConfigEntry, BackupCoordinatorData +from .entity import BackupManagerEntity +from .manager import BackupManagerState + + +@dataclass(kw_only=True, frozen=True) +class BackupSensorEntityDescription(SensorEntityDescription): + """Description for Home Assistant Backup sensor entities.""" + + value_fn: Callable[[BackupCoordinatorData], str | datetime | None] + + +BACKUP_MANAGER_DESCRIPTIONS = ( + BackupSensorEntityDescription( + key="backup_manager_state", + translation_key="backup_manager_state", + device_class=SensorDeviceClass.ENUM, + options=[state.value for state in BackupManagerState], + value_fn=lambda data: data.backup_manager_state, + ), + BackupSensorEntityDescription( + key="next_scheduled_automatic_backup", + translation_key="next_scheduled_automatic_backup", + device_class=SensorDeviceClass.TIMESTAMP, + value_fn=lambda data: data.next_scheduled_automatic_backup, + ), + BackupSensorEntityDescription( + key="last_successful_automatic_backup", + translation_key="last_successful_automatic_backup", + device_class=SensorDeviceClass.TIMESTAMP, + value_fn=lambda data: data.last_successful_automatic_backup, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: BackupConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Sensor set up for backup config entry.""" + + coordinator = config_entry.runtime_data + + async_add_entities( + BackupManagerSensor(coordinator, description) + for description in BACKUP_MANAGER_DESCRIPTIONS + ) + + +class BackupManagerSensor(BackupManagerEntity, SensorEntity): + """Sensor to track backup manager state.""" + + entity_description: BackupSensorEntityDescription + + @property + def native_value(self) -> str | datetime | None: + """Return native value of entity.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/backup/strings.json b/homeassistant/components/backup/strings.json index c3047d3a4ac..487fdd89a7c 100644 --- a/homeassistant/components/backup/strings.json +++ b/homeassistant/components/backup/strings.json @@ -22,5 +22,24 @@ "name": "Create automatic backup", "description": "Creates a new backup with automatic backup settings." } + }, + "entity": { + "sensor": { + "backup_manager_state": { + "name": "Backup Manager State", + "state": { + "idle": "Idle", + "create_backup": "Creating a backup", + "receive_backup": "Receiving a backup", + "restore_backup": "Restoring a backup" + } + }, + "next_scheduled_automatic_backup": { + "name": "Next scheduled automatic backup" + }, + "last_successful_automatic_backup": { + "name": "Last successful automatic backup" + } + } } } diff --git a/homeassistant/components/backup/websocket.py b/homeassistant/components/backup/websocket.py index 8b5f35287dd..4c370a4224d 100644 --- a/homeassistant/components/backup/websocket.py +++ b/homeassistant/components/backup/websocket.py @@ -55,7 +55,7 @@ async def handle_info( "backups": list(backups.values()), "last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup, "last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup, - "last_non_idle_event": manager.last_non_idle_event, + "last_action_event": manager.last_action_event, "next_automatic_backup": manager.config.data.schedule.next_automatic_backup, "next_automatic_backup_additional": manager.config.data.schedule.next_automatic_backup_additional, "state": manager.state, diff --git a/homeassistant/components/baf/strings.json b/homeassistant/components/baf/strings.json index e2f02a6095e..64956984bb8 100644 --- a/homeassistant/components/baf/strings.json +++ b/homeassistant/components/baf/strings.json @@ -23,7 +23,7 @@ "entity": { "climate": { "auto_comfort": { - "name": "Auto comfort" + "name": "Auto Comfort" } }, "fan": { @@ -39,25 +39,25 @@ }, "number": { "comfort_min_speed": { - "name": "Auto Comfort Minimum Speed" + "name": "Auto Comfort minimum speed" }, "comfort_max_speed": { - "name": "Auto Comfort Maximum Speed" + "name": "Auto Comfort maximum speed" }, "comfort_heat_assist_speed": { - "name": "Auto Comfort Heat Assist Speed" + "name": "Auto Comfort Heat Assist speed" }, "return_to_auto_timeout": { - "name": "Return to Auto Timeout" + "name": "Return to Auto timeout" }, "motion_sense_timeout": { - "name": "Motion Sense Timeout" + "name": "Motion sense timeout" }, "light_return_to_auto_timeout": { - "name": "Light Return to Auto Timeout" + "name": "Light return to Auto timeout" }, "light_auto_motion_timeout": { - "name": "Light Motion Sense Timeout" + "name": "Light motion sense timeout" } }, "sensor": { @@ -76,10 +76,10 @@ }, "switch": { "legacy_ir_remote_enable": { - "name": "Legacy IR Remote" + "name": "Legacy IR remote" }, "led_indicators_enable": { - "name": "Led Indicators" + "name": "LED indicators" }, "comfort_heat_assist_enable": { "name": "Auto Comfort Heat Assist" @@ -88,10 +88,10 @@ "name": "Beep" }, "eco_enable": { - "name": "Eco Mode" + "name": "Eco mode" }, "motion_sense_enable": { - "name": "Motion Sense" + "name": "Motion sense" }, "return_to_auto_enable": { "name": "Return to Auto" @@ -103,7 +103,7 @@ "name": "Dim to Warm" }, "light_return_to_auto_enable": { - "name": "Light Return to Auto" + "name": "Light return to Auto" } } } diff --git a/homeassistant/components/balboa/__init__.py b/homeassistant/components/balboa/__init__.py index 207826d136e..54ae569bb78 100644 --- a/homeassistant/components/balboa/__init__.py +++ b/homeassistant/components/balboa/__init__.py @@ -21,6 +21,7 @@ _LOGGER = logging.getLogger(__name__) PLATFORMS = [ Platform.BINARY_SENSOR, Platform.CLIMATE, + Platform.EVENT, Platform.FAN, Platform.LIGHT, Platform.SELECT, @@ -28,7 +29,6 @@ PLATFORMS = [ Platform.TIME, ] - KEEP_ALIVE_INTERVAL = timedelta(minutes=1) SYNC_TIME_INTERVAL = timedelta(hours=1) diff --git a/homeassistant/components/balboa/event.py b/homeassistant/components/balboa/event.py new file mode 100644 index 00000000000..57263c34783 --- /dev/null +++ b/homeassistant/components/balboa/event.py @@ -0,0 +1,91 @@ +"""Support for Balboa events.""" + +from __future__ import annotations + +from datetime import datetime, timedelta + +from pybalboa import EVENT_UPDATE, SpaClient + +from homeassistant.components.event import EventEntity +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.event import async_track_time_interval + +from . import BalboaConfigEntry +from .entity import BalboaEntity + +FAULT = "fault" +FAULT_DATE = "fault_date" +REQUEST_FAULT_LOG_INTERVAL = timedelta(minutes=5) + +FAULT_MESSAGE_CODE_MAP: dict[int, str] = { + 15: "sensor_out_of_sync", + 16: "low_flow", + 17: "flow_failed", + 18: "settings_reset", + 19: "priming_mode", + 20: "clock_failed", + 21: "settings_reset", + 22: "memory_failure", + 26: "service_sensor_sync", + 27: "heater_dry", + 28: "heater_may_be_dry", + 29: "water_too_hot", + 30: "heater_too_hot", + 31: "sensor_a_fault", + 32: "sensor_b_fault", + 34: "pump_stuck", + 35: "hot_fault", + 36: "gfci_test_failed", + 37: "standby_mode", +} +FAULT_EVENT_TYPES = sorted(set(FAULT_MESSAGE_CODE_MAP.values())) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: BalboaConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the spa's events.""" + async_add_entities([BalboaEventEntity(entry.runtime_data)]) + + +class BalboaEventEntity(BalboaEntity, EventEntity): + """Representation of a Balboa event entity.""" + + _attr_event_types = FAULT_EVENT_TYPES + _attr_translation_key = FAULT + + def __init__(self, spa: SpaClient) -> None: + """Initialize a Balboa event entity.""" + super().__init__(spa, FAULT) + + @callback + def _async_handle_event(self) -> None: + """Handle the fault event.""" + if not (fault := self._client.fault): + return + fault_date = fault.fault_datetime.isoformat() + if self.state_attributes.get(FAULT_DATE) != fault_date: + self._trigger_event( + FAULT_MESSAGE_CODE_MAP.get(fault.message_code, fault.message), + {FAULT_DATE: fault_date, "code": fault.message_code}, + ) + self.async_write_ha_state() + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + await super().async_added_to_hass() + self.async_on_remove(self._client.on(EVENT_UPDATE, self._async_handle_event)) + + async def request_fault_log(now: datetime | None = None) -> None: + """Request the most recent fault log.""" + await self._client.request_fault_log() + + await request_fault_log() + self.async_on_remove( + async_track_time_interval( + self.hass, request_fault_log, REQUEST_FAULT_LOG_INTERVAL + ) + ) diff --git a/homeassistant/components/balboa/strings.json b/homeassistant/components/balboa/strings.json index 9779984b182..784ce8533a8 100644 --- a/homeassistant/components/balboa/strings.json +++ b/homeassistant/components/balboa/strings.json @@ -57,6 +57,35 @@ } } }, + "event": { + "fault": { + "name": "Fault", + "state_attributes": { + "event_type": { + "state": { + "sensor_out_of_sync": "Sensors are out of sync", + "low_flow": "The water flow is low", + "flow_failed": "The water flow has failed", + "settings_reset": "The settings have been reset", + "priming_mode": "Priming mode", + "clock_failed": "The clock has failed", + "memory_failure": "Program memory failure", + "service_sensor_sync": "Sensors are out of sync -- call for service", + "heater_dry": "The heater is dry", + "heater_may_be_dry": "The heater may be dry", + "water_too_hot": "The water is too hot", + "heater_too_hot": "The heater is too hot", + "sensor_a_fault": "Sensor A fault", + "sensor_b_fault": "Sensor B fault", + "pump_stuck": "A pump may be stuck on", + "hot_fault": "Hot fault", + "gfci_test_failed": "The GFCI test failed", + "standby_mode": "Standby mode (hold mode)" + } + } + } + } + }, "fan": { "pump": { "name": "Pump {index}" diff --git a/homeassistant/components/bang_olufsen/strings.json b/homeassistant/components/bang_olufsen/strings.json index 57ab828f9fb..422dc4be567 100644 --- a/homeassistant/components/bang_olufsen/strings.json +++ b/homeassistant/components/bang_olufsen/strings.json @@ -29,7 +29,7 @@ "description": "Manually configure your Bang & Olufsen device." }, "zeroconf_confirm": { - "title": "Setup Bang & Olufsen device", + "title": "Set up Bang & Olufsen device", "description": "Confirm the configuration of the {model}-{serial_number} @ {host}." } } @@ -197,11 +197,11 @@ "services": { "beolink_allstandby": { "name": "Beolink all standby", - "description": "Set all Connected Beolink devices to standby." + "description": "Sets all connected Beolink devices to standby." }, "beolink_expand": { "name": "Beolink expand", - "description": "Expand current Beolink experience.", + "description": "Adds devices to the current Beolink experience.", "fields": { "all_discovered": { "name": "All discovered", @@ -221,7 +221,7 @@ }, "beolink_join": { "name": "Beolink join", - "description": "Join a Beolink experience.", + "description": "Joins a Beolink experience.", "fields": { "beolink_jid": { "name": "Beolink JID", @@ -241,11 +241,11 @@ }, "beolink_leave": { "name": "Beolink leave", - "description": "Leave a Beolink experience." + "description": "Leaves a Beolink experience." }, "beolink_unexpand": { "name": "Beolink unexpand", - "description": "Unexpand from current Beolink experience.", + "description": "Removes devices from the current Beolink experience.", "fields": { "beolink_jids": { "name": "Beolink JIDs", @@ -274,7 +274,7 @@ "message": "An error occurred while attempting to play {media_type}: {error_message}." }, "invalid_grouping_entity": { - "message": "Entity with id: {entity_id} can't be added to the Beolink session. Is the entity a Bang & Olufsen media_player?" + "message": "Entity with ID {entity_id} can't be added to the Beolink session. Is the entity a Bang & Olufsen media_player?" }, "invalid_sound_mode": { "message": "{invalid_sound_mode} is an invalid sound mode. Valid values are: {valid_sound_modes}." diff --git a/homeassistant/components/bluesound/media_player.py b/homeassistant/components/bluesound/media_player.py index 135d1b5d27e..0addcc1daac 100644 --- a/homeassistant/components/bluesound/media_player.py +++ b/homeassistant/components/bluesound/media_player.py @@ -501,18 +501,16 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity return # presets and inputs might have the same name; presets have priority - url: str | None = None for input_ in self._inputs: if input_.text == source: - url = input_.url + await self._player.play_url(input_.url) + return for preset in self._presets: if preset.name == source: - url = preset.url + await self._player.load_preset(preset.id) + return - if url is None: - raise ServiceValidationError(f"Source {source} not found") - - await self._player.play_url(url) + raise ServiceValidationError(f"Source {source} not found") async def async_clear_playlist(self) -> None: """Clear players playlist.""" diff --git a/homeassistant/components/bluetooth/__init__.py b/homeassistant/components/bluetooth/__init__.py index c46ef22803e..7abc929fde5 100644 --- a/homeassistant/components/bluetooth/__init__.py +++ b/homeassistant/components/bluetooth/__init__.py @@ -311,11 +311,24 @@ async def async_update_device( update the device with the new location so they can figure out where the adapter is. """ + address = details[ADAPTER_ADDRESS] + connections = {(dr.CONNECTION_BLUETOOTH, address)} device_registry = dr.async_get(hass) + # We only have one device for the config entry + # so if the address has been corrected, make + # sure the device entry reflects the correct + # address + for device in dr.async_entries_for_config_entry(device_registry, entry.entry_id): + for conn_type, conn_value in device.connections: + if conn_type == dr.CONNECTION_BLUETOOTH and conn_value != address: + device_registry.async_update_device( + device.id, new_connections=connections + ) + break device_entry = device_registry.async_get_or_create( config_entry_id=entry.entry_id, - name=adapter_human_name(adapter, details[ADAPTER_ADDRESS]), - connections={(dr.CONNECTION_BLUETOOTH, details[ADAPTER_ADDRESS])}, + name=adapter_human_name(adapter, address), + connections=connections, manufacturer=details[ADAPTER_MANUFACTURER], model=adapter_model(details), sw_version=details.get(ADAPTER_SW_VERSION), @@ -342,9 +355,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ) ) ) + return True address = entry.unique_id assert address is not None - assert source_entry is not None source_domain = entry.data[CONF_SOURCE_DOMAIN] if mac_manufacturer := await get_manufacturer_from_mac(address): manufacturer = f"{mac_manufacturer} ({source_domain})" diff --git a/homeassistant/components/bluetooth/config_flow.py b/homeassistant/components/bluetooth/config_flow.py index e76277306f5..328707bd722 100644 --- a/homeassistant/components/bluetooth/config_flow.py +++ b/homeassistant/components/bluetooth/config_flow.py @@ -186,16 +186,28 @@ class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a flow initialized by an external scanner.""" source = user_input[CONF_SOURCE] await self.async_set_unique_id(source) + source_config_entry_id = user_input[CONF_SOURCE_CONFIG_ENTRY_ID] data = { CONF_SOURCE: source, CONF_SOURCE_MODEL: user_input[CONF_SOURCE_MODEL], CONF_SOURCE_DOMAIN: user_input[CONF_SOURCE_DOMAIN], - CONF_SOURCE_CONFIG_ENTRY_ID: user_input[CONF_SOURCE_CONFIG_ENTRY_ID], + CONF_SOURCE_CONFIG_ENTRY_ID: source_config_entry_id, CONF_SOURCE_DEVICE_ID: user_input[CONF_SOURCE_DEVICE_ID], } self._abort_if_unique_id_configured(updates=data) - manager = get_manager() - scanner = manager.async_scanner_by_source(source) + for entry in self._async_current_entries(include_ignore=False): + # If the mac address needs to be corrected, migrate + # the config entry to the new mac address + if ( + entry.data.get(CONF_SOURCE_CONFIG_ENTRY_ID) == source_config_entry_id + and entry.unique_id != source + ): + self.hass.config_entries.async_update_entry( + entry, unique_id=source, data={**entry.data, **data} + ) + self.hass.config_entries.async_schedule_reload(entry.entry_id) + return self.async_abort(reason="already_configured") + scanner = get_manager().async_scanner_by_source(source) assert scanner is not None return self.async_create_entry(title=scanner.name, data=data) diff --git a/homeassistant/components/bluetooth/manifest.json b/homeassistant/components/bluetooth/manifest.json index 6c851e603d9..1b2b0e7267b 100644 --- a/homeassistant/components/bluetooth/manifest.json +++ b/homeassistant/components/bluetooth/manifest.json @@ -18,9 +18,9 @@ "bleak==0.22.3", "bleak-retry-connector==3.9.0", "bluetooth-adapters==0.21.4", - "bluetooth-auto-recovery==1.4.4", - "bluetooth-data-tools==1.23.4", - "dbus-fast==2.33.0", - "habluetooth==3.24.1" + "bluetooth-auto-recovery==1.4.5", + "bluetooth-data-tools==1.26.5", + "dbus-fast==2.43.0", + "habluetooth==3.37.0" ] } diff --git a/homeassistant/components/bosch_alarm/__init__.py b/homeassistant/components/bosch_alarm/__init__.py new file mode 100644 index 00000000000..bc7fee46f60 --- /dev/null +++ b/homeassistant/components/bosch_alarm/__init__.py @@ -0,0 +1,62 @@ +"""The Bosch Alarm integration.""" + +from __future__ import annotations + +from ssl import SSLError + +from bosch_alarm_mode2 import Panel + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr + +from .const import CONF_INSTALLER_CODE, CONF_USER_CODE, DOMAIN + +PLATFORMS: list[Platform] = [Platform.ALARM_CONTROL_PANEL] + +type BoschAlarmConfigEntry = ConfigEntry[Panel] + + +async def async_setup_entry(hass: HomeAssistant, entry: BoschAlarmConfigEntry) -> bool: + """Set up Bosch Alarm from a config entry.""" + + panel = Panel( + host=entry.data[CONF_HOST], + port=entry.data[CONF_PORT], + automation_code=entry.data.get(CONF_PASSWORD), + installer_or_user_code=entry.data.get( + CONF_INSTALLER_CODE, entry.data.get(CONF_USER_CODE) + ), + ) + try: + await panel.connect() + except (PermissionError, ValueError) as err: + await panel.disconnect() + raise ConfigEntryNotReady from err + except (TimeoutError, OSError, ConnectionRefusedError, SSLError) as err: + await panel.disconnect() + raise ConfigEntryNotReady("Connection failed") from err + + entry.runtime_data = panel + + device_registry = dr.async_get(hass) + + device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + identifiers={(DOMAIN, entry.unique_id or entry.entry_id)}, + name=f"Bosch {panel.model}", + manufacturer="Bosch Security Systems", + model=panel.model, + sw_version=panel.firmware_version, + ) + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: BoschAlarmConfigEntry) -> bool: + """Unload a config entry.""" + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + await entry.runtime_data.disconnect() + return unload_ok diff --git a/homeassistant/components/bosch_alarm/alarm_control_panel.py b/homeassistant/components/bosch_alarm/alarm_control_panel.py new file mode 100644 index 00000000000..a1d8a7b90f4 --- /dev/null +++ b/homeassistant/components/bosch_alarm/alarm_control_panel.py @@ -0,0 +1,109 @@ +"""Support for Bosch Alarm Panel.""" + +from __future__ import annotations + +from bosch_alarm_mode2 import Panel + +from homeassistant.components.alarm_control_panel import ( + AlarmControlPanelEntity, + AlarmControlPanelEntityFeature, + AlarmControlPanelState, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import BoschAlarmConfigEntry +from .const import DOMAIN + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: BoschAlarmConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up control panels for each area.""" + panel = config_entry.runtime_data + + async_add_entities( + AreaAlarmControlPanel( + panel, + area_id, + config_entry.unique_id or config_entry.entry_id, + ) + for area_id in panel.areas + ) + + +class AreaAlarmControlPanel(AlarmControlPanelEntity): + """An alarm control panel entity for a bosch alarm panel.""" + + _attr_has_entity_name = True + _attr_supported_features = ( + AlarmControlPanelEntityFeature.ARM_HOME + | AlarmControlPanelEntityFeature.ARM_AWAY + ) + _attr_code_arm_required = False + _attr_name = None + + def __init__(self, panel: Panel, area_id: int, unique_id: str) -> None: + """Initialise a Bosch Alarm control panel entity.""" + self.panel = panel + self._area = panel.areas[area_id] + self._area_id = area_id + self._attr_unique_id = f"{unique_id}_area_{area_id}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self._attr_unique_id)}, + name=self._area.name, + manufacturer="Bosch Security Systems", + via_device=( + DOMAIN, + unique_id, + ), + ) + + @property + def alarm_state(self) -> AlarmControlPanelState | None: + """Return the state of the alarm.""" + if self._area.is_triggered(): + return AlarmControlPanelState.TRIGGERED + if self._area.is_disarmed(): + return AlarmControlPanelState.DISARMED + if self._area.is_arming(): + return AlarmControlPanelState.ARMING + if self._area.is_pending(): + return AlarmControlPanelState.PENDING + if self._area.is_part_armed(): + return AlarmControlPanelState.ARMED_HOME + if self._area.is_all_armed(): + return AlarmControlPanelState.ARMED_AWAY + return None + + async def async_alarm_disarm(self, code: str | None = None) -> None: + """Disarm this panel.""" + await self.panel.area_disarm(self._area_id) + + async def async_alarm_arm_home(self, code: str | None = None) -> None: + """Send arm home command.""" + await self.panel.area_arm_part(self._area_id) + + async def async_alarm_arm_away(self, code: str | None = None) -> None: + """Send arm away command.""" + await self.panel.area_arm_all(self._area_id) + + @property + def available(self) -> bool: + """Return True if entity is available.""" + return self.panel.connection_status() + + async def async_added_to_hass(self) -> None: + """Run when entity attached to hass.""" + await super().async_added_to_hass() + self._area.status_observer.attach(self.schedule_update_ha_state) + self.panel.connection_status_observer.attach(self.schedule_update_ha_state) + + async def async_will_remove_from_hass(self) -> None: + """Run when entity removed from hass.""" + await super().async_will_remove_from_hass() + self._area.status_observer.detach(self.schedule_update_ha_state) + self.panel.connection_status_observer.detach(self.schedule_update_ha_state) diff --git a/homeassistant/components/bosch_alarm/config_flow.py b/homeassistant/components/bosch_alarm/config_flow.py new file mode 100644 index 00000000000..e48f2a11944 --- /dev/null +++ b/homeassistant/components/bosch_alarm/config_flow.py @@ -0,0 +1,165 @@ +"""Config flow for Bosch Alarm integration.""" + +from __future__ import annotations + +import asyncio +import logging +import ssl +from typing import Any + +from bosch_alarm_mode2 import Panel +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import ( + CONF_CODE, + CONF_HOST, + CONF_MODEL, + CONF_PASSWORD, + CONF_PORT, +) +import homeassistant.helpers.config_validation as cv + +from .const import CONF_INSTALLER_CODE, CONF_USER_CODE, DOMAIN + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): str, + vol.Required(CONF_PORT, default=7700): cv.positive_int, + } +) + +STEP_AUTH_DATA_SCHEMA_SOLUTION = vol.Schema( + { + vol.Required(CONF_USER_CODE): str, + } +) + +STEP_AUTH_DATA_SCHEMA_AMAX = vol.Schema( + { + vol.Required(CONF_INSTALLER_CODE): str, + vol.Required(CONF_PASSWORD): str, + } +) + +STEP_AUTH_DATA_SCHEMA_BG = vol.Schema( + { + vol.Required(CONF_PASSWORD): str, + } +) + +STEP_INIT_DATA_SCHEMA = vol.Schema({vol.Optional(CONF_CODE): str}) + + +async def try_connect( + data: dict[str, Any], load_selector: int = 0 +) -> tuple[str, int | None]: + """Validate the user input allows us to connect. + + Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. + """ + panel = Panel( + host=data[CONF_HOST], + port=data[CONF_PORT], + automation_code=data.get(CONF_PASSWORD), + installer_or_user_code=data.get(CONF_INSTALLER_CODE, data.get(CONF_USER_CODE)), + ) + + try: + await panel.connect(load_selector) + finally: + await panel.disconnect() + + return (panel.model, panel.serial_number) + + +class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Bosch Alarm.""" + + def __init__(self) -> None: + """Init config flow.""" + + self._data: dict[str, Any] = {} + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + + if user_input is not None: + try: + # Use load_selector = 0 to fetch the panel model without authentication. + (model, serial) = await try_connect(user_input, 0) + except ( + OSError, + ConnectionRefusedError, + ssl.SSLError, + asyncio.exceptions.TimeoutError, + ) as e: + _LOGGER.error("Connection Error: %s", e) + errors["base"] = "cannot_connect" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + self._data = user_input + self._data[CONF_MODEL] = model + return await self.async_step_auth() + return self.async_show_form( + step_id="user", + data_schema=self.add_suggested_values_to_schema( + STEP_USER_DATA_SCHEMA, user_input + ), + errors=errors, + ) + + async def async_step_auth( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the auth step.""" + errors: dict[str, str] = {} + + # Each model variant requires a different authentication flow + if "Solution" in self._data[CONF_MODEL]: + schema = STEP_AUTH_DATA_SCHEMA_SOLUTION + elif "AMAX" in self._data[CONF_MODEL]: + schema = STEP_AUTH_DATA_SCHEMA_AMAX + else: + schema = STEP_AUTH_DATA_SCHEMA_BG + + if user_input is not None: + self._data.update(user_input) + try: + (model, serial_number) = await try_connect( + self._data, Panel.LOAD_EXTENDED_INFO + ) + except (PermissionError, ValueError) as e: + errors["base"] = "invalid_auth" + _LOGGER.error("Authentication Error: %s", e) + except ( + OSError, + ConnectionRefusedError, + ssl.SSLError, + TimeoutError, + ) as e: + _LOGGER.error("Connection Error: %s", e) + errors["base"] = "cannot_connect" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + if serial_number: + await self.async_set_unique_id(str(serial_number)) + self._abort_if_unique_id_configured() + else: + self._async_abort_entries_match({CONF_HOST: self._data[CONF_HOST]}) + return self.async_create_entry(title=f"Bosch {model}", data=self._data) + + return self.async_show_form( + step_id="auth", + data_schema=self.add_suggested_values_to_schema(schema, user_input), + errors=errors, + ) diff --git a/homeassistant/components/bosch_alarm/const.py b/homeassistant/components/bosch_alarm/const.py new file mode 100644 index 00000000000..7205831391c --- /dev/null +++ b/homeassistant/components/bosch_alarm/const.py @@ -0,0 +1,6 @@ +"""Constants for the Bosch Alarm integration.""" + +DOMAIN = "bosch_alarm" +HISTORY_ATTR = "history" +CONF_INSTALLER_CODE = "installer_code" +CONF_USER_CODE = "user_code" diff --git a/homeassistant/components/bosch_alarm/manifest.json b/homeassistant/components/bosch_alarm/manifest.json new file mode 100644 index 00000000000..a54ace71782 --- /dev/null +++ b/homeassistant/components/bosch_alarm/manifest.json @@ -0,0 +1,11 @@ +{ + "domain": "bosch_alarm", + "name": "Bosch Alarm", + "codeowners": ["@mag1024", "@sanjay900"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/bosch_alarm", + "integration_type": "device", + "iot_class": "local_push", + "quality_scale": "bronze", + "requirements": ["bosch-alarm-mode2==0.4.3"] +} diff --git a/homeassistant/components/bosch_alarm/quality_scale.yaml b/homeassistant/components/bosch_alarm/quality_scale.yaml new file mode 100644 index 00000000000..467760fb863 --- /dev/null +++ b/homeassistant/components/bosch_alarm/quality_scale.yaml @@ -0,0 +1,84 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + No custom actions defined + appropriate-polling: + status: exempt + comment: | + No polling + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + No custom actions are defined. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: | + No custom actions are defined. + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: todo + integration-owner: done + log-when-unavailable: todo + parallel-updates: todo + reauthentication-flow: todo + test-coverage: done + + # Gold + devices: done + diagnostics: todo + discovery-update-info: todo + discovery: todo + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + Device type integration + entity-category: todo + entity-device-class: todo + entity-disabled-by-default: todo + entity-translations: todo + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + No repairs + stale-devices: + status: exempt + comment: | + Device type integration + + # Platinum + async-dependency: done + inject-websession: + status: exempt + comment: | + Integration does not make any HTTP requests. + strict-typing: done diff --git a/homeassistant/components/bosch_alarm/strings.json b/homeassistant/components/bosch_alarm/strings.json new file mode 100644 index 00000000000..f4846021b55 --- /dev/null +++ b/homeassistant/components/bosch_alarm/strings.json @@ -0,0 +1,36 @@ +{ + "config": { + "step": { + "user": { + "data": { + "host": "[%key:common::config_flow::data::host%]", + "port": "[%key:common::config_flow::data::port%]" + }, + "data_description": { + "host": "The hostname or IP address of your Bosch alarm panel", + "port": "The port used to connect to your Bosch alarm panel. This is usually 7700" + } + }, + "auth": { + "data": { + "password": "[%key:common::config_flow::data::password%]", + "installer_code": "Installer code", + "user_code": "User code" + }, + "data_description": { + "password": "The Mode 2 automation code from your panel", + "installer_code": "The installer code from your panel", + "user_code": "The user code from your panel" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + } +} diff --git a/homeassistant/components/bring/diagnostics.py b/homeassistant/components/bring/diagnostics.py index 6c2f779ef05..e5cafd30ab5 100644 --- a/homeassistant/components/bring/diagnostics.py +++ b/homeassistant/components/bring/diagnostics.py @@ -4,10 +4,14 @@ from __future__ import annotations from typing import Any +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.const import CONF_EMAIL, CONF_NAME from homeassistant.core import HomeAssistant from .coordinator import BringConfigEntry +TO_REDACT = {CONF_NAME, CONF_EMAIL} + async def async_get_config_entry_diagnostics( hass: HomeAssistant, config_entry: BringConfigEntry @@ -15,7 +19,10 @@ async def async_get_config_entry_diagnostics( """Return diagnostics for a config entry.""" return { - "data": {k: v.to_dict() for k, v in config_entry.runtime_data.data.items()}, + "data": { + k: async_redact_data(v.to_dict(), TO_REDACT) + for k, v in config_entry.runtime_data.data.items() + }, "lists": [lst.to_dict() for lst in config_entry.runtime_data.lists], "user_settings": config_entry.runtime_data.user_settings.to_dict(), } diff --git a/homeassistant/components/bring/manifest.json b/homeassistant/components/bring/manifest.json index f292b10f7dc..b2d42835cce 100644 --- a/homeassistant/components/bring/manifest.json +++ b/homeassistant/components/bring/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["bring_api"], "quality_scale": "platinum", - "requirements": ["bring-api==1.0.2"] + "requirements": ["bring-api==1.1.0"] } diff --git a/homeassistant/components/brother/__init__.py b/homeassistant/components/brother/__init__.py index 464e6629224..1c1768b58fd 100644 --- a/homeassistant/components/brother/__init__.py +++ b/homeassistant/components/brother/__init__.py @@ -9,6 +9,7 @@ from homeassistant.const import CONF_HOST, CONF_TYPE, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady +from .const import DOMAIN from .coordinator import BrotherConfigEntry, BrotherDataUpdateCoordinator PLATFORMS = [Platform.SENSOR] @@ -25,7 +26,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> b host, printer_type=printer_type, snmp_engine=snmp_engine ) except (ConnectionError, SnmpError, TimeoutError) as error: - raise ConfigEntryNotReady from error + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="cannot_connect", + translation_placeholders={ + "device": entry.title, + "error": repr(error), + }, + ) from error coordinator = BrotherDataUpdateCoordinator(hass, entry, brother) await coordinator.async_config_entry_first_refresh() diff --git a/homeassistant/components/brother/coordinator.py b/homeassistant/components/brother/coordinator.py index 4f518ba8a25..a3c337f27f7 100644 --- a/homeassistant/components/brother/coordinator.py +++ b/homeassistant/components/brother/coordinator.py @@ -26,6 +26,7 @@ class BrotherDataUpdateCoordinator(DataUpdateCoordinator[BrotherSensors]): ) -> None: """Initialize.""" self.brother = brother + self.device_name = config_entry.title super().__init__( hass, @@ -41,5 +42,12 @@ class BrotherDataUpdateCoordinator(DataUpdateCoordinator[BrotherSensors]): async with timeout(20): data = await self.brother.async_update() except (ConnectionError, SnmpError, UnsupportedModelError) as error: - raise UpdateFailed(error) from error + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + translation_placeholders={ + "device": self.device_name, + "error": repr(error), + }, + ) from error return data diff --git a/homeassistant/components/brother/strings.json b/homeassistant/components/brother/strings.json index b502ed7e3b9..d0714a199c4 100644 --- a/homeassistant/components/brother/strings.json +++ b/homeassistant/components/brother/strings.json @@ -159,5 +159,13 @@ "name": "Last restart" } } + }, + "exceptions": { + "cannot_connect": { + "message": "An error occurred while connecting to the {device} printer: {error}" + }, + "update_error": { + "message": "An error occurred while retrieving data from the {device} printer: {error}" + } } } diff --git a/homeassistant/components/buienradar/sensor.py b/homeassistant/components/buienradar/sensor.py index a4d39ea07cc..586543de129 100644 --- a/homeassistant/components/buienradar/sensor.py +++ b/homeassistant/components/buienradar/sensor.py @@ -170,6 +170,7 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( native_unit_of_measurement=DEGREE, icon="mdi:compass-outline", device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), SensorEntityDescription( key="pressure", diff --git a/homeassistant/components/caldav/todo.py b/homeassistant/components/caldav/todo.py index fada4693cf0..73f172dabec 100644 --- a/homeassistant/components/caldav/todo.py +++ b/homeassistant/components/caldav/todo.py @@ -138,6 +138,8 @@ class WebDavTodoListEntity(TodoListEntity): await self.hass.async_add_executor_job( partial(self._calendar.save_todo, **item_data), ) + # refreshing async otherwise it would take too much time + self.hass.async_create_task(self.async_update_ha_state(force_refresh=True)) except (requests.ConnectionError, DAVError) as err: raise HomeAssistantError(f"CalDAV save error: {err}") from err @@ -172,6 +174,8 @@ class WebDavTodoListEntity(TodoListEntity): obj_type="todo", ), ) + # refreshing async otherwise it would take too much time + self.hass.async_create_task(self.async_update_ha_state(force_refresh=True)) except (requests.ConnectionError, DAVError) as err: raise HomeAssistantError(f"CalDAV save error: {err}") from err @@ -195,3 +199,5 @@ class WebDavTodoListEntity(TodoListEntity): await self.hass.async_add_executor_job(item.delete) except (requests.ConnectionError, DAVError) as err: raise HomeAssistantError(f"CalDAV delete error: {err}") from err + # refreshing async otherwise it would take too much time + self.hass.async_create_task(self.async_update_ha_state(force_refresh=True)) diff --git a/homeassistant/components/canary/strings.json b/homeassistant/components/canary/strings.json index 699e8b25e11..8be11a48b5e 100644 --- a/homeassistant/components/canary/strings.json +++ b/homeassistant/components/canary/strings.json @@ -21,8 +21,8 @@ "step": { "init": { "data": { - "ffmpeg_arguments": "Arguments passed to ffmpeg for cameras", - "timeout": "Request Timeout (seconds)" + "ffmpeg_arguments": "Arguments passed to FFmpeg for cameras", + "timeout": "Request timeout (seconds)" } } } diff --git a/homeassistant/components/cast/config_flow.py b/homeassistant/components/cast/config_flow.py index 034cf856023..6c33eac230f 100644 --- a/homeassistant/components/cast/config_flow.py +++ b/homeassistant/components/cast/config_flow.py @@ -16,12 +16,21 @@ from homeassistant.config_entries import ( from homeassistant.const import CONF_UUID from homeassistant.core import callback from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo from .const import CONF_IGNORE_CEC, CONF_KNOWN_HOSTS, DOMAIN IGNORE_CEC_SCHEMA = vol.Schema(vol.All(cv.ensure_list, [cv.string])) -KNOWN_HOSTS_SCHEMA = vol.Schema(vol.All(cv.ensure_list, [cv.string])) +KNOWN_HOSTS_SCHEMA = vol.Schema( + { + vol.Optional( + CONF_KNOWN_HOSTS, + ): SelectSelector( + SelectSelectorConfig(custom_value=True, options=[], multiple=True), + ) + } +) WANTED_UUID_SCHEMA = vol.Schema(vol.All(cv.ensure_list, [cv.string])) @@ -30,12 +39,6 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize flow.""" - self._ignore_cec = set[str]() - self._known_hosts = set[str]() - self._wanted_uuid = set[str]() - @staticmethod @callback def async_get_options_flow( @@ -62,48 +65,31 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Confirm the setup.""" - errors = {} - data = {CONF_KNOWN_HOSTS: self._known_hosts} - if user_input is not None: - bad_hosts = False - known_hosts = user_input[CONF_KNOWN_HOSTS] - known_hosts = [x.strip() for x in known_hosts.split(",") if x.strip()] - try: - known_hosts = KNOWN_HOSTS_SCHEMA(known_hosts) - except vol.Invalid: - errors["base"] = "invalid_known_hosts" - bad_hosts = True - else: - self._known_hosts = known_hosts - data = self._get_data() - if not bad_hosts: - return self.async_create_entry(title="Google Cast", data=data) + known_hosts = _trim_items(user_input.get(CONF_KNOWN_HOSTS, [])) + return self.async_create_entry( + title="Google Cast", + data=self._get_data(known_hosts=known_hosts), + ) - fields = {} - fields[vol.Optional(CONF_KNOWN_HOSTS, default="")] = str - - return self.async_show_form( - step_id="config", data_schema=vol.Schema(fields), errors=errors - ) + return self.async_show_form(step_id="config", data_schema=KNOWN_HOSTS_SCHEMA) async def async_step_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Confirm the setup.""" - - data = self._get_data() - if user_input is not None or not onboarding.async_is_onboarded(self.hass): - return self.async_create_entry(title="Google Cast", data=data) + return self.async_create_entry(title="Google Cast", data=self._get_data()) return self.async_show_form(step_id="confirm") - def _get_data(self): + def _get_data( + self, *, known_hosts: list[str] | None = None + ) -> dict[str, list[str]]: return { - CONF_IGNORE_CEC: list(self._ignore_cec), - CONF_KNOWN_HOSTS: list(self._known_hosts), - CONF_UUID: list(self._wanted_uuid), + CONF_IGNORE_CEC: [], + CONF_KNOWN_HOSTS: known_hosts or [], + CONF_UUID: [], } @@ -123,31 +109,24 @@ class CastOptionsFlowHandler(OptionsFlow): ) -> ConfigFlowResult: """Manage the Google Cast options.""" errors: dict[str, str] = {} - current_config = self.config_entry.data if user_input is not None: - bad_hosts, known_hosts = _string_to_list( - user_input.get(CONF_KNOWN_HOSTS, ""), KNOWN_HOSTS_SCHEMA + known_hosts = _trim_items(user_input.get(CONF_KNOWN_HOSTS, [])) + self.updated_config = dict(self.config_entry.data) + self.updated_config[CONF_KNOWN_HOSTS] = known_hosts + + if self.show_advanced_options: + return await self.async_step_advanced_options() + + self.hass.config_entries.async_update_entry( + self.config_entry, data=self.updated_config ) - - if not bad_hosts: - self.updated_config = dict(current_config) - self.updated_config[CONF_KNOWN_HOSTS] = known_hosts - - if self.show_advanced_options: - return await self.async_step_advanced_options() - - self.hass.config_entries.async_update_entry( - self.config_entry, data=self.updated_config - ) - return self.async_create_entry(title="", data={}) - - fields: dict[vol.Marker, type[str]] = {} - suggested_value = _list_to_string(current_config.get(CONF_KNOWN_HOSTS)) - _add_with_suggestion(fields, CONF_KNOWN_HOSTS, suggested_value) + return self.async_create_entry(title="", data={}) return self.async_show_form( step_id="basic_options", - data_schema=vol.Schema(fields), + data_schema=self.add_suggested_values_to_schema( + KNOWN_HOSTS_SCHEMA, self.config_entry.data + ), errors=errors, last_step=not self.show_advanced_options, ) @@ -206,6 +185,10 @@ def _string_to_list(string, schema): return invalid, items +def _trim_items(items: list[str]) -> list[str]: + return [x.strip() for x in items if x.strip()] + + def _add_with_suggestion( fields: dict[vol.Marker, type[str]], key: str, suggested_value: str ) -> None: diff --git a/homeassistant/components/cast/const.py b/homeassistant/components/cast/const.py index 056ee054d1d..0a85a0007b3 100644 --- a/homeassistant/components/cast/const.py +++ b/homeassistant/components/cast/const.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, TypedDict +from typing import TYPE_CHECKING, NotRequired, TypedDict from homeassistant.util.signal_type import SignalType @@ -46,3 +46,4 @@ class HomeAssistantControllerData(TypedDict): hass_uuid: str client_id: str | None refresh_token: str + app_id: NotRequired[str] diff --git a/homeassistant/components/cast/helpers.py b/homeassistant/components/cast/helpers.py index 8f4af197b8e..c45bbb4fbbc 100644 --- a/homeassistant/components/cast/helpers.py +++ b/homeassistant/components/cast/helpers.py @@ -7,6 +7,7 @@ from dataclasses import dataclass import logging from typing import TYPE_CHECKING, ClassVar from urllib.parse import urlparse +from uuid import UUID import aiohttp import attr @@ -40,7 +41,7 @@ class ChromecastInfo: is_dynamic_group = attr.ib(type=bool | None, default=None) @property - def friendly_name(self) -> str: + def friendly_name(self) -> str | None: """Return the Friendly Name.""" return self.cast_info.friendly_name @@ -50,7 +51,7 @@ class ChromecastInfo: return self.cast_info.cast_type == CAST_TYPE_GROUP @property - def uuid(self) -> bool: + def uuid(self) -> UUID: """Return the UUID.""" return self.cast_info.uuid @@ -80,7 +81,7 @@ class ChromecastInfo: "+label%3A%22integration%3A+cast%22" ) - _LOGGER.debug( + _LOGGER.info( ( "Fetched cast details for unknown model '%s' manufacturer:" " '%s', type: '%s'. Please %s" @@ -111,7 +112,10 @@ class ChromecastInfo: is_dynamic_group = False http_group_status = None http_group_status = dial.get_multizone_status( - None, + # We pass services which will be used for the HTTP request, and we + # don't care about the host in http_group_status.dynamic_groups so + # we pass an empty string to simplify the code. + "", services=self.cast_info.services, zconf=ChromeCastZeroconf.get_zeroconf(), ) diff --git a/homeassistant/components/cast/manifest.json b/homeassistant/components/cast/manifest.json index 0650f267544..6c8b0536e2f 100644 --- a/homeassistant/components/cast/manifest.json +++ b/homeassistant/components/cast/manifest.json @@ -14,7 +14,7 @@ "documentation": "https://www.home-assistant.io/integrations/cast", "iot_class": "local_polling", "loggers": ["casttube", "pychromecast"], - "requirements": ["PyChromecast==14.0.5"], + "requirements": ["PyChromecast==14.0.7"], "single_config_entry": true, "zeroconf": ["_googlecast._tcp.local."] } diff --git a/homeassistant/components/cast/services.yaml b/homeassistant/components/cast/services.yaml index e2e23ad40a2..45b36f6d983 100644 --- a/homeassistant/components/cast/services.yaml +++ b/homeassistant/components/cast/services.yaml @@ -7,11 +7,11 @@ show_lovelace_view: integration: cast domain: media_player dashboard_path: - required: true example: lovelace-cast selector: text: view_path: + required: true example: downstairs selector: text: diff --git a/homeassistant/components/cast/strings.json b/homeassistant/components/cast/strings.json index 9c49813bd83..8c7c7c0cff0 100644 --- a/homeassistant/components/cast/strings.json +++ b/homeassistant/components/cast/strings.json @@ -6,9 +6,11 @@ }, "config": { "title": "Google Cast configuration", - "description": "Known Hosts - A comma-separated list of hostnames or IP-addresses of cast devices, use if mDNS discovery is not working.", "data": { - "known_hosts": "Known hosts" + "known_hosts": "Add known host" + }, + "data_description": { + "known_hosts": "Hostnames or IP-addresses of cast devices, use if mDNS discovery is not working" } } }, @@ -20,9 +22,11 @@ "step": { "basic_options": { "title": "[%key:component::cast::config::step::config::title%]", - "description": "[%key:component::cast::config::step::config::description%]", "data": { "known_hosts": "[%key:component::cast::config::step::config::data::known_hosts%]" + }, + "data_description": { + "known_hosts": "[%key:component::cast::config::step::config::data_description::known_hosts%]" } }, "advanced_options": { @@ -49,7 +53,7 @@ }, "dashboard_path": { "name": "Dashboard path", - "description": "The URL path of the dashboard to show." + "description": "The URL path of the dashboard to show, defaults to lovelace if not specified." }, "view_path": { "name": "View path", diff --git a/homeassistant/components/chacon_dio/config_flow.py b/homeassistant/components/chacon_dio/config_flow.py index 54604b81153..daaf38e0edc 100644 --- a/homeassistant/components/chacon_dio/config_flow.py +++ b/homeassistant/components/chacon_dio/config_flow.py @@ -44,7 +44,7 @@ class ChaconDioConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except DIOChaconInvalidAuthError: errors["base"] = "invalid_auth" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" diff --git a/homeassistant/components/cloud/account_link.py b/homeassistant/components/cloud/account_link.py index 851d658f8e0..3c3d944d479 100644 --- a/homeassistant/components/cloud/account_link.py +++ b/homeassistant/components/cloud/account_link.py @@ -127,7 +127,11 @@ class CloudOAuth2Implementation(config_entry_oauth2_flow.AbstractOAuth2Implement flow_id=flow_id, user_input=tokens ) - self.hass.async_create_task(await_tokens()) + # It's a background task because it should be cancelled on shutdown and there's nothing else + # we can do in such case. There's also no need to wait for this during setup. + self.hass.async_create_background_task( + await_tokens(), name="Awaiting OAuth tokens" + ) return authorize_url diff --git a/homeassistant/components/cloud/backup.py b/homeassistant/components/cloud/backup.py index b31fe16fbe9..f4426eabeed 100644 --- a/homeassistant/components/cloud/backup.py +++ b/homeassistant/components/cloud/backup.py @@ -4,13 +4,14 @@ from __future__ import annotations import asyncio from collections.abc import AsyncIterator, Callable, Coroutine, Mapping +from http import HTTPStatus import logging import random from typing import Any -from aiohttp import ClientError +from aiohttp import ClientError, ClientResponseError from hass_nabucasa import Cloud, CloudError -from hass_nabucasa.api import CloudApiNonRetryableError +from hass_nabucasa.api import CloudApiError, CloudApiNonRetryableError from hass_nabucasa.cloud_api import ( FilesHandlerListEntry, async_files_delete_file, @@ -18,7 +19,12 @@ from hass_nabucasa.cloud_api import ( ) from hass_nabucasa.files import FilesError, StorageType, calculate_b64md5 -from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError +from homeassistant.components.backup import ( + AgentBackup, + BackupAgent, + BackupAgentError, + BackupNotFound, +) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.aiohttp_client import ChunkAsyncStreamIterator from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -90,9 +96,7 @@ class CloudBackupAgent(BackupAgent): :param backup_id: The ID of the backup that was returned in async_list_backups. :return: An async iterator that yields bytes. """ - if not (backup := await self._async_get_backup(backup_id)): - raise BackupAgentError("Backup not found") - + backup = await self._async_get_backup(backup_id) try: content = await self._cloud.files.download( storage_type=StorageType.BACKUP, @@ -117,6 +121,8 @@ class CloudBackupAgent(BackupAgent): """ if not backup.protected: raise BackupAgentError("Cloud backups must be protected") + if self._cloud.subscription_expired: + raise BackupAgentError("Cloud subscription has expired") size = backup.size try: @@ -149,6 +155,13 @@ class CloudBackupAgent(BackupAgent): ) from err raise BackupAgentError(f"Failed to upload backup {err}") from err except CloudError as err: + if ( + isinstance(err, CloudApiError) + and isinstance(err.orig_exc, ClientResponseError) + and err.orig_exc.status == HTTPStatus.FORBIDDEN + and self._cloud.subscription_expired + ): + raise BackupAgentError("Cloud subscription has expired") from err if tries == _RETRY_LIMIT: raise BackupAgentError(f"Failed to upload backup {err}") from err tries += 1 @@ -171,9 +184,7 @@ class CloudBackupAgent(BackupAgent): :param backup_id: The ID of the backup that was returned in async_list_backups. """ - if not (backup := await self._async_get_backup(backup_id)): - return - + backup = await self._async_get_backup(backup_id) try: await async_files_delete_file( self._cloud, @@ -204,16 +215,12 @@ class CloudBackupAgent(BackupAgent): self, backup_id: str, **kwargs: Any, - ) -> AgentBackup | None: + ) -> AgentBackup: """Return a backup.""" - if not (backup := await self._async_get_backup(backup_id)): - return None + backup = await self._async_get_backup(backup_id) return AgentBackup.from_dict(backup["Metadata"]) - async def _async_get_backup( - self, - backup_id: str, - ) -> FilesHandlerListEntry | None: + async def _async_get_backup(self, backup_id: str) -> FilesHandlerListEntry: """Return a backup.""" backups = await self._async_list_backups() @@ -221,4 +228,4 @@ class CloudBackupAgent(BackupAgent): if backup["Metadata"]["backup_id"] == backup_id: return backup - return None + raise BackupNotFound(f"Backup {backup_id} not found") diff --git a/homeassistant/components/cloud/http_api.py b/homeassistant/components/cloud/http_api.py index 73952d80f6c..6f18cc424cd 100644 --- a/homeassistant/components/cloud/http_api.py +++ b/homeassistant/components/cloud/http_api.py @@ -245,6 +245,10 @@ class CloudLoginView(HomeAssistantView): name = "api:cloud:login" @require_admin + async def post(self, request: web.Request) -> web.Response: + """Handle login request.""" + return await self._post(request) + @_handle_cloud_errors @RequestDataValidator( vol.Schema( @@ -259,7 +263,7 @@ class CloudLoginView(HomeAssistantView): ) ) ) - async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response: + async def _post(self, request: web.Request, data: dict[str, Any]) -> web.Response: """Handle login request.""" hass = request.app[KEY_HASS] cloud = hass.data[DATA_CLOUD] @@ -316,8 +320,12 @@ class CloudLogoutView(HomeAssistantView): name = "api:cloud:logout" @require_admin - @_handle_cloud_errors async def post(self, request: web.Request) -> web.Response: + """Handle logout request.""" + return await self._post(request) + + @_handle_cloud_errors + async def _post(self, request: web.Request) -> web.Response: """Handle logout request.""" hass = request.app[KEY_HASS] cloud = hass.data[DATA_CLOUD] @@ -400,9 +408,13 @@ class CloudForgotPasswordView(HomeAssistantView): name = "api:cloud:forgot_password" @require_admin + async def post(self, request: web.Request) -> web.Response: + """Handle forgot password request.""" + return await self._post(request) + @_handle_cloud_errors @RequestDataValidator(vol.Schema({vol.Required("email"): str})) - async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response: + async def _post(self, request: web.Request, data: dict[str, Any]) -> web.Response: """Handle forgot password request.""" hass = request.app[KEY_HASS] cloud = hass.data[DATA_CLOUD] diff --git a/homeassistant/components/cloud/manifest.json b/homeassistant/components/cloud/manifest.json index 4e99d08afb5..7f448f2f614 100644 --- a/homeassistant/components/cloud/manifest.json +++ b/homeassistant/components/cloud/manifest.json @@ -13,6 +13,6 @@ "integration_type": "system", "iot_class": "cloud_push", "loggers": ["acme", "hass_nabucasa", "snitun"], - "requirements": ["hass-nabucasa==0.92.0"], + "requirements": ["hass-nabucasa==0.94.0"], "single_config_entry": true } diff --git a/homeassistant/components/cloud/tts.py b/homeassistant/components/cloud/tts.py index 3ac3f3d1c2d..f901adfa99e 100644 --- a/homeassistant/components/cloud/tts.py +++ b/homeassistant/components/cloud/tts.py @@ -286,7 +286,7 @@ class CloudTTSEntity(TextToSpeechEntity): return self._language @property - def default_options(self) -> dict[str, Any]: + def default_options(self) -> dict[str, str]: """Return a dict include default options.""" return { ATTR_AUDIO_OUTPUT: AudioOutput.MP3, @@ -363,7 +363,7 @@ class CloudTTSEntity(TextToSpeechEntity): _LOGGER.error("Voice error: %s", err) return (None, None) - return (str(options[ATTR_AUDIO_OUTPUT].value), data) + return (options[ATTR_AUDIO_OUTPUT], data) class CloudProvider(Provider): @@ -404,7 +404,7 @@ class CloudProvider(Provider): return [Voice(voice, voice) for voice in voices] @property - def default_options(self) -> dict[str, Any]: + def default_options(self) -> dict[str, str]: """Return a dict include default options.""" return { ATTR_AUDIO_OUTPUT: AudioOutput.MP3, @@ -444,7 +444,7 @@ class CloudProvider(Provider): _LOGGER.error("Voice error: %s", err) return (None, None) - return (str(options[ATTR_AUDIO_OUTPUT].value), data) + return options[ATTR_AUDIO_OUTPUT], data @callback diff --git a/homeassistant/components/cloudflare/strings.json b/homeassistant/components/cloudflare/strings.json index 8c8ec57b074..453135f47a0 100644 --- a/homeassistant/components/cloudflare/strings.json +++ b/homeassistant/components/cloudflare/strings.json @@ -4,19 +4,19 @@ "step": { "user": { "title": "Connect to Cloudflare", - "description": "This integration requires an API Token created with Zone:Zone:Read and Zone:DNS:Edit permissions for all zones in your account.", + "description": "This integration requires an API token created with Zone:Zone:Read and Zone:DNS:Edit permissions for all zones in your account.", "data": { "api_token": "[%key:common::config_flow::data::api_token%]" } }, "zone": { - "title": "Choose the Zone to Update", + "title": "Choose the zone to update", "data": { "zone": "Zone" } }, "records": { - "title": "Choose the Records to Update", + "title": "Choose the records to update", "data": { "records": "Records" } @@ -40,7 +40,7 @@ "services": { "update_records": { "name": "Update records", - "description": "Manually trigger update to Cloudflare records." + "description": "Manually triggers an update of Cloudflare records." } } } diff --git a/homeassistant/components/comelit/alarm_control_panel.py b/homeassistant/components/comelit/alarm_control_panel.py index 6ea4e97f12e..1ad26905dd1 100644 --- a/homeassistant/components/comelit/alarm_control_panel.py +++ b/homeassistant/components/comelit/alarm_control_panel.py @@ -6,7 +6,7 @@ import logging from typing import cast from aiocomelit.api import ComelitVedoAreaObject -from aiocomelit.const import ALARM_AREAS, AlarmAreaState +from aiocomelit.const import AlarmAreaState from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, @@ -20,6 +20,9 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .coordinator import ComelitConfigEntry, ComelitVedoSystem +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + _LOGGER = logging.getLogger(__name__) AWAY = "away" @@ -38,6 +41,7 @@ ALARM_ACTIONS: dict[str, str] = { ALARM_AREA_ARMED_STATUS: dict[str, int] = { + DISABLE: 0, HOME_P1: 1, HOME_P2: 2, NIGHT: 3, @@ -56,7 +60,7 @@ async def async_setup_entry( async_add_entities( ComelitAlarmEntity(coordinator, device, config_entry.entry_id) - for device in coordinator.data[ALARM_AREAS].values() + for device in coordinator.data["alarm_areas"].values() ) @@ -92,7 +96,7 @@ class ComelitAlarmEntity(CoordinatorEntity[ComelitVedoSystem], AlarmControlPanel @property def _area(self) -> ComelitVedoAreaObject: """Return area object.""" - return self.coordinator.data[ALARM_AREAS][self._area_index] + return self.coordinator.data["alarm_areas"][self._area_index] @property def available(self) -> bool: @@ -125,20 +129,38 @@ class ComelitAlarmEntity(CoordinatorEntity[ComelitVedoSystem], AlarmControlPanel AlarmAreaState.TRIGGERED: AlarmControlPanelState.TRIGGERED, }.get(self._area.human_status) + async def _async_update_state(self, area_state: AlarmAreaState, armed: int) -> None: + """Update state after action.""" + self._area.human_status = area_state + self._area.armed = armed + await self.async_update_ha_state() + async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" if code != str(self._api.device_pin): return await self._api.set_zone_status(self._area.index, ALARM_ACTIONS[DISABLE]) + await self._async_update_state( + AlarmAreaState.DISARMED, ALARM_AREA_ARMED_STATUS[DISABLE] + ) async def async_alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command.""" await self._api.set_zone_status(self._area.index, ALARM_ACTIONS[AWAY]) + await self._async_update_state( + AlarmAreaState.ARMED, ALARM_AREA_ARMED_STATUS[AWAY] + ) async def async_alarm_arm_home(self, code: str | None = None) -> None: """Send arm home command.""" await self._api.set_zone_status(self._area.index, ALARM_ACTIONS[HOME]) + await self._async_update_state( + AlarmAreaState.ARMED, ALARM_AREA_ARMED_STATUS[HOME_P1] + ) async def async_alarm_arm_night(self, code: str | None = None) -> None: """Send arm night command.""" await self._api.set_zone_status(self._area.index, ALARM_ACTIONS[NIGHT]) + await self._async_update_state( + AlarmAreaState.ARMED, ALARM_AREA_ARMED_STATUS[NIGHT] + ) diff --git a/homeassistant/components/comelit/binary_sensor.py b/homeassistant/components/comelit/binary_sensor.py index a895f8dc511..dfa6d3e97f3 100644 --- a/homeassistant/components/comelit/binary_sensor.py +++ b/homeassistant/components/comelit/binary_sensor.py @@ -5,7 +5,6 @@ from __future__ import annotations from typing import cast from aiocomelit import ComelitVedoZoneObject -from aiocomelit.const import ALARM_ZONES from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, @@ -17,6 +16,9 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .coordinator import ComelitConfigEntry, ComelitVedoSystem +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, @@ -29,7 +31,7 @@ async def async_setup_entry( async_add_entities( ComelitVedoBinarySensorEntity(coordinator, device, config_entry.entry_id) - for device in coordinator.data[ALARM_ZONES].values() + for device in coordinator.data["alarm_zones"].values() ) @@ -49,7 +51,7 @@ class ComelitVedoBinarySensorEntity( ) -> None: """Init sensor entity.""" self._api = coordinator.api - self._zone = zone + self._zone_index = zone.index super().__init__(coordinator) # Use config_entry.entry_id as base for unique_id # because no serial number or mac is available @@ -59,4 +61,6 @@ class ComelitVedoBinarySensorEntity( @property def is_on(self) -> bool: """Presence detected.""" - return self.coordinator.data[ALARM_ZONES][self._zone.index].status_api == "0001" + return ( + self.coordinator.data["alarm_zones"][self._zone_index].status_api == "0001" + ) diff --git a/homeassistant/components/comelit/climate.py b/homeassistant/components/comelit/climate.py index 6906c9bf735..3ec79001d55 100644 --- a/homeassistant/components/comelit/climate.py +++ b/homeassistant/components/comelit/climate.py @@ -3,7 +3,7 @@ from __future__ import annotations from enum import StrEnum -from typing import Any, cast +from typing import Any, TypedDict, cast from aiocomelit import ComelitSerialBridgeObject from aiocomelit.const import CLIMATE @@ -16,12 +16,17 @@ from homeassistant.components.climate import ( UnitOfTemperature, ) from homeassistant.const import ATTR_TEMPERATURE, PRECISION_TENTHS -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity +from .const import DOMAIN from .coordinator import ComelitConfigEntry, ComelitSerialBridge +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + class ClimaComelitMode(StrEnum): """Serial Bridge clima modes.""" @@ -42,22 +47,23 @@ class ClimaComelitCommand(StrEnum): AUTO = "auto" -API_STATUS: dict[str, dict[str, Any]] = { - ClimaComelitMode.OFF: { - "action": "off", - "hvac_mode": HVACMode.OFF, - "hvac_action": HVACAction.OFF, - }, - ClimaComelitMode.LOWER: { - "action": "lower", - "hvac_mode": HVACMode.COOL, - "hvac_action": HVACAction.COOLING, - }, - ClimaComelitMode.UPPER: { - "action": "upper", - "hvac_mode": HVACMode.HEAT, - "hvac_action": HVACAction.HEATING, - }, +class ClimaComelitApiStatus(TypedDict): + """Comelit Clima API status.""" + + hvac_mode: HVACMode + hvac_action: HVACAction + + +API_STATUS: dict[str, ClimaComelitApiStatus] = { + ClimaComelitMode.OFF: ClimaComelitApiStatus( + hvac_mode=HVACMode.OFF, hvac_action=HVACAction.OFF + ), + ClimaComelitMode.LOWER: ClimaComelitApiStatus( + hvac_mode=HVACMode.COOL, hvac_action=HVACAction.COOLING + ), + ClimaComelitMode.UPPER: ClimaComelitApiStatus( + hvac_mode=HVACMode.HEAT, hvac_action=HVACAction.HEATING + ), } MODE_TO_ACTION: dict[HVACMode, ClimaComelitCommand] = { @@ -113,70 +119,50 @@ class ComelitClimateEntity(CoordinatorEntity[ComelitSerialBridge], ClimateEntity # because no serial number or mac is available self._attr_unique_id = f"{config_entry_entry_id}-{device.index}" self._attr_device_info = coordinator.platform_device_info(device, device.type) + self._update_attributes() + + def _update_attributes(self) -> None: + """Update class attributes.""" + device = self.coordinator.data[CLIMATE][self._device.index] + if not isinstance(device.val, list): + raise HomeAssistantError( + translation_domain=DOMAIN, translation_key="invalid_clima_data" + ) - @property - def _clima(self) -> list[Any]: - """Return clima device data.""" # CLIMATE has a 2 item tuple: # - first for Clima # - second for Humidifier - return self.coordinator.data[CLIMATE][self._device.index].val[0] + values = device.val[0] - @property - def _api_mode(self) -> str: - """Return device mode.""" - # Values from API: "O", "L", "U" - return self._clima[2] + _active = values[1] + _mode = values[2] # Values from API: "O", "L", "U" + _automatic = values[3] == ClimaComelitMode.AUTO - @property - def _api_active(self) -> bool: - "Return device active/idle." - return self._clima[1] + self._attr_current_temperature = values[0] / 10 - @property - def _api_automatic(self) -> bool: - """Return device in automatic/manual mode.""" - return self._clima[3] == ClimaComelitMode.AUTO + self._attr_hvac_action = None + if _mode == ClimaComelitMode.OFF: + self._attr_hvac_action = HVACAction.OFF + if not _active: + self._attr_hvac_action = HVACAction.IDLE + if _mode in API_STATUS: + self._attr_hvac_action = API_STATUS[_mode]["hvac_action"] - @property - def target_temperature(self) -> float: - """Set target temperature.""" - return self._clima[4] / 10 + self._attr_hvac_mode = None + if _mode == ClimaComelitMode.OFF: + self._attr_hvac_mode = HVACMode.OFF + if _automatic: + self._attr_hvac_mode = HVACMode.AUTO + if _mode in API_STATUS: + self._attr_hvac_mode = API_STATUS[_mode]["hvac_mode"] - @property - def current_temperature(self) -> float: - """Return current temperature.""" - return self._clima[0] / 10 + self._attr_target_temperature = values[4] / 10 - @property - def hvac_mode(self) -> HVACMode | None: - """HVAC current mode.""" - - if self._api_mode == ClimaComelitMode.OFF: - return HVACMode.OFF - - if self._api_automatic: - return HVACMode.AUTO - - if self._api_mode in API_STATUS: - return API_STATUS[self._api_mode]["hvac_mode"] - - return None - - @property - def hvac_action(self) -> HVACAction | None: - """HVAC current action.""" - - if self._api_mode == ClimaComelitMode.OFF: - return HVACAction.OFF - - if not self._api_active: - return HVACAction.IDLE - - if self._api_mode in API_STATUS: - return API_STATUS[self._api_mode]["hvac_action"] - - return None + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self._update_attributes() + super()._handle_coordinator_update() async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" @@ -191,6 +177,8 @@ class ComelitClimateEntity(CoordinatorEntity[ComelitSerialBridge], ClimateEntity await self.coordinator.api.set_clima_status( self._device.index, ClimaComelitCommand.SET, target_temp ) + self._attr_target_temperature = target_temp + self.async_write_ha_state() async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: """Set hvac mode.""" @@ -202,3 +190,5 @@ class ComelitClimateEntity(CoordinatorEntity[ComelitSerialBridge], ClimateEntity await self.coordinator.api.set_clima_status( self._device.index, MODE_TO_ACTION[hvac_mode] ) + self._attr_hvac_mode = hvac_mode + self.async_write_ha_state() diff --git a/homeassistant/components/comelit/const.py b/homeassistant/components/comelit/const.py index 84d8fbd6315..f52f33fd6da 100644 --- a/homeassistant/components/comelit/const.py +++ b/homeassistant/components/comelit/const.py @@ -9,3 +9,5 @@ _LOGGER = logging.getLogger(__package__) DOMAIN = "comelit" DEFAULT_PORT = 80 DEVICE_TYPE_LIST = [BRIDGE, VEDO] + +SCAN_INTERVAL = 5 diff --git a/homeassistant/components/comelit/coordinator.py b/homeassistant/components/comelit/coordinator.py index fcb149b21d6..df4965d9945 100644 --- a/homeassistant/components/comelit/coordinator.py +++ b/homeassistant/components/comelit/coordinator.py @@ -2,18 +2,19 @@ from abc import abstractmethod from datetime import timedelta -from typing import Any +from typing import TypeVar -from aiocomelit import ( +from aiocomelit.api import ( + AlarmDataObject, + ComelitCommonApi, ComeliteSerialBridgeApi, ComelitSerialBridgeObject, ComelitVedoApi, ComelitVedoAreaObject, ComelitVedoZoneObject, - exceptions, ) -from aiocomelit.api import ComelitCommonApi from aiocomelit.const import BRIDGE, VEDO +from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -21,12 +22,18 @@ from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import device_registry as dr from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import _LOGGER, DOMAIN +from .const import _LOGGER, DOMAIN, SCAN_INTERVAL type ComelitConfigEntry = ConfigEntry[ComelitBaseCoordinator] -class ComelitBaseCoordinator(DataUpdateCoordinator[dict[str, Any]]): +T = TypeVar( + "T", + bound=dict[str, dict[int, ComelitSerialBridgeObject]] | AlarmDataObject, +) + + +class ComelitBaseCoordinator(DataUpdateCoordinator[T]): """Base coordinator for Comelit Devices.""" _hw_version: str @@ -46,7 +53,7 @@ class ComelitBaseCoordinator(DataUpdateCoordinator[dict[str, Any]]): logger=_LOGGER, config_entry=entry, name=f"{DOMAIN}-{host}-coordinator", - update_interval=timedelta(seconds=5), + update_interval=timedelta(seconds=SCAN_INTERVAL), ) device_registry = dr.async_get(self.hass) device_registry.async_get_or_create( @@ -81,23 +88,25 @@ class ComelitBaseCoordinator(DataUpdateCoordinator[dict[str, Any]]): hw_version=self._hw_version, ) - async def _async_update_data(self) -> dict[str, Any]: + async def _async_update_data(self) -> T: """Update device data.""" _LOGGER.debug("Polling Comelit %s host: %s", self._device, self._host) try: await self.api.login() return await self._async_update_system_data() - except (exceptions.CannotConnect, exceptions.CannotRetrieveData) as err: + except (CannotConnect, CannotRetrieveData) as err: raise UpdateFailed(repr(err)) from err - except exceptions.CannotAuthenticate as err: + except CannotAuthenticate as err: raise ConfigEntryAuthFailed from err @abstractmethod - async def _async_update_system_data(self) -> dict[str, Any]: + async def _async_update_system_data(self) -> T: """Class method for updating data.""" -class ComelitSerialBridge(ComelitBaseCoordinator): +class ComelitSerialBridge( + ComelitBaseCoordinator[dict[str, dict[int, ComelitSerialBridgeObject]]] +): """Queries Comelit Serial Bridge.""" _hw_version = "20003101" @@ -115,12 +124,14 @@ class ComelitSerialBridge(ComelitBaseCoordinator): self.api = ComeliteSerialBridgeApi(host, port, pin) super().__init__(hass, entry, BRIDGE, host) - async def _async_update_system_data(self) -> dict[str, Any]: + async def _async_update_system_data( + self, + ) -> dict[str, dict[int, ComelitSerialBridgeObject]]: """Specific method for updating data.""" return await self.api.get_all_devices() -class ComelitVedoSystem(ComelitBaseCoordinator): +class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]): """Queries Comelit VEDO system.""" _hw_version = "VEDO IP" @@ -138,6 +149,8 @@ class ComelitVedoSystem(ComelitBaseCoordinator): self.api = ComelitVedoApi(host, port, pin) super().__init__(hass, entry, VEDO, host) - async def _async_update_system_data(self) -> dict[str, Any]: + async def _async_update_system_data( + self, + ) -> AlarmDataObject: """Specific method for updating data.""" return await self.api.get_all_areas_and_zones() diff --git a/homeassistant/components/comelit/cover.py b/homeassistant/components/comelit/cover.py index 64412569f95..befcb0c35d4 100644 --- a/homeassistant/components/comelit/cover.py +++ b/homeassistant/components/comelit/cover.py @@ -8,13 +8,16 @@ from aiocomelit import ComelitSerialBridgeObject from aiocomelit.const import COVER, STATE_COVER, STATE_OFF, STATE_ON from homeassistant.components.cover import CoverDeviceClass, CoverEntity, CoverState -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.update_coordinator import CoordinatorEntity from .coordinator import ComelitConfigEntry, ComelitSerialBridge +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, @@ -95,13 +98,20 @@ class ComelitCoverEntity( """Return if the cover is opening.""" return self._current_action("opening") + async def _cover_set_state(self, action: int, state: int) -> None: + """Set desired cover state.""" + self._last_state = self.state + await self._api.set_device_status(COVER, self._device.index, action) + self.coordinator.data[COVER][self._device.index].status = state + self.async_write_ha_state() + async def async_close_cover(self, **kwargs: Any) -> None: """Close cover.""" - await self._api.set_device_status(COVER, self._device.index, STATE_OFF) + await self._cover_set_state(STATE_OFF, 2) async def async_open_cover(self, **kwargs: Any) -> None: """Open cover.""" - await self._api.set_device_status(COVER, self._device.index, STATE_ON) + await self._cover_set_state(STATE_ON, 1) async def async_stop_cover(self, **_kwargs: Any) -> None: """Stop the cover.""" @@ -109,13 +119,7 @@ class ComelitCoverEntity( return action = STATE_ON if self.is_closing else STATE_OFF - await self._api.set_device_status(COVER, self._device.index, action) - - @callback - def _handle_coordinator_update(self) -> None: - """Handle device update.""" - self._last_state = self.state - self.async_write_ha_state() + await self._cover_set_state(action, 0) async def async_added_to_hass(self) -> None: """Handle entity which will be added.""" diff --git a/homeassistant/components/comelit/humidifier.py b/homeassistant/components/comelit/humidifier.py index 5daf2297782..ad8f49ed5e2 100644 --- a/homeassistant/components/comelit/humidifier.py +++ b/homeassistant/components/comelit/humidifier.py @@ -16,14 +16,17 @@ from homeassistant.components.humidifier import ( HumidifierEntity, HumidifierEntityFeature, ) -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN from .coordinator import ComelitConfigEntry, ComelitSerialBridge +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + class HumidifierComelitMode(StrEnum): """Serial Bridge humidifier modes.""" @@ -121,62 +124,41 @@ class ComelitHumidifierEntity(CoordinatorEntity[ComelitSerialBridge], Humidifier self._active_mode = active_mode self._active_action = active_action self._set_command = set_command + self._update_attributes() + + def _update_attributes(self) -> None: + """Update class attributes.""" + device = self.coordinator.data[CLIMATE][self._device.index] + if not isinstance(device.val, list): + raise HomeAssistantError( + translation_domain=DOMAIN, translation_key="invalid_clima_data" + ) - @property - def _humidifier(self) -> list[Any]: - """Return humidifier device data.""" # CLIMATE has a 2 item tuple: # - first for Clima # - second for Humidifier - return self.coordinator.data[CLIMATE][self._device.index].val[1] + values = device.val[1] - @property - def _api_mode(self) -> str: - """Return device mode.""" - # Values from API: "O", "L", "U" - return self._humidifier[2] + _active = values[1] + _mode = values[2] # Values from API: "O", "L", "U" + _automatic = values[3] == HumidifierComelitMode.AUTO - @property - def _api_active(self) -> bool: - "Return device active/idle." - return self._humidifier[1] + self._attr_action = HumidifierAction.IDLE + if _mode == HumidifierComelitMode.OFF: + self._attr_action = HumidifierAction.OFF + if _active and _mode == self._active_mode: + self._attr_action = self._active_action - @property - def _api_automatic(self) -> bool: - """Return device in automatic/manual mode.""" - return self._humidifier[3] == HumidifierComelitMode.AUTO + self._attr_current_humidity = values[0] / 10 + self._attr_is_on = _mode == self._active_mode + self._attr_mode = MODE_AUTO if _automatic else MODE_NORMAL + self._attr_target_humidity = values[4] / 10 - @property - def target_humidity(self) -> float: - """Set target humidity.""" - return self._humidifier[4] / 10 - - @property - def current_humidity(self) -> float: - """Return current humidity.""" - return self._humidifier[0] / 10 - - @property - def is_on(self) -> bool | None: - """Return true is humidifier is on.""" - return self._api_mode == self._active_mode - - @property - def mode(self) -> str | None: - """Return current mode.""" - return MODE_AUTO if self._api_automatic else MODE_NORMAL - - @property - def action(self) -> HumidifierAction | None: - """Return current action.""" - - if self._api_mode == HumidifierComelitMode.OFF: - return HumidifierAction.OFF - - if self._api_active and self._api_mode == self._active_mode: - return self._active_action - - return HumidifierAction.IDLE + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self._update_attributes() + super()._handle_coordinator_update() async def async_set_humidity(self, humidity: int) -> None: """Set new target humidity.""" @@ -192,12 +174,16 @@ class ComelitHumidifierEntity(CoordinatorEntity[ComelitSerialBridge], Humidifier await self.coordinator.api.set_humidity_status( self._device.index, HumidifierComelitCommand.SET, humidity ) + self._attr_target_humidity = humidity + self.async_write_ha_state() async def async_set_mode(self, mode: str) -> None: """Set humidifier mode.""" await self.coordinator.api.set_humidity_status( self._device.index, MODE_TO_ACTION[mode] ) + self._attr_mode = mode + self.async_write_ha_state() async def async_turn_on(self, **kwargs: Any) -> None: """Turn on.""" diff --git a/homeassistant/components/comelit/light.py b/homeassistant/components/comelit/light.py index 45f4146ece6..53cf6bdcb46 100644 --- a/homeassistant/components/comelit/light.py +++ b/homeassistant/components/comelit/light.py @@ -14,6 +14,9 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .coordinator import ComelitConfigEntry, ComelitSerialBridge +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, @@ -56,7 +59,8 @@ class ComelitLightEntity(CoordinatorEntity[ComelitSerialBridge], LightEntity): async def _light_set_state(self, state: int) -> None: """Set desired light state.""" await self.coordinator.api.set_device_status(LIGHT, self._device.index, state) - await self.coordinator.async_request_refresh() + self.coordinator.data[LIGHT][self._device.index].status = state + self.async_write_ha_state() async def async_turn_on(self, **kwargs: Any) -> None: """Turn the light on.""" diff --git a/homeassistant/components/comelit/manifest.json b/homeassistant/components/comelit/manifest.json index 238dede8546..3abfc222e7d 100644 --- a/homeassistant/components/comelit/manifest.json +++ b/homeassistant/components/comelit/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["aiocomelit"], - "requirements": ["aiocomelit==0.10.1"] + "requirements": ["aiocomelit==0.11.3"] } diff --git a/homeassistant/components/comelit/sensor.py b/homeassistant/components/comelit/sensor.py index 9200d99262f..c93ccd30eb6 100644 --- a/homeassistant/components/comelit/sensor.py +++ b/homeassistant/components/comelit/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from typing import Final, cast from aiocomelit import ComelitSerialBridgeObject, ComelitVedoZoneObject -from aiocomelit.const import ALARM_ZONES, BRIDGE, OTHER, AlarmZoneState +from aiocomelit.const import BRIDGE, OTHER, AlarmZoneState from homeassistant.components.sensor import ( SensorDeviceClass, @@ -20,6 +20,9 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .coordinator import ComelitConfigEntry, ComelitSerialBridge, ComelitVedoSystem +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + SENSOR_BRIDGE_TYPES: Final = ( SensorEntityDescription( key="power", @@ -82,7 +85,7 @@ async def async_setup_vedo_entry( coordinator = cast(ComelitVedoSystem, config_entry.runtime_data) entities: list[ComelitVedoSensorEntity] = [] - for device in coordinator.data[ALARM_ZONES].values(): + for device in coordinator.data["alarm_zones"].values(): entities.extend( ComelitVedoSensorEntity( coordinator, device, config_entry.entry_id, sensor_desc @@ -119,9 +122,12 @@ class ComelitBridgeSensorEntity(CoordinatorEntity[ComelitSerialBridge], SensorEn @property def native_value(self) -> StateType: """Sensor value.""" - return getattr( - self.coordinator.data[OTHER][self._device.index], - self.entity_description.key, + return cast( + StateType, + getattr( + self.coordinator.data[OTHER][self._device.index], + self.entity_description.key, + ), ) @@ -139,7 +145,7 @@ class ComelitVedoSensorEntity(CoordinatorEntity[ComelitVedoSystem], SensorEntity ) -> None: """Init sensor entity.""" self._api = coordinator.api - self._zone = zone + self._zone_index = zone.index super().__init__(coordinator) # Use config_entry.entry_id as base for unique_id # because no serial number or mac is available @@ -151,7 +157,7 @@ class ComelitVedoSensorEntity(CoordinatorEntity[ComelitVedoSystem], SensorEntity @property def _zone_object(self) -> ComelitVedoZoneObject: """Zone object.""" - return self.coordinator.data[ALARM_ZONES][self._zone.index] + return self.coordinator.data["alarm_zones"][self._zone_index] @property def available(self) -> bool: @@ -164,4 +170,4 @@ class ComelitVedoSensorEntity(CoordinatorEntity[ComelitVedoSystem], SensorEntity if (status := self._zone_object.human_status) == AlarmZoneState.UNKNOWN: return None - return status.value + return cast(str, status.value) diff --git a/homeassistant/components/comelit/strings.json b/homeassistant/components/comelit/strings.json index 14d947c7323..496d62655a9 100644 --- a/homeassistant/components/comelit/strings.json +++ b/homeassistant/components/comelit/strings.json @@ -3,19 +3,25 @@ "flow_title": "{host}", "step": { "reauth_confirm": { - "description": "Please enter the correct PIN for {host}", "data": { "pin": "[%key:common::config_flow::data::pin%]" + }, + "data_description": { + "pin": "The PIN of your Comelit device." } }, "user": { "data": { "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]", - "pin": "[%key:common::config_flow::data::pin%]" + "pin": "[%key:common::config_flow::data::pin%]", + "type": "Device type" }, "data_description": { - "host": "The hostname or IP address of your Comelit device." + "host": "The hostname or IP address of your Comelit device.", + "port": "The port of your Comelit device.", + "pin": "[%key:component::comelit::config::step::reauth_confirm::data_description::pin%]", + "type": "The type of your Comelit device." } } }, @@ -58,6 +64,9 @@ "exceptions": { "humidity_while_off": { "message": "Cannot change humidity while off" + }, + "invalid_clima_data": { + "message": "Invalid 'clima' data" } } } diff --git a/homeassistant/components/comelit/switch.py b/homeassistant/components/comelit/switch.py index e89ee74c1be..2c751cbe2cb 100644 --- a/homeassistant/components/comelit/switch.py +++ b/homeassistant/components/comelit/switch.py @@ -14,6 +14,9 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .coordinator import ComelitConfigEntry, ComelitSerialBridge +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, @@ -64,7 +67,8 @@ class ComelitSwitchEntity(CoordinatorEntity[ComelitSerialBridge], SwitchEntity): await self.coordinator.api.set_device_status( self._device.type, self._device.index, state ) - await self.coordinator.async_request_refresh() + self.coordinator.data[self._device.type][self._device.index].status = state + self.async_write_ha_state() async def async_turn_on(self, **kwargs: Any) -> None: """Turn the switch on.""" @@ -77,7 +81,4 @@ class ComelitSwitchEntity(CoordinatorEntity[ComelitSerialBridge], SwitchEntity): @property def is_on(self) -> bool: """Return True if switch is on.""" - return ( - self.coordinator.data[self._device.type][self._device.index].status - == STATE_ON - ) + return self.coordinator.data[OTHER][self._device.index].status == STATE_ON diff --git a/homeassistant/components/conversation/__init__.py b/homeassistant/components/conversation/__init__.py index 14c5244c18b..25aaf6df290 100644 --- a/homeassistant/components/conversation/__init__.py +++ b/homeassistant/components/conversation/__init__.py @@ -4,7 +4,6 @@ from __future__ import annotations from collections.abc import Callable import logging -import re from typing import Literal from hassil.recognize import RecognizeResult @@ -91,8 +90,6 @@ __all__ = [ _LOGGER = logging.getLogger(__name__) -REGEX_TYPE = type(re.compile("")) - SERVICE_PROCESS_SCHEMA = vol.Schema( { vol.Required(ATTR_TEXT): cv.string, diff --git a/homeassistant/components/conversation/chat_log.py b/homeassistant/components/conversation/chat_log.py index 19482af1983..cb7b8dd22f7 100644 --- a/homeassistant/components/conversation/chat_log.py +++ b/homeassistant/components/conversation/chat_log.py @@ -8,7 +8,7 @@ from contextlib import contextmanager from contextvars import ContextVar from dataclasses import asdict, dataclass, field, replace import logging -from typing import Literal, TypedDict +from typing import Any, Literal, TypedDict import voluptuous as vol @@ -51,8 +51,7 @@ def async_get_chat_log( ) if user_input is not None and ( (content := chat_log.content[-1]).role != "user" - # MyPy doesn't understand that content is a UserContent here - or content.content != user_input.text # type: ignore[union-attr] + or content.content != user_input.text ): chat_log.async_add_user_content(UserContent(content=user_input.text)) @@ -128,7 +127,7 @@ class ConverseError(HomeAssistantError): class SystemContent: """Base class for chat messages.""" - role: str = field(init=False, default="system") + role: Literal["system"] = field(init=False, default="system") content: str @@ -136,7 +135,7 @@ class SystemContent: class UserContent: """Assistant content.""" - role: str = field(init=False, default="user") + role: Literal["user"] = field(init=False, default="user") content: str @@ -144,7 +143,7 @@ class UserContent: class AssistantContent: """Assistant content.""" - role: str = field(init=False, default="assistant") + role: Literal["assistant"] = field(init=False, default="assistant") agent_id: str content: str | None = None tool_calls: list[llm.ToolInput] | None = None @@ -154,7 +153,7 @@ class AssistantContent: class ToolResultContent: """Tool result content.""" - role: str = field(init=False, default="tool_result") + role: Literal["tool_result"] = field(init=False, default="tool_result") agent_id: str tool_call_id: str tool_name: str @@ -183,6 +182,25 @@ class ChatLog: llm_api: llm.APIInstance | None = None delta_listener: Callable[[ChatLog, dict], None] | None = None + @property + def continue_conversation(self) -> bool: + """Return whether the conversation should continue.""" + if not self.content: + return False + + last_msg = self.content[-1] + + return ( + last_msg.role == "assistant" + and last_msg.content is not None + and last_msg.content.strip().endswith( + ( + "?", + ";", # Greek question mark + ) + ) + ) + @property def unresponded_tool_results(self) -> bool: """Return if there are unresponded tool results.""" @@ -438,10 +456,16 @@ class ChatLog: LOGGER.debug("Prompt: %s", self.content) LOGGER.debug("Tools: %s", self.llm_api.tools if self.llm_api else None) - trace.async_conversation_trace_append( - trace.ConversationTraceEventType.AGENT_DETAIL, + self.async_trace( { "messages": self.content, "tools": self.llm_api.tools if self.llm_api else None, - }, + } + ) + + def async_trace(self, agent_details: dict[str, Any]) -> None: + """Append agent specific details to the conversation trace.""" + trace.async_conversation_trace_append( + trace.ConversationTraceEventType.AGENT_DETAIL, + agent_details, ) diff --git a/homeassistant/components/conversation/default_agent.py b/homeassistant/components/conversation/default_agent.py index 3a7aa0c26e8..bed4b4c0dd6 100644 --- a/homeassistant/components/conversation/default_agent.py +++ b/homeassistant/components/conversation/default_agent.py @@ -42,7 +42,6 @@ from homeassistant.components.homeassistant.exposed_entities import ( from homeassistant.const import EVENT_STATE_CHANGED, MATCH_ALL from homeassistant.helpers import ( area_registry as ar, - chat_session, device_registry as dr, entity_registry as er, floor_registry as fr, @@ -56,7 +55,7 @@ from homeassistant.helpers.event import async_track_state_added_domain from homeassistant.util import language as language_util from homeassistant.util.json import JsonObjectType, json_loads_object -from .chat_log import AssistantContent, async_get_chat_log +from .chat_log import AssistantContent, ChatLog from .const import ( DATA_DEFAULT_ENTITY, DEFAULT_EXPOSED_ATTRIBUTES, @@ -332,49 +331,46 @@ class DefaultAgent(ConversationEntity): return result - async def async_process(self, user_input: ConversationInput) -> ConversationResult: - """Process a sentence.""" + async def _async_handle_message( + self, + user_input: ConversationInput, + chat_log: ChatLog, + ) -> ConversationResult: + """Handle a message.""" response: intent.IntentResponse | None = None - with ( - chat_session.async_get_chat_session( - self.hass, user_input.conversation_id - ) as session, - async_get_chat_log(self.hass, session, user_input) as chat_log, - ): - # Check if a trigger matched - if trigger_result := await self.async_recognize_sentence_trigger( - user_input - ): - # Process callbacks and get response - response_text = await self._handle_trigger_result( - trigger_result, user_input - ) - # Convert to conversation result - response = intent.IntentResponse( - language=user_input.language or self.hass.config.language - ) - response.response_type = intent.IntentResponseType.ACTION_DONE - response.async_set_speech(response_text) - - if response is None: - # Match intents - intent_result = await self.async_recognize_intent(user_input) - response = await self._async_process_intent_result( - intent_result, user_input - ) - - speech: str = response.speech.get("plain", {}).get("speech", "") - chat_log.async_add_assistant_content_without_tools( - AssistantContent( - agent_id=user_input.agent_id, - content=speech, - ) + # Check if a trigger matched + if trigger_result := await self.async_recognize_sentence_trigger(user_input): + # Process callbacks and get response + response_text = await self._handle_trigger_result( + trigger_result, user_input ) - return ConversationResult( - response=response, conversation_id=session.conversation_id + # Convert to conversation result + response = intent.IntentResponse( + language=user_input.language or self.hass.config.language ) + response.response_type = intent.IntentResponseType.ACTION_DONE + response.async_set_speech(response_text) + + if response is None: + # Match intents + intent_result = await self.async_recognize_intent(user_input) + response = await self._async_process_intent_result( + intent_result, user_input + ) + + speech: str = response.speech.get("plain", {}).get("speech", "") + chat_log.async_add_assistant_content_without_tools( + AssistantContent( + agent_id=user_input.agent_id, + content=speech, + ) + ) + + return ConversationResult( + response=response, conversation_id=chat_log.conversation_id + ) async def _async_process_intent_result( self, @@ -654,7 +650,14 @@ class DefaultAgent(ConversationEntity): if ( (maybe_result is None) # first result - or (num_matched_entities > best_num_matched_entities) + or ( + # More literal text matched + result.text_chunks_matched > maybe_result.text_chunks_matched + ) + or ( + # More entities matched + num_matched_entities > best_num_matched_entities + ) or ( # Fewer unmatched entities (num_matched_entities == best_num_matched_entities) @@ -666,16 +669,6 @@ class DefaultAgent(ConversationEntity): and (num_unmatched_entities == best_num_unmatched_entities) and (num_unmatched_ranges > best_num_unmatched_ranges) ) - or ( - # More literal text matched - (num_matched_entities == best_num_matched_entities) - and (num_unmatched_entities == best_num_unmatched_entities) - and (num_unmatched_ranges == best_num_unmatched_ranges) - and ( - result.text_chunks_matched - > maybe_result.text_chunks_matched - ) - ) or ( # Prefer match failures with entities (result.text_chunks_matched == maybe_result.text_chunks_matched) diff --git a/homeassistant/components/conversation/entity.py b/homeassistant/components/conversation/entity.py index d9598dee7eb..ca4d18ab9f5 100644 --- a/homeassistant/components/conversation/entity.py +++ b/homeassistant/components/conversation/entity.py @@ -4,9 +4,11 @@ from abc import abstractmethod from typing import Literal, final from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.helpers.chat_session import async_get_chat_session from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.util import dt as dt_util +from .chat_log import ChatLog, async_get_chat_log from .const import ConversationEntityFeature from .models import ConversationInput, ConversationResult @@ -51,9 +53,21 @@ class ConversationEntity(RestoreEntity): def supported_languages(self) -> list[str] | Literal["*"]: """Return a list of supported languages.""" - @abstractmethod async def async_process(self, user_input: ConversationInput) -> ConversationResult: """Process a sentence.""" + with ( + async_get_chat_session(self.hass, user_input.conversation_id) as session, + async_get_chat_log(self.hass, session, user_input) as chat_log, + ): + return await self._async_handle_message(user_input, chat_log) + + async def _async_handle_message( + self, + user_input: ConversationInput, + chat_log: ChatLog, + ) -> ConversationResult: + """Call the API.""" + raise NotImplementedError async def async_prepare(self, language: str | None = None) -> None: """Load intents for a language.""" diff --git a/homeassistant/components/conversation/http.py b/homeassistant/components/conversation/http.py index 4d8526a4fd4..efcdcb8d69b 100644 --- a/homeassistant/components/conversation/http.py +++ b/homeassistant/components/conversation/http.py @@ -3,11 +3,13 @@ from __future__ import annotations from collections.abc import Iterable +from dataclasses import asdict from typing import Any from aiohttp import web from hassil.recognize import MISSING_ENTITY, RecognizeResult from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity +from home_assistant_intents import get_language_scores import voluptuous as vol from homeassistant.components import http, websocket_api @@ -38,6 +40,7 @@ def async_setup(hass: HomeAssistant) -> None: websocket_api.async_register_command(hass, websocket_list_agents) websocket_api.async_register_command(hass, websocket_list_sentences) websocket_api.async_register_command(hass, websocket_hass_agent_debug) + websocket_api.async_register_command(hass, websocket_hass_agent_language_scores) @websocket_api.websocket_command( @@ -336,6 +339,36 @@ def _get_unmatched_slots( return unmatched_slots +@websocket_api.websocket_command( + { + vol.Required("type"): "conversation/agent/homeassistant/language_scores", + vol.Optional("language"): str, + vol.Optional("country"): str, + } +) +@websocket_api.async_response +async def websocket_hass_agent_language_scores( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Get support scores per language.""" + language = msg.get("language", hass.config.language) + country = msg.get("country", hass.config.country) + + scores = await hass.async_add_executor_job(get_language_scores) + matching_langs = language_util.matches(language, scores.keys(), country=country) + preferred_lang = matching_langs[0] if matching_langs else language + result = { + "languages": { + lang_key: asdict(lang_scores) for lang_key, lang_scores in scores.items() + }, + "preferred_language": preferred_lang, + } + + connection.send_result(msg["id"], result) + + class ConversationProcessView(http.HomeAssistantView): """View to process text.""" diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index ea950ace323..a1281764bd5 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==2.2.3", "home-assistant-intents==2025.3.5"] + "requirements": ["hassil==2.2.3", "home-assistant-intents==2025.3.28"] } diff --git a/homeassistant/components/conversation/models.py b/homeassistant/components/conversation/models.py index 08a68fa0164..7bdd13afc01 100644 --- a/homeassistant/components/conversation/models.py +++ b/homeassistant/components/conversation/models.py @@ -62,12 +62,14 @@ class ConversationResult: response: intent.IntentResponse conversation_id: str | None = None + continue_conversation: bool = False def as_dict(self) -> dict[str, Any]: """Return result as a dict.""" return { "response": self.response.as_dict(), "conversation_id": self.conversation_id, + "continue_conversation": self.continue_conversation, } diff --git a/homeassistant/components/conversation/util.py b/homeassistant/components/conversation/util.py deleted file mode 100644 index 4326c95cb66..00000000000 --- a/homeassistant/components/conversation/util.py +++ /dev/null @@ -1,37 +0,0 @@ -"""Util for Conversation.""" - -from __future__ import annotations - -import re - - -def create_matcher(utterance: str) -> re.Pattern[str]: - """Create a regex that matches the utterance.""" - # Split utterance into parts that are type: NORMAL, GROUP or OPTIONAL - # Pattern matches (GROUP|OPTIONAL): Change light to [the color] {name} - parts = re.split(r"({\w+}|\[[\w\s]+\] *)", utterance) - # Pattern to extract name from GROUP part. Matches {name} - group_matcher = re.compile(r"{(\w+)}") - # Pattern to extract text from OPTIONAL part. Matches [the color] - optional_matcher = re.compile(r"\[([\w ]+)\] *") - - pattern = ["^"] - for part in parts: - group_match = group_matcher.match(part) - optional_match = optional_matcher.match(part) - - # Normal part - if group_match is None and optional_match is None: - pattern.append(part) - continue - - # Group part - if group_match is not None: - pattern.append(rf"(?P<{group_match.groups()[0]}>[\w ]+?)\s*") - - # Optional part - elif optional_match is not None: - pattern.append(rf"(?:{optional_match.groups()[0]} *)?") - - pattern.append("$") - return re.compile("".join(pattern), re.IGNORECASE) diff --git a/homeassistant/components/cover/reproduce_state.py b/homeassistant/components/cover/reproduce_state.py index 307fe5f11bd..de3e0cebfb7 100644 --- a/homeassistant/components/cover/reproduce_state.py +++ b/homeassistant/components/cover/reproduce_state.py @@ -3,12 +3,14 @@ from __future__ import annotations import asyncio -from collections.abc import Iterable +from collections.abc import Coroutine, Iterable +from functools import partial import logging -from typing import Any +from typing import Any, Final from homeassistant.const import ( ATTR_ENTITY_ID, + ATTR_SUPPORTED_FEATURES, SERVICE_CLOSE_COVER, SERVICE_CLOSE_COVER_TILT, SERVICE_OPEN_COVER, @@ -16,7 +18,8 @@ from homeassistant.const import ( SERVICE_SET_COVER_POSITION, SERVICE_SET_COVER_TILT_POSITION, ) -from homeassistant.core import Context, HomeAssistant, State +from homeassistant.core import Context, HomeAssistant, ServiceResponse, State +from homeassistant.util.enum import try_parse_enum from . import ( ATTR_CURRENT_POSITION, @@ -24,17 +27,140 @@ from . import ( ATTR_POSITION, ATTR_TILT_POSITION, DOMAIN, + CoverEntityFeature, CoverState, ) _LOGGER = logging.getLogger(__name__) -VALID_STATES = { - CoverState.CLOSED, - CoverState.CLOSING, - CoverState.OPEN, - CoverState.OPENING, -} + +OPENING_STATES = {CoverState.OPENING, CoverState.OPEN} +CLOSING_STATES = {CoverState.CLOSING, CoverState.CLOSED} +VALID_STATES: set[CoverState] = OPENING_STATES | CLOSING_STATES + +FULL_OPEN: Final = 100 +FULL_CLOSE: Final = 0 + + +def _determine_features(current_attrs: dict[str, Any]) -> CoverEntityFeature: + """Determine supported features based on current attributes.""" + features = CoverEntityFeature(0) + if ATTR_CURRENT_POSITION in current_attrs: + features |= ( + CoverEntityFeature.SET_POSITION + | CoverEntityFeature.OPEN + | CoverEntityFeature.CLOSE + ) + if ATTR_CURRENT_TILT_POSITION in current_attrs: + features |= ( + CoverEntityFeature.SET_TILT_POSITION + | CoverEntityFeature.OPEN_TILT + | CoverEntityFeature.CLOSE_TILT + ) + if features == CoverEntityFeature(0): + features = CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE + return features + + +async def _async_set_position( + service_call: partial[Coroutine[Any, Any, ServiceResponse]], + service_data: dict[str, Any], + features: CoverEntityFeature, + target_position: int, +) -> bool: + """Set the position of the cover. + + Returns True if the position was set, False if there is no + supported method for setting the position. + """ + if target_position == FULL_CLOSE and CoverEntityFeature.CLOSE in features: + await service_call(SERVICE_CLOSE_COVER, service_data) + elif target_position == FULL_OPEN and CoverEntityFeature.OPEN in features: + await service_call(SERVICE_OPEN_COVER, service_data) + elif CoverEntityFeature.SET_POSITION in features: + await service_call( + SERVICE_SET_COVER_POSITION, service_data | {ATTR_POSITION: target_position} + ) + else: + # Requested a position but the cover doesn't support it + return False + return True + + +async def _async_set_tilt_position( + service_call: partial[Coroutine[Any, Any, ServiceResponse]], + service_data: dict[str, Any], + features: CoverEntityFeature, + target_tilt_position: int, +) -> bool: + """Set the tilt position of the cover. + + Returns True if the tilt position was set, False if there is no + supported method for setting the tilt position. + """ + if target_tilt_position == FULL_CLOSE and CoverEntityFeature.CLOSE_TILT in features: + await service_call(SERVICE_CLOSE_COVER_TILT, service_data) + elif target_tilt_position == FULL_OPEN and CoverEntityFeature.OPEN_TILT in features: + await service_call(SERVICE_OPEN_COVER_TILT, service_data) + elif CoverEntityFeature.SET_TILT_POSITION in features: + await service_call( + SERVICE_SET_COVER_TILT_POSITION, + service_data | {ATTR_TILT_POSITION: target_tilt_position}, + ) + else: + # Requested a tilt position but the cover doesn't support it + return False + return True + + +async def _async_close_cover( + service_call: partial[Coroutine[Any, Any, ServiceResponse]], + service_data: dict[str, Any], + features: CoverEntityFeature, + set_position: bool, + set_tilt: bool, +) -> None: + """Close the cover if it was not closed by setting the position.""" + if not set_position: + if CoverEntityFeature.CLOSE in features: + await service_call(SERVICE_CLOSE_COVER, service_data) + elif CoverEntityFeature.SET_POSITION in features: + await service_call( + SERVICE_SET_COVER_POSITION, service_data | {ATTR_POSITION: FULL_CLOSE} + ) + if not set_tilt: + if CoverEntityFeature.CLOSE_TILT in features: + await service_call(SERVICE_CLOSE_COVER_TILT, service_data) + elif CoverEntityFeature.SET_TILT_POSITION in features: + await service_call( + SERVICE_SET_COVER_TILT_POSITION, + service_data | {ATTR_TILT_POSITION: FULL_CLOSE}, + ) + + +async def _async_open_cover( + service_call: partial[Coroutine[Any, Any, ServiceResponse]], + service_data: dict[str, Any], + features: CoverEntityFeature, + set_position: bool, + set_tilt: bool, +) -> None: + """Open the cover if it was not opened by setting the position.""" + if not set_position: + if CoverEntityFeature.OPEN in features: + await service_call(SERVICE_OPEN_COVER, service_data) + elif CoverEntityFeature.SET_POSITION in features: + await service_call( + SERVICE_SET_COVER_POSITION, service_data | {ATTR_POSITION: FULL_OPEN} + ) + if not set_tilt: + if CoverEntityFeature.OPEN_TILT in features: + await service_call(SERVICE_OPEN_COVER_TILT, service_data) + elif CoverEntityFeature.SET_TILT_POSITION in features: + await service_call( + SERVICE_SET_COVER_TILT_POSITION, + service_data | {ATTR_TILT_POSITION: FULL_OPEN}, + ) async def _async_reproduce_state( @@ -45,74 +171,72 @@ async def _async_reproduce_state( reproduce_options: dict[str, Any] | None = None, ) -> None: """Reproduce a single state.""" - if (cur_state := hass.states.get(state.entity_id)) is None: - _LOGGER.warning("Unable to find entity %s", state.entity_id) + entity_id = state.entity_id + if (cur_state := hass.states.get(entity_id)) is None: + _LOGGER.warning("Unable to find entity %s", entity_id) return - if state.state not in VALID_STATES: - _LOGGER.warning( - "Invalid state specified for %s: %s", state.entity_id, state.state - ) + if (target_state := state.state) not in VALID_STATES: + _LOGGER.warning("Invalid state specified for %s: %s", entity_id, target_state) return + current_attrs = cur_state.attributes + target_attrs = state.attributes + + current_position = current_attrs.get(ATTR_CURRENT_POSITION) + target_position = target_attrs.get(ATTR_CURRENT_POSITION) + position_matches = current_position == target_position + + current_tilt_position = current_attrs.get(ATTR_CURRENT_TILT_POSITION) + target_tilt_position = target_attrs.get(ATTR_CURRENT_TILT_POSITION) + tilt_position_matches = current_tilt_position == target_tilt_position + + state_matches = cur_state.state == target_state # Return if we are already at the right state. - if ( - cur_state.state == state.state - and cur_state.attributes.get(ATTR_CURRENT_POSITION) - == state.attributes.get(ATTR_CURRENT_POSITION) - and cur_state.attributes.get(ATTR_CURRENT_TILT_POSITION) - == state.attributes.get(ATTR_CURRENT_TILT_POSITION) - ): + if state_matches and position_matches and tilt_position_matches: return - service_data = {ATTR_ENTITY_ID: state.entity_id} - service_data_tilting = {ATTR_ENTITY_ID: state.entity_id} + features = try_parse_enum( + CoverEntityFeature, current_attrs.get(ATTR_SUPPORTED_FEATURES) + ) + if features is None: + # Backwards compatibility for integrations that + # don't set supported features since it previously + # worked without it. + _LOGGER.warning("Supported features is not set for %s", entity_id) + features = _determine_features(current_attrs) - if not ( - cur_state.state == state.state - and cur_state.attributes.get(ATTR_CURRENT_POSITION) - == state.attributes.get(ATTR_CURRENT_POSITION) - ): - # Open/Close - if state.state in [CoverState.CLOSED, CoverState.CLOSING]: - service = SERVICE_CLOSE_COVER - elif state.state in [CoverState.OPEN, CoverState.OPENING]: - if ( - ATTR_CURRENT_POSITION in cur_state.attributes - and ATTR_CURRENT_POSITION in state.attributes - ): - service = SERVICE_SET_COVER_POSITION - service_data[ATTR_POSITION] = state.attributes[ATTR_CURRENT_POSITION] - else: - service = SERVICE_OPEN_COVER + service_call = partial( + hass.services.async_call, + DOMAIN, + context=context, + blocking=True, + ) + service_data = {ATTR_ENTITY_ID: entity_id} - await hass.services.async_call( - DOMAIN, service, service_data, context=context, blocking=True + set_position = ( + not position_matches + and target_position is not None + and await _async_set_position( + service_call, service_data, features, target_position + ) + ) + set_tilt = ( + not tilt_position_matches + and target_tilt_position is not None + and await _async_set_tilt_position( + service_call, service_data, features, target_tilt_position + ) + ) + + if target_state in CLOSING_STATES: + await _async_close_cover( + service_call, service_data, features, set_position, set_tilt ) - if ( - ATTR_CURRENT_TILT_POSITION in state.attributes - and ATTR_CURRENT_TILT_POSITION in cur_state.attributes - and cur_state.attributes.get(ATTR_CURRENT_TILT_POSITION) - != state.attributes.get(ATTR_CURRENT_TILT_POSITION) - ): - # Tilt position - if state.attributes.get(ATTR_CURRENT_TILT_POSITION) == 100: - service_tilting = SERVICE_OPEN_COVER_TILT - elif state.attributes.get(ATTR_CURRENT_TILT_POSITION) == 0: - service_tilting = SERVICE_CLOSE_COVER_TILT - else: - service_tilting = SERVICE_SET_COVER_TILT_POSITION - service_data_tilting[ATTR_TILT_POSITION] = state.attributes[ - ATTR_CURRENT_TILT_POSITION - ] - - await hass.services.async_call( - DOMAIN, - service_tilting, - service_data_tilting, - context=context, - blocking=True, + elif target_state in OPENING_STATES: + await _async_open_cover( + service_call, service_data, features, set_position, set_tilt ) diff --git a/homeassistant/components/daikin/manifest.json b/homeassistant/components/daikin/manifest.json index f794d97a9ba..86fc804ec92 100644 --- a/homeassistant/components/daikin/manifest.json +++ b/homeassistant/components/daikin/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/daikin", "iot_class": "local_polling", "loggers": ["pydaikin"], - "requirements": ["pydaikin==2.13.8"], + "requirements": ["pydaikin==2.14.1"], "zeroconf": ["_dkapi._tcp.local."] } diff --git a/homeassistant/components/debugpy/manifest.json b/homeassistant/components/debugpy/manifest.json index 078af8c67a5..21211d334df 100644 --- a/homeassistant/components/debugpy/manifest.json +++ b/homeassistant/components/debugpy/manifest.json @@ -6,5 +6,5 @@ "integration_type": "service", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["debugpy==1.8.11"] + "requirements": ["debugpy==1.8.13"] } diff --git a/homeassistant/components/deconz/manifest.json b/homeassistant/components/deconz/manifest.json index 93ae8e392c8..5664e6abc8a 100644 --- a/homeassistant/components/deconz/manifest.json +++ b/homeassistant/components/deconz/manifest.json @@ -7,7 +7,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["pydeconz"], - "requirements": ["pydeconz==118"], + "requirements": ["pydeconz==120"], "ssdp": [ { "manufacturer": "Royal Philips Electronics", diff --git a/homeassistant/components/deluge/config_flow.py b/homeassistant/components/deluge/config_flow.py index 19afe26e8f9..78eced64c7c 100644 --- a/homeassistant/components/deluge/config_flow.py +++ b/homeassistant/components/deluge/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from ssl import SSLError from typing import Any @@ -21,6 +22,8 @@ from .const import ( DOMAIN, ) +_LOGGER = logging.getLogger(__name__) + class DelugeFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow for Deluge.""" @@ -86,7 +89,8 @@ class DelugeFlowHandler(ConfigFlow, domain=DOMAIN): await self.hass.async_add_executor_job(api.connect) except (ConnectionRefusedError, TimeoutError, SSLError): return "cannot_connect" - except Exception as ex: # noqa: BLE001 + except Exception as ex: + _LOGGER.exception("Unexpected error") if type(ex).__name__ == "BadLoginError": return "invalid_auth" return "unknown" diff --git a/homeassistant/components/demo/__init__.py b/homeassistant/components/demo/__init__.py index 9314fc211de..dbc65119bfa 100644 --- a/homeassistant/components/demo/__init__.py +++ b/homeassistant/components/demo/__init__.py @@ -48,6 +48,7 @@ COMPONENTS_WITH_CONFIG_ENTRY_DEMO_PLATFORM = [ Platform.TIME, Platform.UPDATE, Platform.VACUUM, + Platform.VALVE, Platform.WATER_HEATER, Platform.WEATHER, ] diff --git a/homeassistant/components/demo/valve.py b/homeassistant/components/demo/valve.py new file mode 100644 index 00000000000..03f0123dd96 --- /dev/null +++ b/homeassistant/components/demo/valve.py @@ -0,0 +1,89 @@ +"""Demo valve platform that implements valves.""" + +from __future__ import annotations + +import asyncio +from typing import Any + +from homeassistant.components.valve import ValveEntity, ValveEntityFeature, ValveState +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +OPEN_CLOSE_DELAY = 2 # Used to give a realistic open/close experience in frontend + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the Demo config entry.""" + async_add_entities( + [ + DemoValve("Front Garden", ValveState.OPEN), + DemoValve("Orchard", ValveState.CLOSED), + ] + ) + + +class DemoValve(ValveEntity): + """Representation of a Demo valve.""" + + _attr_should_poll = False + + def __init__( + self, + name: str, + state: str, + moveable: bool = True, + ) -> None: + """Initialize the valve.""" + self._attr_name = name + if moveable: + self._attr_supported_features = ( + ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE + ) + self._state = state + self._moveable = moveable + + @property + def is_open(self) -> bool: + """Return true if valve is open.""" + return self._state == ValveState.OPEN + + @property + def is_opening(self) -> bool: + """Return true if valve is opening.""" + return self._state == ValveState.OPENING + + @property + def is_closing(self) -> bool: + """Return true if valve is closing.""" + return self._state == ValveState.CLOSING + + @property + def is_closed(self) -> bool: + """Return true if valve is closed.""" + return self._state == ValveState.CLOSED + + @property + def reports_position(self) -> bool: + """Return True if entity reports position, False otherwise.""" + return False + + async def async_open_valve(self, **kwargs: Any) -> None: + """Open the valve.""" + self._state = ValveState.OPENING + self.async_write_ha_state() + await asyncio.sleep(OPEN_CLOSE_DELAY) + self._state = ValveState.OPEN + self.async_write_ha_state() + + async def async_close_valve(self, **kwargs: Any) -> None: + """Close the valve.""" + self._state = ValveState.CLOSING + self.async_write_ha_state() + await asyncio.sleep(OPEN_CLOSE_DELAY) + self._state = ValveState.CLOSED + self.async_write_ha_state() diff --git a/homeassistant/components/dexcom/config_flow.py b/homeassistant/components/dexcom/config_flow.py index 90917e0ce2c..ed6dc94e764 100644 --- a/homeassistant/components/dexcom/config_flow.py +++ b/homeassistant/components/dexcom/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +import logging from typing import Any from pydexcom import AccountError, Dexcom, SessionError @@ -12,6 +13,8 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from .const import CONF_SERVER, DOMAIN, SERVER_OUS, SERVER_US +_LOGGER = logging.getLogger(__name__) + DATA_SCHEMA = vol.Schema( { vol.Required(CONF_USERNAME): str, @@ -43,7 +46,8 @@ class DexcomConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except AccountError: errors["base"] = "invalid_auth" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected error") errors["base"] = "unknown" if "base" not in errors: diff --git a/homeassistant/components/dhcp/manifest.json b/homeassistant/components/dhcp/manifest.json index 5b3a5abd26f..64fd2ff38c6 100644 --- a/homeassistant/components/dhcp/manifest.json +++ b/homeassistant/components/dhcp/manifest.json @@ -16,6 +16,6 @@ "requirements": [ "aiodhcpwatcher==1.1.1", "aiodiscover==2.6.1", - "cached-ipaddress==0.9.2" + "cached-ipaddress==0.10.0" ] } diff --git a/homeassistant/components/discovergy/__init__.py b/homeassistant/components/discovergy/__init__.py index 9cf63176de6..0a8b7422f84 100644 --- a/homeassistant/components/discovergy/__init__.py +++ b/homeassistant/components/discovergy/__init__.py @@ -9,7 +9,7 @@ import pydiscovergy.error as discovergyError from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers.httpx_client import get_async_client +from homeassistant.helpers.httpx_client import create_async_httpx_client from .coordinator import DiscovergyConfigEntry, DiscovergyUpdateCoordinator @@ -21,7 +21,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: DiscovergyConfigEntry) - client = Discovergy( email=entry.data[CONF_EMAIL], password=entry.data[CONF_PASSWORD], - httpx_client=get_async_client(hass), + httpx_client=create_async_httpx_client(hass), authentication=BasicAuth(), ) diff --git a/homeassistant/components/dlna_dmr/manifest.json b/homeassistant/components/dlna_dmr/manifest.json index 82541476a02..119d1d31d52 100644 --- a/homeassistant/components/dlna_dmr/manifest.json +++ b/homeassistant/components/dlna_dmr/manifest.json @@ -8,7 +8,7 @@ "documentation": "https://www.home-assistant.io/integrations/dlna_dmr", "iot_class": "local_push", "loggers": ["async_upnp_client"], - "requirements": ["async-upnp-client==0.43.0", "getmac==0.9.5"], + "requirements": ["async-upnp-client==0.44.0", "getmac==0.9.5"], "ssdp": [ { "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1", diff --git a/homeassistant/components/dlna_dms/manifest.json b/homeassistant/components/dlna_dms/manifest.json index 17fc3dc27e8..0289d5100d6 100644 --- a/homeassistant/components/dlna_dms/manifest.json +++ b/homeassistant/components/dlna_dms/manifest.json @@ -7,7 +7,7 @@ "dependencies": ["ssdp"], "documentation": "https://www.home-assistant.io/integrations/dlna_dms", "iot_class": "local_polling", - "requirements": ["async-upnp-client==0.43.0"], + "requirements": ["async-upnp-client==0.44.0"], "ssdp": [ { "deviceType": "urn:schemas-upnp-org:device:MediaServer:1", diff --git a/homeassistant/components/doorbird/strings.json b/homeassistant/components/doorbird/strings.json index 090ba4f161f..ad43e8c1c1c 100644 --- a/homeassistant/components/doorbird/strings.json +++ b/homeassistant/components/doorbird/strings.json @@ -6,7 +6,7 @@ "events": "Comma separated list of events." }, "data_description": { - "events": "Add an comma separated event name for each event you wish to track. After entering them here, use the DoorBird app to assign them to a specific event.\n\nExample: somebody_pressed_the_button, motion" + "events": "Add a comma separated event name for each event you wish to track. After entering them here, use the DoorBird app to assign them to a specific event.\n\nExample: somebody_pressed_the_button, motion" } } } diff --git a/homeassistant/components/dormakaba_dkey/config_flow.py b/homeassistant/components/dormakaba_dkey/config_flow.py index 0d23b822231..369accb83d8 100644 --- a/homeassistant/components/dormakaba_dkey/config_flow.py +++ b/homeassistant/components/dormakaba_dkey/config_flow.py @@ -57,7 +57,7 @@ class DormkabaConfigFlow(ConfigFlow, domain=DOMAIN): self._discovery_info = self._discovered_devices[address] return await self.async_step_associate() - current_addresses = self._async_current_ids() + current_addresses = self._async_current_ids(include_ignore=False) for discovery in async_discovered_service_info(self.hass): if ( discovery.address in current_addresses diff --git a/homeassistant/components/duke_energy/config_flow.py b/homeassistant/components/duke_energy/config_flow.py index e06940b0fba..2ec92ff4c12 100644 --- a/homeassistant/components/duke_energy/config_flow.py +++ b/homeassistant/components/duke_energy/config_flow.py @@ -50,10 +50,10 @@ class DukeEnergyConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - username = auth["cdp_internal_user_id"].lower() + username = auth["internalUserID"].lower() await self.async_set_unique_id(username) self._abort_if_unique_id_configured() - email = auth["email"].lower() + email = auth["loginEmailAddress"].lower() data = { CONF_EMAIL: email, CONF_USERNAME: username, diff --git a/homeassistant/components/duke_energy/coordinator.py b/homeassistant/components/duke_energy/coordinator.py index 12a2f5fd6ae..a76168475c0 100644 --- a/homeassistant/components/duke_energy/coordinator.py +++ b/homeassistant/components/duke_energy/coordinator.py @@ -8,7 +8,11 @@ from aiodukeenergy import DukeEnergy from aiohttp import ClientError from homeassistant.components.recorder import get_instance -from homeassistant.components.recorder.models import StatisticData, StatisticMetaData +from homeassistant.components.recorder.models import ( + StatisticData, + StatisticMeanType, + StatisticMetaData, +) from homeassistant.components.recorder.statistics import ( async_add_external_statistics, get_last_statistics, @@ -137,7 +141,7 @@ class DukeEnergyCoordinator(DataUpdateCoordinator[None]): f"Duke Energy {meter['serviceType'].capitalize()} {serial_number}" ) consumption_metadata = StatisticMetaData( - has_mean=False, + mean_type=StatisticMeanType.NONE, has_sum=True, name=f"{name_prefix} Consumption", source=DOMAIN, diff --git a/homeassistant/components/duke_energy/manifest.json b/homeassistant/components/duke_energy/manifest.json index ece18d7ad2a..ad64fdd5cc4 100644 --- a/homeassistant/components/duke_energy/manifest.json +++ b/homeassistant/components/duke_energy/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["recorder"], "documentation": "https://www.home-assistant.io/integrations/duke_energy", "iot_class": "cloud_polling", - "requirements": ["aiodukeenergy==0.2.2"] + "requirements": ["aiodukeenergy==0.3.0"] } diff --git a/homeassistant/components/dynalite/strings.json b/homeassistant/components/dynalite/strings.json index 468cdebf0b1..4f73f91113b 100644 --- a/homeassistant/components/dynalite/strings.json +++ b/homeassistant/components/dynalite/strings.json @@ -36,7 +36,7 @@ }, "request_channel_level": { "name": "Request channel level", - "description": "Requests Dynalite to report the level of a specific channel.", + "description": "Requests Dynalite to report the brightness level of a specific channel.", "fields": { "host": { "name": "[%key:common::config_flow::data::host%]", @@ -48,7 +48,7 @@ }, "channel": { "name": "Channel", - "description": "Channel to request the level for." + "description": "Channel to request the brightness level for." } } } diff --git a/homeassistant/components/ecobee/strings.json b/homeassistant/components/ecobee/strings.json index 2b44c45edef..078643ee789 100644 --- a/homeassistant/components/ecobee/strings.json +++ b/homeassistant/components/ecobee/strings.json @@ -25,7 +25,7 @@ "state_attributes": { "preset_mode": { "state": { - "away_indefinitely": "Away Indefinitely" + "away_indefinitely": "Away indefinitely" } } } @@ -91,7 +91,7 @@ }, "fan_min_on_time": { "name": "Fan minimum on time", - "description": "Minimum number of minutes to run the fan each hour (0 to 60) during the vacation." + "description": "Minimum number of minutes to run the fan each hour during the vacation." } } }, @@ -125,7 +125,7 @@ }, "set_fan_min_on_time": { "name": "Set fan minimum on time", - "description": "Sets the minimum fan on time.", + "description": "Sets the minimum amount of time that the fan will run per hour.", "fields": { "entity_id": { "name": "Entity", @@ -133,7 +133,7 @@ }, "fan_min_on_time": { "name": "[%key:component::ecobee::services::create_vacation::fields::fan_min_on_time::name%]", - "description": "New value of fan min on time." + "description": "Minimum number of minutes to run the fan each hour." } } }, diff --git a/homeassistant/components/ecoforest/sensor.py b/homeassistant/components/ecoforest/sensor.py index c1d4aca6f0c..d0e4c17abe1 100644 --- a/homeassistant/components/ecoforest/sensor.py +++ b/homeassistant/components/ecoforest/sensor.py @@ -132,7 +132,7 @@ SENSOR_TYPES: tuple[EcoforestSensorEntityDescription, ...] = ( ), EcoforestSensorEntityDescription( key="convecto_air_flow", - translation_key="convecto_air_flow", + translation_key="convector_air_flow", native_unit_of_measurement=PERCENTAGE, entity_registry_enabled_default=False, value_fn=lambda data: data.convecto_air_flow, diff --git a/homeassistant/components/ecoforest/strings.json b/homeassistant/components/ecoforest/strings.json index 1094e10ada3..d0e807b5f2a 100644 --- a/homeassistant/components/ecoforest/strings.json +++ b/homeassistant/components/ecoforest/strings.json @@ -78,8 +78,8 @@ "extractor": { "name": "Extractor" }, - "convecto_air_flow": { - "name": "Convecto air flow" + "convector_air_flow": { + "name": "Convector air flow" } }, "number": { diff --git a/homeassistant/components/econet/water_heater.py b/homeassistant/components/econet/water_heater.py index fb74ae8b4a5..f93ad7f8872 100644 --- a/homeassistant/components/econet/water_heater.py +++ b/homeassistant/components/econet/water_heater.py @@ -91,15 +91,15 @@ class EcoNetWaterHeater(EcoNetEntity[WaterHeater], WaterHeaterEntity): def operation_list(self) -> list[str]: """List of available operation modes.""" econet_modes = self.water_heater.modes - op_list = [] + operation_modes = set() for mode in econet_modes: if ( mode is not WaterHeaterOperationMode.UNKNOWN and mode is not WaterHeaterOperationMode.VACATION ): ha_mode = ECONET_STATE_TO_HA[mode] - op_list.append(ha_mode) - return op_list + operation_modes.add(ha_mode) + return list(operation_modes) @property def supported_features(self) -> WaterHeaterEntityFeature: diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index 6d3dc5c9be6..ad8b3ea70a5 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==12.3.1"] + "requirements": ["py-sucks==0.9.10", "deebot-client==12.5.0"] } diff --git a/homeassistant/components/ecowitt/binary_sensor.py b/homeassistant/components/ecowitt/binary_sensor.py index a2ed279f601..1d36f5232db 100644 --- a/homeassistant/components/ecowitt/binary_sensor.py +++ b/homeassistant/components/ecowitt/binary_sensor.py @@ -26,6 +26,9 @@ ECOWITT_BINARYSENSORS_MAPPING: Final = { device_class=BinarySensorDeviceClass.BATTERY, entity_category=EntityCategory.DIAGNOSTIC, ), + EcoWittSensorTypes.RAIN_STATE: BinarySensorEntityDescription( + key="RAIN_STATE", device_class=BinarySensorDeviceClass.MOISTURE + ), } diff --git a/homeassistant/components/ecowitt/sensor.py b/homeassistant/components/ecowitt/sensor.py index 6968acdfa4f..7d37aa40b86 100644 --- a/homeassistant/components/ecowitt/sensor.py +++ b/homeassistant/components/ecowitt/sensor.py @@ -68,6 +68,7 @@ ECOWITT_SENSORS_MAPPING: Final = { key="DEGREE", native_unit_of_measurement=DEGREE, device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), EcoWittSensorTypes.WATT_METERS_SQUARED: SensorEntityDescription( key="WATT_METERS_SQUARED", diff --git a/homeassistant/components/eheimdigital/config_flow.py b/homeassistant/components/eheimdigital/config_flow.py index c6535608b0c..b0432267c8e 100644 --- a/homeassistant/components/eheimdigital/config_flow.py +++ b/homeassistant/components/eheimdigital/config_flow.py @@ -62,6 +62,7 @@ class EheimDigitalConfigFlow(ConfigFlow, domain=DOMAIN): except (ClientError, TimeoutError): return self.async_abort(reason="cannot_connect") except Exception: # noqa: BLE001 + LOGGER.exception("Unknown exception occurred") return self.async_abort(reason="unknown") await self.async_set_unique_id(hub.main.mac_address) self._abort_if_unique_id_configured(updates={CONF_HOST: host}) diff --git a/homeassistant/components/elevenlabs/strings.json b/homeassistant/components/elevenlabs/strings.json index b346f94a963..8b0205a9e9a 100644 --- a/homeassistant/components/elevenlabs/strings.json +++ b/homeassistant/components/elevenlabs/strings.json @@ -6,7 +6,7 @@ "api_key": "[%key:common::config_flow::data::api_key%]" }, "data_description": { - "api_key": "Your Elevenlabs API key." + "api_key": "Your ElevenLabs API key." } } }, diff --git a/homeassistant/components/elkm1/entity.py b/homeassistant/components/elkm1/entity.py index d9967d93967..ce717578eae 100644 --- a/homeassistant/components/elkm1/entity.py +++ b/homeassistant/components/elkm1/entity.py @@ -100,7 +100,11 @@ class ElkEntity(Entity): return {"index": self._element.index + 1} def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None: - pass + """Handle changes to the element. + + This method is called when the element changes. It should be + overridden by subclasses to handle the changes. + """ @callback def _element_callback(self, element: Element, changeset: dict[str, Any]) -> None: @@ -111,7 +115,7 @@ class ElkEntity(Entity): async def async_added_to_hass(self) -> None: """Register callback for ElkM1 changes and update entity state.""" self._element.add_callback(self._element_callback) - self._element_callback(self._element, {}) + self._element_changed(self._element, {}) @property def device_info(self) -> DeviceInfo: diff --git a/homeassistant/components/elkm1/strings.json b/homeassistant/components/elkm1/strings.json index f184483646d..b50c1817838 100644 --- a/homeassistant/components/elkm1/strings.json +++ b/homeassistant/components/elkm1/strings.json @@ -53,7 +53,7 @@ "fields": { "code": { "name": "Code", - "description": "An code to authorize the bypass of the alarm control panel." + "description": "Alarm code to authorize the bypass of the alarm control panel." } } }, @@ -63,7 +63,7 @@ "fields": { "code": { "name": "Code", - "description": "An code to authorize the bypass clear of the alarm control panel." + "description": "Alarm code to authorize the bypass clear of the alarm control panel." } } }, @@ -73,7 +73,7 @@ "fields": { "code": { "name": "Code", - "description": "An code to arm the alarm control panel." + "description": "Alarm code to arm the alarm control panel." } } }, @@ -181,7 +181,7 @@ "fields": { "code": { "name": "Code", - "description": "An code to authorize the bypass of the zone." + "description": "Alarm code to authorize the bypass of the zone." } } }, diff --git a/homeassistant/components/elvia/importer.py b/homeassistant/components/elvia/importer.py index 4e8b7f716ef..caca787237c 100644 --- a/homeassistant/components/elvia/importer.py +++ b/homeassistant/components/elvia/importer.py @@ -7,7 +7,11 @@ from typing import TYPE_CHECKING, cast from elvia import Elvia, error as ElviaError -from homeassistant.components.recorder.models import StatisticData, StatisticMetaData +from homeassistant.components.recorder.models import ( + StatisticData, + StatisticMeanType, + StatisticMetaData, +) from homeassistant.components.recorder.statistics import ( async_add_external_statistics, get_last_statistics, @@ -144,7 +148,7 @@ class ElviaImporter: async_add_external_statistics( hass=self.hass, metadata=StatisticMetaData( - has_mean=False, + mean_type=StatisticMeanType.NONE, has_sum=True, name=f"{self.metering_point_id} Consumption", source=DOMAIN, diff --git a/homeassistant/components/energenie_power_sockets/strings.json b/homeassistant/components/energenie_power_sockets/strings.json index 4e4e49c68fb..bd536568d2c 100644 --- a/homeassistant/components/energenie_power_sockets/strings.json +++ b/homeassistant/components/energenie_power_sockets/strings.json @@ -1,5 +1,5 @@ { - "title": "Energenie Power Sockets Integration.", + "title": "Energenie Power Sockets", "config": { "step": { "user": { diff --git a/homeassistant/components/energy/data.py b/homeassistant/components/energy/data.py index ff86177cf41..442aedf23b0 100644 --- a/homeassistant/components/energy/data.py +++ b/homeassistant/components/energy/data.py @@ -139,6 +139,10 @@ class DeviceConsumption(TypedDict): # An optional custom name for display in energy graphs name: str | None + # An optional statistic_id identifying a device + # that includes this device's consumption in its total + included_in_stat: str | None + class EnergyPreferences(TypedDict): """Dictionary holding the energy data.""" @@ -291,6 +295,7 @@ DEVICE_CONSUMPTION_SCHEMA = vol.Schema( { vol.Required("stat_consumption"): str, vol.Optional("name"): str, + vol.Optional("included_in_stat"): str, } ) diff --git a/homeassistant/components/energy/strings.json b/homeassistant/components/energy/strings.json index e9d72247319..5eb2c93161e 100644 --- a/homeassistant/components/energy/strings.json +++ b/homeassistant/components/energy/strings.json @@ -7,7 +7,7 @@ }, "recorder_untracked": { "title": "Entity not tracked", - "description": "The recorder has been configured to exclude these configured entities:" + "description": "Home Assistant Recorder has been configured to exclude these configured entities:" }, "entity_unavailable": { "title": "Entity unavailable", diff --git a/homeassistant/components/energyzero/strings.json b/homeassistant/components/energyzero/strings.json index 7788f4d4d8e..48682ab31ee 100644 --- a/homeassistant/components/energyzero/strings.json +++ b/homeassistant/components/energyzero/strings.json @@ -54,10 +54,10 @@ "services": { "get_gas_prices": { "name": "Get gas prices", - "description": "Request gas prices from EnergyZero.", + "description": "Requests gas prices from EnergyZero.", "fields": { "config_entry": { - "name": "Config Entry", + "name": "Config entry", "description": "The config entry to use for this action." }, "incl_vat": { @@ -76,7 +76,7 @@ }, "get_energy_prices": { "name": "Get energy prices", - "description": "Request energy prices from EnergyZero.", + "description": "Requests energy prices from EnergyZero.", "fields": { "config_entry": { "name": "[%key:component::energyzero::services::get_gas_prices::fields::config_entry::name%]", diff --git a/homeassistant/components/enigma2/config_flow.py b/homeassistant/components/enigma2/config_flow.py index b0649a8368d..876d55128cf 100644 --- a/homeassistant/components/enigma2/config_flow.py +++ b/homeassistant/components/enigma2/config_flow.py @@ -1,5 +1,6 @@ """Config flow for Enigma2.""" +import logging from typing import Any, cast from aiohttp.client_exceptions import ClientError @@ -63,6 +64,8 @@ CONFIG_SCHEMA = vol.Schema( } ) +_LOGGER = logging.getLogger(__name__) + async def get_options_schema(handler: SchemaCommonFlowHandler) -> vol.Schema: """Get the options schema.""" @@ -130,7 +133,8 @@ class Enigma2ConfigFlowHandler(ConfigFlow, domain=DOMAIN): errors = {"base": "invalid_auth"} except ClientError: errors = {"base": "cannot_connect"} - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors = {"base": "unknown"} else: unique_id = about["info"]["ifaces"][0]["mac"] or self.unique_id diff --git a/homeassistant/components/enphase_envoy/strings.json b/homeassistant/components/enphase_envoy/strings.json index b498c59e0d3..ce3a8593226 100644 --- a/homeassistant/components/enphase_envoy/strings.json +++ b/homeassistant/components/enphase_envoy/strings.json @@ -187,13 +187,13 @@ "name": "Lifetime energy consumption {phase_name}" }, "balanced_net_consumption": { - "name": "balanced net power consumption" + "name": "Balanced net power consumption" }, "lifetime_balanced_net_consumption": { "name": "Lifetime balanced net energy consumption" }, "balanced_net_consumption_phase": { - "name": "balanced net power consumption {phase_name}" + "name": "Balanced net power consumption {phase_name}" }, "lifetime_balanced_net_consumption_phase": { "name": "Lifetime balanced net energy consumption {phase_name}" @@ -217,7 +217,7 @@ "name": "Net consumption CT current" }, "net_ct_powerfactor": { - "name": "Powerfactor net consumption CT" + "name": "Power factor net consumption CT" }, "net_ct_metering_status": { "name": "Metering status net consumption CT" @@ -235,7 +235,7 @@ "name": "Production CT current" }, "production_ct_powerfactor": { - "name": "powerfactor production CT" + "name": "Power factor production CT" }, "production_ct_metering_status": { "name": "Metering status production CT" @@ -262,7 +262,7 @@ "name": "Storage CT current" }, "storage_ct_powerfactor": { - "name": "Powerfactor storage CT" + "name": "Power factor storage CT" }, "storage_ct_metering_status": { "name": "Metering status storage CT" @@ -289,7 +289,7 @@ "name": "Net consumption CT current {phase_name}" }, "net_ct_powerfactor_phase": { - "name": "Powerfactor net consumption CT {phase_name}" + "name": "Power factor net consumption CT {phase_name}" }, "net_ct_metering_status_phase": { "name": "Metering status net consumption CT {phase_name}" @@ -307,7 +307,7 @@ "name": "Production CT current {phase_name}" }, "production_ct_powerfactor_phase": { - "name": "Powerfactor production CT {phase_name}" + "name": "Power factor production CT {phase_name}" }, "production_ct_metering_status_phase": { "name": "Metering status production CT {phase_name}" @@ -334,7 +334,7 @@ "name": "Storage CT current {phase_name}" }, "storage_ct_powerfactor_phase": { - "name": "Powerfactor storage CT {phase_name}" + "name": "Power factor storage CT {phase_name}" }, "storage_ct_metering_status_phase": { "name": "Metering status storage CT {phase_name}" diff --git a/homeassistant/components/environment_canada/sensor.py b/homeassistant/components/environment_canada/sensor.py index 3a789289c74..1685888d2bc 100644 --- a/homeassistant/components/environment_canada/sensor.py +++ b/homeassistant/components/environment_canada/sensor.py @@ -168,6 +168,7 @@ SENSOR_TYPES: tuple[ECSensorEntityDescription, ...] = ( native_unit_of_measurement=DEGREE, value_fn=lambda data: data.conditions.get("wind_bearing", {}).get("value"), device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), ECSensorEntityDescription( key="wind_chill", diff --git a/homeassistant/components/epson/config_flow.py b/homeassistant/components/epson/config_flow.py index c54bff2eea9..077b9cc31f7 100644 --- a/homeassistant/components/epson/config_flow.py +++ b/homeassistant/components/epson/config_flow.py @@ -72,5 +72,7 @@ class EpsonConfigFlow(ConfigFlow, domain=DOMAIN): if projector: projector.close() return self.async_show_form( - step_id="user", data_schema=DATA_SCHEMA, errors=errors + step_id="user", + data_schema=self.add_suggested_values_to_schema(DATA_SCHEMA, user_input), + errors=errors, ) diff --git a/homeassistant/components/eq3btsmart/manifest.json b/homeassistant/components/eq3btsmart/manifest.json index 18dcbb5cb65..ab62c962982 100644 --- a/homeassistant/components/eq3btsmart/manifest.json +++ b/homeassistant/components/eq3btsmart/manifest.json @@ -22,5 +22,5 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["eq3btsmart"], - "requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.8.0"] + "requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.12.0"] } diff --git a/homeassistant/components/esphome/__init__.py b/homeassistant/components/esphome/__init__.py index fee2531fa20..1e1a2763b59 100644 --- a/homeassistant/components/esphome/__init__.py +++ b/homeassistant/components/esphome/__init__.py @@ -16,7 +16,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType -from .const import CONF_NOISE_PSK, DATA_FFMPEG_PROXY, DOMAIN +from .const import CONF_BLUETOOTH_MAC_ADDRESS, CONF_NOISE_PSK, DATA_FFMPEG_PROXY, DOMAIN from .dashboard import async_setup as async_setup_dashboard from .domain_data import DomainData @@ -87,6 +87,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> async def async_remove_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> None: """Remove an esphome config entry.""" - if mac_address := entry.unique_id: - async_remove_scanner(hass, mac_address.upper()) + if bluetooth_mac_address := entry.data.get(CONF_BLUETOOTH_MAC_ADDRESS): + async_remove_scanner(hass, bluetooth_mac_address.upper()) await DomainData.get(hass).get_or_create_store(hass, entry).async_remove() diff --git a/homeassistant/components/esphome/assist_satellite.py b/homeassistant/components/esphome/assist_satellite.py index 016b1c3494d..a129a7723dd 100644 --- a/homeassistant/components/esphome/assist_satellite.py +++ b/homeassistant/components/esphome/assist_satellite.py @@ -253,6 +253,11 @@ class EsphomeAssistSatellite( # Will use media player for TTS/announcements self._update_tts_format() + if feature_flags & VoiceAssistantFeature.START_CONVERSATION: + self._attr_supported_features |= ( + assist_satellite.AssistSatelliteEntityFeature.START_CONVERSATION + ) + # Update wake word select when config is updated self.async_on_remove( self.entry_data.async_register_assist_satellite_set_wake_word_callback( @@ -284,7 +289,10 @@ class EsphomeAssistSatellite( elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_END: assert event.data is not None data_to_send = { - "conversation_id": event.data["intent_output"]["conversation_id"] or "", + "conversation_id": event.data["intent_output"]["conversation_id"], + "continue_conversation": str( + int(event.data["intent_output"]["continue_conversation"]) + ), } elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_TTS_START: assert event.data is not None @@ -339,14 +347,33 @@ class EsphomeAssistSatellite( Should block until the announcement is done playing. """ + await self._do_announce(announcement, run_pipeline_after=False) + + async def async_start_conversation( + self, start_announcement: assist_satellite.AssistSatelliteAnnouncement + ) -> None: + """Start a conversation from the satellite.""" + await self._do_announce(start_announcement, run_pipeline_after=True) + + async def _do_announce( + self, + announcement: assist_satellite.AssistSatelliteAnnouncement, + run_pipeline_after: bool, + ) -> None: + """Announce media on the satellite. + + Optionally run a voice pipeline after the announcement has finished. + """ _LOGGER.debug( "Waiting for announcement to finished (message=%s, media_id=%s)", announcement.message, announcement.media_id, ) media_id = announcement.media_id - if announcement.media_id_source != "tts": - # Route non-TTS media through the proxy + is_media_tts = announcement.media_id_source == "tts" + preannounce_media_id = announcement.preannounce_media_id + if (not is_media_tts) or preannounce_media_id: + # Route media through the proxy format_to_use: MediaPlayerSupportedFormat | None = None for supported_format in chain( *self.entry_data.media_player_formats.values() @@ -359,19 +386,33 @@ class EsphomeAssistSatellite( assert (self.registry_entry is not None) and ( self.registry_entry.device_id is not None ) - proxy_url = async_create_proxy_url( - self.hass, - self.registry_entry.device_id, - media_id, + + make_proxy_url = partial( + async_create_proxy_url, + hass=self.hass, + device_id=self.registry_entry.device_id, media_format=format_to_use.format, rate=format_to_use.sample_rate or None, channels=format_to_use.num_channels or None, width=format_to_use.sample_bytes or None, ) - media_id = async_process_play_media_url(self.hass, proxy_url) + + if not is_media_tts: + media_id = async_process_play_media_url( + self.hass, make_proxy_url(media_url=media_id) + ) + + if preannounce_media_id: + preannounce_media_id = async_process_play_media_url( + self.hass, make_proxy_url(media_url=preannounce_media_id) + ) await self.cli.send_voice_assistant_announcement_await_response( - media_id, _ANNOUNCEMENT_TIMEOUT_SEC, announcement.message + media_id, + _ANNOUNCEMENT_TIMEOUT_SEC, + announcement.message, + start_conversation=run_pipeline_after, + preannounce_media_id=preannounce_media_id or "", ) async def handle_pipeline_start( diff --git a/homeassistant/components/esphome/config_flow.py b/homeassistant/components/esphome/config_flow.py index 955a93cd2b7..686d77d9b34 100644 --- a/homeassistant/components/esphome/config_flow.py +++ b/homeassistant/components/esphome/config_flow.py @@ -128,8 +128,23 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN): self._password = "" return await self._async_authenticate_or_add() + if error is None and entry_data.get(CONF_NOISE_PSK): + return await self.async_step_reauth_encryption_removed_confirm() return await self.async_step_reauth_confirm() + async def async_step_reauth_encryption_removed_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reauthorization flow when encryption was removed.""" + if user_input is not None: + self._noise_psk = None + return self._async_get_entry() + + return self.async_show_form( + step_id="reauth_encryption_removed_confirm", + description_placeholders={"name": self._name}, + ) + async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/esphome/const.py b/homeassistant/components/esphome/const.py index 1a3be4c34ae..c7cd7fdcdf0 100644 --- a/homeassistant/components/esphome/const.py +++ b/homeassistant/components/esphome/const.py @@ -8,6 +8,7 @@ CONF_ALLOW_SERVICE_CALLS = "allow_service_calls" CONF_SUBSCRIBE_LOGS = "subscribe_logs" CONF_DEVICE_NAME = "device_name" CONF_NOISE_PSK = "noise_psk" +CONF_BLUETOOTH_MAC_ADDRESS = "bluetooth_mac_address" DEFAULT_ALLOW_SERVICE_CALLS = True DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS = False diff --git a/homeassistant/components/esphome/diagnostics.py b/homeassistant/components/esphome/diagnostics.py index 58c9a8fe666..0903e874a15 100644 --- a/homeassistant/components/esphome/diagnostics.py +++ b/homeassistant/components/esphome/diagnostics.py @@ -13,9 +13,7 @@ from . import CONF_NOISE_PSK from .dashboard import async_get_dashboard from .entry_data import ESPHomeConfigEntry -CONF_MAC_ADDRESS = "mac_address" - -REDACT_KEYS = {CONF_NOISE_PSK, CONF_PASSWORD, CONF_MAC_ADDRESS} +REDACT_KEYS = {CONF_NOISE_PSK, CONF_PASSWORD, "mac_address", "bluetooth_mac_address"} async def async_get_config_entry_diagnostics( @@ -27,13 +25,17 @@ async def async_get_config_entry_diagnostics( diag["config"] = config_entry.as_dict() entry_data = config_entry.runtime_data + device_info = entry_data.device_info if (storage_data := await entry_data.store.async_load()) is not None: diag["storage_data"] = storage_data if ( - config_entry.unique_id - and (scanner := async_scanner_by_source(hass, config_entry.unique_id.upper())) + device_info + and ( + scanner_mac := device_info.bluetooth_mac_address or device_info.mac_address + ) + and (scanner := async_scanner_by_source(hass, scanner_mac.upper())) and (bluetooth_device := entry_data.bluetooth_device) ): diag["bluetooth"] = { diff --git a/homeassistant/components/esphome/entry_data.py b/homeassistant/components/esphome/entry_data.py index fc41ee99a00..023c6f70da4 100644 --- a/homeassistant/components/esphome/entry_data.py +++ b/homeassistant/components/esphome/entry_data.py @@ -282,15 +282,18 @@ class RuntimeEntryData: ) -> None: """Distribute an update of static infos to all platforms.""" # First, load all platforms - needed_platforms = set() - if async_get_dashboard(hass): - needed_platforms.add(Platform.UPDATE) + needed_platforms: set[Platform] = set() - if self.device_info and self.device_info.voice_assistant_feature_flags_compat( - self.api_version - ): - needed_platforms.add(Platform.BINARY_SENSOR) - needed_platforms.add(Platform.SELECT) + if self.device_info: + if async_get_dashboard(hass): + # Only load the update platform if the device_info is set + # When we restore the entry, the device_info may not be set yet + # and we don't want to load the update platform since it needs + # a complete device_info. + needed_platforms.add(Platform.UPDATE) + if self.device_info.voice_assistant_feature_flags_compat(self.api_version): + needed_platforms.add(Platform.BINARY_SENSOR) + needed_platforms.add(Platform.SELECT) ent_reg = er.async_get(hass) registry_get_entity = ent_reg.async_get_entity_id @@ -312,18 +315,19 @@ class RuntimeEntryData: # Make a dict of the EntityInfo by type and send # them to the listeners for each specific EntityInfo type - infos_by_type: dict[type[EntityInfo], list[EntityInfo]] = {} + infos_by_type: defaultdict[type[EntityInfo], list[EntityInfo]] = defaultdict( + list + ) for info in infos: - info_type = type(info) - if info_type not in infos_by_type: - infos_by_type[info_type] = [] - infos_by_type[info_type].append(info) + infos_by_type[type(info)].append(info) - callbacks_by_type = self.entity_info_callbacks - for type_, entity_infos in infos_by_type.items(): - if callbacks_ := callbacks_by_type.get(type_): - for callback_ in callbacks_: - callback_(entity_infos) + for type_, callbacks in self.entity_info_callbacks.items(): + # If all entities for a type are removed, we + # still need to call the callbacks with an empty list + # to make sure the entities are removed. + entity_infos = infos_by_type.get(type_, []) + for callback_ in callbacks: + callback_(entity_infos) # Finally update static info subscriptions for callback_ in self.static_info_update_subscriptions: diff --git a/homeassistant/components/esphome/event.py b/homeassistant/components/esphome/event.py index 11a5d0cfb33..f4db3844e3d 100644 --- a/homeassistant/components/esphome/event.py +++ b/homeassistant/components/esphome/event.py @@ -33,6 +33,16 @@ class EsphomeEvent(EsphomeEntity[EventInfo, Event], EventEntity): self._trigger_event(self._state.event_type) self.async_write_ha_state() + @callback + def _on_device_update(self) -> None: + """Call when device updates or entry data changes.""" + super()._on_device_update() + if self._entry_data.available: + # Event entities should go available directly + # when the device comes online and not wait + # for the next data push. + self.async_write_ha_state() + async_setup_entry = partial( platform_async_setup_entry, diff --git a/homeassistant/components/esphome/manager.py b/homeassistant/components/esphome/manager.py index e32bb7d6ded..7ce96a0f510 100644 --- a/homeassistant/components/esphome/manager.py +++ b/homeassistant/components/esphome/manager.py @@ -13,6 +13,7 @@ from aioesphomeapi import ( APIConnectionError, APIVersion, DeviceInfo as EsphomeDeviceInfo, + EncryptionHelloAPIError, EntityInfo, HomeassistantServiceCall, InvalidAuthAPIError, @@ -63,6 +64,7 @@ from homeassistant.util.async_ import create_eager_task from .bluetooth import async_connect_scanner from .const import ( CONF_ALLOW_SERVICE_CALLS, + CONF_BLUETOOTH_MAC_ADDRESS, CONF_DEVICE_NAME, CONF_SUBSCRIBE_LOGS, DEFAULT_ALLOW_SERVICE_CALLS, @@ -431,6 +433,13 @@ class ESPHomeManager: device_mac = format_mac(device_info.mac_address) mac_address_matches = unique_id == device_mac + if ( + bluetooth_mac_address := device_info.bluetooth_mac_address + ) and entry.data.get(CONF_BLUETOOTH_MAC_ADDRESS) != bluetooth_mac_address: + hass.config_entries.async_update_entry( + entry, + data={**entry.data, CONF_BLUETOOTH_MAC_ADDRESS: bluetooth_mac_address}, + ) # # Migrate config entry to new unique ID if the current # unique id is not a mac address. @@ -498,7 +507,9 @@ class ESPHomeManager: ) ) else: - bluetooth.async_remove_scanner(hass, device_info.mac_address) + bluetooth.async_remove_scanner( + hass, device_info.bluetooth_mac_address or device_info.mac_address + ) if device_info.voice_assistant_feature_flags_compat(api_version) and ( Platform.ASSIST_SATELLITE not in entry_data.loaded_platforms @@ -560,6 +571,7 @@ class ESPHomeManager: if isinstance( err, ( + EncryptionHelloAPIError, RequiresEncryptionAPIError, InvalidEncryptionKeyAPIError, InvalidAuthAPIError, @@ -617,11 +629,22 @@ class ESPHomeManager: ) _setup_services(hass, entry_data, services) - if entry_data.device_info is not None and entry_data.device_info.name: - reconnect_logic.name = entry_data.device_info.name + if (device_info := entry_data.device_info) is not None: + if device_info.name: + reconnect_logic.name = device_info.name + if ( + bluetooth_mac_address := device_info.bluetooth_mac_address + ) and entry.data.get(CONF_BLUETOOTH_MAC_ADDRESS) != bluetooth_mac_address: + hass.config_entries.async_update_entry( + entry, + data={ + **entry.data, + CONF_BLUETOOTH_MAC_ADDRESS: bluetooth_mac_address, + }, + ) if entry.unique_id is None: hass.config_entries.async_update_entry( - entry, unique_id=format_mac(entry_data.device_info.mac_address) + entry, unique_id=format_mac(device_info.mac_address) ) await reconnect_logic.start() diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json index d07754d68a0..954968f5e2c 100644 --- a/homeassistant/components/esphome/manifest.json +++ b/homeassistant/components/esphome/manifest.json @@ -16,9 +16,9 @@ "loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"], "mqtt": ["esphome/discover/#"], "requirements": [ - "aioesphomeapi==29.2.0", + "aioesphomeapi==29.8.0", "esphome-dashboard-api==1.2.3", - "bleak-esphome==2.8.0" + "bleak-esphome==2.12.0" ], "zeroconf": ["_esphomelib._tcp.local."] } diff --git a/homeassistant/components/esphome/strings.json b/homeassistant/components/esphome/strings.json index 1534a49e365..437b9ac2098 100644 --- a/homeassistant/components/esphome/strings.json +++ b/homeassistant/components/esphome/strings.json @@ -4,7 +4,7 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", - "mdns_missing_mac": "Missing MAC address in MDNS properties.", + "mdns_missing_mac": "Missing MAC address in mDNS properties.", "service_received": "Action received", "mqtt_missing_mac": "Missing MAC address in MQTT properties.", "mqtt_missing_api": "Missing API port in MQTT properties.", @@ -43,6 +43,9 @@ }, "description": "The ESPHome device {name} enabled transport encryption or changed the encryption key. Please enter the updated key. You can find it in the ESPHome Dashboard or in your device configuration." }, + "reauth_encryption_removed_confirm": { + "description": "The ESPHome device {name} disabled transport encryption. Please confirm that you want to remove the encryption key and allow unencrypted connections." + }, "discovery_confirm": { "description": "Do you want to add the ESPHome node `{name}` to Home Assistant?", "title": "Discovered ESPHome node" diff --git a/homeassistant/components/everlights/light.py b/homeassistant/components/everlights/light.py index ae159d77240..c153f01e83c 100644 --- a/homeassistant/components/everlights/light.py +++ b/homeassistant/components/everlights/light.py @@ -4,7 +4,7 @@ from __future__ import annotations from datetime import timedelta import logging -from typing import Any +from typing import Any, cast import pyeverlights import voluptuous as vol @@ -84,7 +84,7 @@ class EverLightsLight(LightEntity): api: pyeverlights.EverLights, channel: int, status: dict[str, Any], - effects, + effects: list[str], ) -> None: """Initialize the light.""" self._api = api @@ -106,8 +106,10 @@ class EverLightsLight(LightEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Turn the light on.""" - hs_color = kwargs.get(ATTR_HS_COLOR, self._attr_hs_color) - brightness = kwargs.get(ATTR_BRIGHTNESS, self._attr_brightness) + hs_color = cast( + tuple[float, float], kwargs.get(ATTR_HS_COLOR, self._attr_hs_color) + ) + brightness = cast(int, kwargs.get(ATTR_BRIGHTNESS, self._attr_brightness)) effect = kwargs.get(ATTR_EFFECT) if effect is not None: @@ -116,7 +118,7 @@ class EverLightsLight(LightEntity): rgb = color_int_to_rgb(colors[0]) hsv = color_util.color_RGB_to_hsv(*rgb) hs_color = hsv[:2] - brightness = hsv[2] / 100 * 255 + brightness = round(hsv[2] / 100 * 255) else: rgb = color_util.color_hsv_to_RGB( diff --git a/homeassistant/components/ezviz/strings.json b/homeassistant/components/ezviz/strings.json index f1653661cdd..cd8bbc9d199 100644 --- a/homeassistant/components/ezviz/strings.json +++ b/homeassistant/components/ezviz/strings.json @@ -54,7 +54,7 @@ "init": { "data": { "timeout": "Request timeout (seconds)", - "ffmpeg_arguments": "Arguments passed to ffmpeg for cameras" + "ffmpeg_arguments": "Arguments passed to FFmpeg for cameras" } } } diff --git a/homeassistant/components/feedreader/strings.json b/homeassistant/components/feedreader/strings.json index 3132aadbda8..35022e82bb1 100644 --- a/homeassistant/components/feedreader/strings.json +++ b/homeassistant/components/feedreader/strings.json @@ -36,7 +36,7 @@ "issues": { "import_yaml_error_url_error": { "title": "The Feedreader YAML configuration import failed", - "description": "Configuring the Feedreader using YAML is being removed but there was a connection error when trying to import the YAML configuration for `{url}`.\n\nPlease verify that url is reachable and accessible for Home Assistant and restart Home Assistant to try again or remove the Feedreader YAML configuration from your configuration.yaml file and continue to set up the integration manually." + "description": "Configuring the Feedreader using YAML is being removed but there was a connection error when trying to import the YAML configuration for `{url}`.\n\nPlease verify that the URL is reachable and accessible for Home Assistant and restart Home Assistant to try again or remove the Feedreader YAML configuration from your configuration.yaml file and continue to set up the integration manually." } } } diff --git a/homeassistant/components/ffmpeg/strings.json b/homeassistant/components/ffmpeg/strings.json index 66c1f19de5b..cac7fcfc48c 100644 --- a/homeassistant/components/ffmpeg/strings.json +++ b/homeassistant/components/ffmpeg/strings.json @@ -2,7 +2,7 @@ "services": { "restart": { "name": "[%key:common::action::restart%]", - "description": "Sends a restart command to a ffmpeg based sensor.", + "description": "Sends a restart command to an FFmpeg-based sensor.", "fields": { "entity_id": { "name": "Entity", @@ -12,7 +12,7 @@ }, "start": { "name": "[%key:common::action::start%]", - "description": "Sends a start command to a ffmpeg based sensor.", + "description": "Sends a start command to an FFmpeg-based sensor.", "fields": { "entity_id": { "name": "Entity", @@ -22,7 +22,7 @@ }, "stop": { "name": "[%key:common::action::stop%]", - "description": "Sends a stop command to a ffmpeg based sensor.", + "description": "Sends a stop command to an FFmpeg-based sensor.", "fields": { "entity_id": { "name": "Entity", diff --git a/homeassistant/components/fibaro/__init__.py b/homeassistant/components/fibaro/__init__.py index 8ede0169482..33b2598a636 100644 --- a/homeassistant/components/fibaro/__init__.py +++ b/homeassistant/components/fibaro/__init__.py @@ -7,21 +7,21 @@ from collections.abc import Callable, Mapping import logging from typing import Any -from pyfibaro.fibaro_client import FibaroClient +from pyfibaro.fibaro_client import ( + FibaroAuthenticationFailed, + FibaroClient, + FibaroConnectFailed, +) +from pyfibaro.fibaro_data_helper import read_rooms from pyfibaro.fibaro_device import DeviceModel -from pyfibaro.fibaro_room import RoomModel +from pyfibaro.fibaro_info import InfoModel from pyfibaro.fibaro_scene import SceneModel from pyfibaro.fibaro_state_resolver import FibaroEvent, FibaroStateResolver -from requests.exceptions import HTTPError from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ( - ConfigEntryAuthFailed, - ConfigEntryNotReady, - HomeAssistantError, -) +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import DeviceEntry, DeviceInfo from homeassistant.util import slugify @@ -74,63 +74,31 @@ FIBARO_TYPEMAP = { class FibaroController: """Initiate Fibaro Controller Class.""" - def __init__(self, config: Mapping[str, Any]) -> None: + def __init__( + self, fibaro_client: FibaroClient, info: InfoModel, import_plugins: bool + ) -> None: """Initialize the Fibaro controller.""" - - # The FibaroClient uses the correct API version automatically - self._client = FibaroClient(config[CONF_URL]) - self._client.set_authentication(config[CONF_USERNAME], config[CONF_PASSWORD]) + self._client = fibaro_client + self._fibaro_info = info # Whether to import devices from plugins - self._import_plugins = config[CONF_IMPORT_PLUGINS] - self._room_map: dict[int, RoomModel] # Mapping roomId to room object + self._import_plugins = import_plugins + # Mapping roomId to room object + self._room_map = read_rooms(fibaro_client) self._device_map: dict[int, DeviceModel] # Mapping deviceId to device object self.fibaro_devices: dict[Platform, list[DeviceModel]] = defaultdict( list ) # List of devices by entity platform # All scenes - self._scenes: list[SceneModel] = [] + self._scenes = self._client.read_scenes() self._callbacks: dict[int, list[Any]] = {} # Update value callbacks by deviceId # Event callbacks by device id self._event_callbacks: dict[int, list[Callable[[FibaroEvent], None]]] = {} - self.hub_serial: str # Unique serial number of the hub - self.hub_name: str # The friendly name of the hub - self.hub_model: str - self.hub_software_version: str - self.hub_api_url: str = config[CONF_URL] + # Unique serial number of the hub + self.hub_serial = info.serial_number # Device infos by fibaro device id self._device_infos: dict[int, DeviceInfo] = {} - - def connect(self) -> None: - """Start the communication with the Fibaro controller.""" - - # Return value doesn't need to be checked, - # it is only relevant when connecting without credentials - self._client.connect() - info = self._client.read_info() - self.hub_serial = info.serial_number - self.hub_name = info.hc_name - self.hub_model = info.platform - self.hub_software_version = info.current_version - - self._room_map = {room.fibaro_id: room for room in self._client.read_rooms()} self._read_devices() - self._scenes = self._client.read_scenes() - - def connect_with_error_handling(self) -> None: - """Translate connect errors to easily differentiate auth and connect failures. - - When there is a better error handling in the used library this can be improved. - """ - try: - self.connect() - except HTTPError as http_ex: - if http_ex.response.status_code == 403: - raise FibaroAuthFailed from http_ex - - raise FibaroConnectFailed from http_ex - except Exception as ex: - raise FibaroConnectFailed from ex def enable_state_handler(self) -> None: """Start StateHandler thread for monitoring updates.""" @@ -302,14 +270,20 @@ class FibaroController: def get_room_name(self, room_id: int) -> str | None: """Get the room name by room id.""" - assert self._room_map - room = self._room_map.get(room_id) - return room.name if room else None + return self._room_map.get(room_id) def read_scenes(self) -> list[SceneModel]: """Return list of scenes.""" return self._scenes + def read_fibaro_info(self) -> InfoModel: + """Return the general info about the hub.""" + return self._fibaro_info + + def get_frontend_url(self) -> str: + """Return the url to the Fibaro hub web UI.""" + return self._client.frontend_url() + def _read_devices(self) -> None: """Read and process the device list.""" devices = self._client.read_devices() @@ -319,20 +293,17 @@ class FibaroController: for device in devices: try: device.fibaro_controller = self - if device.room_id == 0: + room_name = self.get_room_name(device.room_id) + if not room_name: room_name = "Unknown" - else: - room_name = self._room_map[device.room_id].name device.room_name = room_name device.friendly_name = f"{room_name} {device.name}" device.ha_id = ( f"{slugify(room_name)}_{slugify(device.name)}_{device.fibaro_id}" ) if device.enabled and (not device.is_plugin or self._import_plugins): - device.mapped_platform = self._map_device_to_platform(device) - else: - device.mapped_platform = None - if (platform := device.mapped_platform) is None: + platform = self._map_device_to_platform(device) + if platform is None: continue device.unique_id_str = f"{slugify(self.hub_serial)}.{device.fibaro_id}" self._create_device_info(device, devices) @@ -375,11 +346,17 @@ class FibaroController: pass +def connect_fibaro_client(data: Mapping[str, Any]) -> tuple[InfoModel, FibaroClient]: + """Connect to the fibaro hub and read some basic data.""" + client = FibaroClient(data[CONF_URL]) + info = client.connect_with_credentials(data[CONF_USERNAME], data[CONF_PASSWORD]) + return (info, client) + + def init_controller(data: Mapping[str, Any]) -> FibaroController: - """Validate the user input allows us to connect to fibaro.""" - controller = FibaroController(data) - controller.connect_with_error_handling() - return controller + """Connect to the fibaro hub and init the controller.""" + info, client = connect_fibaro_client(data) + return FibaroController(client, info, data[CONF_IMPORT_PLUGINS]) async def async_setup_entry(hass: HomeAssistant, entry: FibaroConfigEntry) -> bool: @@ -393,22 +370,24 @@ async def async_setup_entry(hass: HomeAssistant, entry: FibaroConfigEntry) -> bo raise ConfigEntryNotReady( f"Could not connect to controller at {entry.data[CONF_URL]}" ) from connect_ex - except FibaroAuthFailed as auth_ex: + except FibaroAuthenticationFailed as auth_ex: raise ConfigEntryAuthFailed from auth_ex entry.runtime_data = controller # register the hub device info separately as the hub has sometimes no entities + fibaro_info = controller.read_fibaro_info() device_registry = dr.async_get(hass) device_registry.async_get_or_create( config_entry_id=entry.entry_id, identifiers={(DOMAIN, controller.hub_serial)}, serial_number=controller.hub_serial, - manufacturer="Fibaro", - name=controller.hub_name, - model=controller.hub_model, - sw_version=controller.hub_software_version, - configuration_url=controller.hub_api_url.removesuffix("/api/"), + manufacturer=fibaro_info.manufacturer_name, + name=fibaro_info.hc_name, + model=fibaro_info.model_name, + sw_version=fibaro_info.current_version, + configuration_url=controller.get_frontend_url(), + connections={(dr.CONNECTION_NETWORK_MAC, fibaro_info.mac_address)}, ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -443,11 +422,3 @@ async def async_remove_config_entry_device( return False return True - - -class FibaroConnectFailed(HomeAssistantError): - """Error to indicate we cannot connect to fibaro home center.""" - - -class FibaroAuthFailed(HomeAssistantError): - """Error to indicate that authentication failed on fibaro home center.""" diff --git a/homeassistant/components/fibaro/climate.py b/homeassistant/components/fibaro/climate.py index d601450a70f..7a8cc3fd2a9 100644 --- a/homeassistant/components/fibaro/climate.py +++ b/homeassistant/components/fibaro/climate.py @@ -129,13 +129,13 @@ class FibaroThermostat(FibaroEntity, ClimateEntity): def __init__(self, fibaro_device: DeviceModel) -> None: """Initialize the Fibaro device.""" super().__init__(fibaro_device) - self._temp_sensor_device: FibaroEntity | None = None - self._target_temp_device: FibaroEntity | None = None - self._op_mode_device: FibaroEntity | None = None - self._fan_mode_device: FibaroEntity | None = None + self._temp_sensor_device: DeviceModel | None = None + self._target_temp_device: DeviceModel | None = None + self._op_mode_device: DeviceModel | None = None + self._fan_mode_device: DeviceModel | None = None self.entity_id = ENTITY_ID_FORMAT.format(self.ha_id) - siblings = fibaro_device.fibaro_controller.get_siblings(fibaro_device) + siblings = self.controller.get_siblings(fibaro_device) _LOGGER.debug("%s siblings: %s", fibaro_device.ha_id, siblings) tempunit = "C" for device in siblings: @@ -147,23 +147,23 @@ class FibaroThermostat(FibaroEntity, ClimateEntity): and (device.value.has_value or device.has_heating_thermostat_setpoint) and device.unit in ("C", "F") ): - self._temp_sensor_device = FibaroEntity(device) + self._temp_sensor_device = device tempunit = device.unit if any( action for action in TARGET_TEMP_ACTIONS if action in device.actions ): - self._target_temp_device = FibaroEntity(device) + self._target_temp_device = device self._attr_supported_features |= ClimateEntityFeature.TARGET_TEMPERATURE if device.has_unit: tempunit = device.unit if any(action for action in OP_MODE_ACTIONS if action in device.actions): - self._op_mode_device = FibaroEntity(device) + self._op_mode_device = device self._attr_supported_features |= ClimateEntityFeature.PRESET_MODE if "setFanMode" in device.actions: - self._fan_mode_device = FibaroEntity(device) + self._fan_mode_device = device self._attr_supported_features |= ClimateEntityFeature.FAN_MODE if tempunit == "F": @@ -172,7 +172,7 @@ class FibaroThermostat(FibaroEntity, ClimateEntity): self._attr_temperature_unit = UnitOfTemperature.CELSIUS if self._fan_mode_device: - fan_modes = self._fan_mode_device.fibaro_device.supported_modes + fan_modes = self._fan_mode_device.supported_modes self._attr_fan_modes = [] for mode in fan_modes: if mode not in FANMODES: @@ -184,7 +184,7 @@ class FibaroThermostat(FibaroEntity, ClimateEntity): if self._op_mode_device: self._attr_preset_modes = [] self._attr_hvac_modes: list[HVACMode] = [] - device = self._op_mode_device.fibaro_device + device = self._op_mode_device if device.has_supported_thermostat_modes: for mode in device.supported_thermostat_modes: try: @@ -222,15 +222,15 @@ class FibaroThermostat(FibaroEntity, ClimateEntity): "- _fan_mode_device %s" ), self.ha_id, - self._temp_sensor_device.ha_id if self._temp_sensor_device else "None", - self._target_temp_device.ha_id if self._target_temp_device else "None", - self._op_mode_device.ha_id if self._op_mode_device else "None", - self._fan_mode_device.ha_id if self._fan_mode_device else "None", + self._temp_sensor_device.fibaro_id if self._temp_sensor_device else "None", + self._target_temp_device.fibaro_id if self._target_temp_device else "None", + self._op_mode_device.fibaro_id if self._op_mode_device else "None", + self._fan_mode_device.fibaro_id if self._fan_mode_device else "None", ) await super().async_added_to_hass() # Register update callback for child devices - siblings = self.fibaro_device.fibaro_controller.get_siblings(self.fibaro_device) + siblings = self.controller.get_siblings(self.fibaro_device) for device in siblings: if device != self.fibaro_device: self.controller.register(device.fibaro_id, self._update_callback) @@ -240,14 +240,14 @@ class FibaroThermostat(FibaroEntity, ClimateEntity): """Return the fan setting.""" if not self._fan_mode_device: return None - mode = self._fan_mode_device.fibaro_device.mode + mode = self._fan_mode_device.mode return FANMODES[mode] def set_fan_mode(self, fan_mode: str) -> None: """Set new target fan mode.""" if not self._fan_mode_device: return - self._fan_mode_device.action("setFanMode", HA_FANMODES[fan_mode]) + self._fan_mode_device.execute_action("setFanMode", [HA_FANMODES[fan_mode]]) @property def fibaro_op_mode(self) -> str | int: @@ -255,7 +255,7 @@ class FibaroThermostat(FibaroEntity, ClimateEntity): if not self._op_mode_device: return HA_OPMODES_HVAC[HVACMode.AUTO] - device = self._op_mode_device.fibaro_device + device = self._op_mode_device if device.has_operating_mode: return device.operating_mode @@ -281,17 +281,17 @@ class FibaroThermostat(FibaroEntity, ClimateEntity): if not self._op_mode_device: return - if "setOperatingMode" in self._op_mode_device.fibaro_device.actions: - self._op_mode_device.action("setOperatingMode", HA_OPMODES_HVAC[hvac_mode]) - elif "setThermostatMode" in self._op_mode_device.fibaro_device.actions: - device = self._op_mode_device.fibaro_device + device = self._op_mode_device + if "setOperatingMode" in device.actions: + device.execute_action("setOperatingMode", [HA_OPMODES_HVAC[hvac_mode]]) + elif "setThermostatMode" in device.actions: if device.has_supported_thermostat_modes: for mode in device.supported_thermostat_modes: if mode.lower() == hvac_mode: - self._op_mode_device.action("setThermostatMode", mode) + device.execute_action("setThermostatMode", [mode]) break - elif "setMode" in self._op_mode_device.fibaro_device.actions: - self._op_mode_device.action("setMode", HA_OPMODES_HVAC[hvac_mode]) + elif "setMode" in device.actions: + device.execute_action("setMode", [HA_OPMODES_HVAC[hvac_mode]]) @property def hvac_action(self) -> HVACAction | None: @@ -299,7 +299,7 @@ class FibaroThermostat(FibaroEntity, ClimateEntity): if not self._op_mode_device: return None - device = self._op_mode_device.fibaro_device + device = self._op_mode_device if device.has_thermostat_operating_state: with suppress(ValueError): return HVACAction(device.thermostat_operating_state.lower()) @@ -315,15 +315,15 @@ class FibaroThermostat(FibaroEntity, ClimateEntity): if not self._op_mode_device: return None - if self._op_mode_device.fibaro_device.has_thermostat_mode: - mode = self._op_mode_device.fibaro_device.thermostat_mode + if self._op_mode_device.has_thermostat_mode: + mode = self._op_mode_device.thermostat_mode if self.preset_modes is not None and mode in self.preset_modes: return mode return None - if self._op_mode_device.fibaro_device.has_operating_mode: - mode = self._op_mode_device.fibaro_device.operating_mode + if self._op_mode_device.has_operating_mode: + mode = self._op_mode_device.operating_mode else: - mode = self._op_mode_device.fibaro_device.mode + mode = self._op_mode_device.mode if mode not in OPMODES_PRESET: return None @@ -334,20 +334,22 @@ class FibaroThermostat(FibaroEntity, ClimateEntity): if self._op_mode_device is None: return - if "setThermostatMode" in self._op_mode_device.fibaro_device.actions: - self._op_mode_device.action("setThermostatMode", preset_mode) - elif "setOperatingMode" in self._op_mode_device.fibaro_device.actions: - self._op_mode_device.action( - "setOperatingMode", HA_OPMODES_PRESET[preset_mode] + if "setThermostatMode" in self._op_mode_device.actions: + self._op_mode_device.execute_action("setThermostatMode", [preset_mode]) + elif "setOperatingMode" in self._op_mode_device.actions: + self._op_mode_device.execute_action( + "setOperatingMode", [HA_OPMODES_PRESET[preset_mode]] + ) + elif "setMode" in self._op_mode_device.actions: + self._op_mode_device.execute_action( + "setMode", [HA_OPMODES_PRESET[preset_mode]] ) - elif "setMode" in self._op_mode_device.fibaro_device.actions: - self._op_mode_device.action("setMode", HA_OPMODES_PRESET[preset_mode]) @property def current_temperature(self) -> float | None: """Return the current temperature.""" if self._temp_sensor_device: - device = self._temp_sensor_device.fibaro_device + device = self._temp_sensor_device if device.has_heating_thermostat_setpoint: return device.heating_thermostat_setpoint return device.value.float_value() @@ -357,7 +359,7 @@ class FibaroThermostat(FibaroEntity, ClimateEntity): def target_temperature(self) -> float | None: """Return the temperature we try to reach.""" if self._target_temp_device: - device = self._target_temp_device.fibaro_device + device = self._target_temp_device if device.has_heating_thermostat_setpoint_future: return device.heating_thermostat_setpoint_future return device.target_level @@ -368,9 +370,11 @@ class FibaroThermostat(FibaroEntity, ClimateEntity): temperature = kwargs.get(ATTR_TEMPERATURE) target = self._target_temp_device if target is not None and temperature is not None: - if "setThermostatSetpoint" in target.fibaro_device.actions: - target.action("setThermostatSetpoint", self.fibaro_op_mode, temperature) - elif "setHeatingThermostatSetpoint" in target.fibaro_device.actions: - target.action("setHeatingThermostatSetpoint", temperature) + if "setThermostatSetpoint" in target.actions: + target.execute_action( + "setThermostatSetpoint", [self.fibaro_op_mode, temperature] + ) + elif "setHeatingThermostatSetpoint" in target.actions: + target.execute_action("setHeatingThermostatSetpoint", [temperature]) else: - target.action("setTargetLevel", temperature) + target.execute_action("setTargetLevel", [temperature]) diff --git a/homeassistant/components/fibaro/config_flow.py b/homeassistant/components/fibaro/config_flow.py index 0ffd9aaa48f..d941ceab37f 100644 --- a/homeassistant/components/fibaro/config_flow.py +++ b/homeassistant/components/fibaro/config_flow.py @@ -6,6 +6,7 @@ from collections.abc import Mapping import logging from typing import Any +from pyfibaro.fibaro_client import FibaroAuthenticationFailed, FibaroConnectFailed from slugify import slugify import voluptuous as vol @@ -13,7 +14,7 @@ from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_URL, CONF_USERNAME from homeassistant.core import HomeAssistant -from . import FibaroAuthFailed, FibaroConnectFailed, init_controller +from . import connect_fibaro_client from .const import CONF_IMPORT_PLUGINS, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -33,16 +34,16 @@ async def _validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. """ - controller = await hass.async_add_executor_job(init_controller, data) + info, _ = await hass.async_add_executor_job(connect_fibaro_client, data) _LOGGER.debug( "Successfully connected to fibaro home center %s with name %s", - controller.hub_serial, - controller.hub_name, + info.serial_number, + info.hc_name, ) return { - "serial_number": slugify(controller.hub_serial), - "name": controller.hub_name, + "serial_number": slugify(info.serial_number), + "name": info.hc_name, } @@ -75,7 +76,7 @@ class FibaroConfigFlow(ConfigFlow, domain=DOMAIN): info = await _validate_input(self.hass, user_input) except FibaroConnectFailed: errors["base"] = "cannot_connect" - except FibaroAuthFailed: + except FibaroAuthenticationFailed: errors["base"] = "invalid_auth" else: await self.async_set_unique_id(info["serial_number"]) @@ -106,7 +107,7 @@ class FibaroConfigFlow(ConfigFlow, domain=DOMAIN): await _validate_input(self.hass, new_data) except FibaroConnectFailed: errors["base"] = "cannot_connect" - except FibaroAuthFailed: + except FibaroAuthenticationFailed: errors["base"] = "invalid_auth" else: return self.async_update_reload_and_abort( diff --git a/homeassistant/components/fibaro/entity.py b/homeassistant/components/fibaro/entity.py index 6a8e12136c8..5375b058315 100644 --- a/homeassistant/components/fibaro/entity.py +++ b/homeassistant/components/fibaro/entity.py @@ -11,6 +11,8 @@ from pyfibaro.fibaro_device import DeviceModel from homeassistant.const import ATTR_ARMED, ATTR_BATTERY_LEVEL from homeassistant.helpers.entity import Entity +from . import FibaroController + _LOGGER = logging.getLogger(__name__) @@ -22,7 +24,7 @@ class FibaroEntity(Entity): def __init__(self, fibaro_device: DeviceModel) -> None: """Initialize the device.""" self.fibaro_device = fibaro_device - self.controller = fibaro_device.fibaro_controller + self.controller: FibaroController = fibaro_device.fibaro_controller self.ha_id = fibaro_device.ha_id self._attr_name = fibaro_device.friendly_name self._attr_unique_id = fibaro_device.unique_id_str @@ -54,15 +56,6 @@ class FibaroEntity(Entity): return self.fibaro_device.value_2.int_value() return None - def dont_know_message(self, cmd: str) -> None: - """Make a warning in case we don't know how to perform an action.""" - _LOGGER.warning( - "Not sure how to %s: %s (available actions: %s)", - cmd, - str(self.ha_id), - str(self.fibaro_device.actions), - ) - def set_level(self, level: int) -> None: """Set the level of Fibaro device.""" self.action("setValue", level) @@ -97,11 +90,7 @@ class FibaroEntity(Entity): def action(self, cmd: str, *args: Any) -> None: """Perform an action on the Fibaro HC.""" - if cmd in self.fibaro_device.actions: - self.fibaro_device.execute_action(cmd, args) - _LOGGER.debug("-> %s.%s%s called", str(self.ha_id), str(cmd), str(args)) - else: - self.dont_know_message(cmd) + self.fibaro_device.execute_action(cmd, args) @property def current_binary_state(self) -> bool: diff --git a/homeassistant/components/fibaro/manifest.json b/homeassistant/components/fibaro/manifest.json index d2a1186b05b..cd4d1de838c 100644 --- a/homeassistant/components/fibaro/manifest.json +++ b/homeassistant/components/fibaro/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["pyfibaro"], - "requirements": ["pyfibaro==0.8.0"] + "requirements": ["pyfibaro==0.8.2"] } diff --git a/homeassistant/components/fints/sensor.py b/homeassistant/components/fints/sensor.py index 318325dbb09..f5188d5bf21 100644 --- a/homeassistant/components/fints/sensor.py +++ b/homeassistant/components/fints/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections import namedtuple from datetime import timedelta import logging -from typing import Any +from typing import Any, cast from fints.client import FinTS3PinTanClient from fints.models import SEPAAccount @@ -73,7 +73,7 @@ def setup_platform( credentials = BankCredentials( config[CONF_BIN], config[CONF_USERNAME], config[CONF_PIN], config[CONF_URL] ) - fints_name = config.get(CONF_NAME, config[CONF_BIN]) + fints_name = cast(str, config.get(CONF_NAME, config[CONF_BIN])) account_config = { acc[CONF_ACCOUNT]: acc[CONF_NAME] for acc in config[CONF_ACCOUNTS] diff --git a/homeassistant/components/flo/strings.json b/homeassistant/components/flo/strings.json index 3444911fbd4..64e22bedec3 100644 --- a/homeassistant/components/flo/strings.json +++ b/homeassistant/components/flo/strings.json @@ -60,11 +60,11 @@ "fields": { "sleep_minutes": { "name": "Sleep minutes", - "description": "The time to sleep in minutes." + "description": "The duration to sleep in minutes." }, "revert_to_mode": { "name": "Revert to mode", - "description": "The mode to revert to after sleep_minutes has elapsed." + "description": "The mode to revert to after the 'Sleep minutes' duration has elapsed." } } }, @@ -78,7 +78,7 @@ }, "run_health_test": { "name": "Run health test", - "description": "Have the Flo device run a health test." + "description": "Requests the Flo device to run a health test." } } } diff --git a/homeassistant/components/forked_daapd/__init__.py b/homeassistant/components/forked_daapd/__init__.py index 2172e60ba38..844a6a3eff9 100644 --- a/homeassistant/components/forked_daapd/__init__.py +++ b/homeassistant/components/forked_daapd/__init__.py @@ -1,29 +1,36 @@ """The forked_daapd component.""" -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant +from pyforked_daapd import ForkedDaapdAPI -from .const import DOMAIN, HASS_DATA_UPDATER_KEY +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .coordinator import ForkedDaapdConfigEntry, ForkedDaapdUpdater PLATFORMS = [Platform.MEDIA_PLAYER] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: ForkedDaapdConfigEntry) -> bool: """Set up forked-daapd from a config entry by forwarding to platform.""" + host: str = entry.data[CONF_HOST] + port: int = entry.data[CONF_PORT] + password: str = entry.data[CONF_PASSWORD] + forked_daapd_api = ForkedDaapdAPI( + async_get_clientsession(hass), host, port, password + ) + forked_daapd_updater = ForkedDaapdUpdater(hass, forked_daapd_api, entry.entry_id) + entry.runtime_data = forked_daapd_updater await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: ForkedDaapdConfigEntry +) -> bool: """Remove forked-daapd component.""" status = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if status and hass.data.get(DOMAIN) and hass.data[DOMAIN].get(entry.entry_id): - if websocket_handler := hass.data[DOMAIN][entry.entry_id][ - HASS_DATA_UPDATER_KEY - ].websocket_handler: + if status: + if websocket_handler := entry.runtime_data.websocket_handler: websocket_handler.cancel() - del hass.data[DOMAIN][entry.entry_id] - if not hass.data[DOMAIN]: - del hass.data[DOMAIN] return status diff --git a/homeassistant/components/forked_daapd/config_flow.py b/homeassistant/components/forked_daapd/config_flow.py index b2b2d498f60..890976c7503 100644 --- a/homeassistant/components/forked_daapd/config_flow.py +++ b/homeassistant/components/forked_daapd/config_flow.py @@ -7,12 +7,7 @@ from typing import Any from pyforked_daapd import ForkedDaapdAPI import voluptuous as vol -from homeassistant.config_entries import ( - ConfigEntry, - ConfigFlow, - ConfigFlowResult, - OptionsFlow, -) +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_PORT from homeassistant.core import callback from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -28,6 +23,7 @@ from .const import ( DEFAULT_TTS_VOLUME, DOMAIN, ) +from .coordinator import ForkedDaapdConfigEntry _LOGGER = logging.getLogger(__name__) @@ -115,7 +111,7 @@ class ForkedDaapdFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: ForkedDaapdConfigEntry, ) -> ForkedDaapdOptionsFlowHandler: """Return options flow handler.""" return ForkedDaapdOptionsFlowHandler() diff --git a/homeassistant/components/forked_daapd/const.py b/homeassistant/components/forked_daapd/const.py index dd7ed1bdf16..effd4c9454c 100644 --- a/homeassistant/components/forked_daapd/const.py +++ b/homeassistant/components/forked_daapd/const.py @@ -30,9 +30,8 @@ DEFAULT_SERVER_NAME = "My Server" DEFAULT_TTS_PAUSE_TIME = 1.2 DEFAULT_TTS_VOLUME = 0.8 DEFAULT_UNMUTE_VOLUME = 0.6 -DOMAIN = "forked_daapd" # key for hass.data +DOMAIN = "forked_daapd" FD_NAME = "OwnTone" -HASS_DATA_UPDATER_KEY = "UPDATER" KNOWN_PIPES = {"librespot-java"} PIPE_FUNCTION_MAP = { "librespot-java": { diff --git a/homeassistant/components/forked_daapd/coordinator.py b/homeassistant/components/forked_daapd/coordinator.py index 2db0a75c429..0ba339be505 100644 --- a/homeassistant/components/forked_daapd/coordinator.py +++ b/homeassistant/components/forked_daapd/coordinator.py @@ -9,6 +9,7 @@ from typing import Any from pyforked_daapd import ForkedDaapdAPI +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers.dispatcher import async_dispatcher_send @@ -22,6 +23,8 @@ from .const import ( SIGNAL_UPDATE_QUEUE, ) +type ForkedDaapdConfigEntry = ConfigEntry[ForkedDaapdUpdater] + _LOGGER = logging.getLogger(__name__) WS_NOTIFY_EVENT_TYPES = ["player", "outputs", "volume", "options", "queue", "database"] @@ -39,6 +42,11 @@ class ForkedDaapdUpdater: self._all_output_ids: set[str] = set() self._entry_id = entry_id + @property + def api(self) -> ForkedDaapdAPI: + """Return the API object.""" + return self._api + async def async_init(self) -> None: """Perform async portion of class initialization.""" if not (server_config := await self._api.get_request("config")): diff --git a/homeassistant/components/forked_daapd/media_player.py b/homeassistant/components/forked_daapd/media_player.py index 8cbf33460aa..fd5390195a6 100644 --- a/homeassistant/components/forked_daapd/media_player.py +++ b/homeassistant/components/forked_daapd/media_player.py @@ -7,7 +7,6 @@ from collections import defaultdict import logging from typing import Any -from pyforked_daapd import ForkedDaapdAPI from pylibrespot_java import LibrespotJavaAPI from homeassistant.components import media_source @@ -28,8 +27,7 @@ from homeassistant.components.spotify import ( resolve_spotify_media_type, spotify_uri_from_media_browser_url, ) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT +from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.dispatcher import ( @@ -55,9 +53,7 @@ from .const import ( DEFAULT_TTS_PAUSE_TIME, DEFAULT_TTS_VOLUME, DEFAULT_UNMUTE_VOLUME, - DOMAIN, FD_NAME, - HASS_DATA_UPDATER_KEY, KNOWN_PIPES, PIPE_FUNCTION_MAP, SIGNAL_ADD_ZONES, @@ -74,23 +70,21 @@ from .const import ( SUPPORTED_FEATURES_ZONE, TTS_TIMEOUT, ) -from .coordinator import ForkedDaapdUpdater +from .coordinator import ForkedDaapdConfigEntry _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: ForkedDaapdConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up forked-daapd from a config entry.""" + forked_daapd_updater = config_entry.runtime_data + host: str = config_entry.data[CONF_HOST] - port: int = config_entry.data[CONF_PORT] - password: str = config_entry.data[CONF_PASSWORD] - forked_daapd_api = ForkedDaapdAPI( - async_get_clientsession(hass), host, port, password - ) + forked_daapd_api = forked_daapd_updater.api forked_daapd_master = ForkedDaapdMaster( clientsession=async_get_clientsession(hass), api=forked_daapd_api, @@ -111,20 +105,12 @@ async def async_setup_entry( ) config_entry.async_on_unload(config_entry.add_update_listener(update_listener)) - if not hass.data.get(DOMAIN): - hass.data[DOMAIN] = {config_entry.entry_id: {}} - async_add_entities([forked_daapd_master], False) - forked_daapd_updater = ForkedDaapdUpdater( - hass, forked_daapd_api, config_entry.entry_id - ) - hass.data[DOMAIN][config_entry.entry_id][HASS_DATA_UPDATER_KEY] = ( - forked_daapd_updater - ) + await forked_daapd_updater.async_init() -async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def update_listener(hass: HomeAssistant, entry: ForkedDaapdConfigEntry) -> None: """Handle options update.""" async_dispatcher_send( hass, SIGNAL_CONFIG_OPTIONS_UPDATE.format(entry.entry_id), entry.options diff --git a/homeassistant/components/frankever/__init__.py b/homeassistant/components/frankever/__init__.py new file mode 100644 index 00000000000..66eeecb1e59 --- /dev/null +++ b/homeassistant/components/frankever/__init__.py @@ -0,0 +1 @@ +"""FrankEver virtual integration.""" diff --git a/homeassistant/components/frankever/manifest.json b/homeassistant/components/frankever/manifest.json new file mode 100644 index 00000000000..37d7be765ef --- /dev/null +++ b/homeassistant/components/frankever/manifest.json @@ -0,0 +1,6 @@ +{ + "domain": "frankever", + "name": "FrankEver", + "integration_type": "virtual", + "supported_by": "shelly" +} diff --git a/homeassistant/components/fritz/sensor.py b/homeassistant/components/fritz/sensor.py index bcee590460f..243b3b5eb4c 100644 --- a/homeassistant/components/fritz/sensor.py +++ b/homeassistant/components/fritz/sensor.py @@ -193,7 +193,6 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = ( translation_key="max_kb_s_sent", native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND, device_class=SensorDeviceClass.DATA_RATE, - entity_category=EntityCategory.DIAGNOSTIC, value_fn=_retrieve_max_kb_s_sent_state, ), FritzSensorEntityDescription( @@ -201,7 +200,6 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = ( translation_key="max_kb_s_received", native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND, device_class=SensorDeviceClass.DATA_RATE, - entity_category=EntityCategory.DIAGNOSTIC, value_fn=_retrieve_max_kb_s_received_state, ), FritzSensorEntityDescription( @@ -225,6 +223,7 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = ( translation_key="link_kb_s_sent", native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND, device_class=SensorDeviceClass.DATA_RATE, + entity_category=EntityCategory.DIAGNOSTIC, value_fn=_retrieve_link_kb_s_sent_state, ), FritzSensorEntityDescription( @@ -232,12 +231,15 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = ( translation_key="link_kb_s_received", native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND, device_class=SensorDeviceClass.DATA_RATE, + entity_category=EntityCategory.DIAGNOSTIC, value_fn=_retrieve_link_kb_s_received_state, ), FritzSensorEntityDescription( key="link_noise_margin_sent", translation_key="link_noise_margin_sent", native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, value_fn=_retrieve_link_noise_margin_sent_state, is_suitable=lambda info: info.wan_enabled and info.connection == DSL_CONNECTION, ), @@ -245,6 +247,8 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = ( key="link_noise_margin_received", translation_key="link_noise_margin_received", native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, value_fn=_retrieve_link_noise_margin_received_state, is_suitable=lambda info: info.wan_enabled and info.connection == DSL_CONNECTION, ), @@ -252,6 +256,8 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = ( key="link_attenuation_sent", translation_key="link_attenuation_sent", native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, value_fn=_retrieve_link_attenuation_sent_state, is_suitable=lambda info: info.wan_enabled and info.connection == DSL_CONNECTION, ), @@ -259,6 +265,8 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = ( key="link_attenuation_received", translation_key="link_attenuation_received", native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, value_fn=_retrieve_link_attenuation_received_state, is_suitable=lambda info: info.wan_enabled and info.connection == DSL_CONNECTION, ), diff --git a/homeassistant/components/fritzbox/climate.py b/homeassistant/components/fritzbox/climate.py index 118e03c391f..57c7e2a696f 100644 --- a/homeassistant/components/fritzbox/climate.py +++ b/homeassistant/components/fritzbox/climate.py @@ -6,6 +6,7 @@ from typing import Any from homeassistant.components.climate import ( ATTR_HVAC_MODE, + PRESET_BOOST, PRESET_COMFORT, PRESET_ECO, ClimateEntity, @@ -38,7 +39,7 @@ from .sensor import value_scheduled_preset HVAC_MODES = [HVACMode.HEAT, HVACMode.OFF] PRESET_HOLIDAY = "holiday" PRESET_SUMMER = "summer" -PRESET_MODES = [PRESET_ECO, PRESET_COMFORT] +PRESET_MODES = [PRESET_ECO, PRESET_COMFORT, PRESET_BOOST] SUPPORTED_FEATURES = ( ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE @@ -194,6 +195,8 @@ class FritzboxThermostat(FritzBoxDeviceEntity, ClimateEntity): return PRESET_HOLIDAY if self.data.summer_active: return PRESET_SUMMER + if self.data.target_temperature == ON_API_TEMPERATURE: + return PRESET_BOOST if self.data.target_temperature == self.data.comfort_temperature: return PRESET_COMFORT if self.data.target_temperature == self.data.eco_temperature: @@ -211,6 +214,8 @@ class FritzboxThermostat(FritzBoxDeviceEntity, ClimateEntity): await self.async_set_temperature(temperature=self.data.comfort_temperature) elif preset_mode == PRESET_ECO: await self.async_set_temperature(temperature=self.data.eco_temperature) + elif preset_mode == PRESET_BOOST: + await self.async_set_temperature(temperature=ON_REPORT_SET_TEMPERATURE) @property def extra_state_attributes(self) -> ClimateExtraAttributes: diff --git a/homeassistant/components/fritzbox/sensor.py b/homeassistant/components/fritzbox/sensor.py index bed7004bd6a..801a3a67a6e 100644 --- a/homeassistant/components/fritzbox/sensor.py +++ b/homeassistant/components/fritzbox/sensor.py @@ -137,6 +137,7 @@ SENSOR_TYPES: Final[tuple[FritzSensorEntityDescription, ...]] = ( key="battery", native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.BATTERY, + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, suitable=lambda device: device.battery_level is not None, native_value=lambda device: device.battery_level, diff --git a/homeassistant/components/fritzbox/strings.json b/homeassistant/components/fritzbox/strings.json index c7c2439b566..e0df30875bc 100644 --- a/homeassistant/components/fritzbox/strings.json +++ b/homeassistant/components/fritzbox/strings.json @@ -89,7 +89,7 @@ "message": "Can't change preset while holiday or summer mode is active on the device." }, "change_hvac_while_active_mode": { - "message": "Can't change hvac mode while holiday or summer mode is active on the device." + "message": "Can't change HVAC mode while holiday or summer mode is active on the device." } } } diff --git a/homeassistant/components/fronius/config_flow.py b/homeassistant/components/fronius/config_flow.py index f35c9ce5bc1..b8aa2da81c6 100644 --- a/homeassistant/components/fronius/config_flow.py +++ b/homeassistant/components/fronius/config_flow.py @@ -149,7 +149,7 @@ class FroniusConfigFlow(ConfigFlow, domain=DOMAIN): unique_id, info = await validate_host(self.hass, user_input[CONF_HOST]) except CannotConnect: errors["base"] = "cannot_connect" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: diff --git a/homeassistant/components/fronius/manifest.json b/homeassistant/components/fronius/manifest.json index 94d0f90b0bd..661d808ad23 100644 --- a/homeassistant/components/fronius/manifest.json +++ b/homeassistant/components/fronius/manifest.json @@ -11,6 +11,6 @@ "documentation": "https://www.home-assistant.io/integrations/fronius", "iot_class": "local_polling", "loggers": ["pyfronius"], - "quality_scale": "gold", - "requirements": ["PyFronius==0.7.3"] + "quality_scale": "platinum", + "requirements": ["PyFronius==0.7.7"] } diff --git a/homeassistant/components/fronius/quality_scale.yaml b/homeassistant/components/fronius/quality_scale.yaml index 2c4b892475b..522b8ab571f 100644 --- a/homeassistant/components/fronius/quality_scale.yaml +++ b/homeassistant/components/fronius/quality_scale.yaml @@ -83,7 +83,4 @@ rules: # Platinum async-dependency: done inject-websession: done - strict-typing: - status: todo - comment: | - The pyfronius library isn't strictly typed and doesn't export type information. + strict-typing: done diff --git a/homeassistant/components/frontend/__init__.py b/homeassistant/components/frontend/__init__.py index 6184d888004..9a0627f9f42 100644 --- a/homeassistant/components/frontend/__init__.py +++ b/homeassistant/components/frontend/__init__.py @@ -52,10 +52,9 @@ CONF_JS_VERSION = "javascript_version" DEFAULT_THEME_COLOR = "#03A9F4" -DATA_PANELS = "frontend_panels" -DATA_JS_VERSION = "frontend_js_version" -DATA_EXTRA_MODULE_URL = "frontend_extra_module_url" -DATA_EXTRA_JS_URL_ES5 = "frontend_extra_js_url_es5" +DATA_PANELS: HassKey[dict[str, Panel]] = HassKey("frontend_panels") +DATA_EXTRA_MODULE_URL: HassKey[UrlManager] = HassKey("frontend_extra_module_url") +DATA_EXTRA_JS_URL_ES5: HassKey[UrlManager] = HassKey("frontend_extra_js_url_es5") DATA_WS_SUBSCRIBERS: HassKey[set[tuple[websocket_api.ActiveConnection, int]]] = HassKey( "frontend_ws_subscribers" @@ -64,8 +63,8 @@ DATA_WS_SUBSCRIBERS: HassKey[set[tuple[websocket_api.ActiveConnection, int]]] = THEMES_STORAGE_KEY = f"{DOMAIN}_theme" THEMES_STORAGE_VERSION = 1 THEMES_SAVE_DELAY = 60 -DATA_THEMES_STORE = "frontend_themes_store" -DATA_THEMES = "frontend_themes" +DATA_THEMES_STORE: HassKey[Store] = HassKey("frontend_themes_store") +DATA_THEMES: HassKey[dict[str, Any]] = HassKey("frontend_themes") DATA_DEFAULT_THEME = "frontend_default_theme" DATA_DEFAULT_DARK_THEME = "frontend_default_dark_theme" DEFAULT_THEME = "default" @@ -242,7 +241,7 @@ class Panel: sidebar_title: str | None = None # Url to show the panel in the frontend - frontend_url_path: str | None = None + frontend_url_path: str # Config to pass to the webcomponent config: dict[str, Any] | None = None @@ -273,7 +272,7 @@ class Panel: self.config_panel_domain = config_panel_domain @callback - def to_response(self) -> PanelRespons: + def to_response(self) -> PanelResponse: """Panel as dictionary.""" return { "component_name": self.component_name, @@ -631,7 +630,8 @@ class IndexView(web_urldispatcher.AbstractResource): def get_info(self) -> dict[str, list[str]]: # type: ignore[override] """Return a dict with additional info useful for introspection.""" - return {"panels": list(self.hass.data[DATA_PANELS])} + panels = self.hass.data[DATA_PANELS] + return {"panels": list(panels)} def raw_match(self, path: str) -> bool: """Perform a raw match against path.""" @@ -841,13 +841,13 @@ def websocket_subscribe_extra_js( connection.send_message(websocket_api.result_message(msg["id"])) -class PanelRespons(TypedDict): +class PanelResponse(TypedDict): """Represent the panel response type.""" component_name: str icon: str | None title: str | None config: dict[str, Any] | None - url_path: str | None + url_path: str require_admin: bool config_panel_domain: str | None diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index b210fdb6661..4cab8375d1b 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20250306.0"] + "requirements": ["home-assistant-frontend==20250401.0"] } diff --git a/homeassistant/components/frontend/storage.py b/homeassistant/components/frontend/storage.py index cbcc3024aa7..a33a9de7ac5 100644 --- a/homeassistant/components/frontend/storage.py +++ b/homeassistant/components/frontend/storage.py @@ -12,8 +12,11 @@ from homeassistant.components import websocket_api from homeassistant.components.websocket_api import ActiveConnection from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.storage import Store +from homeassistant.util.hass_dict import HassKey -DATA_STORAGE = "frontend_storage" +DATA_STORAGE: HassKey[tuple[dict[str, Store], dict[str, dict]]] = HassKey( + "frontend_storage" +) STORAGE_VERSION_USER_DATA = 1 diff --git a/homeassistant/components/frontier_silicon/config_flow.py b/homeassistant/components/frontier_silicon/config_flow.py index f6514da28ff..dc4f6bea989 100644 --- a/homeassistant/components/frontier_silicon/config_flow.py +++ b/homeassistant/components/frontier_silicon/config_flow.py @@ -108,8 +108,8 @@ class FrontierSiliconConfigFlow(ConfigFlow, domain=DOMAIN): self._webfsapi_url = await AFSAPI.get_webfsapi_endpoint(device_url) except FSConnectionError: return self.async_abort(reason="cannot_connect") - except Exception as exception: # noqa: BLE001 - _LOGGER.debug(exception) + except Exception: + _LOGGER.exception("Unexpected exception") return self.async_abort(reason="unknown") # try to login with default pin diff --git a/homeassistant/components/fujitsu_fglair/config_flow.py b/homeassistant/components/fujitsu_fglair/config_flow.py index c4b097ff0de..9369fd7b7cd 100644 --- a/homeassistant/components/fujitsu_fglair/config_flow.py +++ b/homeassistant/components/fujitsu_fglair/config_flow.py @@ -62,7 +62,7 @@ class FGLairConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except AylaAuthError: errors["base"] = "invalid_auth" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" diff --git a/homeassistant/components/fujitsu_fglair/sensor.py b/homeassistant/components/fujitsu_fglair/sensor.py index 0ad5bec3117..3bb693e1068 100644 --- a/homeassistant/components/fujitsu_fglair/sensor.py +++ b/homeassistant/components/fujitsu_fglair/sensor.py @@ -24,6 +24,7 @@ async def async_setup_entry( async_add_entities( FGLairOutsideTemperature(entry.runtime_data, device) for device in entry.runtime_data.data.values() + if device.outdoor_temperature is not None ) diff --git a/homeassistant/components/fully_kiosk/strings.json b/homeassistant/components/fully_kiosk/strings.json index a4b466926f0..5841456c034 100644 --- a/homeassistant/components/fully_kiosk/strings.json +++ b/homeassistant/components/fully_kiosk/strings.json @@ -1,6 +1,6 @@ { "common": { - "data_description_password": "The Remote Admin Password from the Fully Kiosk Browser app settings.", + "data_description_password": "The Remote Admin password from the Fully Kiosk Browser app settings.", "data_description_ssl": "Is the Fully Kiosk app configured to require SSL for the connection?", "data_description_verify_ssl": "Should SSL certificartes be verified? This should be off for self-signed certificates." }, @@ -151,7 +151,7 @@ } }, "set_config": { - "name": "Set Configuration", + "name": "Set configuration", "description": "Sets a configuration parameter on Fully Kiosk Browser.", "fields": { "key": { @@ -165,7 +165,7 @@ } }, "start_application": { - "name": "Start Application", + "name": "Start application", "description": "Starts an application on the device running Fully Kiosk Browser.", "fields": { "application": { diff --git a/homeassistant/components/fyta/config_flow.py b/homeassistant/components/fyta/config_flow.py index 78cb7647785..9c5ab1de405 100644 --- a/homeassistant/components/fyta/config_flow.py +++ b/homeassistant/components/fyta/config_flow.py @@ -65,8 +65,8 @@ class FytaConfigFlow(ConfigFlow, domain=DOMAIN): return {"base": "invalid_auth"} except FytaPasswordError: return {"base": "invalid_auth", CONF_PASSWORD: "password_error"} - except Exception as e: # noqa: BLE001 - _LOGGER.error(e) + except Exception: + _LOGGER.exception("Unexpected exception") return {"base": "unknown"} finally: await fyta.client.close() diff --git a/homeassistant/components/fyta/manifest.json b/homeassistant/components/fyta/manifest.json index ea628f55c6c..615197203a8 100644 --- a/homeassistant/components/fyta/manifest.json +++ b/homeassistant/components/fyta/manifest.json @@ -9,5 +9,5 @@ "iot_class": "cloud_polling", "loggers": ["fyta_cli"], "quality_scale": "platinum", - "requirements": ["fyta_cli==0.7.0"] + "requirements": ["fyta_cli==0.7.2"] } diff --git a/homeassistant/components/gardena_bluetooth/config_flow.py b/homeassistant/components/gardena_bluetooth/config_flow.py index c7631b62f47..613d0cf21db 100644 --- a/homeassistant/components/gardena_bluetooth/config_flow.py +++ b/homeassistant/components/gardena_bluetooth/config_flow.py @@ -41,6 +41,8 @@ def _is_supported(discovery_info: BluetoothServiceInfo): ProductType.PUMP, ProductType.VALVE, ProductType.WATER_COMPUTER, + ProductType.AUTOMATS, + ProductType.PRESSURE_TANKS, ): _LOGGER.debug("Unsupported device: %s", manufacturer_data) return False diff --git a/homeassistant/components/gardena_bluetooth/manifest.json b/homeassistant/components/gardena_bluetooth/manifest.json index 28bba1015f5..8c9cda7d998 100644 --- a/homeassistant/components/gardena_bluetooth/manifest.json +++ b/homeassistant/components/gardena_bluetooth/manifest.json @@ -14,5 +14,5 @@ "documentation": "https://www.home-assistant.io/integrations/gardena_bluetooth", "iot_class": "local_polling", "loggers": ["bleak", "bleak_esphome", "gardena_bluetooth"], - "requirements": ["gardena-bluetooth==1.5.0"] + "requirements": ["gardena-bluetooth==1.6.0"] } diff --git a/homeassistant/components/geniushub/strings.json b/homeassistant/components/geniushub/strings.json index 42d53c7fa00..79eee2c9a1b 100644 --- a/homeassistant/components/geniushub/strings.json +++ b/homeassistant/components/geniushub/strings.json @@ -45,7 +45,7 @@ }, "mode": { "name": "[%key:common::config_flow::data::mode%]", - "description": "One of: off, timer or footprint." + "description": "The zone's operating mode." } } }, diff --git a/homeassistant/components/geocaching/manifest.json b/homeassistant/components/geocaching/manifest.json index 127519ca5d0..4617bd1c57b 100644 --- a/homeassistant/components/geocaching/manifest.json +++ b/homeassistant/components/geocaching/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/geocaching", "iot_class": "cloud_polling", - "requirements": ["geocachingapi==0.2.1"] + "requirements": ["geocachingapi==0.3.0"] } diff --git a/homeassistant/components/gios/__init__.py b/homeassistant/components/gios/__init__.py index c76efbcf361..31f704fcacc 100644 --- a/homeassistant/components/gios/__init__.py +++ b/homeassistant/components/gios/__init__.py @@ -4,9 +4,14 @@ from __future__ import annotations import logging +from aiohttp.client_exceptions import ClientConnectorError +from gios import Gios +from gios.exceptions import GiosError + from homeassistant.components.air_quality import DOMAIN as AIR_QUALITY_PLATFORM from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -36,8 +41,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: GiosConfigEntry) -> bool device_registry.async_update_device(device_entry.id, new_identifiers={new_ids}) websession = async_get_clientsession(hass) + try: + gios = await Gios.create(websession, station_id) + except (GiosError, ConnectionError, ClientConnectorError) as err: + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="cannot_connect", + translation_placeholders={ + "entry": entry.title, + "error": repr(err), + }, + ) from err - coordinator = GiosDataUpdateCoordinator(hass, entry, websession, station_id) + coordinator = GiosDataUpdateCoordinator(hass, entry, gios) await coordinator.async_config_entry_first_refresh() entry.runtime_data = GiosData(coordinator) diff --git a/homeassistant/components/gios/config_flow.py b/homeassistant/components/gios/config_flow.py index a089aeab820..9b242a8cc99 100644 --- a/homeassistant/components/gios/config_flow.py +++ b/homeassistant/components/gios/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations import asyncio -from typing import Any +from typing import TYPE_CHECKING, Any from aiohttp.client_exceptions import ClientConnectorError from gios import ApiError, Gios, InvalidSensorsDataError, NoStationError @@ -12,6 +12,12 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_NAME from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.selector import ( + SelectOptionDict, + SelectSelector, + SelectSelectorConfig, + SelectSelectorMode, +) from .const import API_TIMEOUT, CONF_STATION_ID, DOMAIN @@ -27,40 +33,59 @@ class GiosFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a flow initialized by the user.""" errors = {} + websession = async_get_clientsession(self.hass) + if user_input is not None: + station_id = user_input[CONF_STATION_ID] + try: - await self.async_set_unique_id( - str(user_input[CONF_STATION_ID]), raise_on_progress=False - ) + await self.async_set_unique_id(station_id, raise_on_progress=False) self._abort_if_unique_id_configured() - websession = async_get_clientsession(self.hass) - async with asyncio.timeout(API_TIMEOUT): - gios = Gios(user_input[CONF_STATION_ID], websession) + gios = await Gios.create(websession, int(station_id)) await gios.async_update() - assert gios.station_name is not None + # GIOS treats station ID as int + user_input[CONF_STATION_ID] = int(station_id) + + if TYPE_CHECKING: + assert gios.station_name is not None + return self.async_create_entry( title=gios.station_name, data=user_input, ) except (ApiError, ClientConnectorError, TimeoutError): errors["base"] = "cannot_connect" - except NoStationError: - errors[CONF_STATION_ID] = "wrong_station_id" except InvalidSensorsDataError: errors[CONF_STATION_ID] = "invalid_sensors_data" + try: + gios = await Gios.create(websession) + except (ApiError, ClientConnectorError, NoStationError): + return self.async_abort(reason="cannot_connect") + + options: list[SelectOptionDict] = [ + SelectOptionDict(value=str(station.id), label=station.name) + for station in gios.measurement_stations.values() + ] + + schema: vol.Schema = vol.Schema( + { + vol.Required(CONF_STATION_ID): SelectSelector( + SelectSelectorConfig( + options=options, + sort=True, + mode=SelectSelectorMode.DROPDOWN, + ), + ), + vol.Optional(CONF_NAME, default=self.hass.config.location_name): str, + } + ) + return self.async_show_form( step_id="user", - data_schema=vol.Schema( - { - vol.Required(CONF_STATION_ID): int, - vol.Optional( - CONF_NAME, default=self.hass.config.location_name - ): str, - } - ), + data_schema=schema, errors=errors, ) diff --git a/homeassistant/components/gios/const.py b/homeassistant/components/gios/const.py index a8490511ab8..2294e89c961 100644 --- a/homeassistant/components/gios/const.py +++ b/homeassistant/components/gios/const.py @@ -13,7 +13,7 @@ SCAN_INTERVAL: Final = timedelta(minutes=30) DOMAIN: Final = "gios" MANUFACTURER: Final = "Główny Inspektorat Ochrony Środowiska" -URL = "http://powietrze.gios.gov.pl/pjp/current/station_details/info/{station_id}" +URL = "https://powietrze.gios.gov.pl/pjp/current/station_details/info/{station_id}" API_TIMEOUT: Final = 30 diff --git a/homeassistant/components/gios/coordinator.py b/homeassistant/components/gios/coordinator.py index be4b41ca6ee..eb0dd82eb67 100644 --- a/homeassistant/components/gios/coordinator.py +++ b/homeassistant/components/gios/coordinator.py @@ -6,7 +6,6 @@ import asyncio from dataclasses import dataclass import logging -from aiohttp import ClientSession from aiohttp.client_exceptions import ClientConnectorError from gios import Gios from gios.exceptions import GiosError @@ -39,11 +38,10 @@ class GiosDataUpdateCoordinator(DataUpdateCoordinator[GiosSensors]): self, hass: HomeAssistant, config_entry: GiosConfigEntry, - session: ClientSession, - station_id: int, + gios: Gios, ) -> None: """Class to manage fetching GIOS data API.""" - self.gios = Gios(station_id, session) + self.gios = gios super().__init__( hass, @@ -59,4 +57,11 @@ class GiosDataUpdateCoordinator(DataUpdateCoordinator[GiosSensors]): async with asyncio.timeout(API_TIMEOUT): return await self.gios.async_update() except (GiosError, ClientConnectorError) as error: - raise UpdateFailed(error) from error + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + translation_placeholders={ + "entry": self.config_entry.title, + "error": repr(error), + }, + ) from error diff --git a/homeassistant/components/gios/manifest.json b/homeassistant/components/gios/manifest.json index 3d2e719fab6..8deb2eee414 100644 --- a/homeassistant/components/gios/manifest.json +++ b/homeassistant/components/gios/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["dacite", "gios"], - "requirements": ["gios==5.0.0"] + "requirements": ["gios==6.0.0"] } diff --git a/homeassistant/components/gios/strings.json b/homeassistant/components/gios/strings.json index fc82f1c843d..eca23159a13 100644 --- a/homeassistant/components/gios/strings.json +++ b/homeassistant/components/gios/strings.json @@ -5,17 +5,17 @@ "title": "GIO\u015a (Polish Chief Inspectorate Of Environmental Protection)", "data": { "name": "[%key:common::config_flow::data::name%]", - "station_id": "ID of the measuring station" + "station_id": "Measuring station" } } }, "error": { - "wrong_station_id": "ID of the measuring station is not correct.", "invalid_sensors_data": "Invalid sensors' data for this measuring station.", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_location%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_location%]", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" } }, "system_health": { @@ -170,5 +170,13 @@ } } } + }, + "exceptions": { + "cannot_connect": { + "message": "An error occurred while connecting to the GIOS API for {entry}: {error}" + }, + "update_error": { + "message": "An error occurred while retrieving data from the GIOS API for {entry}: {error}" + } } } diff --git a/homeassistant/components/go2rtc/const.py b/homeassistant/components/go2rtc/const.py index 234411936cb..491b2269043 100644 --- a/homeassistant/components/go2rtc/const.py +++ b/homeassistant/components/go2rtc/const.py @@ -6,4 +6,4 @@ CONF_DEBUG_UI = "debug_ui" DEBUG_UI_URL_MESSAGE = "Url and debug_ui cannot be set at the same time." HA_MANAGED_API_PORT = 11984 HA_MANAGED_URL = f"http://localhost:{HA_MANAGED_API_PORT}/" -RECOMMENDED_VERSION = "1.9.8" +RECOMMENDED_VERSION = "1.9.9" diff --git a/homeassistant/components/gogogate2/config_flow.py b/homeassistant/components/gogogate2/config_flow.py index 0348d0b428c..cebff656d5d 100644 --- a/homeassistant/components/gogogate2/config_flow.py +++ b/homeassistant/components/gogogate2/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations import dataclasses +import logging import re from typing import Any, Self @@ -27,6 +28,8 @@ from homeassistant.helpers.service_info.zeroconf import ( from .common import get_api from .const import DEVICE_TYPE_GOGOGATE2, DEVICE_TYPE_ISMARTGATE, DOMAIN +_LOGGER = logging.getLogger(__name__) + DEVICE_NAMES = { DEVICE_TYPE_GOGOGATE2: "Gogogate2", DEVICE_TYPE_ISMARTGATE: "ismartgate", @@ -115,7 +118,8 @@ class Gogogate2FlowHandler(ConfigFlow, domain=DOMAIN): else: errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "cannot_connect" if self._ip_address and self._device_type: diff --git a/homeassistant/components/google/calendar.py b/homeassistant/components/google/calendar.py index 4f8ffba1d19..a62d2bf1d6b 100644 --- a/homeassistant/components/google/calendar.py +++ b/homeassistant/components/google/calendar.py @@ -89,6 +89,7 @@ OPAQUE = "opaque" RRULE_PREFIX = "RRULE:" SERVICE_CREATE_EVENT = "create_event" +FILTERED_EVENT_TYPES = [EventTypeEnum.BIRTHDAY, EventTypeEnum.WORKING_LOCATION] @dataclasses.dataclass(frozen=True, kw_only=True) @@ -103,7 +104,7 @@ class GoogleCalendarEntityDescription(CalendarEntityDescription): search: str | None local_sync: bool device_id: str - working_location: bool = False + event_type: EventTypeEnum | None = None def _get_entity_descriptions( @@ -173,14 +174,24 @@ def _get_entity_descriptions( local_sync, ) if calendar_item.primary and local_sync: - _LOGGER.debug("work location entity") + # Create a separate calendar for birthdays + entity_descriptions.append( + dataclasses.replace( + entity_description, + key=f"{key}-birthdays", + translation_key="birthdays", + event_type=EventTypeEnum.BIRTHDAY, + name=None, + entity_id=None, + ) + ) # Create an optional disabled by default entity for Work Location entity_descriptions.append( dataclasses.replace( entity_description, key=f"{key}-work-location", translation_key="working_location", - working_location=True, + event_type=EventTypeEnum.WORKING_LOCATION, name=None, entity_id=None, entity_registry_enabled_default=False, @@ -383,9 +394,18 @@ class GoogleCalendarEntity( for attendee in event.attendees ): return False - - if event.event_type == EventTypeEnum.WORKING_LOCATION: - return self.entity_description.working_location + # Calendar enttiy may be limited to a specific event type + if ( + self.entity_description.event_type is not None + and self.entity_description.event_type != event.event_type + ): + return False + # Default calendar entity omits the special types but includes all the others + if ( + self.entity_description.event_type is None + and event.event_type in FILTERED_EVENT_TYPES + ): + return False if self._ignore_availability: return True return event.transparency == OPAQUE diff --git a/homeassistant/components/google/manifest.json b/homeassistant/components/google/manifest.json index bd04597e513..efce97a0d6f 100644 --- a/homeassistant/components/google/manifest.json +++ b/homeassistant/components/google/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/google", "iot_class": "cloud_polling", "loggers": ["googleapiclient"], - "requirements": ["gcal-sync==7.0.0", "oauth2client==4.1.3", "ical==8.3.0"] + "requirements": ["gcal-sync==7.0.0", "oauth2client==4.1.3", "ical==9.0.3"] } diff --git a/homeassistant/components/google/strings.json b/homeassistant/components/google/strings.json index 5ee0cdd9c14..5776fd0480b 100644 --- a/homeassistant/components/google/strings.json +++ b/homeassistant/components/google/strings.json @@ -131,6 +131,9 @@ "calendar": { "working_location": { "name": "Working location" + }, + "birthdays": { + "name": "Birthdays" } } } diff --git a/homeassistant/components/google_assistant/const.py b/homeassistant/components/google_assistant/const.py index 8132ecaae2c..71738c9d13e 100644 --- a/homeassistant/components/google_assistant/const.py +++ b/homeassistant/components/google_assistant/const.py @@ -14,6 +14,7 @@ from homeassistant.components import ( input_boolean, input_button, input_select, + lawn_mower, light, lock, media_player, @@ -58,6 +59,7 @@ DEFAULT_EXPOSED_DOMAINS = [ "humidifier", "input_boolean", "input_select", + "lawn_mower", "light", "lock", "media_player", @@ -88,6 +90,7 @@ TYPE_GATE = f"{PREFIX_TYPES}GATE" TYPE_HUMIDIFIER = f"{PREFIX_TYPES}HUMIDIFIER" TYPE_LIGHT = f"{PREFIX_TYPES}LIGHT" TYPE_LOCK = f"{PREFIX_TYPES}LOCK" +TYPE_MOWER = f"{PREFIX_TYPES}MOWER" TYPE_OUTLET = f"{PREFIX_TYPES}OUTLET" TYPE_RECEIVER = f"{PREFIX_TYPES}AUDIO_VIDEO_RECEIVER" TYPE_SCENE = f"{PREFIX_TYPES}SCENE" @@ -149,6 +152,7 @@ DOMAIN_TO_GOOGLE_TYPES = { input_boolean.DOMAIN: TYPE_SWITCH, input_button.DOMAIN: TYPE_SCENE, input_select.DOMAIN: TYPE_SENSOR, + lawn_mower.DOMAIN: TYPE_MOWER, light.DOMAIN: TYPE_LIGHT, lock.DOMAIN: TYPE_LOCK, media_player.DOMAIN: TYPE_SETTOP, diff --git a/homeassistant/components/google_assistant/trait.py b/homeassistant/components/google_assistant/trait.py index 44251a3be04..9edd340d7d9 100644 --- a/homeassistant/components/google_assistant/trait.py +++ b/homeassistant/components/google_assistant/trait.py @@ -21,6 +21,7 @@ from homeassistant.components import ( input_boolean, input_button, input_select, + lawn_mower, light, lock, media_player, @@ -42,6 +43,7 @@ from homeassistant.components.climate import ClimateEntityFeature from homeassistant.components.cover import CoverEntityFeature from homeassistant.components.fan import FanEntityFeature from homeassistant.components.humidifier import HumidifierEntityFeature +from homeassistant.components.lawn_mower import LawnMowerEntityFeature from homeassistant.components.light import LightEntityFeature from homeassistant.components.lock import LockState from homeassistant.components.media_player import MediaPlayerEntityFeature, MediaType @@ -714,7 +716,7 @@ class DockTrait(_Trait): @staticmethod def supported(domain, features, device_class, _): """Test if state is supported.""" - return domain == vacuum.DOMAIN + return domain in (vacuum.DOMAIN, lawn_mower.DOMAIN) def sync_attributes(self) -> dict[str, Any]: """Return dock attributes for a sync request.""" @@ -722,17 +724,32 @@ class DockTrait(_Trait): def query_attributes(self) -> dict[str, Any]: """Return dock query attributes.""" - return {"isDocked": self.state.state == vacuum.VacuumActivity.DOCKED} + domain = self.state.domain + state = self.state.state + if domain == vacuum.DOMAIN: + return {"isDocked": state == vacuum.VacuumActivity.DOCKED} + if domain == lawn_mower.DOMAIN: + return {"isDocked": state == lawn_mower.LawnMowerActivity.DOCKED} + raise NotImplementedError(f"Unsupported domain {domain}") async def execute(self, command, data, params, challenge): """Execute a dock command.""" - await self.hass.services.async_call( - self.state.domain, - vacuum.SERVICE_RETURN_TO_BASE, - {ATTR_ENTITY_ID: self.state.entity_id}, - blocking=not self.config.should_report_state, - context=data.context, - ) + domain = self.state.domain + service: str | None = None + + if domain == vacuum.DOMAIN: + service = vacuum.SERVICE_RETURN_TO_BASE + elif domain == lawn_mower.DOMAIN: + service = lawn_mower.SERVICE_DOCK + + if service: + await self.hass.services.async_call( + self.state.domain, + service, + {ATTR_ENTITY_ID: self.state.entity_id}, + blocking=not self.config.should_report_state, + context=data.context, + ) @register_trait @@ -843,7 +860,7 @@ class StartStopTrait(_Trait): @staticmethod def supported(domain, features, device_class, _): """Test if state is supported.""" - if domain == vacuum.DOMAIN: + if domain in (vacuum.DOMAIN, lawn_mower.DOMAIN): return True if ( @@ -863,6 +880,12 @@ class StartStopTrait(_Trait): & VacuumEntityFeature.PAUSE != 0 } + if domain == lawn_mower.DOMAIN: + return { + "pausable": self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0) + & LawnMowerEntityFeature.PAUSE + != 0 + } if domain in COVER_VALVE_DOMAINS: return {} @@ -878,6 +901,11 @@ class StartStopTrait(_Trait): "isRunning": state == vacuum.VacuumActivity.CLEANING, "isPaused": state == vacuum.VacuumActivity.PAUSED, } + if domain == lawn_mower.DOMAIN: + return { + "isRunning": state == lawn_mower.LawnMowerActivity.MOWING, + "isPaused": state == lawn_mower.LawnMowerActivity.PAUSED, + } if domain in COVER_VALVE_DOMAINS: return { @@ -896,46 +924,52 @@ class StartStopTrait(_Trait): if domain == vacuum.DOMAIN: await self._execute_vacuum(command, data, params, challenge) return + if domain == lawn_mower.DOMAIN: + await self._execute_lawn_mower(command, data, params, challenge) + return if domain in COVER_VALVE_DOMAINS: await self._execute_cover_or_valve(command, data, params, challenge) return async def _execute_vacuum(self, command, data, params, challenge): """Execute a StartStop command.""" + service: str | None = None if command == COMMAND_START_STOP: - if params["start"]: - await self.hass.services.async_call( - self.state.domain, - vacuum.SERVICE_START, - {ATTR_ENTITY_ID: self.state.entity_id}, - blocking=not self.config.should_report_state, - context=data.context, - ) - else: - await self.hass.services.async_call( - self.state.domain, - vacuum.SERVICE_STOP, - {ATTR_ENTITY_ID: self.state.entity_id}, - blocking=not self.config.should_report_state, - context=data.context, - ) + service = vacuum.SERVICE_START if params["start"] else vacuum.SERVICE_STOP elif command == COMMAND_PAUSE_UNPAUSE: - if params["pause"]: - await self.hass.services.async_call( - self.state.domain, - vacuum.SERVICE_PAUSE, - {ATTR_ENTITY_ID: self.state.entity_id}, - blocking=not self.config.should_report_state, - context=data.context, - ) - else: - await self.hass.services.async_call( - self.state.domain, - vacuum.SERVICE_START, - {ATTR_ENTITY_ID: self.state.entity_id}, - blocking=not self.config.should_report_state, - context=data.context, - ) + service = vacuum.SERVICE_PAUSE if params["pause"] else vacuum.SERVICE_START + if service: + await self.hass.services.async_call( + self.state.domain, + service, + {ATTR_ENTITY_ID: self.state.entity_id}, + blocking=not self.config.should_report_state, + context=data.context, + ) + + async def _execute_lawn_mower(self, command, data, params, challenge): + """Execute a StartStop command.""" + service: str | None = None + if command == COMMAND_START_STOP: + service = ( + lawn_mower.SERVICE_START_MOWING + if params["start"] + else lawn_mower.SERVICE_DOCK + ) + elif command == COMMAND_PAUSE_UNPAUSE: + service = ( + lawn_mower.SERVICE_PAUSE + if params["pause"] + else lawn_mower.SERVICE_START_MOWING + ) + if service: + await self.hass.services.async_call( + self.state.domain, + service, + {ATTR_ENTITY_ID: self.state.entity_id}, + blocking=not self.config.should_report_state, + context=data.context, + ) async def _execute_cover_or_valve(self, command, data, params, challenge): """Execute a StartStop command.""" diff --git a/homeassistant/components/google_assistant_sdk/manifest.json b/homeassistant/components/google_assistant_sdk/manifest.json index 85469a464b3..70e93f39f42 100644 --- a/homeassistant/components/google_assistant_sdk/manifest.json +++ b/homeassistant/components/google_assistant_sdk/manifest.json @@ -7,6 +7,6 @@ "documentation": "https://www.home-assistant.io/integrations/google_assistant_sdk", "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["gassist-text==0.0.11"], + "requirements": ["gassist-text==0.0.12"], "single_config_entry": true } diff --git a/homeassistant/components/google_cloud/stt.py b/homeassistant/components/google_cloud/stt.py index 41c5a6710b7..cd5055383ea 100644 --- a/homeassistant/components/google_cloud/stt.py +++ b/homeassistant/components/google_cloud/stt.py @@ -6,6 +6,7 @@ from collections.abc import AsyncGenerator, AsyncIterable import logging from google.api_core.exceptions import GoogleAPIError, Unauthenticated +from google.api_core.retry import AsyncRetry from google.cloud import speech_v1 from homeassistant.components.stt import ( @@ -127,6 +128,7 @@ class GoogleCloudSpeechToTextEntity(SpeechToTextEntity): responses = await self._client.streaming_recognize( requests=request_generator(), timeout=10, + retry=AsyncRetry(initial=0.1, maximum=2.0, multiplier=2.0), ) transcript = "" diff --git a/homeassistant/components/google_cloud/tts.py b/homeassistant/components/google_cloud/tts.py index 1f5f838b593..16519645dee 100644 --- a/homeassistant/components/google_cloud/tts.py +++ b/homeassistant/components/google_cloud/tts.py @@ -7,6 +7,7 @@ from pathlib import Path from typing import Any, cast from google.api_core.exceptions import GoogleAPIError, Unauthenticated +from google.api_core.retry import AsyncRetry from google.cloud import texttospeech import voluptuous as vol @@ -215,7 +216,11 @@ class BaseGoogleCloudProvider: ), ) - response = await self._client.synthesize_speech(request, timeout=10) + response = await self._client.synthesize_speech( + request, + timeout=10, + retry=AsyncRetry(initial=0.1, maximum=2.0, multiplier=2.0), + ) if encoding == texttospeech.AudioEncoding.MP3: extension = "mp3" diff --git a/homeassistant/components/google_drive/backup.py b/homeassistant/components/google_drive/backup.py index 73e5902f8f5..a4b7fc956ce 100644 --- a/homeassistant/components/google_drive/backup.py +++ b/homeassistant/components/google_drive/backup.py @@ -8,7 +8,12 @@ from typing import Any from google_drive_api.exceptions import GoogleDriveApiError -from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError +from homeassistant.components.backup import ( + AgentBackup, + BackupAgent, + BackupAgentError, + BackupNotFound, +) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.aiohttp_client import ChunkAsyncStreamIterator @@ -93,13 +98,13 @@ class GoogleDriveBackupAgent(BackupAgent): self, backup_id: str, **kwargs: Any, - ) -> AgentBackup | None: + ) -> AgentBackup: """Return a backup.""" backups = await self.async_list_backups() for backup in backups: if backup.backup_id == backup_id: return backup - return None + raise BackupNotFound(f"Backup {backup_id} not found") async def async_download_backup( self, @@ -120,7 +125,7 @@ class GoogleDriveBackupAgent(BackupAgent): return ChunkAsyncStreamIterator(stream) except (GoogleDriveApiError, HomeAssistantError, TimeoutError) as err: raise BackupAgentError(f"Failed to download backup: {err}") from err - raise BackupAgentError("Backup not found") + raise BackupNotFound(f"Backup {backup_id} not found") async def async_delete_backup( self, @@ -138,5 +143,7 @@ class GoogleDriveBackupAgent(BackupAgent): _LOGGER.debug("Deleting file_id: %s", file_id) await self._client.async_delete(file_id) _LOGGER.debug("Deleted backup_id: %s", backup_id) + return except (GoogleDriveApiError, HomeAssistantError, TimeoutError) as err: raise BackupAgentError(f"Failed to delete backup: {err}") from err + raise BackupNotFound(f"Backup {backup_id} not found") diff --git a/homeassistant/components/google_generative_ai_conversation/__init__.py b/homeassistant/components/google_generative_ai_conversation/__init__.py index c32d7b5ddea..88a51446cda 100644 --- a/homeassistant/components/google_generative_ai_conversation/__init__.py +++ b/homeassistant/components/google_generative_ai_conversation/__init__.py @@ -5,7 +5,7 @@ from __future__ import annotations import mimetypes from pathlib import Path -from google import genai # type: ignore[attr-defined] +from google.genai import Client from google.genai.errors import APIError, ClientError from requests.exceptions import Timeout import voluptuous as vol @@ -43,7 +43,7 @@ CONF_FILENAMES = "filenames" CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) PLATFORMS = (Platform.CONVERSATION,) -type GoogleGenerativeAIConfigEntry = ConfigEntry[genai.Client] +type GoogleGenerativeAIConfigEntry = ConfigEntry[Client] async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: @@ -139,7 +139,11 @@ async def async_setup_entry( """Set up Google Generative AI Conversation from a config entry.""" try: - client = genai.Client(api_key=entry.data[CONF_API_KEY]) + + def _init_client() -> Client: + return Client(api_key=entry.data[CONF_API_KEY]) + + client = await hass.async_add_executor_job(_init_client) await client.aio.models.get( model=entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL), config={"http_options": {"timeout": TIMEOUT_MILLIS}}, diff --git a/homeassistant/components/google_generative_ai_conversation/config_flow.py b/homeassistant/components/google_generative_ai_conversation/config_flow.py index 00a016143f4..b7753c21bf9 100644 --- a/homeassistant/components/google_generative_ai_conversation/config_flow.py +++ b/homeassistant/components/google_generative_ai_conversation/config_flow.py @@ -7,7 +7,7 @@ import logging from types import MappingProxyType from typing import Any -from google import genai # type: ignore[attr-defined] +from google import genai from google.genai.errors import APIError, ClientError from requests.exceptions import Timeout import voluptuous as vol @@ -44,6 +44,7 @@ from .const import ( CONF_TEMPERATURE, CONF_TOP_K, CONF_TOP_P, + CONF_USE_GOOGLE_SEARCH_TOOL, DOMAIN, RECOMMENDED_CHAT_MODEL, RECOMMENDED_HARM_BLOCK_THRESHOLD, @@ -51,6 +52,7 @@ from .const import ( RECOMMENDED_TEMPERATURE, RECOMMENDED_TOP_K, RECOMMENDED_TOP_P, + RECOMMENDED_USE_GOOGLE_SEARCH_TOOL, TIMEOUT_MILLIS, ) @@ -341,6 +343,13 @@ async def google_generative_ai_config_option_schema( }, default=RECOMMENDED_HARM_BLOCK_THRESHOLD, ): harm_block_thresholds_selector, + vol.Optional( + CONF_USE_GOOGLE_SEARCH_TOOL, + description={ + "suggested_value": options.get(CONF_USE_GOOGLE_SEARCH_TOOL), + }, + default=RECOMMENDED_USE_GOOGLE_SEARCH_TOOL, + ): bool, } ) return schema diff --git a/homeassistant/components/google_generative_ai_conversation/const.py b/homeassistant/components/google_generative_ai_conversation/const.py index 35834f6e7f9..108ffe1891d 100644 --- a/homeassistant/components/google_generative_ai_conversation/const.py +++ b/homeassistant/components/google_generative_ai_conversation/const.py @@ -22,5 +22,7 @@ CONF_HATE_BLOCK_THRESHOLD = "hate_block_threshold" CONF_SEXUAL_BLOCK_THRESHOLD = "sexual_block_threshold" CONF_DANGEROUS_BLOCK_THRESHOLD = "dangerous_block_threshold" RECOMMENDED_HARM_BLOCK_THRESHOLD = "BLOCK_MEDIUM_AND_ABOVE" +CONF_USE_GOOGLE_SEARCH_TOOL = "enable_google_search_tool" +RECOMMENDED_USE_GOOGLE_SEARCH_TOOL = False TIMEOUT_MILLIS = 10000 diff --git a/homeassistant/components/google_generative_ai_conversation/conversation.py b/homeassistant/components/google_generative_ai_conversation/conversation.py index 5fd373acf72..7c19c5445a7 100644 --- a/homeassistant/components/google_generative_ai_conversation/conversation.py +++ b/homeassistant/components/google_generative_ai_conversation/conversation.py @@ -4,6 +4,7 @@ from __future__ import annotations import codecs from collections.abc import Callable +from dataclasses import replace from typing import Any, Literal, cast from google.genai.errors import APIError @@ -12,6 +13,7 @@ from google.genai.types import ( Content, FunctionDeclaration, GenerateContentConfig, + GoogleSearch, HarmCategory, Part, SafetySetting, @@ -25,7 +27,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import chat_session, device_registry as dr, intent, llm +from homeassistant.helpers import device_registry as dr, intent, llm from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from .const import ( @@ -39,6 +41,7 @@ from .const import ( CONF_TEMPERATURE, CONF_TOP_K, CONF_TOP_P, + CONF_USE_GOOGLE_SEARCH_TOOL, DOMAIN, LOGGER, RECOMMENDED_CHAT_MODEL, @@ -168,17 +171,25 @@ def _escape_decode(value: Any) -> Any: return value +def _create_google_tool_response_parts( + parts: list[conversation.ToolResultContent], +) -> list[Part]: + """Create Google tool response parts.""" + return [ + Part.from_function_response( + name=tool_result.tool_name, response=tool_result.tool_result + ) + for tool_result in parts + ] + + def _create_google_tool_response_content( content: list[conversation.ToolResultContent], ) -> Content: """Create a Google tool response content.""" return Content( - parts=[ - Part.from_function_response( - name=tool_result.tool_name, response=tool_result.tool_result - ) - for tool_result in content - ] + role="user", + parts=_create_google_tool_response_parts(content), ) @@ -188,7 +199,7 @@ def _convert_content( | conversation.SystemContent, ) -> Content: """Convert HA content to Google content.""" - if content.role != "assistant" or not content.tool_calls: # type: ignore[union-attr] + if content.role != "assistant" or not content.tool_calls: role = "model" if content.role == "assistant" else content.role return Content( role=role, @@ -264,18 +275,6 @@ class GoogleGenerativeAIConversationEntity( conversation.async_unset_agent(self.hass, self.entry) await super().async_will_remove_from_hass() - async def async_process( - self, user_input: conversation.ConversationInput - ) -> conversation.ConversationResult: - """Process a sentence.""" - with ( - chat_session.async_get_chat_session( - self.hass, user_input.conversation_id - ) as session, - conversation.async_get_chat_log(self.hass, session, user_input) as chat_log, - ): - return await self._async_handle_message(user_input, chat_log) - def _fix_tool_name(self, tool_name: str) -> str: """Fix tool name if needed.""" # The Gemini 2.0+ tokenizer seemingly has a issue with the HassListAddItem tool @@ -308,6 +307,13 @@ class GoogleGenerativeAIConversationEntity( for tool in chat_log.llm_api.tools ] + # Using search grounding allows the model to retrieve information from the web, + # however, it may interfere with how the model decides to use some tools, or entities + # for example weather entity may be disregarded if the model chooses to Google it. + if options.get(CONF_USE_GOOGLE_SEARCH_TOOL) is True: + tools = tools or [] + tools.append(Tool(google_search=GoogleSearch())) + model_name = self.entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL) # Gemini 1.0 doesn't support system_instruction while 1.5 does. # Assume future versions will support it (if not, the request fails with a @@ -333,23 +339,30 @@ class GoogleGenerativeAIConversationEntity( for chat_content in chat_log.content[1:-1]: if chat_content.role == "tool_result": - # mypy doesn't like picking a type based on checking shared property 'role' - tool_results.append(cast(conversation.ToolResultContent, chat_content)) + tool_results.append(chat_content) continue + if ( + not isinstance(chat_content, conversation.ToolResultContent) + and chat_content.content == "" + ): + # Skipping is not possible since the number of function calls need to match the number of function responses + # and skipping one would mean removing the other and hence this would prevent a proper chat log + chat_content = replace(chat_content, content=" ") + if tool_results: messages.append(_create_google_tool_response_content(tool_results)) tool_results.clear() - messages.append( - _convert_content( - cast( - conversation.UserContent - | conversation.SystemContent - | conversation.AssistantContent, - chat_content, - ) - ) + messages.append(_convert_content(chat_content)) + + # The SDK requires the first message to be a user message + # This is not the case if user used `start_conversation` + # Workaround from https://github.com/googleapis/python-genai/issues/529#issuecomment-2740964537 + if messages and messages[0].role != "user": + messages.insert( + 0, + Content(role="user", parts=[Part.from_text(text=" ")]), ) if tool_results: @@ -406,7 +419,7 @@ class GoogleGenerativeAIConversationEntity( chat = self._genai_client.aio.chats.create( model=model_name, history=messages, config=generateContentConfig ) - chat_request: str | Content = user_input.text + chat_request: str | list[Part] = user_input.text # To prevent infinite loops, we limit the number of iterations for _iteration in range(MAX_TOOL_ITERATIONS): try: @@ -425,6 +438,18 @@ class GoogleGenerativeAIConversationEntity( error = f"Sorry, I had a problem talking to Google Generative AI: {err}" raise HomeAssistantError(error) from err + if (usage_metadata := chat_response.usage_metadata) is not None: + chat_log.async_trace( + { + "stats": { + "input_tokens": usage_metadata.prompt_token_count, + "cached_input_tokens": usage_metadata.cached_content_token_count + or 0, + "output_tokens": usage_metadata.candidates_token_count, + } + } + ) + response_parts = chat_response.candidates[0].content.parts if not response_parts: raise HomeAssistantError( @@ -448,7 +473,7 @@ class GoogleGenerativeAIConversationEntity( ) ) - chat_request = _create_google_tool_response_content( + chat_request = _create_google_tool_response_parts( [ tool_response async for tool_response in chat_log.async_add_assistant_content( @@ -469,7 +494,9 @@ class GoogleGenerativeAIConversationEntity( " ".join([part.text.strip() for part in response_parts if part.text]) ) return conversation.ConversationResult( - response=response, conversation_id=chat_log.conversation_id + response=response, + conversation_id=chat_log.conversation_id, + continue_conversation=chat_log.continue_conversation, ) async def _async_entry_update_listener( diff --git a/homeassistant/components/google_generative_ai_conversation/manifest.json b/homeassistant/components/google_generative_ai_conversation/manifest.json index cc381532c6f..25e44964a6d 100644 --- a/homeassistant/components/google_generative_ai_conversation/manifest.json +++ b/homeassistant/components/google_generative_ai_conversation/manifest.json @@ -2,11 +2,11 @@ "domain": "google_generative_ai_conversation", "name": "Google Generative AI", "after_dependencies": ["assist_pipeline", "intent"], - "codeowners": ["@tronikos"], + "codeowners": ["@tronikos", "@ivanlh"], "config_flow": true, "dependencies": ["conversation"], "documentation": "https://www.home-assistant.io/integrations/google_generative_ai_conversation", "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["google-genai==1.1.0"] + "requirements": ["google-genai==1.7.0"] } diff --git a/homeassistant/components/google_generative_ai_conversation/strings.json b/homeassistant/components/google_generative_ai_conversation/strings.json index 772fadb089c..b814f89469a 100644 --- a/homeassistant/components/google_generative_ai_conversation/strings.json +++ b/homeassistant/components/google_generative_ai_conversation/strings.json @@ -36,7 +36,8 @@ "harassment_block_threshold": "Negative or harmful comments targeting identity and/or protected attributes", "hate_block_threshold": "Content that is rude, disrespectful, or profane", "sexual_block_threshold": "Contains references to sexual acts or other lewd content", - "dangerous_block_threshold": "Promotes, facilitates, or encourages harmful acts" + "dangerous_block_threshold": "Promotes, facilitates, or encourages harmful acts", + "enable_google_search_tool": "Enable Google Search tool" }, "data_description": { "prompt": "Instruct how the LLM should respond. This can be a template." @@ -70,7 +71,7 @@ "issues": { "deprecated_image_filename_parameter": { "title": "Deprecated 'image_filename' parameter", - "description": "The 'image_filename' parameter in Google Generative AI actions is deprecated. Please edit scripts and automations to use 'filenames' intead." + "description": "The 'image_filename' parameter in Google Generative AI actions is deprecated. Please edit scripts and automations to use 'filenames' instead." } } } diff --git a/homeassistant/components/google_pubsub/manifest.json b/homeassistant/components/google_pubsub/manifest.json index d3e57c26e39..b96f4e9ebc0 100644 --- a/homeassistant/components/google_pubsub/manifest.json +++ b/homeassistant/components/google_pubsub/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/google_pubsub", "iot_class": "cloud_push", "quality_scale": "legacy", - "requirements": ["google-cloud-pubsub==2.28.0"] + "requirements": ["google-cloud-pubsub==2.29.0"] } diff --git a/homeassistant/components/govee_light_local/manifest.json b/homeassistant/components/govee_light_local/manifest.json index cba341cd482..55a6b9e8578 100644 --- a/homeassistant/components/govee_light_local/manifest.json +++ b/homeassistant/components/govee_light_local/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["network"], "documentation": "https://www.home-assistant.io/integrations/govee_light_local", "iot_class": "local_push", - "requirements": ["govee-local-api==2.0.1"] + "requirements": ["govee-local-api==2.1.0"] } diff --git a/homeassistant/components/gree/strings.json b/homeassistant/components/gree/strings.json index 45911433b92..403cf7d45fc 100644 --- a/homeassistant/components/gree/strings.json +++ b/homeassistant/components/gree/strings.json @@ -16,13 +16,13 @@ "name": "Panel light" }, "quiet": { - "name": "Quiet" + "name": "Quiet mode" }, "fresh_air": { "name": "Fresh air" }, "xfan": { - "name": "XFan" + "name": "Xtra fan" }, "health_mode": { "name": "Health mode" diff --git a/homeassistant/components/growatt_server/strings.json b/homeassistant/components/growatt_server/strings.json index 9a985d98034..758428d7a55 100644 --- a/homeassistant/components/growatt_server/strings.json +++ b/homeassistant/components/growatt_server/strings.json @@ -38,28 +38,28 @@ "name": "Input 1 voltage" }, "inverter_amperage_input_1": { - "name": "Input 1 Amperage" + "name": "Input 1 amperage" }, "inverter_wattage_input_1": { - "name": "Input 1 Wattage" + "name": "Input 1 wattage" }, "inverter_voltage_input_2": { "name": "Input 2 voltage" }, "inverter_amperage_input_2": { - "name": "Input 2 Amperage" + "name": "Input 2 amperage" }, "inverter_wattage_input_2": { - "name": "Input 2 Wattage" + "name": "Input 2 wattage" }, "inverter_voltage_input_3": { "name": "Input 3 voltage" }, "inverter_amperage_input_3": { - "name": "Input 3 Amperage" + "name": "Input 3 amperage" }, "inverter_wattage_input_3": { - "name": "Input 3 Wattage" + "name": "Input 3 wattage" }, "inverter_internal_wattage": { "name": "Internal wattage" @@ -137,13 +137,13 @@ "name": "Load consumption" }, "mix_wattage_pv_1": { - "name": "PV1 Wattage" + "name": "PV1 wattage" }, "mix_wattage_pv_2": { - "name": "PV2 Wattage" + "name": "PV2 wattage" }, "mix_wattage_pv_all": { - "name": "All PV Wattage" + "name": "All PV wattage" }, "mix_export_to_grid": { "name": "Export to grid" @@ -182,7 +182,7 @@ "name": "Storage production today" }, "storage_storage_production_lifetime": { - "name": "Lifetime Storage production" + "name": "Lifetime storage production" }, "storage_grid_discharge_today": { "name": "Grid discharged today" @@ -224,7 +224,7 @@ "name": "Storage charging/ discharging(-ve)" }, "storage_load_consumption_solar_storage": { - "name": "Load consumption (Solar + Storage)" + "name": "Load consumption (solar + storage)" }, "storage_charge_today": { "name": "Charge today" @@ -257,7 +257,7 @@ "name": "Output voltage" }, "storage_ac_output_frequency": { - "name": "Ac output frequency" + "name": "AC output frequency" }, "storage_current_pv": { "name": "Solar charge current" @@ -290,7 +290,7 @@ "name": "Lifetime total energy input 1" }, "tlx_energy_today_input_1": { - "name": "Energy Today Input 1" + "name": "Energy today input 1" }, "tlx_voltage_input_1": { "name": "[%key:component::growatt_server::entity::sensor::inverter_voltage_input_1::name%]" @@ -305,7 +305,7 @@ "name": "Lifetime total energy input 2" }, "tlx_energy_today_input_2": { - "name": "Energy Today Input 2" + "name": "Energy today input 2" }, "tlx_voltage_input_2": { "name": "[%key:component::growatt_server::entity::sensor::inverter_voltage_input_2::name%]" @@ -320,7 +320,7 @@ "name": "Lifetime total energy input 3" }, "tlx_energy_today_input_3": { - "name": "Energy Today Input 3" + "name": "Energy today input 3" }, "tlx_voltage_input_3": { "name": "[%key:component::growatt_server::entity::sensor::inverter_voltage_input_3::name%]" @@ -335,16 +335,16 @@ "name": "Lifetime total energy input 4" }, "tlx_energy_today_input_4": { - "name": "Energy Today Input 4" + "name": "Energy today input 4" }, "tlx_voltage_input_4": { "name": "Input 4 voltage" }, "tlx_amperage_input_4": { - "name": "Input 4 Amperage" + "name": "Input 4 amperage" }, "tlx_wattage_input_4": { - "name": "Input 4 Wattage" + "name": "Input 4 wattage" }, "tlx_solar_generation_total": { "name": "Lifetime total solar energy" @@ -434,10 +434,10 @@ "name": "Money lifetime" }, "total_energy_today": { - "name": "Energy Today" + "name": "Energy today" }, "total_output_power": { - "name": "Output Power" + "name": "Output power" }, "total_energy_output": { "name": "[%key:component::growatt_server::entity::sensor::inverter_energy_total::name%]" diff --git a/homeassistant/components/habitica/const.py b/homeassistant/components/habitica/const.py index 353bcbbd39d..7a5677cb687 100644 --- a/homeassistant/components/habitica/const.py +++ b/homeassistant/components/habitica/const.py @@ -40,6 +40,23 @@ ATTR_ALIAS = "alias" ATTR_PRIORITY = "priority" ATTR_COST = "cost" ATTR_NOTES = "notes" +ATTR_UP_DOWN = "up_down" +ATTR_FREQUENCY = "frequency" +ATTR_COUNTER_UP = "counter_up" +ATTR_COUNTER_DOWN = "counter_down" +ATTR_ADD_CHECKLIST_ITEM = "add_checklist_item" +ATTR_REMOVE_CHECKLIST_ITEM = "remove_checklist_item" +ATTR_SCORE_CHECKLIST_ITEM = "score_checklist_item" +ATTR_UNSCORE_CHECKLIST_ITEM = "unscore_checklist_item" +ATTR_REMINDER = "reminder" +ATTR_REMOVE_REMINDER = "remove_reminder" +ATTR_CLEAR_REMINDER = "clear_reminder" +ATTR_CLEAR_DATE = "clear_date" +ATTR_REPEAT = "repeat" +ATTR_INTERVAL = "every_x" +ATTR_START_DATE = "start_date" +ATTR_REPEAT_MONTHLY = "repeat_monthly" +ATTR_STREAK = "streak" SERVICE_CAST_SKILL = "cast_skill" SERVICE_START_QUEST = "start_quest" @@ -56,6 +73,13 @@ SERVICE_SCORE_REWARD = "score_reward" SERVICE_TRANSFORMATION = "transformation" SERVICE_UPDATE_REWARD = "update_reward" +SERVICE_CREATE_REWARD = "create_reward" +SERVICE_UPDATE_HABIT = "update_habit" +SERVICE_CREATE_HABIT = "create_habit" +SERVICE_UPDATE_TODO = "update_todo" +SERVICE_CREATE_TODO = "create_todo" +SERVICE_UPDATE_DAILY = "update_daily" +SERVICE_CREATE_DAILY = "create_daily" DEVELOPER_ID = "4c4ca53f-c059-4ffa-966e-9d29dd405daf" X_CLIENT = f"{DEVELOPER_ID} - {APPLICATION_NAME} {__version__}" @@ -63,3 +87,5 @@ X_CLIENT = f"{DEVELOPER_ID} - {APPLICATION_NAME} {__version__}" SECTION_REAUTH_LOGIN = "reauth_login" SECTION_REAUTH_API_KEY = "reauth_api_key" SECTION_DANGER_ZONE = "danger_zone" + +WEEK_DAYS = ["m", "t", "w", "th", "f", "s", "su"] diff --git a/homeassistant/components/habitica/icons.json b/homeassistant/components/habitica/icons.json index e119b063aa5..aac90814af5 100644 --- a/homeassistant/components/habitica/icons.json +++ b/homeassistant/components/habitica/icons.json @@ -224,6 +224,60 @@ "tag_options": "mdi:tag", "developer_options": "mdi:test-tube" } + }, + "create_reward": { + "service": "mdi:treasure-chest-outline", + "sections": { + "developer_options": "mdi:test-tube" + } + }, + "update_habit": { + "service": "mdi:contrast-box", + "sections": { + "tag_options": "mdi:tag", + "developer_options": "mdi:test-tube" + } + }, + "create_habit": { + "service": "mdi:contrast-box", + "sections": { + "developer_options": "mdi:test-tube" + } + }, + "update_todo": { + "service": "mdi:pencil-box-outline", + "sections": { + "checklist_options": "mdi:format-list-checks", + "tag_options": "mdi:tag", + "developer_options": "mdi:test-tube", + "duedate_options": "mdi:calendar-blank", + "reminder_options": "mdi:reminder" + } + }, + "create_todo": { + "service": "mdi:pencil-box-outline", + "sections": { + "developer_options": "mdi:test-tube" + } + }, + "update_daily": { + "service": "mdi:calendar-month", + "sections": { + "checklist_options": "mdi:format-list-checks", + "tag_options": "mdi:tag", + "developer_options": "mdi:test-tube", + "reminder_options": "mdi:reminder", + "repeat_weekly_options": "mdi:calendar-refresh", + "repeat_monthly_options": "mdi:calendar-refresh" + } + }, + "create_daily": { + "service": "mdi:calendar-month", + "sections": { + "developer_options": "mdi:test-tube", + "repeat_weekly_options": "mdi:calendar-refresh", + "repeat_monthly_options": "mdi:calendar-refresh" + } } } } diff --git a/homeassistant/components/habitica/services.py b/homeassistant/components/habitica/services.py index 57005cf2b72..bcbd6caa7a7 100644 --- a/homeassistant/components/habitica/services.py +++ b/homeassistant/components/habitica/services.py @@ -3,16 +3,21 @@ from __future__ import annotations from dataclasses import asdict +from datetime import UTC, date, datetime, time import logging from typing import TYPE_CHECKING, Any, cast -from uuid import UUID +from uuid import UUID, uuid4 from aiohttp import ClientError from habiticalib import ( + Checklist, Direction, + Frequency, HabiticaException, NotAuthorizedError, NotFoundError, + Reminders, + Repeat, Skill, Task, TaskData, @@ -24,7 +29,7 @@ import voluptuous as vol from homeassistant.components.todo import ATTR_RENAME from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_NAME, CONF_NAME +from homeassistant.const import ATTR_DATE, ATTR_NAME, CONF_NAME from homeassistant.core import ( HomeAssistant, ServiceCall, @@ -35,25 +40,43 @@ from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import config_validation as cv from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.selector import ConfigEntrySelector +from homeassistant.util import dt as dt_util from .const import ( + ATTR_ADD_CHECKLIST_ITEM, ATTR_ALIAS, ATTR_ARGS, + ATTR_CLEAR_DATE, + ATTR_CLEAR_REMINDER, ATTR_CONFIG_ENTRY, ATTR_COST, + ATTR_COUNTER_DOWN, + ATTR_COUNTER_UP, ATTR_DATA, ATTR_DIRECTION, + ATTR_FREQUENCY, + ATTR_INTERVAL, ATTR_ITEM, ATTR_KEYWORD, ATTR_NOTES, ATTR_PATH, ATTR_PRIORITY, + ATTR_REMINDER, + ATTR_REMOVE_CHECKLIST_ITEM, + ATTR_REMOVE_REMINDER, ATTR_REMOVE_TAG, + ATTR_REPEAT, + ATTR_REPEAT_MONTHLY, + ATTR_SCORE_CHECKLIST_ITEM, ATTR_SKILL, + ATTR_START_DATE, + ATTR_STREAK, ATTR_TAG, ATTR_TARGET, ATTR_TASK, ATTR_TYPE, + ATTR_UNSCORE_CHECKLIST_ITEM, + ATTR_UP_DOWN, DOMAIN, EVENT_API_CALL_SUCCESS, SERVICE_ABORT_QUEST, @@ -61,6 +84,10 @@ from .const import ( SERVICE_API_CALL, SERVICE_CANCEL_QUEST, SERVICE_CAST_SKILL, + SERVICE_CREATE_DAILY, + SERVICE_CREATE_HABIT, + SERVICE_CREATE_REWARD, + SERVICE_CREATE_TODO, SERVICE_GET_TASKS, SERVICE_LEAVE_QUEST, SERVICE_REJECT_QUEST, @@ -68,7 +95,11 @@ from .const import ( SERVICE_SCORE_REWARD, SERVICE_START_QUEST, SERVICE_TRANSFORMATION, + SERVICE_UPDATE_DAILY, + SERVICE_UPDATE_HABIT, SERVICE_UPDATE_REWARD, + SERVICE_UPDATE_TODO, + WEEK_DAYS, ) from .coordinator import HabiticaConfigEntry @@ -112,21 +143,65 @@ SERVICE_TRANSFORMATION_SCHEMA = vol.Schema( } ) -SERVICE_UPDATE_TASK_SCHEMA = vol.Schema( +BASE_TASK_SCHEMA = vol.Schema( { vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(), - vol.Required(ATTR_TASK): cv.string, vol.Optional(ATTR_RENAME): cv.string, vol.Optional(ATTR_NOTES): cv.string, vol.Optional(ATTR_TAG): vol.All(cv.ensure_list, [str]), - vol.Optional(ATTR_REMOVE_TAG): vol.All(cv.ensure_list, [str]), vol.Optional(ATTR_ALIAS): vol.All( cv.string, cv.matches_regex("^[a-zA-Z0-9-_]*$") ), - vol.Optional(ATTR_COST): vol.Coerce(float), + vol.Optional(ATTR_COST): vol.All(vol.Coerce(float), vol.Range(0)), + vol.Optional(ATTR_PRIORITY): vol.All( + vol.Upper, vol.In(TaskPriority._member_names_) + ), + vol.Optional(ATTR_UP_DOWN): vol.All(cv.ensure_list, [str]), + vol.Optional(ATTR_COUNTER_UP): vol.All(int, vol.Range(0)), + vol.Optional(ATTR_COUNTER_DOWN): vol.All(int, vol.Range(0)), + vol.Optional(ATTR_FREQUENCY): vol.Coerce(Frequency), + vol.Optional(ATTR_DATE): cv.date, + vol.Optional(ATTR_CLEAR_DATE): cv.boolean, + vol.Optional(ATTR_REMINDER): vol.All( + cv.ensure_list, [vol.Any(cv.datetime, cv.time)] + ), + vol.Optional(ATTR_REMOVE_REMINDER): vol.All( + cv.ensure_list, [vol.Any(cv.datetime, cv.time)] + ), + vol.Optional(ATTR_CLEAR_REMINDER): cv.boolean, + vol.Optional(ATTR_ADD_CHECKLIST_ITEM): vol.All(cv.ensure_list, [str]), + vol.Optional(ATTR_REMOVE_CHECKLIST_ITEM): vol.All(cv.ensure_list, [str]), + vol.Optional(ATTR_SCORE_CHECKLIST_ITEM): vol.All(cv.ensure_list, [str]), + vol.Optional(ATTR_UNSCORE_CHECKLIST_ITEM): vol.All(cv.ensure_list, [str]), + vol.Optional(ATTR_START_DATE): cv.date, + vol.Optional(ATTR_INTERVAL): vol.All(int, vol.Range(0)), + vol.Optional(ATTR_REPEAT): vol.All(cv.ensure_list, [vol.In(WEEK_DAYS)]), + vol.Optional(ATTR_REPEAT_MONTHLY): vol.All( + cv.string, vol.In({"day_of_month", "day_of_week"}) + ), + vol.Optional(ATTR_STREAK): vol.All(int, vol.Range(0)), } ) +SERVICE_UPDATE_TASK_SCHEMA = BASE_TASK_SCHEMA.extend( + { + vol.Required(ATTR_TASK): cv.string, + vol.Optional(ATTR_REMOVE_TAG): vol.All(cv.ensure_list, [str]), + } +) + +SERVICE_CREATE_TASK_SCHEMA = BASE_TASK_SCHEMA.extend( + { + vol.Required(ATTR_NAME): cv.string, + } +) + +SERVICE_DAILY_SCHEMA = { + vol.Optional(ATTR_REMINDER): vol.All(cv.ensure_list, [cv.time]), + vol.Optional(ATTR_REMOVE_REMINDER): vol.All(cv.ensure_list, [cv.time]), +} + + SERVICE_GET_TASKS_SCHEMA = vol.Schema( { vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}), @@ -161,6 +236,17 @@ ITEMID_MAP = { "shiny_seed": Skill.SHINY_SEED, } +SERVICE_TASK_TYPE_MAP = { + SERVICE_UPDATE_REWARD: TaskType.REWARD, + SERVICE_CREATE_REWARD: TaskType.REWARD, + SERVICE_UPDATE_HABIT: TaskType.HABIT, + SERVICE_CREATE_HABIT: TaskType.HABIT, + SERVICE_UPDATE_TODO: TaskType.TODO, + SERVICE_CREATE_TODO: TaskType.TODO, + SERVICE_UPDATE_DAILY: TaskType.DAILY, + SERVICE_CREATE_DAILY: TaskType.DAILY, +} + def get_config_entry(hass: HomeAssistant, entry_id: str) -> HabiticaConfigEntry: """Return config entry or raise if not found or not loaded.""" @@ -539,33 +625,42 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901 return result - async def update_task(call: ServiceCall) -> ServiceResponse: - """Update task action.""" + async def create_or_update_task(call: ServiceCall) -> ServiceResponse: # noqa: C901 + """Create or update task action.""" entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY]) coordinator = entry.runtime_data await coordinator.async_refresh() + is_update = call.service in ( + SERVICE_UPDATE_HABIT, + SERVICE_UPDATE_REWARD, + SERVICE_UPDATE_TODO, + SERVICE_UPDATE_DAILY, + ) + task_type = SERVICE_TASK_TYPE_MAP[call.service] + current_task = None - try: - current_task = next( - task - for task in coordinator.data.tasks - if call.data[ATTR_TASK] in (str(task.id), task.alias, task.text) - and task.Type is TaskType.REWARD - ) - except StopIteration as e: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="task_not_found", - translation_placeholders={"task": f"'{call.data[ATTR_TASK]}'"}, - ) from e + if is_update: + try: + current_task = next( + task + for task in coordinator.data.tasks + if call.data[ATTR_TASK] in (str(task.id), task.alias, task.text) + and task.Type is task_type + ) + except StopIteration as e: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="task_not_found", + translation_placeholders={"task": f"'{call.data[ATTR_TASK]}'"}, + ) from e - task_id = current_task.id - if TYPE_CHECKING: - assert task_id data = Task() - if rename := call.data.get(ATTR_RENAME): - data["text"] = rename + if not is_update: + data["type"] = task_type + + if (text := call.data.get(ATTR_RENAME)) or (text := call.data.get(ATTR_NAME)): + data["text"] = text if (notes := call.data.get(ATTR_NOTES)) is not None: data["notes"] = notes @@ -574,7 +669,7 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901 remove_tags = cast(list[str], call.data.get(ATTR_REMOVE_TAG)) if tags or remove_tags: - update_tags = set(current_task.tags) + update_tags = set(current_task.tags) if current_task else set() user_tags = { tag.name.lower(): tag.id for tag in coordinator.data.user.tags @@ -633,8 +728,159 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901 if (cost := call.data.get(ATTR_COST)) is not None: data["value"] = cost + if priority := call.data.get(ATTR_PRIORITY): + data["priority"] = TaskPriority[priority] + + if frequency := call.data.get(ATTR_FREQUENCY): + data["frequency"] = frequency + else: + frequency = current_task.frequency if current_task else Frequency.WEEKLY + + if up_down := call.data.get(ATTR_UP_DOWN): + data["up"] = "up" in up_down + data["down"] = "down" in up_down + + if counter_up := call.data.get(ATTR_COUNTER_UP): + data["counterUp"] = counter_up + + if counter_down := call.data.get(ATTR_COUNTER_DOWN): + data["counterDown"] = counter_down + + if due_date := call.data.get(ATTR_DATE): + data["date"] = datetime.combine(due_date, time()) + + if call.data.get(ATTR_CLEAR_DATE): + data["date"] = None + + checklist = current_task.checklist if current_task else [] + + if add_checklist_item := call.data.get(ATTR_ADD_CHECKLIST_ITEM): + checklist.extend( + Checklist(completed=False, id=uuid4(), text=item) + for item in add_checklist_item + if not any(i.text == item for i in checklist) + ) + if remove_checklist_item := call.data.get(ATTR_REMOVE_CHECKLIST_ITEM): + checklist = [ + item for item in checklist if item.text not in remove_checklist_item + ] + + if score_checklist_item := call.data.get(ATTR_SCORE_CHECKLIST_ITEM): + for item in checklist: + if item.text in score_checklist_item: + item.completed = True + + if unscore_checklist_item := call.data.get(ATTR_UNSCORE_CHECKLIST_ITEM): + for item in checklist: + if item.text in unscore_checklist_item: + item.completed = False + if ( + add_checklist_item + or remove_checklist_item + or score_checklist_item + or unscore_checklist_item + ): + data["checklist"] = checklist + + reminders = current_task.reminders if current_task else [] + + if add_reminders := call.data.get(ATTR_REMINDER): + if task_type is TaskType.TODO: + existing_reminder_datetimes = { + r.time.replace(tzinfo=None) for r in reminders + } + + reminders.extend( + Reminders(id=uuid4(), time=r) + for r in add_reminders + if r not in existing_reminder_datetimes + ) + if task_type is TaskType.DAILY: + existing_reminder_times = { + r.time.time().replace(microsecond=0, second=0) for r in reminders + } + + reminders.extend( + Reminders( + id=uuid4(), + time=datetime.combine(date.today(), r, tzinfo=UTC), + ) + for r in add_reminders + if r not in existing_reminder_times + ) + + if remove_reminder := call.data.get(ATTR_REMOVE_REMINDER): + if task_type is TaskType.TODO: + reminders = list( + filter( + lambda r: r.time.replace(tzinfo=None) not in remove_reminder, + reminders, + ) + ) + if task_type is TaskType.DAILY: + reminders = list( + filter( + lambda r: r.time.time().replace(second=0, microsecond=0) + not in remove_reminder, + reminders, + ) + ) + + if clear_reminders := call.data.get(ATTR_CLEAR_REMINDER): + reminders = [] + + if add_reminders or remove_reminder or clear_reminders: + data["reminders"] = reminders + + if start_date := call.data.get(ATTR_START_DATE): + data["startDate"] = datetime.combine(start_date, time()) + else: + start_date = ( + current_task.startDate + if current_task and current_task.startDate + else dt_util.start_of_local_day() + ) + if repeat := call.data.get(ATTR_REPEAT): + if frequency is Frequency.WEEKLY: + data["repeat"] = Repeat(**{d: d in repeat for d in WEEK_DAYS}) + else: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="frequency_not_weekly", + ) + if repeat_monthly := call.data.get(ATTR_REPEAT_MONTHLY): + if frequency is not Frequency.MONTHLY: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="frequency_not_monthly", + ) + + if repeat_monthly == "day_of_week": + weekday = start_date.weekday() + data["weeksOfMonth"] = [(start_date.day - 1) // 7] + data["repeat"] = Repeat( + **{day: i == weekday for i, day in enumerate(WEEK_DAYS)} + ) + data["daysOfMonth"] = [] + + else: + data["daysOfMonth"] = [start_date.day] + data["weeksOfMonth"] = [] + + if interval := call.data.get(ATTR_INTERVAL): + data["everyX"] = interval + + if streak := call.data.get(ATTR_STREAK): + data["streak"] = streak + try: - response = await coordinator.habitica.update_task(task_id, data) + if is_update: + if TYPE_CHECKING: + assert current_task + assert current_task.id + response = await coordinator.habitica.update_task(current_task.id, data) + else: + response = await coordinator.habitica.create_task(data) except TooManyRequestsError as e: raise HomeAssistantError( translation_domain=DOMAIN, @@ -656,13 +902,32 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901 else: return response.data.to_dict(omit_none=True) - hass.services.async_register( - DOMAIN, + for service in ( + SERVICE_UPDATE_DAILY, + SERVICE_UPDATE_HABIT, SERVICE_UPDATE_REWARD, - update_task, - schema=SERVICE_UPDATE_TASK_SCHEMA, - supports_response=SupportsResponse.ONLY, - ) + SERVICE_UPDATE_TODO, + ): + hass.services.async_register( + DOMAIN, + service, + create_or_update_task, + schema=SERVICE_UPDATE_TASK_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) + for service in ( + SERVICE_CREATE_DAILY, + SERVICE_CREATE_HABIT, + SERVICE_CREATE_REWARD, + SERVICE_CREATE_TODO, + ): + hass.services.async_register( + DOMAIN, + service, + create_or_update_task, + schema=SERVICE_CREATE_TASK_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) hass.services.async_register( DOMAIN, SERVICE_API_CALL, diff --git a/homeassistant/components/habitica/services.yaml b/homeassistant/components/habitica/services.yaml index 7b486690ef5..3fb25e2b4b7 100644 --- a/homeassistant/components/habitica/services.yaml +++ b/homeassistant/components/habitica/services.yaml @@ -144,26 +144,26 @@ update_reward: fields: config_entry: *config_entry task: *task - rename: + rename: &rename selector: text: - notes: + notes: ¬es required: false selector: text: multiline: true cost: required: false - selector: + selector: &cost_selector number: min: 0 step: 0.01 unit_of_measurement: "🪙" mode: box - tag_options: + tag_options: &tag_options collapsed: true fields: - tag: + tag: &tag required: false selector: text: @@ -173,10 +173,274 @@ update_reward: selector: text: multiple: true - developer_options: + developer_options: &developer_options collapsed: true fields: - alias: + alias: &alias required: false selector: text: +create_reward: + fields: + config_entry: *config_entry + name: &name + required: true + selector: + text: + notes: *notes + cost: + required: true + selector: *cost_selector + tag: *tag + developer_options: *developer_options +update_habit: + fields: + config_entry: *config_entry + task: *task + rename: *rename + notes: *notes + up_down: &up_down + required: false + selector: + select: + options: + - value: up + label: "➕" + - value: down + label: "➖" + multiple: true + mode: list + priority: &priority + required: false + selector: + select: + options: + - "trivial" + - "easy" + - "medium" + - "hard" + mode: dropdown + translation_key: "priority" + frequency: &frequency + required: false + selector: + select: + options: + - "daily" + - "weekly" + - "monthly" + translation_key: "frequency" + mode: dropdown + tag_options: *tag_options + developer_options: + collapsed: true + fields: + counter_up: + required: false + selector: + number: + min: 0 + step: 1 + unit_of_measurement: "➕" + mode: box + counter_down: + required: false + selector: + number: + min: 0 + step: 1 + unit_of_measurement: "➖" + mode: box + alias: *alias +create_habit: + fields: + config_entry: *config_entry + name: *name + notes: *notes + up_down: *up_down + priority: *priority + frequency: *frequency + tag: *tag + developer_options: *developer_options +update_todo: + fields: + config_entry: *config_entry + task: *task + rename: *rename + notes: *notes + checklist_options: &checklist_options + collapsed: true + fields: + add_checklist_item: &add_checklist_item + required: false + selector: + text: + multiple: true + remove_checklist_item: + required: false + selector: + text: + multiple: true + score_checklist_item: + required: false + selector: + text: + multiple: true + unscore_checklist_item: + required: false + selector: + text: + multiple: true + priority: *priority + duedate_options: + collapsed: true + fields: + date: &due_date + required: false + selector: + date: + clear_date: + required: false + selector: + constant: + value: true + label: "🗑️" + reminder_options: + collapsed: true + fields: + reminder: &reminder + required: false + selector: + text: + type: datetime-local + multiple: true + remove_reminder: + required: false + selector: + text: + type: datetime-local + multiple: true + clear_reminder: &clear_reminder + required: false + selector: + constant: + value: true + label: "🗑️" + tag_options: *tag_options + developer_options: *developer_options +create_todo: + fields: + config_entry: *config_entry + name: *name + notes: *notes + add_checklist_item: *add_checklist_item + priority: *priority + date: *due_date + reminder: *reminder + tag: *tag + developer_options: *developer_options +update_daily: + fields: + config_entry: *config_entry + task: *task + rename: *rename + notes: *notes + checklist_options: *checklist_options + priority: *priority + start_date: &start_date + required: false + selector: + date: + frequency: &frequency_daily + required: false + selector: + select: + options: + - "daily" + - "weekly" + - "monthly" + - "yearly" + translation_key: "frequency" + mode: dropdown + every_x: &every_x + required: false + selector: + number: + min: 0 + step: 1 + unit_of_measurement: "🔃" + mode: box + repeat_weekly_options: &repeat_weekly_options + collapsed: true + fields: + repeat: + required: false + selector: + select: + options: + - "m" + - "t" + - "w" + - "th" + - "f" + - "s" + - "su" + mode: list + translation_key: repeat + multiple: true + repeat_monthly_options: &repeat_monthly_options + collapsed: true + fields: + repeat_monthly: + required: false + selector: + select: + options: + - "day_of_month" + - "day_of_week" + translation_key: repeat_monthly + mode: list + reminder_options: + collapsed: true + fields: + reminder: &reminder_daily + required: false + selector: + text: + type: time + multiple: true + remove_reminder: + required: false + selector: + text: + type: time + multiple: true + clear_reminder: *clear_reminder + tag_options: *tag_options + developer_options: + collapsed: true + fields: + streak: &streak + required: false + selector: + number: + min: 0 + step: 1 + unit_of_measurement: "▶▶" + mode: box + alias: *alias +create_daily: + fields: + config_entry: *config_entry + name: *name + notes: *notes + add_checklist_item: *add_checklist_item + priority: *priority + start_date: *start_date + frequency: *frequency_daily + every_x: *every_x + repeat_weekly_options: *repeat_weekly_options + repeat_monthly_options: *repeat_monthly_options + reminder: *reminder_daily + tag: *tag + developer_options: *developer_options diff --git a/homeassistant/components/habitica/strings.json b/homeassistant/components/habitica/strings.json index 1bb2fcbd9d7..695eb1576fe 100644 --- a/homeassistant/components/habitica/strings.json +++ b/homeassistant/components/habitica/strings.json @@ -11,9 +11,9 @@ "config_entry_description": "Select the Habitica account to update a task.", "task_description": "The name (or task ID) of the task you want to update.", "rename_name": "Rename", - "rename_description": "The new title for the Habitica task.", - "notes_name": "Update notes", - "notes_description": "The new notes for the Habitica task.", + "rename_description": "The title for the Habitica task.", + "notes_name": "Notes", + "notes_description": "The notes for the Habitica task.", "tag_name": "Add tags", "tag_description": "Add tags to the Habitica task. If a tag does not already exist, a new one will be created.", "remove_tag_name": "Remove tags", @@ -23,7 +23,48 @@ "developer_options_name": "Advanced settings", "developer_options_description": "Additional features available in developer mode.", "tag_options_name": "Tags", - "tag_options_description": "Add or remove tags from a task." + "tag_options_description": "Add or remove tags from a task.", + "name_description": "The title for the Habitica task.", + "cost_name": "Cost", + "priority_name": "Difficulty", + "priority_description": "The difficulty of the task.", + "frequency_name": "Counter reset", + "frequency_description": "The frequency at which the habit's counter resets: daily at the start of a new day, weekly after Sunday night, or monthly at the beginning of a new month.", + "up_down_name": "Rewards or losses", + "up_down_description": "Whether the habit is good and rewarding (positive), bad and penalizing (negative), or both.", + "add_checklist_item_name": "Add checklist items", + "add_checklist_item_description": "The items to add to a task's checklist.", + "remove_checklist_item_name": "Delete items", + "remove_checklist_item_description": "Remove items from a task's checklist.", + "score_checklist_item_name": "Complete items", + "score_checklist_item_description": "Mark items from a task's checklist as completed.", + "unscore_checklist_item_name": "Uncomplete items", + "unscore_checklist_item_description": "Undo completion of items of a task's checklist.", + "checklist_options_name": "Checklist", + "checklist_options_description": "Add, remove, or update status of an item on a task's checklist.", + "reminder_name": "Add reminders", + "reminder_description": "Add reminders to a Habitica task.", + "remove_reminder_name": "Remove reminders", + "remove_reminder_description": "Remove specific reminders from a Habitica task.", + "clear_reminder_name": "Clear all reminders", + "clear_reminder_description": "Remove all reminders from a Habitica task.", + "reminder_options_name": "Reminders", + "reminder_options_description": "Add, remove or clear reminders of a Habitica task.", + "date_name": "Due date", + "date_description": "The to-do's due date.", + "repeat_name": "Repeat on", + "start_date_name": "Start date", + "start_date_description": "Defines when the daily task becomes active and specifies the exact weekday or day of the month it repeats on.", + "frequency_daily_name": "Repeat interval", + "frequency_daily_description": "The repetition interval of a daily.", + "every_x_name": "Repeat every X", + "every_x_description": "The number of intervals (days, weeks, months, or years) after which the daily repeats, based on the chosen repetition interval. A value of 0 makes the daily inactive ('Grey Daily').", + "repeat_weekly_description": "The days of the week the daily repeats.", + "repeat_monthly_description": "Whether a monthly recurring task repeats on the same calendar day each month or on the same weekday and week of the month, based on the start date.", + "repeat_weekly_options_name": "Weekly repeat days", + "repeat_weekly_options_description": "Options related to weekly repetition, applicable when the repetition interval is set to weekly.", + "repeat_monthly_options_name": "Monthly repeat day", + "repeat_monthly_options_description": "Options related to monthly repetition, applicable when the repetition interval is set to monthly." }, "config": { "abort": { @@ -651,7 +692,7 @@ "description": "Filter tasks by type." }, "priority": { - "name": "Difficulty", + "name": "[%key:component::habitica::common::priority_name%]", "description": "Filter tasks by difficulty." }, "task": { @@ -707,7 +748,7 @@ "description": "[%key:component::habitica::common::alias_description%]" }, "cost": { - "name": "Cost", + "name": "[%key:component::habitica::common::cost_name%]", "description": "Update the cost of a reward." } }, @@ -721,6 +762,482 @@ "description": "[%key:component::habitica::common::developer_options_description%]" } } + }, + "create_reward": { + "name": "Create reward", + "description": "Adds a new custom reward.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "Select the Habitica account to create a reward." + }, + "name": { + "name": "[%key:component::habitica::common::task_name%]", + "description": "[%key:component::habitica::common::name_description%]" + }, + "notes": { + "name": "[%key:component::habitica::common::notes_name%]", + "description": "[%key:component::habitica::common::notes_description%]" + }, + "tag": { + "name": "[%key:component::habitica::common::tag_options_name%]", + "description": "[%key:component::habitica::common::tag_description%]" + }, + "alias": { + "name": "[%key:component::habitica::common::alias_name%]", + "description": "[%key:component::habitica::common::alias_description%]" + }, + "cost": { + "name": "[%key:component::habitica::common::cost_name%]", + "description": "The cost of the reward." + } + }, + "sections": { + "developer_options": { + "name": "[%key:component::habitica::common::developer_options_name%]", + "description": "[%key:component::habitica::common::developer_options_description%]" + } + } + }, + "update_habit": { + "name": "Update a habit", + "description": "Updates a specific habit for the selected Habitica character", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "Select the Habitica account to update a habit." + }, + "task": { + "name": "[%key:component::habitica::common::task_name%]", + "description": "[%key:component::habitica::common::task_description%]" + }, + "rename": { + "name": "[%key:component::habitica::common::rename_name%]", + "description": "[%key:component::habitica::common::rename_description%]" + }, + "notes": { + "name": "[%key:component::habitica::common::notes_name%]", + "description": "[%key:component::habitica::common::notes_description%]" + }, + "tag": { + "name": "[%key:component::habitica::common::tag_name%]", + "description": "[%key:component::habitica::common::tag_description%]" + }, + "remove_tag": { + "name": "[%key:component::habitica::common::remove_tag_name%]", + "description": "[%key:component::habitica::common::remove_tag_description%]" + }, + "alias": { + "name": "[%key:component::habitica::common::alias_name%]", + "description": "[%key:component::habitica::common::alias_description%]" + }, + "priority": { + "name": "[%key:component::habitica::common::priority_name%]", + "description": "[%key:component::habitica::common::priority_description%]" + }, + "frequency": { + "name": "[%key:component::habitica::common::frequency_name%]", + "description": "[%key:component::habitica::common::frequency_description%]" + }, + "up_down": { + "name": "[%key:component::habitica::common::up_down_name%]", + "description": "[%key:component::habitica::common::up_down_description%]" + }, + "counter_up": { + "name": "Adjust positive counter", + "description": "Update the up counter of a positive habit." + }, + "counter_down": { + "name": "Adjust negative counter", + "description": "Update the down counter of a negative habit." + } + }, + "sections": { + "tag_options": { + "name": "[%key:component::habitica::common::tag_options_name%]", + "description": "[%key:component::habitica::common::tag_options_description%]" + }, + "developer_options": { + "name": "[%key:component::habitica::common::developer_options_name%]", + "description": "[%key:component::habitica::common::developer_options_description%]" + } + } + }, + "create_habit": { + "name": "Create habit", + "description": "Adds a new habit.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "Select the Habitica account to create a habit." + }, + "name": { + "name": "[%key:component::habitica::common::task_name%]", + "description": "[%key:component::habitica::common::name_description%]" + }, + "notes": { + "name": "[%key:component::habitica::common::notes_name%]", + "description": "[%key:component::habitica::common::notes_description%]" + }, + "tag": { + "name": "[%key:component::habitica::common::tag_options_name%]", + "description": "[%key:component::habitica::common::tag_description%]" + }, + "alias": { + "name": "[%key:component::habitica::common::alias_name%]", + "description": "[%key:component::habitica::common::alias_description%]" + }, + "priority": { + "name": "[%key:component::habitica::common::priority_name%]", + "description": "[%key:component::habitica::common::priority_description%]" + }, + "frequency": { + "name": "[%key:component::habitica::common::frequency_name%]", + "description": "[%key:component::habitica::common::frequency_description%]" + }, + "up_down": { + "name": "[%key:component::habitica::common::up_down_name%]", + "description": "[%key:component::habitica::common::up_down_description%]" + } + }, + "sections": { + "developer_options": { + "name": "[%key:component::habitica::common::developer_options_name%]", + "description": "[%key:component::habitica::common::developer_options_description%]" + } + } + }, + "update_todo": { + "name": "Update a to-do", + "description": "Updates a specific to-do for a selected Habitica character", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "[%key:component::habitica::common::config_entry_description%]" + }, + "task": { + "name": "[%key:component::habitica::common::task_name%]", + "description": "The name (or task ID) of the to-do you want to update." + }, + "rename": { + "name": "[%key:component::habitica::common::rename_name%]", + "description": "[%key:component::habitica::common::rename_description%]" + }, + "notes": { + "name": "[%key:component::habitica::common::notes_name%]", + "description": "[%key:component::habitica::common::notes_description%]" + }, + "tag": { + "name": "[%key:component::habitica::common::tag_name%]", + "description": "[%key:component::habitica::common::tag_description%]" + }, + "remove_tag": { + "name": "[%key:component::habitica::common::remove_tag_name%]", + "description": "[%key:component::habitica::common::remove_tag_description%]" + }, + "alias": { + "name": "[%key:component::habitica::common::alias_name%]", + "description": "[%key:component::habitica::common::alias_description%]" + }, + "priority": { + "name": "[%key:component::habitica::common::priority_name%]", + "description": "[%key:component::habitica::common::priority_description%]" + }, + "date": { + "name": "[%key:component::habitica::common::date_name%]", + "description": "[%key:component::habitica::common::date_description%]" + }, + "clear_date": { + "name": "Clear due date", + "description": "Remove the due date from the to-do." + }, + "reminder": { + "name": "[%key:component::habitica::common::reminder_name%]", + "description": "[%key:component::habitica::common::reminder_description%]" + }, + "remove_reminder": { + "name": "[%key:component::habitica::common::remove_reminder_name%]", + "description": "[%key:component::habitica::common::remove_reminder_description%]" + }, + "clear_reminder": { + "name": "[%key:component::habitica::common::clear_reminder_name%]", + "description": "[%key:component::habitica::common::clear_reminder_description%]" + }, + "add_checklist_item": { + "name": "[%key:component::habitica::common::add_checklist_item_name%]", + "description": "[%key:component::habitica::common::add_checklist_item_description%]" + }, + "remove_checklist_item": { + "name": "[%key:component::habitica::common::remove_checklist_item_name%]", + "description": "[%key:component::habitica::common::remove_checklist_item_description%]" + }, + "score_checklist_item": { + "name": "[%key:component::habitica::common::score_checklist_item_name%]", + "description": "[%key:component::habitica::common::score_checklist_item_description%]" + }, + "unscore_checklist_item": { + "name": "[%key:component::habitica::common::unscore_checklist_item_name%]", + "description": "[%key:component::habitica::common::unscore_checklist_item_description%]" + } + }, + "sections": { + "checklist_options": { + "name": "[%key:component::habitica::common::checklist_options_name%]", + "description": "[%key:component::habitica::common::checklist_options_description%]" + }, + "duedate_options": { + "name": "[%key:component::habitica::common::date_name%]", + "description": "Set, update or remove due dates of a to-do." + }, + "reminder_options": { + "name": "[%key:component::habitica::common::reminder_options_name%]", + "description": "[%key:component::habitica::common::reminder_options_description%]" + }, + "tag_options": { + "name": "[%key:component::habitica::common::tag_options_name%]", + "description": "[%key:component::habitica::common::tag_options_description%]" + }, + "developer_options": { + "name": "[%key:component::habitica::common::developer_options_name%]", + "description": "[%key:component::habitica::common::developer_options_description%]" + } + } + }, + "create_todo": { + "name": "Create to-do", + "description": "Adds a new to-do.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "Select the Habitica account to create a to-do." + }, + "name": { + "name": "[%key:component::habitica::common::task_name%]", + "description": "[%key:component::habitica::common::name_description%]" + }, + "notes": { + "name": "[%key:component::habitica::common::notes_name%]", + "description": "[%key:component::habitica::common::notes_description%]" + }, + "tag": { + "name": "[%key:component::habitica::common::tag_options_name%]", + "description": "[%key:component::habitica::common::tag_description%]" + }, + "alias": { + "name": "[%key:component::habitica::common::alias_name%]", + "description": "[%key:component::habitica::common::alias_description%]" + }, + "priority": { + "name": "[%key:component::habitica::common::priority_name%]", + "description": "[%key:component::habitica::common::priority_description%]" + }, + "date": { + "name": "[%key:component::habitica::common::date_name%]", + "description": "[%key:component::habitica::common::date_description%]" + }, + "reminder": { + "name": "[%key:component::habitica::common::reminder_options_name%]", + "description": "[%key:component::habitica::common::reminder_description%]" + }, + "add_checklist_item": { + "name": "[%key:component::habitica::common::checklist_options_name%]", + "description": "[%key:component::habitica::common::add_checklist_item_description%]" + } + }, + "sections": { + "developer_options": { + "name": "[%key:component::habitica::common::developer_options_name%]", + "description": "[%key:component::habitica::common::developer_options_description%]" + } + } + }, + "update_daily": { + "name": "Update a daily", + "description": "Updates a specific daily for a selected Habitica character", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "[%key:component::habitica::common::config_entry_description%]" + }, + "task": { + "name": "[%key:component::habitica::common::task_name%]", + "description": "The name (or task ID) of the daily you want to update." + }, + "rename": { + "name": "[%key:component::habitica::common::rename_name%]", + "description": "[%key:component::habitica::common::rename_description%]" + }, + "notes": { + "name": "[%key:component::habitica::common::notes_name%]", + "description": "[%key:component::habitica::common::notes_description%]" + }, + "tag": { + "name": "[%key:component::habitica::common::tag_name%]", + "description": "[%key:component::habitica::common::tag_description%]" + }, + "remove_tag": { + "name": "[%key:component::habitica::common::remove_tag_name%]", + "description": "[%key:component::habitica::common::remove_tag_description%]" + }, + "alias": { + "name": "[%key:component::habitica::common::alias_name%]", + "description": "[%key:component::habitica::common::alias_description%]" + }, + "priority": { + "name": "[%key:component::habitica::common::priority_name%]", + "description": "[%key:component::habitica::common::priority_description%]" + }, + "start_date": { + "name": "[%key:component::habitica::common::start_date_name%]", + "description": "[%key:component::habitica::common::start_date_description%]" + }, + "frequency": { + "name": "[%key:component::habitica::common::frequency_daily_name%]", + "description": "[%key:component::habitica::common::frequency_daily_description%]" + }, + "every_x": { + "name": "[%key:component::habitica::common::every_x_name%]", + "description": "[%key:component::habitica::common::every_x_description%]" + }, + "repeat": { + "name": "[%key:component::habitica::common::repeat_name%]", + "description": "[%key:component::habitica::common::repeat_weekly_description%]" + }, + "repeat_monthly": { + "name": "[%key:component::habitica::common::repeat_name%]", + "description": "[%key:component::habitica::common::repeat_monthly_description%]" + }, + "add_checklist_item": { + "name": "[%key:component::habitica::common::add_checklist_item_name%]", + "description": "[%key:component::habitica::common::add_checklist_item_description%]" + }, + "remove_checklist_item": { + "name": "[%key:component::habitica::common::remove_checklist_item_name%]", + "description": "[%key:component::habitica::common::remove_checklist_item_description%]" + }, + "score_checklist_item": { + "name": "[%key:component::habitica::common::score_checklist_item_name%]", + "description": "[%key:component::habitica::common::score_checklist_item_description%]" + }, + "unscore_checklist_item": { + "name": "[%key:component::habitica::common::unscore_checklist_item_name%]", + "description": "[%key:component::habitica::common::unscore_checklist_item_description%]" + }, + "streak": { + "name": "Adjust streak", + "description": "Adjust or reset the streak counter of the daily." + }, + "reminder": { + "name": "[%key:component::habitica::common::reminder_name%]", + "description": "[%key:component::habitica::common::reminder_description%]" + }, + "remove_reminder": { + "name": "[%key:component::habitica::common::remove_reminder_name%]", + "description": "[%key:component::habitica::common::remove_reminder_description%]" + }, + "clear_reminder": { + "name": "[%key:component::habitica::common::clear_reminder_name%]", + "description": "[%key:component::habitica::common::clear_reminder_description%]" + } + }, + "sections": { + "checklist_options": { + "name": "[%key:component::habitica::common::checklist_options_name%]", + "description": "[%key:component::habitica::common::checklist_options_description%]" + }, + "repeat_weekly_options": { + "name": "[%key:component::habitica::common::repeat_weekly_options_name%]", + "description": "[%key:component::habitica::common::repeat_weekly_options_description%]" + }, + "repeat_monthly_options": { + "name": "[%key:component::habitica::common::repeat_monthly_options_name%]", + "description": "[%key:component::habitica::common::repeat_monthly_options_description%]" + }, + "tag_options": { + "name": "[%key:component::habitica::common::tag_options_name%]", + "description": "[%key:component::habitica::common::tag_options_description%]" + }, + "developer_options": { + "name": "[%key:component::habitica::common::developer_options_name%]", + "description": "[%key:component::habitica::common::developer_options_description%]" + }, + "reminder_options": { + "name": "[%key:component::habitica::common::reminder_options_name%]", + "description": "[%key:component::habitica::common::reminder_options_description%]" + } + } + }, + "create_daily": { + "name": "Create a daily", + "description": "Adds a new daily.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "[%key:component::habitica::common::config_entry_description%]" + }, + "name": { + "name": "[%key:component::habitica::common::task_name%]", + "description": "[%key:component::habitica::common::name_description%]" + }, + "notes": { + "name": "[%key:component::habitica::common::notes_name%]", + "description": "[%key:component::habitica::common::notes_description%]" + }, + "tag": { + "name": "[%key:component::habitica::common::tag_options_name%]", + "description": "[%key:component::habitica::common::tag_description%]" + }, + "alias": { + "name": "[%key:component::habitica::common::alias_name%]", + "description": "[%key:component::habitica::common::alias_description%]" + }, + "priority": { + "name": "[%key:component::habitica::common::priority_name%]", + "description": "[%key:component::habitica::common::priority_description%]" + }, + "start_date": { + "name": "[%key:component::habitica::common::start_date_name%]", + "description": "[%key:component::habitica::common::start_date_description%]" + }, + "frequency": { + "name": "[%key:component::habitica::common::frequency_daily_name%]", + "description": "[%key:component::habitica::common::frequency_daily_description%]" + }, + "every_x": { + "name": "[%key:component::habitica::common::every_x_name%]", + "description": "[%key:component::habitica::common::every_x_description%]" + }, + "repeat": { + "name": "[%key:component::habitica::common::repeat_name%]", + "description": "[%key:component::habitica::common::repeat_weekly_description%]" + }, + "repeat_monthly": { + "name": "[%key:component::habitica::common::repeat_name%]", + "description": "[%key:component::habitica::common::repeat_monthly_description%]" + }, + "add_checklist_item": { + "name": "[%key:component::habitica::common::checklist_options_name%]", + "description": "[%key:component::habitica::common::add_checklist_item_description%]" + }, + "reminder": { + "name": "[%key:component::habitica::common::reminder_options_name%]", + "description": "[%key:component::habitica::common::reminder_description%]" + } + }, + "sections": { + "repeat_weekly_options": { + "name": "[%key:component::habitica::common::repeat_weekly_options_name%]", + "description": "[%key:component::habitica::common::repeat_weekly_options_description%]" + }, + "repeat_monthly_options": { + "name": "[%key:component::habitica::common::repeat_monthly_options_name%]", + "description": "[%key:component::habitica::common::repeat_monthly_options_description%]" + }, + "developer_options": { + "name": "[%key:component::habitica::common::developer_options_name%]", + "description": "[%key:component::habitica::common::developer_options_description%]" + } + } } }, "selector": { @@ -755,6 +1272,31 @@ "medium": "Medium", "hard": "Hard" } + }, + "frequency": { + "options": { + "daily": "Daily", + "weekly": "Weekly", + "monthly": "Monthly", + "yearly": "Yearly" + } + }, + "repeat": { + "options": { + "m": "[%key:common::time::monday%]", + "t": "[%key:common::time::tuesday%]", + "w": "[%key:common::time::wednesday%]", + "th": "[%key:common::time::thursday%]", + "f": "[%key:common::time::friday%]", + "s": "[%key:common::time::saturday%]", + "su": "[%key:common::time::sunday%]" + } + }, + "repeat_monthly": { + "options": { + "day_of_month": "Day of the month", + "day_of_week": "Day of the week" + } } } } diff --git a/homeassistant/components/harmony/manifest.json b/homeassistant/components/harmony/manifest.json index aab4f51b09a..f67eb4db5aa 100644 --- a/homeassistant/components/harmony/manifest.json +++ b/homeassistant/components/harmony/manifest.json @@ -7,7 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/harmony", "iot_class": "local_push", "loggers": ["aioharmony", "slixmpp"], - "requirements": ["aioharmony==0.4.1"], + "requirements": ["aioharmony==0.5.2"], "ssdp": [ { "manufacturer": "Logitech", diff --git a/homeassistant/components/hassio/backup.py b/homeassistant/components/hassio/backup.py index fe69b9e08e5..20f1ec82a7a 100644 --- a/homeassistant/components/hassio/backup.py +++ b/homeassistant/components/hassio/backup.py @@ -4,6 +4,7 @@ from __future__ import annotations import asyncio from collections.abc import AsyncIterator, Callable, Coroutine, Mapping +from contextlib import suppress import logging import os from pathlib import Path, PurePath @@ -173,7 +174,7 @@ class SupervisorBackupAgent(BackupAgent): ), ) except SupervisorNotFoundError as err: - raise BackupNotFound from err + raise BackupNotFound(f"Backup {backup_id} not found") from err async def async_upload_backup( self, @@ -186,13 +187,14 @@ class SupervisorBackupAgent(BackupAgent): The upload will be skipped if the backup already exists in the agent's location. """ - if await self.async_get_backup(backup.backup_id): - _LOGGER.debug( - "Backup %s already exists in location %s", - backup.backup_id, - self.location, - ) - return + with suppress(BackupNotFound): + if await self.async_get_backup(backup.backup_id): + _LOGGER.debug( + "Backup %s already exists in location %s", + backup.backup_id, + self.location, + ) + return stream = await open_stream() upload_options = supervisor_backups.UploadBackupOptions( location={self.location}, @@ -218,14 +220,14 @@ class SupervisorBackupAgent(BackupAgent): self, backup_id: str, **kwargs: Any, - ) -> AgentBackup | None: + ) -> AgentBackup: """Return a backup.""" try: details = await self._client.backups.backup_info(backup_id) - except SupervisorNotFoundError: - return None + except SupervisorNotFoundError as err: + raise BackupNotFound(f"Backup {backup_id} not found") from err if self.location not in details.location_attributes: - return None + raise BackupNotFound(f"Backup {backup_id} not found") return _backup_details_to_agent_backup(details, self.location) async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None: @@ -237,8 +239,8 @@ class SupervisorBackupAgent(BackupAgent): location={self.location} ), ) - except SupervisorNotFoundError: - _LOGGER.debug("Backup %s does not exist", backup_id) + except SupervisorNotFoundError as err: + raise BackupNotFound(f"Backup {backup_id} not found") from err class SupervisorBackupReaderWriter(BackupReaderWriter): @@ -492,10 +494,12 @@ class SupervisorBackupReaderWriter(BackupReaderWriter): ) -> None: """Restore a backup.""" manager = self._hass.data[DATA_MANAGER] - # The backup manager has already checked that the backup exists so we don't need to - # check that here. + # The backup manager has already checked that the backup exists so we don't + # need to catch BackupNotFound here. backup = await manager.backup_agents[agent_id].async_get_backup(backup_id) if ( + # Check for None to be backwards compatible with the old BackupAgent API, + # this can be removed in HA Core 2025.10 backup and restore_homeassistant and restore_database != backup.database_included diff --git a/homeassistant/components/hassio/strings.json b/homeassistant/components/hassio/strings.json index 799067b8215..a543dbc7f89 100644 --- a/homeassistant/components/hassio/strings.json +++ b/homeassistant/components/hassio/strings.json @@ -152,7 +152,7 @@ }, "unsupported_connectivity_check": { "title": "Unsupported system - Connectivity check disabled", - "description": "System is unsupported because Home Assistant cannot determine when an internet connection is available. Use the link to learn more and how to fix this." + "description": "System is unsupported because Home Assistant cannot determine when an Internet connection is available. Use the link to learn more and how to fix this." }, "unsupported_content_trust": { "title": "Unsupported system - Content-trust check disabled", @@ -216,7 +216,7 @@ }, "unsupported_systemd_journal": { "title": "Unsupported system - Systemd Journal issues", - "description": "System is unsupported because Systemd Journal and/or the gateway service is missing, inactive or misconfigured . Use the link to learn more and how to fix this." + "description": "System is unsupported because Systemd Journal and/or the gateway service is missing, inactive or misconfigured. Use the link to learn more and how to fix this." }, "unsupported_systemd_resolved": { "title": "Unsupported system - Systemd-Resolved issues", @@ -348,7 +348,7 @@ }, "homeassistant_exclude_database": { "name": "Home Assistant exclude database", - "description": "Exclude the Home Assistant database file from backup" + "description": "Exclude the Home Assistant database file from the backup." } } }, @@ -385,8 +385,8 @@ "description": "[%key:component::hassio::services::backup_full::fields::location::description%]" }, "homeassistant_exclude_database": { - "name": "Home Assistant exclude database", - "description": "Exclude the Home Assistant database file from backup" + "name": "[%key:component::hassio::services::backup_full::fields::homeassistant_exclude_database::name%]", + "description": "[%key:component::hassio::services::backup_full::fields::homeassistant_exclude_database::description%]" } } }, diff --git a/homeassistant/components/heos/const.py b/homeassistant/components/heos/const.py index 6d603f7ad30..789fbc12b8e 100644 --- a/homeassistant/components/heos/const.py +++ b/homeassistant/components/heos/const.py @@ -4,6 +4,7 @@ ATTR_PASSWORD = "password" ATTR_USERNAME = "username" DOMAIN = "heos" ENTRY_TITLE = "HEOS System" +SERVICE_GET_QUEUE = "get_queue" SERVICE_GROUP_VOLUME_SET = "group_volume_set" SERVICE_GROUP_VOLUME_DOWN = "group_volume_down" SERVICE_GROUP_VOLUME_UP = "group_volume_up" diff --git a/homeassistant/components/heos/coordinator.py b/homeassistant/components/heos/coordinator.py index 93fe069d9be..5e72eb1427e 100644 --- a/homeassistant/components/heos/coordinator.py +++ b/homeassistant/components/heos/coordinator.py @@ -6,7 +6,6 @@ entities to update. Entities subscribe to entity-specific updates within the ent """ from collections.abc import Callable, Sequence -from datetime import datetime, timedelta import logging from typing import Any @@ -25,10 +24,10 @@ from pyheos import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform -from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback +from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.helpers.event import async_call_later +from homeassistant.helpers.debounce import Debouncer from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import DOMAIN @@ -43,7 +42,6 @@ class HeosCoordinator(DataUpdateCoordinator[None]): def __init__(self, hass: HomeAssistant, config_entry: HeosConfigEntry) -> None: """Set up the coordinator and set in config_entry.""" - self.host: str = config_entry.data[CONF_HOST] credentials: Credentials | None = None if config_entry.options: credentials = Credentials( @@ -53,19 +51,31 @@ class HeosCoordinator(DataUpdateCoordinator[None]): # media position update upon start of playback or when media changes self.heos = Heos( HeosOptions( - self.host, + config_entry.data[CONF_HOST], all_progress_events=False, auto_reconnect=True, + auto_failover=True, credentials=credentials, ) ) self._platform_callbacks: list[Callable[[Sequence[HeosPlayer]], None]] = [] - self._update_sources_pending: bool = False + self._update_sources_debouncer = Debouncer( + hass, + _LOGGER, + immediate=True, + cooldown=2.0, + function=self._async_update_sources, + ) self._source_list: list[str] = [] self._favorites: dict[int, MediaItem] = {} self._inputs: Sequence[MediaItem] = [] super().__init__(hass, _LOGGER, config_entry=config_entry, name=DOMAIN) + @property + def host(self) -> str: + """Get the host address of the device.""" + return self.heos.current_host + @property def inputs(self) -> Sequence[MediaItem]: """Get input sources across all devices.""" @@ -159,8 +169,15 @@ class HeosCoordinator(DataUpdateCoordinator[None]): async def _async_on_reconnected(self) -> None: """Handle when reconnected so resources are updated and entities marked available.""" + assert self.config_entry is not None + if self.host != self.config_entry.data[CONF_HOST]: + self.hass.config_entries.async_update_entry( + self.config_entry, data={CONF_HOST: self.host} + ) + _LOGGER.warning("Successfully failed over to HEOS host %s", self.host) + else: + _LOGGER.warning("Successfully reconnected to HEOS host %s", self.host) await self._async_update_sources() - _LOGGER.warning("Successfully reconnected to HEOS host %s", self.host) self.async_update_listeners() async def _async_on_controller_event( @@ -170,31 +187,9 @@ class HeosCoordinator(DataUpdateCoordinator[None]): if event == const.EVENT_PLAYERS_CHANGED: assert data is not None self._async_handle_player_update_result(data) - elif ( - event in (const.EVENT_SOURCES_CHANGED, const.EVENT_USER_CHANGED) - and not self._update_sources_pending - ): - # Update the sources after a brief delay as we may have received multiple qualifying - # events at once and devices cannot handle immediately attempting to refresh sources. - self._update_sources_pending = True - - async def update_sources_job(_: datetime | None = None) -> None: - await self._async_update_sources() - self._update_sources_pending = False - self.async_update_listeners() - - assert self.config_entry is not None - self.config_entry.async_on_unload( - async_call_later( - self.hass, - timedelta(seconds=1), - HassJob( - update_sources_job, - "heos_update_sources", - cancel_on_shutdown=True, - ), - ) - ) + elif event in (const.EVENT_SOURCES_CHANGED, const.EVENT_USER_CHANGED): + # Debounce because we may have received multiple qualifying events in rapid succession. + await self._update_sources_debouncer.async_call() self.async_update_listeners() def _async_update_player_ids(self, updated_player_ids: dict[int, int]) -> None: @@ -256,15 +251,6 @@ class HeosCoordinator(DataUpdateCoordinator[None]): else: self._source_list.extend([source.name for source in self._inputs]) - async def _async_update_players(self) -> None: - """Update players after reconnection.""" - try: - player_updates = await self.heos.load_players() - except HeosError as error: - _LOGGER.error("Unable to refresh players: %s", error) - return - self._async_handle_player_update_result(player_updates) - @callback def async_get_source_list(self) -> list[str]: """Return the list of sources for players.""" diff --git a/homeassistant/components/heos/icons.json b/homeassistant/components/heos/icons.json index d7a998b6aec..c957ac1939c 100644 --- a/homeassistant/components/heos/icons.json +++ b/homeassistant/components/heos/icons.json @@ -1,5 +1,8 @@ { "services": { + "get_queue": { + "service": "mdi:playlist-music" + }, "group_volume_set": { "service": "mdi:volume-medium" }, diff --git a/homeassistant/components/heos/manifest.json b/homeassistant/components/heos/manifest.json index 19feffd8ef1..cbac9f20574 100644 --- a/homeassistant/components/heos/manifest.json +++ b/homeassistant/components/heos/manifest.json @@ -8,7 +8,7 @@ "iot_class": "local_push", "loggers": ["pyheos"], "quality_scale": "platinum", - "requirements": ["pyheos==1.0.3"], + "requirements": ["pyheos==1.0.4"], "ssdp": [ { "st": "urn:schemas-denon-com:device:ACT-Denon:1" diff --git a/homeassistant/components/heos/media_player.py b/homeassistant/components/heos/media_player.py index 9edc674d1cf..9cd01051b95 100644 --- a/homeassistant/components/heos/media_player.py +++ b/homeassistant/components/heos/media_player.py @@ -3,27 +3,36 @@ from __future__ import annotations from collections.abc import Awaitable, Callable, Coroutine, Sequence +from contextlib import suppress +import dataclasses from datetime import datetime from functools import reduce, wraps +import logging from operator import ior -from typing import Any +from typing import Any, Final from pyheos import ( AddCriteriaType, ControlType, HeosError, HeosPlayer, + MediaItem, + MediaMusicSource, + MediaType as HeosMediaType, PlayState, RepeatType, const as heos_const, ) +from pyheos.util import mediauri as heos_source import voluptuous as vol from homeassistant.components import media_source from homeassistant.components.media_player import ( ATTR_MEDIA_ENQUEUE, ATTR_MEDIA_VOLUME_LEVEL, + BrowseError, BrowseMedia, + MediaClass, MediaPlayerEnqueue, MediaPlayerEntity, MediaPlayerEntityFeature, @@ -32,8 +41,14 @@ from homeassistant.components.media_player import ( RepeatMode, async_process_play_media_url, ) +from homeassistant.components.media_source import BrowseMediaSource from homeassistant.const import Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import ( + HomeAssistant, + ServiceResponse, + SupportsResponse, + callback, +) from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import ( config_validation as cv, @@ -47,6 +62,7 @@ from homeassistant.util.dt import utcnow from .const import ( DOMAIN as HEOS_DOMAIN, + SERVICE_GET_QUEUE, SERVICE_GROUP_VOLUME_DOWN, SERVICE_GROUP_VOLUME_SET, SERVICE_GROUP_VOLUME_UP, @@ -55,6 +71,8 @@ from .coordinator import HeosConfigEntry, HeosCoordinator PARALLEL_UPDATES = 0 +BROWSE_ROOT: Final = "heos://media" + BASE_SUPPORTED_FEATURES = ( MediaPlayerEntityFeature.VOLUME_MUTE | MediaPlayerEntityFeature.VOLUME_SET @@ -97,6 +115,21 @@ HEOS_HA_REPEAT_TYPE_MAP = { } HA_HEOS_REPEAT_TYPE_MAP = {v: k for k, v in HEOS_HA_REPEAT_TYPE_MAP.items()} +HEOS_MEDIA_TYPE_TO_MEDIA_CLASS = { + HeosMediaType.ALBUM: MediaClass.ALBUM, + HeosMediaType.ARTIST: MediaClass.ARTIST, + HeosMediaType.CONTAINER: MediaClass.DIRECTORY, + HeosMediaType.GENRE: MediaClass.GENRE, + HeosMediaType.HEOS_SERVER: MediaClass.DIRECTORY, + HeosMediaType.HEOS_SERVICE: MediaClass.DIRECTORY, + HeosMediaType.MUSIC_SERVICE: MediaClass.DIRECTORY, + HeosMediaType.PLAYLIST: MediaClass.PLAYLIST, + HeosMediaType.SONG: MediaClass.TRACK, + HeosMediaType.STATION: MediaClass.TRACK, +} + +_LOGGER = logging.getLogger(__name__) + async def async_setup_entry( hass: HomeAssistant, @@ -106,6 +139,12 @@ async def async_setup_entry( """Add media players for a config entry.""" # Register custom entity services platform = entity_platform.async_get_current_platform() + platform.async_register_entity_service( + SERVICE_GET_QUEUE, + None, + "async_get_queue", + supports_response=SupportsResponse.ONLY, + ) platform.async_register_entity_service( SERVICE_GROUP_VOLUME_SET, {vol.Required(ATTR_MEDIA_VOLUME_LEVEL): cv.small_float}, @@ -129,20 +168,20 @@ async def async_setup_entry( add_entities_callback(list(coordinator.heos.players.values())) -type _FuncType[**_P] = Callable[_P, Awaitable[Any]] -type _ReturnFuncType[**_P] = Callable[_P, Coroutine[Any, Any, None]] +type _FuncType[**_P, _R] = Callable[_P, Awaitable[_R]] +type _ReturnFuncType[**_P, _R] = Callable[_P, Coroutine[Any, Any, _R]] -def catch_action_error[**_P]( +def catch_action_error[**_P, _R]( action: str, -) -> Callable[[_FuncType[_P]], _ReturnFuncType[_P]]: +) -> Callable[[_FuncType[_P, _R]], _ReturnFuncType[_P, _R]]: """Return decorator that catches errors and raises HomeAssistantError.""" - def decorator(func: _FuncType[_P]) -> _ReturnFuncType[_P]: + def decorator(func: _FuncType[_P, _R]) -> _ReturnFuncType[_P, _R]: @wraps(func) - async def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> None: + async def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> _R: try: - await func(*args, **kwargs) + return await func(*args, **kwargs) except (HeosError, ValueError) as ex: raise HomeAssistantError( translation_domain=HEOS_DOMAIN, @@ -242,6 +281,12 @@ class HeosMediaPlayer(CoordinatorEntity[HeosCoordinator], MediaPlayerEntity): self.async_on_remove(self._player.add_on_player_event(self._player_update)) await super().async_added_to_hass() + @catch_action_error("get queue") + async def async_get_queue(self) -> ServiceResponse: + """Get the queue for the current player.""" + queue = await self._player.get_queue() + return {"queue": [dataclasses.asdict(item) for item in queue]} + @catch_action_error("clear playlist") async def async_clear_playlist(self) -> None: """Clear players playlist.""" @@ -282,6 +327,16 @@ class HeosMediaPlayer(CoordinatorEntity[HeosCoordinator], MediaPlayerEntity): self, media_type: MediaType | str, media_id: str, **kwargs: Any ) -> None: """Play a piece of media.""" + if heos_source.is_media_uri(media_id): + media, data = heos_source.from_media_uri(media_id) + if not isinstance(media, MediaItem): + raise ValueError(f"Invalid media id '{media_id}'") + await self._player.play_media( + media, + HA_HEOS_ENQUEUE_MAP[kwargs.get(ATTR_MEDIA_ENQUEUE)], + ) + return + if media_source.is_media_source_id(media_id): media_type = MediaType.URL play_item = await media_source.async_resolve_media( @@ -534,14 +589,103 @@ class HeosMediaPlayer(CoordinatorEntity[HeosCoordinator], MediaPlayerEntity): """Volume level of the media player (0..1).""" return self._player.volume / 100 + async def _async_browse_media_root(self) -> BrowseMedia: + """Return media browsing root.""" + if not self.coordinator.heos.music_sources: + try: + await self.coordinator.heos.get_music_sources() + except HeosError as error: + _LOGGER.debug("Unable to load music sources: %s", error) + children: list[BrowseMedia] = [ + _media_to_browse_media(source) + for source in self.coordinator.heos.music_sources.values() + if source.available or source.source_id == heos_const.MUSIC_SOURCE_TUNEIN + ] + root = BrowseMedia( + title="Music Sources", + media_class=MediaClass.DIRECTORY, + children_media_class=MediaClass.DIRECTORY, + media_content_type="", + media_content_id=BROWSE_ROOT, + can_expand=True, + can_play=False, + children=children, + ) + # Append media source items + with suppress(BrowseError): + browse = await self._async_browse_media_source() + # If domain is None, it's an overview of available sources + if browse.domain is None and browse.children: + children.extend(browse.children) + else: + children.append(browse) + return root + + async def _async_browse_heos_media(self, media_content_id: str) -> BrowseMedia: + """Browse a HEOS media item.""" + media, data = heos_source.from_media_uri(media_content_id) + browse_media = _media_to_browse_media(media) + try: + browse_result = await self.coordinator.heos.browse_media(media) + except HeosError as error: + _LOGGER.debug("Unable to browse media %s: %s", media, error) + else: + browse_media.children = [ + _media_to_browse_media(item) + for item in browse_result.items + if item.browsable or item.playable + ] + return browse_media + + async def _async_browse_media_source( + self, media_content_id: str | None = None + ) -> BrowseMediaSource: + """Browse a media source item.""" + return await media_source.async_browse_media( + self.hass, + media_content_id, + content_filter=lambda item: item.media_content_type.startswith("audio/"), + ) + async def async_browse_media( self, media_content_type: MediaType | str | None = None, media_content_id: str | None = None, ) -> BrowseMedia: """Implement the websocket media browsing helper.""" - return await media_source.async_browse_media( - self.hass, - media_content_id, - content_filter=lambda item: item.media_content_type.startswith("audio/"), + if media_content_id in (None, BROWSE_ROOT): + return await self._async_browse_media_root() + assert media_content_id is not None + if heos_source.is_media_uri(media_content_id): + return await self._async_browse_heos_media(media_content_id) + if media_source.is_media_source_id(media_content_id): + return await self._async_browse_media_source(media_content_id) + raise ServiceValidationError( + translation_domain=HEOS_DOMAIN, + translation_key="unsupported_media_content_id", + translation_placeholders={"media_content_id": media_content_id}, ) + + +def _media_to_browse_media(media: MediaItem | MediaMusicSource) -> BrowseMedia: + """Convert a HEOS media item to a browse media item.""" + can_expand = False + can_play = False + + if isinstance(media, MediaMusicSource): + can_expand = ( + media.source_id == heos_const.MUSIC_SOURCE_TUNEIN or media.available + ) + else: + can_expand = media.browsable + can_play = media.playable + + return BrowseMedia( + can_expand=can_expand, + can_play=can_play, + media_content_id=heos_source.to_media_uri(media), + media_content_type="", + media_class=HEOS_MEDIA_TYPE_TO_MEDIA_CLASS[media.type], + title=media.name, + thumbnail=media.image_url, + ) diff --git a/homeassistant/components/heos/services.yaml b/homeassistant/components/heos/services.yaml index 8f3a43421f6..fa79bd03096 100644 --- a/homeassistant/components/heos/services.yaml +++ b/homeassistant/components/heos/services.yaml @@ -1,3 +1,9 @@ +get_queue: + target: + entity: + integration: heos + domain: media_player + group_volume_set: target: entity: diff --git a/homeassistant/components/heos/strings.json b/homeassistant/components/heos/strings.json index 340eecb9f8b..38e3349b7c0 100644 --- a/homeassistant/components/heos/strings.json +++ b/homeassistant/components/heos/strings.json @@ -86,6 +86,10 @@ } } }, + "get_queue": { + "name": "Get queue", + "description": "Retrieves the queue of the media player." + }, "group_volume_down": { "name": "Turn down group volume", "description": "Turns down the group volume." @@ -146,6 +150,9 @@ }, "unknown_source": { "message": "Unknown source: {source}" + }, + "unsupported_media_content_id": { + "message": "Unsupported media_content_id: {media_content_id}" } }, "issues": { diff --git a/homeassistant/components/hive/strings.json b/homeassistant/components/hive/strings.json index 219776ad7e6..6323a2eecbf 100644 --- a/homeassistant/components/hive/strings.json +++ b/homeassistant/components/hive/strings.json @@ -2,27 +2,27 @@ "config": { "step": { "user": { - "title": "Hive Login", + "title": "Hive login", "description": "Enter your Hive login information.", "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", - "scan_interval": "Scan Interval (seconds)" + "scan_interval": "Scan interval (seconds)" } }, "2fa": { - "title": "Hive Two-factor Authentication.", - "description": "Enter your Hive authentication code. \n \n Please enter code 0000 to request another code.", + "title": "Hive two-factor authentication", + "description": "Enter your Hive authentication code.\n\nPlease enter code 0000 to request another code.", "data": { "2fa": "Two-factor code" } }, "configuration": { + "title": "Hive configuration", + "description": "Enter your Hive configuration.", "data": { - "device_name": "Device Name" - }, - "description": "Enter your Hive configuration", - "title": "Hive Configuration." + "device_name": "Device name" + } }, "reauth": { "title": "[%key:component::hive::config::step::user::title%]", @@ -37,7 +37,7 @@ "invalid_username": "Failed to sign into Hive. Your email address is not recognised.", "invalid_password": "Failed to sign into Hive. Incorrect password, please try again.", "invalid_code": "Failed to sign into Hive. Your two-factor authentication code was incorrect.", - "no_internet_available": "An internet connection is required to connect to Hive.", + "no_internet_available": "An Internet connection is required to connect to Hive.", "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { diff --git a/homeassistant/components/hko/config_flow.py b/homeassistant/components/hko/config_flow.py index 8548bb4767d..1e2a6230455 100644 --- a/homeassistant/components/hko/config_flow.py +++ b/homeassistant/components/hko/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from asyncio import timeout +import logging from typing import Any from hko import HKO, LOCATIONS, HKOError @@ -15,6 +16,8 @@ from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig from .const import API_RHRREAD, DEFAULT_LOCATION, DOMAIN, KEY_LOCATION +_LOGGER = logging.getLogger(__name__) + def get_loc_name(item): """Return an array of supported locations.""" @@ -54,7 +57,8 @@ class HKOConfigFlow(ConfigFlow, domain=DOMAIN): except HKOError: errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: await self.async_set_unique_id( diff --git a/homeassistant/components/holiday/manifest.json b/homeassistant/components/holiday/manifest.json index ec47b222370..4c73210c36e 100644 --- a/homeassistant/components/holiday/manifest.json +++ b/homeassistant/components/holiday/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/holiday", "iot_class": "local_polling", - "requirements": ["holidays==0.68", "babel==2.15.0"] + "requirements": ["holidays==0.69", "babel==2.15.0"] } diff --git a/homeassistant/components/holiday/strings.json b/homeassistant/components/holiday/strings.json index d464f9e8bfd..6e317b8fa7b 100644 --- a/homeassistant/components/holiday/strings.json +++ b/homeassistant/components/holiday/strings.json @@ -8,7 +8,7 @@ "step": { "user": { "data": { - "country": "Country" + "country": "[%key:common::config_flow::data::country%]" } }, "options": { diff --git a/homeassistant/components/home_connect/__init__.py b/homeassistant/components/home_connect/__init__.py index 70b357518da..fe01a3e9564 100644 --- a/homeassistant/components/home_connect/__init__.py +++ b/homeassistant/components/home_connect/__init__.py @@ -2,195 +2,29 @@ from __future__ import annotations -from collections.abc import Awaitable import logging -from typing import Any, cast +from typing import Any from aiohomeconnect.client import Client as HomeConnectClient -from aiohomeconnect.model import ( - ArrayOfOptions, - CommandKey, - Option, - OptionKey, - ProgramKey, - SettingKey, -) -from aiohomeconnect.model.error import HomeConnectError import aiohttp -import voluptuous as vol -from homeassistant.const import ATTR_DEVICE_ID, Platform -from homeassistant.core import HomeAssistant, ServiceCall, callback -from homeassistant.exceptions import ( - ConfigEntryAuthFailed, - ConfigEntryNotReady, - HomeAssistantError, - ServiceValidationError, -) -from homeassistant.helpers import ( - config_entry_oauth2_flow, - config_validation as cv, - device_registry as dr, -) +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers import config_entry_oauth2_flow, config_validation as cv from homeassistant.helpers.entity_registry import RegistryEntry, async_migrate_entries -from homeassistant.helpers.issue_registry import ( - IssueSeverity, - async_create_issue, - async_delete_issue, -) +from homeassistant.helpers.issue_registry import async_delete_issue from homeassistant.helpers.typing import ConfigType from .api import AsyncConfigEntryAuth -from .const import ( - AFFECTS_TO_ACTIVE_PROGRAM, - AFFECTS_TO_SELECTED_PROGRAM, - ATTR_AFFECTS_TO, - ATTR_KEY, - ATTR_PROGRAM, - ATTR_UNIT, - ATTR_VALUE, - DOMAIN, - OLD_NEW_UNIQUE_ID_SUFFIX_MAP, - PROGRAM_ENUM_OPTIONS, - SERVICE_OPTION_ACTIVE, - SERVICE_OPTION_SELECTED, - SERVICE_PAUSE_PROGRAM, - SERVICE_RESUME_PROGRAM, - SERVICE_SELECT_PROGRAM, - SERVICE_SET_PROGRAM_AND_OPTIONS, - SERVICE_SETTING, - SERVICE_START_PROGRAM, - SVE_TRANSLATION_PLACEHOLDER_KEY, - SVE_TRANSLATION_PLACEHOLDER_PROGRAM, - SVE_TRANSLATION_PLACEHOLDER_VALUE, - TRANSLATION_KEYS_PROGRAMS_MAP, -) +from .const import DOMAIN, OLD_NEW_UNIQUE_ID_SUFFIX_MAP from .coordinator import HomeConnectConfigEntry, HomeConnectCoordinator -from .utils import bsh_key_to_translation_key, get_dict_from_home_connect_error +from .services import register_actions _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) - -PROGRAM_OPTIONS = { - bsh_key_to_translation_key(key): ( - key, - value, - ) - for key, value in { - OptionKey.BSH_COMMON_DURATION: int, - OptionKey.BSH_COMMON_START_IN_RELATIVE: int, - OptionKey.BSH_COMMON_FINISH_IN_RELATIVE: int, - OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_FILL_QUANTITY: int, - OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_MULTIPLE_BEVERAGES: bool, - OptionKey.DISHCARE_DISHWASHER_INTENSIV_ZONE: bool, - OptionKey.DISHCARE_DISHWASHER_BRILLIANCE_DRY: bool, - OptionKey.DISHCARE_DISHWASHER_VARIO_SPEED_PLUS: bool, - OptionKey.DISHCARE_DISHWASHER_SILENCE_ON_DEMAND: bool, - OptionKey.DISHCARE_DISHWASHER_HALF_LOAD: bool, - OptionKey.DISHCARE_DISHWASHER_EXTRA_DRY: bool, - OptionKey.DISHCARE_DISHWASHER_HYGIENE_PLUS: bool, - OptionKey.DISHCARE_DISHWASHER_ECO_DRY: bool, - OptionKey.DISHCARE_DISHWASHER_ZEOLITE_DRY: bool, - OptionKey.COOKING_OVEN_SETPOINT_TEMPERATURE: int, - OptionKey.COOKING_OVEN_FAST_PRE_HEAT: bool, - OptionKey.LAUNDRY_CARE_WASHER_I_DOS_1_ACTIVE: bool, - OptionKey.LAUNDRY_CARE_WASHER_I_DOS_2_ACTIVE: bool, - }.items() -} - - -SERVICE_SETTING_SCHEMA = vol.Schema( - { - vol.Required(ATTR_DEVICE_ID): str, - vol.Required(ATTR_KEY): vol.All( - vol.Coerce(SettingKey), - vol.NotIn([SettingKey.UNKNOWN]), - ), - vol.Required(ATTR_VALUE): vol.Any(str, int, bool), - } -) - -# DEPRECATED: Remove in 2025.9.0 -SERVICE_OPTION_SCHEMA = vol.Schema( - { - vol.Required(ATTR_DEVICE_ID): str, - vol.Required(ATTR_KEY): vol.All( - vol.Coerce(OptionKey), - vol.NotIn([OptionKey.UNKNOWN]), - ), - vol.Required(ATTR_VALUE): vol.Any(str, int, bool), - vol.Optional(ATTR_UNIT): str, - } -) - -# DEPRECATED: Remove in 2025.9.0 -SERVICE_PROGRAM_SCHEMA = vol.Any( - { - vol.Required(ATTR_DEVICE_ID): str, - vol.Required(ATTR_PROGRAM): vol.All( - vol.Coerce(ProgramKey), - vol.NotIn([ProgramKey.UNKNOWN]), - ), - vol.Required(ATTR_KEY): vol.All( - vol.Coerce(OptionKey), - vol.NotIn([OptionKey.UNKNOWN]), - ), - vol.Required(ATTR_VALUE): vol.Any(int, str), - vol.Optional(ATTR_UNIT): str, - }, - { - vol.Required(ATTR_DEVICE_ID): str, - vol.Required(ATTR_PROGRAM): vol.All( - vol.Coerce(ProgramKey), - vol.NotIn([ProgramKey.UNKNOWN]), - ), - }, -) - - -def _require_program_or_at_least_one_option(data: dict) -> dict: - if ATTR_PROGRAM not in data and not any( - option_key in data for option_key in (PROGRAM_ENUM_OPTIONS | PROGRAM_OPTIONS) - ): - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="required_program_or_one_option_at_least", - ) - return data - - -SERVICE_PROGRAM_AND_OPTIONS_SCHEMA = vol.All( - vol.Schema( - { - vol.Required(ATTR_DEVICE_ID): str, - vol.Required(ATTR_AFFECTS_TO): vol.In( - [AFFECTS_TO_ACTIVE_PROGRAM, AFFECTS_TO_SELECTED_PROGRAM] - ), - vol.Optional(ATTR_PROGRAM): vol.In(TRANSLATION_KEYS_PROGRAMS_MAP.keys()), - } - ) - .extend( - { - vol.Optional(translation_key): vol.In(allowed_values.keys()) - for translation_key, ( - key, - allowed_values, - ) in PROGRAM_ENUM_OPTIONS.items() - } - ) - .extend( - { - vol.Optional(translation_key): schema - for translation_key, (key, schema) in PROGRAM_OPTIONS.items() - } - ), - _require_program_or_at_least_one_option, -) - -SERVICE_COMMAND_SCHEMA = vol.Schema({vol.Required(ATTR_DEVICE_ID): str}) - PLATFORMS = [ Platform.BINARY_SENSOR, Platform.BUTTON, @@ -203,406 +37,9 @@ PLATFORMS = [ ] -async def _get_client_and_ha_id( - hass: HomeAssistant, device_id: str -) -> tuple[HomeConnectClient, str]: - device_registry = dr.async_get(hass) - device_entry = device_registry.async_get(device_id) - if device_entry is None: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="device_entry_not_found", - translation_placeholders={ - "device_id": device_id, - }, - ) - entry: HomeConnectConfigEntry | None = None - for entry_id in device_entry.config_entries: - _entry = hass.config_entries.async_get_entry(entry_id) - assert _entry - if _entry.domain == DOMAIN: - entry = cast(HomeConnectConfigEntry, _entry) - break - if entry is None: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="config_entry_not_found", - translation_placeholders={ - "device_id": device_id, - }, - ) - - ha_id = next( - ( - identifier[1] - for identifier in device_entry.identifiers - if identifier[0] == DOMAIN - ), - None, - ) - if ha_id is None: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="appliance_not_found", - translation_placeholders={ - "device_id": device_id, - }, - ) - return entry.runtime_data.client, ha_id - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: C901 +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up Home Connect component.""" - - async def _async_service_program(call: ServiceCall, start: bool) -> None: - """Execute calls to services taking a program.""" - program = call.data[ATTR_PROGRAM] - client, ha_id = await _get_client_and_ha_id(hass, call.data[ATTR_DEVICE_ID]) - - option_key = call.data.get(ATTR_KEY) - options = ( - [ - Option( - option_key, - call.data[ATTR_VALUE], - unit=call.data.get(ATTR_UNIT), - ) - ] - if option_key is not None - else None - ) - - async_create_issue( - hass, - DOMAIN, - "deprecated_set_program_and_option_actions", - breaks_in_ha_version="2025.9.0", - is_fixable=True, - is_persistent=True, - severity=IssueSeverity.WARNING, - translation_key="deprecated_set_program_and_option_actions", - translation_placeholders={ - "new_action_key": SERVICE_SET_PROGRAM_AND_OPTIONS, - "remove_release": "2025.9.0", - "deprecated_action_yaml": "\n".join( - [ - "```yaml", - f"action: {DOMAIN}.{SERVICE_START_PROGRAM if start else SERVICE_SELECT_PROGRAM}", - "data:", - f" {ATTR_DEVICE_ID}: DEVICE_ID", - f" {ATTR_PROGRAM}: {program}", - *([f" {ATTR_KEY}: {options[0].key}"] if options else []), - *([f" {ATTR_VALUE}: {options[0].value}"] if options else []), - *( - [f" {ATTR_UNIT}: {options[0].unit}"] - if options and options[0].unit - else [] - ), - "```", - ] - ), - "new_action_yaml": "\n ".join( - [ - "```yaml", - f"action: {DOMAIN}.{SERVICE_SET_PROGRAM_AND_OPTIONS}", - "data:", - f" {ATTR_DEVICE_ID}: DEVICE_ID", - f" {ATTR_AFFECTS_TO}: {AFFECTS_TO_ACTIVE_PROGRAM if start else AFFECTS_TO_SELECTED_PROGRAM}", - f" {ATTR_PROGRAM}: {bsh_key_to_translation_key(program.value)}", - *( - [ - f" {bsh_key_to_translation_key(options[0].key)}: {options[0].value}" - ] - if options - else [] - ), - "```", - ] - ), - "repo_link": "[aiohomeconnect](https://github.com/MartinHjelmare/aiohomeconnect)", - }, - ) - - try: - if start: - await client.start_program(ha_id, program_key=program, options=options) - else: - await client.set_selected_program( - ha_id, program_key=program, options=options - ) - except HomeConnectError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="start_program" if start else "select_program", - translation_placeholders={ - **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_PROGRAM: program, - }, - ) from err - - async def _async_service_set_program_options( - call: ServiceCall, active: bool - ) -> None: - """Execute calls to services taking a program.""" - option_key = call.data[ATTR_KEY] - value = call.data[ATTR_VALUE] - unit = call.data.get(ATTR_UNIT) - client, ha_id = await _get_client_and_ha_id(hass, call.data[ATTR_DEVICE_ID]) - - async_create_issue( - hass, - DOMAIN, - "deprecated_set_program_and_option_actions", - breaks_in_ha_version="2025.9.0", - is_fixable=True, - is_persistent=True, - severity=IssueSeverity.WARNING, - translation_key="deprecated_set_program_and_option_actions", - translation_placeholders={ - "new_action_key": SERVICE_SET_PROGRAM_AND_OPTIONS, - "remove_release": "2025.9.0", - "deprecated_action_yaml": "\n".join( - [ - "```yaml", - f"action: {DOMAIN}.{SERVICE_OPTION_ACTIVE if active else SERVICE_OPTION_SELECTED}", - "data:", - f" {ATTR_DEVICE_ID}: DEVICE_ID", - f" {ATTR_KEY}: {option_key}", - f" {ATTR_VALUE}: {value}", - *([f" {ATTR_UNIT}: {unit}"] if unit else []), - "```", - ] - ), - "new_action_yaml": "\n ".join( - [ - "```yaml", - f"action: {DOMAIN}.{SERVICE_SET_PROGRAM_AND_OPTIONS}", - "data:", - f" {ATTR_DEVICE_ID}: DEVICE_ID", - f" {ATTR_AFFECTS_TO}: {AFFECTS_TO_ACTIVE_PROGRAM if active else AFFECTS_TO_SELECTED_PROGRAM}", - f" {bsh_key_to_translation_key(option_key)}: {value}", - "```", - ] - ), - "repo_link": "[aiohomeconnect](https://github.com/MartinHjelmare/aiohomeconnect)", - }, - ) - try: - if active: - await client.set_active_program_option( - ha_id, - option_key=option_key, - value=value, - unit=unit, - ) - else: - await client.set_selected_program_option( - ha_id, - option_key=option_key, - value=value, - unit=unit, - ) - except HomeConnectError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="set_options_active_program" - if active - else "set_options_selected_program", - translation_placeholders={ - **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_KEY: option_key, - SVE_TRANSLATION_PLACEHOLDER_VALUE: str(value), - }, - ) from err - - async def _async_service_command( - call: ServiceCall, command_key: CommandKey - ) -> None: - """Execute calls to services executing a command.""" - client, ha_id = await _get_client_and_ha_id(hass, call.data[ATTR_DEVICE_ID]) - - async_create_issue( - hass, - DOMAIN, - "deprecated_command_actions", - breaks_in_ha_version="2025.9.0", - is_fixable=True, - is_persistent=True, - severity=IssueSeverity.WARNING, - translation_key="deprecated_command_actions", - ) - - try: - await client.put_command(ha_id, command_key=command_key, value=True) - except HomeConnectError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="execute_command", - translation_placeholders={ - **get_dict_from_home_connect_error(err), - "command": command_key.value, - }, - ) from err - - async def async_service_option_active(call: ServiceCall) -> None: - """Service for setting an option for an active program.""" - await _async_service_set_program_options(call, True) - - async def async_service_option_selected(call: ServiceCall) -> None: - """Service for setting an option for a selected program.""" - await _async_service_set_program_options(call, False) - - async def async_service_setting(call: ServiceCall) -> None: - """Service for changing a setting.""" - key = call.data[ATTR_KEY] - value = call.data[ATTR_VALUE] - client, ha_id = await _get_client_and_ha_id(hass, call.data[ATTR_DEVICE_ID]) - - try: - await client.set_setting(ha_id, setting_key=key, value=value) - except HomeConnectError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="set_setting", - translation_placeholders={ - **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_KEY: key, - SVE_TRANSLATION_PLACEHOLDER_VALUE: str(value), - }, - ) from err - - async def async_service_pause_program(call: ServiceCall) -> None: - """Service for pausing a program.""" - await _async_service_command(call, CommandKey.BSH_COMMON_PAUSE_PROGRAM) - - async def async_service_resume_program(call: ServiceCall) -> None: - """Service for resuming a paused program.""" - await _async_service_command(call, CommandKey.BSH_COMMON_RESUME_PROGRAM) - - async def async_service_select_program(call: ServiceCall) -> None: - """Service for selecting a program.""" - await _async_service_program(call, False) - - async def async_service_set_program_and_options(call: ServiceCall) -> None: - """Service for setting a program and options.""" - data = dict(call.data) - program = data.pop(ATTR_PROGRAM, None) - affects_to = data.pop(ATTR_AFFECTS_TO) - client, ha_id = await _get_client_and_ha_id(hass, data.pop(ATTR_DEVICE_ID)) - - options: list[Option] = [] - - for option, value in data.items(): - if option in PROGRAM_ENUM_OPTIONS: - options.append( - Option( - PROGRAM_ENUM_OPTIONS[option][0], - PROGRAM_ENUM_OPTIONS[option][1][value], - ) - ) - elif option in PROGRAM_OPTIONS: - option_key = PROGRAM_OPTIONS[option][0] - options.append(Option(option_key, value)) - - method_call: Awaitable[Any] - exception_translation_key: str - if program: - program = ( - program - if isinstance(program, ProgramKey) - else TRANSLATION_KEYS_PROGRAMS_MAP[program] - ) - - if affects_to == AFFECTS_TO_ACTIVE_PROGRAM: - method_call = client.start_program( - ha_id, program_key=program, options=options - ) - exception_translation_key = "start_program" - elif affects_to == AFFECTS_TO_SELECTED_PROGRAM: - method_call = client.set_selected_program( - ha_id, program_key=program, options=options - ) - exception_translation_key = "select_program" - else: - array_of_options = ArrayOfOptions(options) - if affects_to == AFFECTS_TO_ACTIVE_PROGRAM: - method_call = client.set_active_program_options( - ha_id, array_of_options=array_of_options - ) - exception_translation_key = "set_options_active_program" - else: - # affects_to is AFFECTS_TO_SELECTED_PROGRAM - method_call = client.set_selected_program_options( - ha_id, array_of_options=array_of_options - ) - exception_translation_key = "set_options_selected_program" - - try: - await method_call - except HomeConnectError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key=exception_translation_key, - translation_placeholders={ - **get_dict_from_home_connect_error(err), - **( - {SVE_TRANSLATION_PLACEHOLDER_PROGRAM: program} - if program - else {} - ), - }, - ) from err - - async def async_service_start_program(call: ServiceCall) -> None: - """Service for starting a program.""" - await _async_service_program(call, True) - - hass.services.async_register( - DOMAIN, - SERVICE_OPTION_ACTIVE, - async_service_option_active, - schema=SERVICE_OPTION_SCHEMA, - ) - hass.services.async_register( - DOMAIN, - SERVICE_OPTION_SELECTED, - async_service_option_selected, - schema=SERVICE_OPTION_SCHEMA, - ) - hass.services.async_register( - DOMAIN, SERVICE_SETTING, async_service_setting, schema=SERVICE_SETTING_SCHEMA - ) - hass.services.async_register( - DOMAIN, - SERVICE_PAUSE_PROGRAM, - async_service_pause_program, - schema=SERVICE_COMMAND_SCHEMA, - ) - hass.services.async_register( - DOMAIN, - SERVICE_RESUME_PROGRAM, - async_service_resume_program, - schema=SERVICE_COMMAND_SCHEMA, - ) - hass.services.async_register( - DOMAIN, - SERVICE_SELECT_PROGRAM, - async_service_select_program, - schema=SERVICE_PROGRAM_SCHEMA, - ) - hass.services.async_register( - DOMAIN, - SERVICE_START_PROGRAM, - async_service_start_program, - schema=SERVICE_PROGRAM_SCHEMA, - ) - hass.services.async_register( - DOMAIN, - SERVICE_SET_PROGRAM_AND_OPTIONS, - async_service_set_program_and_options, - schema=SERVICE_PROGRAM_AND_OPTIONS_SCHEMA, - ) - + register_actions(hass) return True diff --git a/homeassistant/components/home_connect/binary_sensor.py b/homeassistant/components/home_connect/binary_sensor.py index 1f82aa71766..a28b4ff2b49 100644 --- a/homeassistant/components/home_connect/binary_sensor.py +++ b/homeassistant/components/home_connect/binary_sensor.py @@ -106,8 +106,26 @@ BINARY_SENSORS = ( key=StatusKey.REFRIGERATION_COMMON_DOOR_CHILLER_COMMON, boolean_map=REFRIGERATION_DOOR_BOOLEAN_MAP, device_class=BinarySensorDeviceClass.DOOR, + translation_key="common_chiller_door", + ), + HomeConnectBinarySensorEntityDescription( + key=StatusKey.REFRIGERATION_COMMON_DOOR_CHILLER, + boolean_map=REFRIGERATION_DOOR_BOOLEAN_MAP, + device_class=BinarySensorDeviceClass.DOOR, translation_key="chiller_door", ), + HomeConnectBinarySensorEntityDescription( + key=StatusKey.REFRIGERATION_COMMON_DOOR_CHILLER_LEFT, + boolean_map=REFRIGERATION_DOOR_BOOLEAN_MAP, + device_class=BinarySensorDeviceClass.DOOR, + translation_key="left_chiller_door", + ), + HomeConnectBinarySensorEntityDescription( + key=StatusKey.REFRIGERATION_COMMON_DOOR_CHILLER_RIGHT, + boolean_map=REFRIGERATION_DOOR_BOOLEAN_MAP, + device_class=BinarySensorDeviceClass.DOOR, + translation_key="right_chiller_door", + ), HomeConnectBinarySensorEntityDescription( key=StatusKey.REFRIGERATION_COMMON_DOOR_FLEX_COMPARTMENT, boolean_map=REFRIGERATION_DOOR_BOOLEAN_MAP, @@ -226,6 +244,7 @@ class HomeConnectDoorBinarySensor(HomeConnectBinarySensor): BSH_DOOR_STATE_LOCKED: False, BSH_DOOR_STATE_OPEN: True, }, + entity_registry_enabled_default=False, ), ) self._attr_unique_id = f"{appliance.info.ha_id}-Door" @@ -265,7 +284,8 @@ class HomeConnectDoorBinarySensor(HomeConnectBinarySensor): DOMAIN, f"deprecated_binary_common_door_sensor_{self.entity_id}", breaks_in_ha_version="2025.5.0", - is_fixable=False, + is_fixable=True, + is_persistent=True, severity=IssueSeverity.WARNING, translation_key="deprecated_binary_common_door_sensor", translation_placeholders={ diff --git a/homeassistant/components/home_connect/button.py b/homeassistant/components/home_connect/button.py index 0a5538ec588..0bd31c6b7c9 100644 --- a/homeassistant/components/home_connect/button.py +++ b/homeassistant/components/home_connect/button.py @@ -1,6 +1,6 @@ """Provides button entities for Home Connect.""" -from aiohomeconnect.model import CommandKey, EventKey +from aiohomeconnect.model import CommandKey from aiohomeconnect.model.error import HomeConnectError from homeassistant.components.button import ButtonEntity, ButtonEntityDescription @@ -94,15 +94,9 @@ class HomeConnectButtonEntity(HomeConnectEntity, ButtonEntity): super().__init__( coordinator, appliance, - # The entity is subscribed to the appliance connected event, - # but it will receive also the disconnected event - ButtonEntityDescription( - key=EventKey.BSH_COMMON_APPLIANCE_CONNECTED, - ), + desc, + (appliance.info.ha_id,), ) - self.entity_description = desc - self.appliance = appliance - self.unique_id = f"{appliance.info.ha_id}-{desc.key}" def update_native_value(self) -> None: """Set the value of the entity.""" diff --git a/homeassistant/components/home_connect/const.py b/homeassistant/components/home_connect/const.py index 6255a513e39..64bf4af29a4 100644 --- a/homeassistant/components/home_connect/const.py +++ b/homeassistant/components/home_connect/const.py @@ -79,13 +79,6 @@ ATTR_VALUE = "value" AFFECTS_TO_ACTIVE_PROGRAM = "active_program" AFFECTS_TO_SELECTED_PROGRAM = "selected_program" -SVE_TRANSLATION_KEY_SET_SETTING = "set_setting_entity" -SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME = "appliance_name" -SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID = "entity_id" -SVE_TRANSLATION_PLACEHOLDER_PROGRAM = "program" -SVE_TRANSLATION_PLACEHOLDER_KEY = "key" -SVE_TRANSLATION_PLACEHOLDER_VALUE = "value" - TRANSLATION_KEYS_PROGRAMS_MAP = { bsh_key_to_translation_key(program.value): cast(ProgramKey, program) diff --git a/homeassistant/components/home_connect/coordinator.py b/homeassistant/components/home_connect/coordinator.py index 669e31f58c1..079db6b148e 100644 --- a/homeassistant/components/home_connect/coordinator.py +++ b/homeassistant/components/home_connect/coordinator.py @@ -155,7 +155,7 @@ class HomeConnectCoordinator( f"home_connect-events_listener_task-{self.config_entry.entry_id}", ) - async def _event_listener(self) -> None: # noqa: C901 + async def _event_listener(self) -> None: """Match event with listener for event type.""" retry_time = 10 while True: @@ -232,15 +232,15 @@ class HomeConnectCoordinator( self.data[event_message_ha_id].update(appliance_data) else: self.data[event_message_ha_id] = appliance_data - for listener, context in list( - self._special_listeners.values() - ) + list(self._listeners.values()): - assert isinstance(context, tuple) + for listener, context in self._special_listeners.values(): if ( EventKey.BSH_COMMON_APPLIANCE_DEPAIRED not in context ): listener() + self._call_all_event_listeners_for_appliance( + event_message_ha_id + ) case EventType.DISCONNECTED: self.data[event_message_ha_id].info.connected = False @@ -267,7 +267,7 @@ class HomeConnectCoordinator( _LOGGER.debug( "Non-breaking error (%s) while listening for events," " continuing in %s seconds", - type(error).__name__, + error, retry_time, ) await asyncio_sleep(retry_time) @@ -279,13 +279,6 @@ class HomeConnectCoordinator( ) break - # Trigger to delete the possible depaired device entities - # from known_entities variable at common.py - for listener, context in self._special_listeners.values(): - assert isinstance(context, tuple) - if EventKey.BSH_COMMON_APPLIANCE_DEPAIRED in context: - listener() - @callback def _call_event_listener(self, event_message: EventMessage) -> None: """Call listener for event.""" @@ -389,6 +382,13 @@ class HomeConnectCoordinator( remove_config_entry_id=self.config_entry.entry_id, ) + # Trigger to delete the possible depaired device entities + # from known_entities variable at common.py + for listener, context in self._special_listeners.values(): + assert isinstance(context, tuple) + if EventKey.BSH_COMMON_APPLIANCE_DEPAIRED in context: + listener() + async def _get_appliance_data( self, appliance: HomeAppliance, @@ -415,9 +415,7 @@ class HomeConnectCoordinator( _LOGGER.debug( "Error fetching settings for %s: %s", appliance.ha_id, - error - if isinstance(error, HomeConnectApiError) - else type(error).__name__, + error, ) settings = {} try: @@ -431,9 +429,7 @@ class HomeConnectCoordinator( _LOGGER.debug( "Error fetching status for %s: %s", appliance.ha_id, - error - if isinstance(error, HomeConnectApiError) - else type(error).__name__, + error, ) status = {} @@ -449,9 +445,7 @@ class HomeConnectCoordinator( _LOGGER.debug( "Error fetching programs for %s: %s", appliance.ha_id, - error - if isinstance(error, HomeConnectApiError) - else type(error).__name__, + error, ) else: programs.extend(all_programs.programs) @@ -545,9 +539,7 @@ class HomeConnectCoordinator( _LOGGER.debug( "Error fetching options for %s: %s", ha_id, - error - if isinstance(error, HomeConnectApiError) - else type(error).__name__, + error, ) return {} diff --git a/homeassistant/components/home_connect/entity.py b/homeassistant/components/home_connect/entity.py index 8a0f9bd7640..facb3b14a9b 100644 --- a/homeassistant/components/home_connect/entity.py +++ b/homeassistant/components/home_connect/entity.py @@ -40,9 +40,13 @@ class HomeConnectEntity(CoordinatorEntity[HomeConnectCoordinator]): coordinator: HomeConnectCoordinator, appliance: HomeConnectApplianceData, desc: EntityDescription, + context_override: Any | None = None, ) -> None: """Initialize the entity.""" - super().__init__(coordinator, (appliance.info.ha_id, EventKey(desc.key))) + context = (appliance.info.ha_id, EventKey(desc.key)) + if context_override is not None: + context = context_override + super().__init__(coordinator, context) self.appliance = appliance self.entity_description = desc self._attr_unique_id = f"{appliance.info.ha_id}-{desc.key}" diff --git a/homeassistant/components/home_connect/icons.json b/homeassistant/components/home_connect/icons.json index 651c00328b6..9b4c9276998 100644 --- a/homeassistant/components/home_connect/icons.json +++ b/homeassistant/components/home_connect/icons.json @@ -49,6 +49,23 @@ "default": "mdi:map-marker-remove-variant" } }, + "button": { + "open_door": { + "default": "mdi:door-open" + }, + "partly_open_door": { + "default": "mdi:door-open" + }, + "pause_program": { + "default": "mdi:pause" + }, + "resume_program": { + "default": "mdi:play" + }, + "stop_program": { + "default": "mdi:stop" + } + }, "sensor": { "operation_state": { "default": "mdi:state-machine", @@ -96,7 +113,7 @@ "milk_counter": { "default": "mdi:cup" }, - "coffee_and_milk": { + "coffee_and_milk_counter": { "default": "mdi:coffee" }, "ristretto_espresso_counter": { diff --git a/homeassistant/components/home_connect/light.py b/homeassistant/components/home_connect/light.py index 72c6b9aaa2b..de55a60bd43 100644 --- a/homeassistant/components/home_connect/light.py +++ b/homeassistant/components/home_connect/light.py @@ -21,11 +21,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.util import color as color_util from .common import setup_home_connect_entry -from .const import ( - BSH_AMBIENT_LIGHT_COLOR_CUSTOM_COLOR, - DOMAIN, - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID, -) +from .const import BSH_AMBIENT_LIGHT_COLOR_CUSTOM_COLOR, DOMAIN from .coordinator import ( HomeConnectApplianceData, HomeConnectConfigEntry, @@ -164,7 +160,7 @@ class HomeConnectLight(HomeConnectEntity, LightEntity): translation_key="turn_on_light", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + "entity_id": self.entity_id, }, ) from err if self._color_key and self._custom_color_key: @@ -183,7 +179,7 @@ class HomeConnectLight(HomeConnectEntity, LightEntity): translation_key="select_light_custom_color", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + "entity_id": self.entity_id, }, ) from err @@ -201,7 +197,7 @@ class HomeConnectLight(HomeConnectEntity, LightEntity): translation_key="set_light_color", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + "entity_id": self.entity_id, }, ) from err return @@ -211,11 +207,13 @@ class HomeConnectLight(HomeConnectEntity, LightEntity): brightness = round( color_util.brightness_to_value( self._brightness_scale, - kwargs.get(ATTR_BRIGHTNESS, self._attr_brightness), + cast(int, kwargs.get(ATTR_BRIGHTNESS, self._attr_brightness)), ) ) - hs_color = kwargs.get(ATTR_HS_COLOR, self._attr_hs_color) + hs_color = cast( + tuple[float, float], kwargs.get(ATTR_HS_COLOR, self._attr_hs_color) + ) rgb = color_util.color_hsv_to_RGB(hs_color[0], hs_color[1], brightness) hex_val = color_util.color_rgb_to_hex(*rgb) @@ -231,7 +229,7 @@ class HomeConnectLight(HomeConnectEntity, LightEntity): translation_key="set_light_color", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + "entity_id": self.entity_id, }, ) from err return @@ -254,7 +252,7 @@ class HomeConnectLight(HomeConnectEntity, LightEntity): translation_key="set_light_brightness", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + "entity_id": self.entity_id, }, ) from err @@ -272,7 +270,7 @@ class HomeConnectLight(HomeConnectEntity, LightEntity): translation_key="turn_off_light", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, + "entity_id": self.entity_id, }, ) from err diff --git a/homeassistant/components/home_connect/number.py b/homeassistant/components/home_connect/number.py index db0258f2739..1bb793f4015 100644 --- a/homeassistant/components/home_connect/number.py +++ b/homeassistant/components/home_connect/number.py @@ -1,4 +1,4 @@ -"""Provides number enties for Home Connect.""" +"""Provides number entities for Home Connect.""" import logging from typing import cast @@ -16,14 +16,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from .common import setup_home_connect_entry -from .const import ( - DOMAIN, - SVE_TRANSLATION_KEY_SET_SETTING, - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID, - SVE_TRANSLATION_PLACEHOLDER_KEY, - SVE_TRANSLATION_PLACEHOLDER_VALUE, - UNIT_MAP, -) +from .const import DOMAIN, UNIT_MAP from .coordinator import HomeConnectApplianceData, HomeConnectConfigEntry from .entity import HomeConnectEntity, HomeConnectOptionEntity, constraint_fetcher from .utils import get_dict_from_home_connect_error @@ -33,6 +26,11 @@ _LOGGER = logging.getLogger(__name__) PARALLEL_UPDATES = 1 NUMBERS = ( + NumberEntityDescription( + key=SettingKey.BSH_COMMON_ALARM_CLOCK, + device_class=NumberDeviceClass.DURATION, + translation_key="alarm_clock", + ), NumberEntityDescription( key=SettingKey.REFRIGERATION_FRIDGE_FREEZER_SETPOINT_TEMPERATURE_REFRIGERATOR, device_class=NumberDeviceClass.TEMPERATURE, @@ -180,12 +178,12 @@ class HomeConnectNumberEntity(HomeConnectEntity, NumberEntity): except HomeConnectError as err: raise HomeAssistantError( translation_domain=DOMAIN, - translation_key=SVE_TRANSLATION_KEY_SET_SETTING, + translation_key="set_setting_entity", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, - SVE_TRANSLATION_PLACEHOLDER_KEY: self.bsh_key, - SVE_TRANSLATION_PLACEHOLDER_VALUE: str(value), + "entity_id": self.entity_id, + "key": self.bsh_key, + "value": str(value), }, ) from err diff --git a/homeassistant/components/home_connect/select.py b/homeassistant/components/home_connect/select.py index 001c2e9ec31..c82e0686cb5 100644 --- a/homeassistant/components/home_connect/select.py +++ b/homeassistant/components/home_connect/select.py @@ -31,11 +31,6 @@ from .const import ( INTENSIVE_LEVEL_OPTIONS, PROGRAMS_TRANSLATION_KEYS_MAP, SPIN_SPEED_OPTIONS, - SVE_TRANSLATION_KEY_SET_SETTING, - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID, - SVE_TRANSLATION_PLACEHOLDER_KEY, - SVE_TRANSLATION_PLACEHOLDER_PROGRAM, - SVE_TRANSLATION_PLACEHOLDER_VALUE, TEMPERATURE_OPTIONS, TRANSLATION_KEYS_PROGRAMS_MAP, VARIO_PERFECT_OPTIONS, @@ -406,7 +401,7 @@ class HomeConnectProgramSelectEntity(HomeConnectEntity, SelectEntity): translation_key=self.entity_description.error_translation_key, translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_PROGRAM: program_key.value, + "program": program_key.value, }, ) from err @@ -443,12 +438,12 @@ class HomeConnectSelectEntity(HomeConnectEntity, SelectEntity): except HomeConnectError as err: raise HomeAssistantError( translation_domain=DOMAIN, - translation_key=SVE_TRANSLATION_KEY_SET_SETTING, + translation_key="set_setting_entity", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, - SVE_TRANSLATION_PLACEHOLDER_KEY: self.bsh_key, - SVE_TRANSLATION_PLACEHOLDER_VALUE: value, + "entity_id": self.entity_id, + "key": self.bsh_key, + "value": value, }, ) from err diff --git a/homeassistant/components/home_connect/sensor.py b/homeassistant/components/home_connect/sensor.py index 796af8260fc..f3c73c8a5ff 100644 --- a/homeassistant/components/home_connect/sensor.py +++ b/homeassistant/components/home_connect/sensor.py @@ -53,7 +53,7 @@ BSH_PROGRAM_SENSORS = ( device_class=SensorDeviceClass.TIMESTAMP, translation_key="program_finish_time", appliance_types=( - "CoffeMaker", + "CoffeeMaker", "CookProcessor", "Dishwasher", "Dryer", @@ -195,28 +195,76 @@ SENSORS = ( EVENT_SENSORS = ( HomeConnectSensorEntityDescription( - key=EventKey.REFRIGERATION_FRIDGE_FREEZER_EVENT_DOOR_ALARM_FREEZER, + key=EventKey.BSH_COMMON_EVENT_PROGRAM_ABORTED, device_class=SensorDeviceClass.ENUM, options=EVENT_OPTIONS, default_value="off", - translation_key="freezer_door_alarm", - appliance_types=("FridgeFreezer", "Freezer"), + translation_key="program_aborted", + appliance_types=("Dishwasher", "CleaningRobot", "CookProcessor"), ), HomeConnectSensorEntityDescription( - key=EventKey.REFRIGERATION_FRIDGE_FREEZER_EVENT_DOOR_ALARM_REFRIGERATOR, + key=EventKey.BSH_COMMON_EVENT_PROGRAM_FINISHED, device_class=SensorDeviceClass.ENUM, options=EVENT_OPTIONS, default_value="off", - translation_key="refrigerator_door_alarm", - appliance_types=("FridgeFreezer", "Refrigerator"), + translation_key="program_finished", + appliance_types=( + "Oven", + "Dishwasher", + "Washer", + "Dryer", + "WasherDryer", + "CleaningRobot", + "CookProcessor", + ), ), HomeConnectSensorEntityDescription( - key=EventKey.REFRIGERATION_FRIDGE_FREEZER_EVENT_TEMPERATURE_ALARM_FREEZER, + key=EventKey.BSH_COMMON_EVENT_ALARM_CLOCK_ELAPSED, device_class=SensorDeviceClass.ENUM, options=EVENT_OPTIONS, default_value="off", - translation_key="freezer_temperature_alarm", - appliance_types=("FridgeFreezer", "Freezer"), + translation_key="alarm_clock_elapsed", + appliance_types=("Oven", "Cooktop"), + ), + HomeConnectSensorEntityDescription( + key=EventKey.COOKING_OVEN_EVENT_PREHEAT_FINISHED, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="preheat_finished", + appliance_types=("Oven", "Cooktop"), + ), + HomeConnectSensorEntityDescription( + key=EventKey.COOKING_OVEN_EVENT_REGULAR_PREHEAT_FINISHED, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="regular_preheat_finished", + appliance_types=("Oven",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.LAUNDRY_CARE_DRYER_EVENT_DRYING_PROCESS_FINISHED, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="drying_process_finished", + appliance_types=("Dryer",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.DISHCARE_DISHWASHER_EVENT_SALT_NEARLY_EMPTY, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="salt_nearly_empty", + appliance_types=("Dishwasher",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.DISHCARE_DISHWASHER_EVENT_RINSE_AID_NEARLY_EMPTY, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="rinse_aid_nearly_empty", + appliance_types=("Dishwasher",), ), HomeConnectSensorEntityDescription( key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_BEAN_CONTAINER_EMPTY, @@ -243,20 +291,220 @@ EVENT_SENSORS = ( appliance_types=("CoffeeMaker",), ), HomeConnectSensorEntityDescription( - key=EventKey.DISHCARE_DISHWASHER_EVENT_SALT_NEARLY_EMPTY, + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_KEEP_MILK_TANK_COOL, device_class=SensorDeviceClass.ENUM, options=EVENT_OPTIONS, default_value="off", - translation_key="salt_nearly_empty", - appliance_types=("Dishwasher",), + translation_key="keep_milk_tank_cool", + appliance_types=("CoffeeMaker",), ), HomeConnectSensorEntityDescription( - key=EventKey.DISHCARE_DISHWASHER_EVENT_RINSE_AID_NEARLY_EMPTY, + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DESCALING_IN_20_CUPS, device_class=SensorDeviceClass.ENUM, options=EVENT_OPTIONS, default_value="off", - translation_key="rinse_aid_nearly_empty", - appliance_types=("Dishwasher",), + translation_key="descaling_in_20_cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DESCALING_IN_15_CUPS, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="descaling_in_15_cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DESCALING_IN_10_CUPS, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="descaling_in_10_cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DESCALING_IN_5_CUPS, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="descaling_in_5_cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_SHOULD_BE_DESCALED, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="device_should_be_descaled", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_DESCALING_OVERDUE, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="device_descaling_overdue", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_DESCALING_BLOCKAGE, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="device_descaling_blockage", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_SHOULD_BE_CLEANED, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="device_should_be_cleaned", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_CLEANING_OVERDUE, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="device_cleaning_overdue", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_CALC_N_CLEAN_IN20CUPS, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="calc_n_clean_in20cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_CALC_N_CLEAN_IN15CUPS, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="calc_n_clean_in15cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_CALC_N_CLEAN_IN10CUPS, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="calc_n_clean_in10cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_CALC_N_CLEAN_IN5CUPS, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="calc_n_clean_in5cups", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_SHOULD_BE_CALC_N_CLEANED, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="device_should_be_calc_n_cleaned", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_CALC_N_CLEAN_OVERDUE, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="device_calc_n_clean_overdue", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_COFFEE_MAKER_EVENT_DEVICE_CALC_N_CLEAN_BLOCKAGE, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="device_calc_n_clean_blockage", + appliance_types=("CoffeeMaker",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.REFRIGERATION_FRIDGE_FREEZER_EVENT_DOOR_ALARM_FREEZER, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="freezer_door_alarm", + appliance_types=("FridgeFreezer", "Freezer"), + ), + HomeConnectSensorEntityDescription( + key=EventKey.REFRIGERATION_FRIDGE_FREEZER_EVENT_DOOR_ALARM_REFRIGERATOR, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="refrigerator_door_alarm", + appliance_types=("FridgeFreezer", "Refrigerator"), + ), + HomeConnectSensorEntityDescription( + key=EventKey.REFRIGERATION_FRIDGE_FREEZER_EVENT_TEMPERATURE_ALARM_FREEZER, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="freezer_temperature_alarm", + appliance_types=("FridgeFreezer", "Freezer"), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_CLEANING_ROBOT_EVENT_EMPTY_DUST_BOX_AND_CLEAN_FILTER, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="empty_dust_box_and_clean_filter", + appliance_types=("CleaningRobot",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_CLEANING_ROBOT_EVENT_ROBOT_IS_STUCK, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="robot_is_stuck", + appliance_types=("CleaningRobot",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.CONSUMER_PRODUCTS_CLEANING_ROBOT_EVENT_DOCKING_STATION_NOT_FOUND, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="docking_station_not_found", + appliance_types=("CleaningRobot",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.LAUNDRY_CARE_WASHER_EVENT_I_DOS_1_FILL_LEVEL_POOR, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="poor_i_dos_1_fill_level", + appliance_types=("Washer", "WasherDryer"), + ), + HomeConnectSensorEntityDescription( + key=EventKey.LAUNDRY_CARE_WASHER_EVENT_I_DOS_2_FILL_LEVEL_POOR, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="poor_i_dos_2_fill_level", + appliance_types=("Washer", "WasherDryer"), + ), + HomeConnectSensorEntityDescription( + key=EventKey.COOKING_COMMON_EVENT_HOOD_GREASE_FILTER_MAX_SATURATION_NEARLY_REACHED, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="grease_filter_max_saturation_nearly_reached", + appliance_types=("Hood",), + ), + HomeConnectSensorEntityDescription( + key=EventKey.COOKING_COMMON_EVENT_HOOD_GREASE_FILTER_MAX_SATURATION_REACHED, + device_class=SensorDeviceClass.ENUM, + options=EVENT_OPTIONS, + default_value="off", + translation_key="grease_filter_max_saturation_reached", + appliance_types=("Hood",), ), ) diff --git a/homeassistant/components/home_connect/services.py b/homeassistant/components/home_connect/services.py new file mode 100644 index 00000000000..fac1c5fe1a9 --- /dev/null +++ b/homeassistant/components/home_connect/services.py @@ -0,0 +1,572 @@ +"""Custom actions (previously known as services) for the Home Connect integration.""" + +from __future__ import annotations + +from collections.abc import Awaitable +from typing import Any, cast + +from aiohomeconnect.client import Client as HomeConnectClient +from aiohomeconnect.model import ( + ArrayOfOptions, + CommandKey, + Option, + OptionKey, + ProgramKey, + SettingKey, +) +from aiohomeconnect.model.error import HomeConnectError +import voluptuous as vol + +from homeassistant.const import ATTR_DEVICE_ID +from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import config_validation as cv, device_registry as dr +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue + +from .const import ( + AFFECTS_TO_ACTIVE_PROGRAM, + AFFECTS_TO_SELECTED_PROGRAM, + ATTR_AFFECTS_TO, + ATTR_KEY, + ATTR_PROGRAM, + ATTR_UNIT, + ATTR_VALUE, + DOMAIN, + PROGRAM_ENUM_OPTIONS, + SERVICE_OPTION_ACTIVE, + SERVICE_OPTION_SELECTED, + SERVICE_PAUSE_PROGRAM, + SERVICE_RESUME_PROGRAM, + SERVICE_SELECT_PROGRAM, + SERVICE_SET_PROGRAM_AND_OPTIONS, + SERVICE_SETTING, + SERVICE_START_PROGRAM, + TRANSLATION_KEYS_PROGRAMS_MAP, +) +from .coordinator import HomeConnectConfigEntry +from .utils import bsh_key_to_translation_key, get_dict_from_home_connect_error + +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + + +PROGRAM_OPTIONS = { + bsh_key_to_translation_key(key): ( + key, + value, + ) + for key, value in { + OptionKey.BSH_COMMON_DURATION: int, + OptionKey.BSH_COMMON_START_IN_RELATIVE: int, + OptionKey.BSH_COMMON_FINISH_IN_RELATIVE: int, + OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_FILL_QUANTITY: int, + OptionKey.CONSUMER_PRODUCTS_COFFEE_MAKER_MULTIPLE_BEVERAGES: bool, + OptionKey.DISHCARE_DISHWASHER_INTENSIV_ZONE: bool, + OptionKey.DISHCARE_DISHWASHER_BRILLIANCE_DRY: bool, + OptionKey.DISHCARE_DISHWASHER_VARIO_SPEED_PLUS: bool, + OptionKey.DISHCARE_DISHWASHER_SILENCE_ON_DEMAND: bool, + OptionKey.DISHCARE_DISHWASHER_HALF_LOAD: bool, + OptionKey.DISHCARE_DISHWASHER_EXTRA_DRY: bool, + OptionKey.DISHCARE_DISHWASHER_HYGIENE_PLUS: bool, + OptionKey.DISHCARE_DISHWASHER_ECO_DRY: bool, + OptionKey.DISHCARE_DISHWASHER_ZEOLITE_DRY: bool, + OptionKey.COOKING_OVEN_SETPOINT_TEMPERATURE: int, + OptionKey.COOKING_OVEN_FAST_PRE_HEAT: bool, + OptionKey.LAUNDRY_CARE_WASHER_I_DOS_1_ACTIVE: bool, + OptionKey.LAUNDRY_CARE_WASHER_I_DOS_2_ACTIVE: bool, + }.items() +} + + +SERVICE_SETTING_SCHEMA = vol.Schema( + { + vol.Required(ATTR_DEVICE_ID): str, + vol.Required(ATTR_KEY): vol.All( + vol.Coerce(SettingKey), + vol.NotIn([SettingKey.UNKNOWN]), + ), + vol.Required(ATTR_VALUE): vol.Any(str, int, bool), + } +) + +# DEPRECATED: Remove in 2025.9.0 +SERVICE_OPTION_SCHEMA = vol.Schema( + { + vol.Required(ATTR_DEVICE_ID): str, + vol.Required(ATTR_KEY): vol.All( + vol.Coerce(OptionKey), + vol.NotIn([OptionKey.UNKNOWN]), + ), + vol.Required(ATTR_VALUE): vol.Any(str, int, bool), + vol.Optional(ATTR_UNIT): str, + } +) + +# DEPRECATED: Remove in 2025.9.0 +SERVICE_PROGRAM_SCHEMA = vol.Any( + { + vol.Required(ATTR_DEVICE_ID): str, + vol.Required(ATTR_PROGRAM): vol.All( + vol.Coerce(ProgramKey), + vol.NotIn([ProgramKey.UNKNOWN]), + ), + vol.Required(ATTR_KEY): vol.All( + vol.Coerce(OptionKey), + vol.NotIn([OptionKey.UNKNOWN]), + ), + vol.Required(ATTR_VALUE): vol.Any(int, str), + vol.Optional(ATTR_UNIT): str, + }, + { + vol.Required(ATTR_DEVICE_ID): str, + vol.Required(ATTR_PROGRAM): vol.All( + vol.Coerce(ProgramKey), + vol.NotIn([ProgramKey.UNKNOWN]), + ), + }, +) + + +def _require_program_or_at_least_one_option(data: dict) -> dict: + if ATTR_PROGRAM not in data and not any( + option_key in data for option_key in (PROGRAM_ENUM_OPTIONS | PROGRAM_OPTIONS) + ): + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="required_program_or_one_option_at_least", + ) + return data + + +SERVICE_PROGRAM_AND_OPTIONS_SCHEMA = vol.All( + vol.Schema( + { + vol.Required(ATTR_DEVICE_ID): str, + vol.Required(ATTR_AFFECTS_TO): vol.In( + [AFFECTS_TO_ACTIVE_PROGRAM, AFFECTS_TO_SELECTED_PROGRAM] + ), + vol.Optional(ATTR_PROGRAM): vol.In(TRANSLATION_KEYS_PROGRAMS_MAP.keys()), + } + ) + .extend( + { + vol.Optional(translation_key): vol.In(allowed_values.keys()) + for translation_key, ( + key, + allowed_values, + ) in PROGRAM_ENUM_OPTIONS.items() + } + ) + .extend( + { + vol.Optional(translation_key): schema + for translation_key, (key, schema) in PROGRAM_OPTIONS.items() + } + ), + _require_program_or_at_least_one_option, +) + +SERVICE_COMMAND_SCHEMA = vol.Schema({vol.Required(ATTR_DEVICE_ID): str}) + + +async def _get_client_and_ha_id( + hass: HomeAssistant, device_id: str +) -> tuple[HomeConnectClient, str]: + device_registry = dr.async_get(hass) + device_entry = device_registry.async_get(device_id) + if device_entry is None: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="device_entry_not_found", + translation_placeholders={ + "device_id": device_id, + }, + ) + entry: HomeConnectConfigEntry | None = None + for entry_id in device_entry.config_entries: + _entry = hass.config_entries.async_get_entry(entry_id) + assert _entry + if _entry.domain == DOMAIN: + entry = cast(HomeConnectConfigEntry, _entry) + break + if entry is None: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="config_entry_not_found", + translation_placeholders={ + "device_id": device_id, + }, + ) + + ha_id = next( + ( + identifier[1] + for identifier in device_entry.identifiers + if identifier[0] == DOMAIN + ), + None, + ) + if ha_id is None: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="appliance_not_found", + translation_placeholders={ + "device_id": device_id, + }, + ) + return entry.runtime_data.client, ha_id + + +async def _async_service_program(call: ServiceCall, start: bool) -> None: + """Execute calls to services taking a program.""" + program = call.data[ATTR_PROGRAM] + client, ha_id = await _get_client_and_ha_id(call.hass, call.data[ATTR_DEVICE_ID]) + + option_key = call.data.get(ATTR_KEY) + options = ( + [ + Option( + option_key, + call.data[ATTR_VALUE], + unit=call.data.get(ATTR_UNIT), + ) + ] + if option_key is not None + else None + ) + + async_create_issue( + call.hass, + DOMAIN, + "deprecated_set_program_and_option_actions", + breaks_in_ha_version="2025.9.0", + is_fixable=True, + is_persistent=True, + severity=IssueSeverity.WARNING, + translation_key="deprecated_set_program_and_option_actions", + translation_placeholders={ + "new_action_key": SERVICE_SET_PROGRAM_AND_OPTIONS, + "remove_release": "2025.9.0", + "deprecated_action_yaml": "\n".join( + [ + "```yaml", + f"action: {DOMAIN}.{SERVICE_START_PROGRAM if start else SERVICE_SELECT_PROGRAM}", + "data:", + f" {ATTR_DEVICE_ID}: DEVICE_ID", + f" {ATTR_PROGRAM}: {program}", + *([f" {ATTR_KEY}: {options[0].key}"] if options else []), + *([f" {ATTR_VALUE}: {options[0].value}"] if options else []), + *( + [f" {ATTR_UNIT}: {options[0].unit}"] + if options and options[0].unit + else [] + ), + "```", + ] + ), + "new_action_yaml": "\n ".join( + [ + "```yaml", + f"action: {DOMAIN}.{SERVICE_SET_PROGRAM_AND_OPTIONS}", + "data:", + f" {ATTR_DEVICE_ID}: DEVICE_ID", + f" {ATTR_AFFECTS_TO}: {AFFECTS_TO_ACTIVE_PROGRAM if start else AFFECTS_TO_SELECTED_PROGRAM}", + f" {ATTR_PROGRAM}: {bsh_key_to_translation_key(program.value)}", + *( + [ + f" {bsh_key_to_translation_key(options[0].key)}: {options[0].value}" + ] + if options + else [] + ), + "```", + ] + ), + "repo_link": "[aiohomeconnect](https://github.com/MartinHjelmare/aiohomeconnect)", + }, + ) + + try: + if start: + await client.start_program(ha_id, program_key=program, options=options) + else: + await client.set_selected_program( + ha_id, program_key=program, options=options + ) + except HomeConnectError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="start_program" if start else "select_program", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + "program": program, + }, + ) from err + + +async def _async_service_set_program_options(call: ServiceCall, active: bool) -> None: + """Execute calls to services taking a program.""" + option_key = call.data[ATTR_KEY] + value = call.data[ATTR_VALUE] + unit = call.data.get(ATTR_UNIT) + client, ha_id = await _get_client_and_ha_id(call.hass, call.data[ATTR_DEVICE_ID]) + + async_create_issue( + call.hass, + DOMAIN, + "deprecated_set_program_and_option_actions", + breaks_in_ha_version="2025.9.0", + is_fixable=True, + is_persistent=True, + severity=IssueSeverity.WARNING, + translation_key="deprecated_set_program_and_option_actions", + translation_placeholders={ + "new_action_key": SERVICE_SET_PROGRAM_AND_OPTIONS, + "remove_release": "2025.9.0", + "deprecated_action_yaml": "\n".join( + [ + "```yaml", + f"action: {DOMAIN}.{SERVICE_OPTION_ACTIVE if active else SERVICE_OPTION_SELECTED}", + "data:", + f" {ATTR_DEVICE_ID}: DEVICE_ID", + f" {ATTR_KEY}: {option_key}", + f" {ATTR_VALUE}: {value}", + *([f" {ATTR_UNIT}: {unit}"] if unit else []), + "```", + ] + ), + "new_action_yaml": "\n ".join( + [ + "```yaml", + f"action: {DOMAIN}.{SERVICE_SET_PROGRAM_AND_OPTIONS}", + "data:", + f" {ATTR_DEVICE_ID}: DEVICE_ID", + f" {ATTR_AFFECTS_TO}: {AFFECTS_TO_ACTIVE_PROGRAM if active else AFFECTS_TO_SELECTED_PROGRAM}", + f" {bsh_key_to_translation_key(option_key)}: {value}", + "```", + ] + ), + "repo_link": "[aiohomeconnect](https://github.com/MartinHjelmare/aiohomeconnect)", + }, + ) + try: + if active: + await client.set_active_program_option( + ha_id, + option_key=option_key, + value=value, + unit=unit, + ) + else: + await client.set_selected_program_option( + ha_id, + option_key=option_key, + value=value, + unit=unit, + ) + except HomeConnectError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="set_options_active_program" + if active + else "set_options_selected_program", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + "key": option_key, + "value": str(value), + }, + ) from err + + +async def _async_service_command(call: ServiceCall, command_key: CommandKey) -> None: + """Execute calls to services executing a command.""" + client, ha_id = await _get_client_and_ha_id(call.hass, call.data[ATTR_DEVICE_ID]) + + async_create_issue( + call.hass, + DOMAIN, + "deprecated_command_actions", + breaks_in_ha_version="2025.9.0", + is_fixable=True, + is_persistent=True, + severity=IssueSeverity.WARNING, + translation_key="deprecated_command_actions", + ) + + try: + await client.put_command(ha_id, command_key=command_key, value=True) + except HomeConnectError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="execute_command", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + "command": command_key.value, + }, + ) from err + + +async def async_service_option_active(call: ServiceCall) -> None: + """Service for setting an option for an active program.""" + await _async_service_set_program_options(call, True) + + +async def async_service_option_selected(call: ServiceCall) -> None: + """Service for setting an option for a selected program.""" + await _async_service_set_program_options(call, False) + + +async def async_service_setting(call: ServiceCall) -> None: + """Service for changing a setting.""" + key = call.data[ATTR_KEY] + value = call.data[ATTR_VALUE] + client, ha_id = await _get_client_and_ha_id(call.hass, call.data[ATTR_DEVICE_ID]) + + try: + await client.set_setting(ha_id, setting_key=key, value=value) + except HomeConnectError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="set_setting", + translation_placeholders={ + **get_dict_from_home_connect_error(err), + "key": key, + "value": str(value), + }, + ) from err + + +async def async_service_pause_program(call: ServiceCall) -> None: + """Service for pausing a program.""" + await _async_service_command(call, CommandKey.BSH_COMMON_PAUSE_PROGRAM) + + +async def async_service_resume_program(call: ServiceCall) -> None: + """Service for resuming a paused program.""" + await _async_service_command(call, CommandKey.BSH_COMMON_RESUME_PROGRAM) + + +async def async_service_select_program(call: ServiceCall) -> None: + """Service for selecting a program.""" + await _async_service_program(call, False) + + +async def async_service_set_program_and_options(call: ServiceCall) -> None: + """Service for setting a program and options.""" + data = dict(call.data) + program = data.pop(ATTR_PROGRAM, None) + affects_to = data.pop(ATTR_AFFECTS_TO) + client, ha_id = await _get_client_and_ha_id(call.hass, data.pop(ATTR_DEVICE_ID)) + + options: list[Option] = [] + + for option, value in data.items(): + if option in PROGRAM_ENUM_OPTIONS: + options.append( + Option( + PROGRAM_ENUM_OPTIONS[option][0], + PROGRAM_ENUM_OPTIONS[option][1][value], + ) + ) + elif option in PROGRAM_OPTIONS: + option_key = PROGRAM_OPTIONS[option][0] + options.append(Option(option_key, value)) + + method_call: Awaitable[Any] + exception_translation_key: str + if program: + program = ( + program + if isinstance(program, ProgramKey) + else TRANSLATION_KEYS_PROGRAMS_MAP[program] + ) + + if affects_to == AFFECTS_TO_ACTIVE_PROGRAM: + method_call = client.start_program( + ha_id, program_key=program, options=options + ) + exception_translation_key = "start_program" + elif affects_to == AFFECTS_TO_SELECTED_PROGRAM: + method_call = client.set_selected_program( + ha_id, program_key=program, options=options + ) + exception_translation_key = "select_program" + else: + array_of_options = ArrayOfOptions(options) + if affects_to == AFFECTS_TO_ACTIVE_PROGRAM: + method_call = client.set_active_program_options( + ha_id, array_of_options=array_of_options + ) + exception_translation_key = "set_options_active_program" + else: + # affects_to is AFFECTS_TO_SELECTED_PROGRAM + method_call = client.set_selected_program_options( + ha_id, array_of_options=array_of_options + ) + exception_translation_key = "set_options_selected_program" + + try: + await method_call + except HomeConnectError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key=exception_translation_key, + translation_placeholders={ + **get_dict_from_home_connect_error(err), + **({"program": program} if program else {}), + }, + ) from err + + +async def async_service_start_program(call: ServiceCall) -> None: + """Service for starting a program.""" + await _async_service_program(call, True) + + +def register_actions(hass: HomeAssistant) -> None: + """Register custom actions.""" + + hass.services.async_register( + DOMAIN, + SERVICE_OPTION_ACTIVE, + async_service_option_active, + schema=SERVICE_OPTION_SCHEMA, + ) + hass.services.async_register( + DOMAIN, + SERVICE_OPTION_SELECTED, + async_service_option_selected, + schema=SERVICE_OPTION_SCHEMA, + ) + hass.services.async_register( + DOMAIN, SERVICE_SETTING, async_service_setting, schema=SERVICE_SETTING_SCHEMA + ) + hass.services.async_register( + DOMAIN, + SERVICE_PAUSE_PROGRAM, + async_service_pause_program, + schema=SERVICE_COMMAND_SCHEMA, + ) + hass.services.async_register( + DOMAIN, + SERVICE_RESUME_PROGRAM, + async_service_resume_program, + schema=SERVICE_COMMAND_SCHEMA, + ) + hass.services.async_register( + DOMAIN, + SERVICE_SELECT_PROGRAM, + async_service_select_program, + schema=SERVICE_PROGRAM_SCHEMA, + ) + hass.services.async_register( + DOMAIN, + SERVICE_START_PROGRAM, + async_service_start_program, + schema=SERVICE_PROGRAM_SCHEMA, + ) + hass.services.async_register( + DOMAIN, + SERVICE_SET_PROGRAM_AND_OPTIONS, + async_service_set_program_and_options, + schema=SERVICE_PROGRAM_AND_OPTIONS_SCHEMA, + ) diff --git a/homeassistant/components/home_connect/services.yaml b/homeassistant/components/home_connect/services.yaml index 2b53090fd34..e07e8e91457 100644 --- a/homeassistant/components/home_connect/services.yaml +++ b/homeassistant/components/home_connect/services.yaml @@ -64,7 +64,6 @@ set_program_and_options: - selected_program program: example: dishcare_dishwasher_program_auto2 - required: true selector: select: mode: dropdown diff --git a/homeassistant/components/home_connect/strings.json b/homeassistant/components/home_connect/strings.json index d615d9fc091..5072a4d49a7 100644 --- a/homeassistant/components/home_connect/strings.json +++ b/homeassistant/components/home_connect/strings.json @@ -110,17 +110,71 @@ } }, "issues": { + "deprecated_time_alarm_clock_in_automations_scripts": { + "title": "Deprecated alarm clock entity detected in some automations or scripts", + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::home_connect::issues::deprecated_time_alarm_clock_in_automations_scripts::title%]", + "description": "The alarm clock entity `{entity_id}`, which is deprecated because it's being moved to the `number` platform, is used in the following automations or scripts:\n{items}\n\nPlease, fix this issue by updating your automations or scripts to use the new `number` entity." + } + } + } + }, + "deprecated_time_alarm_clock": { + "title": "Deprecated alarm clock entity", + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::home_connect::issues::deprecated_time_alarm_clock::title%]", + "description": "The alarm clock entity `{entity_id}` is deprecated because it's being moved to the `number` platform.\n\nPlease use the new `number` entity." + } + } + } + }, "deprecated_binary_common_door_sensor": { "title": "Deprecated binary door sensor detected in some automations or scripts", - "description": "The binary door sensor `{entity}`, which is deprecated, is used in the following automations or scripts:\n{items}\n\nA sensor entity with additional possible states is available and should be used going forward; Please use it on the above automations or scripts to fix this issue." + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::home_connect::issues::deprecated_binary_common_door_sensor::title%]", + "description": "The binary door sensor `{entity}`, which is deprecated, is used in the following automations or scripts:\n{items}\n\nA sensor entity with additional possible states is available and should be used going forward; Please use it on the above automations or scripts to fix this issue." + } + } + } }, "deprecated_command_actions": { "title": "The command related actions are deprecated in favor of the new buttons", - "description": "The `pause_program` and `resume_program` actions have been deprecated in favor of new button entities, if the command is available for your appliance. Please update your automations, scripts and panels that use this action to use the button entities instead, and press on submit to fix the issue." + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::home_connect::issues::deprecated_command_actions::title%]", + "description": "The `pause_program` and `resume_program` actions have been deprecated in favor of new button entities, if the command is available for your appliance. Please update your automations, scripts and panels that use this action to use the button entities instead, and press on submit to fix the issue." + } + } + } + }, + "deprecated_program_switch_in_automations_scripts": { + "title": "Deprecated program switch detected in some automations or scripts", + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::home_connect::issues::deprecated_program_switch_in_automations_scripts::title%]", + "description": "Program switches are deprecated and {entity_id} is used in the following automations or scripts:\n{items}\n\nYou can use the active program select entity to run the program without any additional options and get the current running program on the above automations or scripts to fix this issue." + } + } + } }, "deprecated_program_switch": { - "title": "Deprecated program switch detected in some automations or scripts", - "description": "Program switches are deprecated and {entity_id} is used in the following automations or scripts:\n{items}\n\nYou can use the active program select entity to run the program without any additional options and get the current running program on the above automations or scripts to fix this issue." + "title": "Deprecated program switch entities", + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::home_connect::issues::deprecated_program_switch::title%]", + "description": "The switch entity `{entity_id}` and all the other program switches are deprecated.\n\nPlease use the active program select entity instead." + } + } + } }, "deprecated_set_program_and_option_actions": { "title": "The executed action is deprecated", @@ -354,7 +408,7 @@ "options": { "consumer_products_coffee_maker_enum_type_flow_rate_normal": "Normal", "consumer_products_coffee_maker_enum_type_flow_rate_intense": "Intense", - "consumer_products_coffee_maker_enum_type_flow_rate_intense_plus": "Intense plus" + "consumer_products_coffee_maker_enum_type_flow_rate_intense_plus": "Intense +" } }, "coffee_milk_ratio": { @@ -410,7 +464,7 @@ "laundry_care_dryer_enum_type_drying_target_iron_dry": "Iron dry", "laundry_care_dryer_enum_type_drying_target_gentle_dry": "Gentle dry", "laundry_care_dryer_enum_type_drying_target_cupboard_dry": "Cupboard dry", - "laundry_care_dryer_enum_type_drying_target_cupboard_dry_plus": "Cupboard dry plus", + "laundry_care_dryer_enum_type_drying_target_cupboard_dry_plus": "Cupboard dry +", "laundry_care_dryer_enum_type_drying_target_extra_dry": "Extra dry" } }, @@ -594,7 +648,7 @@ "description": "Defines if the program sequence is optimized with a special drying cycle to ensure more shine on glasses and plastic items." }, "dishcare_dishwasher_option_vario_speed_plus": { - "name": "Vario speed plus", + "name": "Vario speed +", "description": "Defines if the program run time is reduced by up to 66% with the usual optimum cleaning and drying." }, "dishcare_dishwasher_option_silence_on_demand": { @@ -610,7 +664,7 @@ "description": "Defines if improved drying for glasses and plasticware is enabled." }, "dishcare_dishwasher_option_hygiene_plus": { - "name": "Hygiene plus", + "name": "Hygiene +", "description": "Defines if the cleaning is done with increased temperature. This ensures maximum hygienic cleanliness for regular use." }, "dishcare_dishwasher_option_eco_dry": { @@ -811,9 +865,18 @@ "bottle_cooler_door": { "name": "Bottle cooler door" }, + "common_chiller_door": { + "name": "Common chiller door" + }, "chiller_door": { "name": "Chiller door" }, + "left_chiller_door": { + "name": "Left chiller door" + }, + "right_chiller_door": { + "name": "Right chiller door" + }, "flex_compartment_door": { "name": "Flex compartment door" }, @@ -859,6 +922,9 @@ } }, "number": { + "alarm_clock": { + "name": "Alarm clock" + }, "refrigerator_setpoint_temperature": { "name": "Refrigerator temperature" }, @@ -1466,7 +1532,7 @@ "inactive": "Inactive", "ready": "Ready", "delayedstart": "Delayed start", - "run": "Run", + "run": "Running", "pause": "[%key:common::state::paused%]", "actionrequired": "Action required", "finished": "Finished", @@ -1536,23 +1602,64 @@ "oven_current_cavity_temperature": { "name": "Current oven cavity temperature" }, - "freezer_door_alarm": { - "name": "Freezer door alarm", - "state": { - "confirmed": "[%key:component::home_connect::common::confirmed%]", - "present": "[%key:component::home_connect::common::present%]" - } - }, - "refrigerator_door_alarm": { - "name": "Refrigerator door alarm", + "program_aborted": { + "name": "Program aborted", "state": { "off": "[%key:common::state::off%]", "confirmed": "[%key:component::home_connect::common::confirmed%]", "present": "[%key:component::home_connect::common::present%]" } }, - "freezer_temperature_alarm": { - "name": "Freezer temperature alarm", + "program_finished": { + "name": "Program finished", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "alarm_clock_elapsed": { + "name": "Alarm clock elapsed", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "preheat_finished": { + "name": "Pre-heat finished", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "regular_preheat_finished": { + "name": "Regular pre-heat finished", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "drying_process_finished": { + "name": "Drying process finished", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "salt_nearly_empty": { + "name": "Salt nearly empty", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "rinse_aid_nearly_empty": { + "name": "Rinse aid nearly empty", "state": { "off": "[%key:common::state::off%]", "confirmed": "[%key:component::home_connect::common::confirmed%]", @@ -1583,16 +1690,216 @@ "present": "[%key:component::home_connect::common::present%]" } }, - "salt_nearly_empty": { - "name": "Salt nearly empty", + "keep_milk_tank_cool": { + "name": "Keep milk tank cool", "state": { "off": "[%key:common::state::off%]", "confirmed": "[%key:component::home_connect::common::confirmed%]", "present": "[%key:component::home_connect::common::present%]" } }, - "rinse_aid_nearly_empty": { - "name": "Rinse aid nearly empty", + "descaling_in_20_cups": { + "name": "Descaling in 20 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "descaling_in_15_cups": { + "name": "Descaling in 15 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "descaling_in_10_cups": { + "name": "Descaling in 10 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "descaling_in_5_cups": { + "name": "Descaling in 5 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_should_be_descaled": { + "name": "Device should be descaled", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_descaling_overdue": { + "name": "Device descaling overdue", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_descaling_blockage": { + "name": "Device descaling blockage", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_should_be_cleaned": { + "name": "Device should be cleaned", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_cleaning_overdue": { + "name": "Device cleaning overdue", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "calc_n_clean_in20cups": { + "name": "Calc'N'Clean in 20 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "calc_n_clean_in15cups": { + "name": "Calc'N'Clean in 15 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "calc_n_clean_in10cups": { + "name": "Calc'N'Clean in 10 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "calc_n_clean_in5cups": { + "name": "Calc'N'Clean in 5 cups", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_should_be_calc_n_cleaned": { + "name": "Device should be Calc'N'Cleaned", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_calc_n_clean_overdue": { + "name": "Device Calc'N'Clean overdue", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "device_calc_n_clean_blockage": { + "name": "Device Calc'N'Clean blockage", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "freezer_door_alarm": { + "name": "Freezer door alarm", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "refrigerator_door_alarm": { + "name": "Refrigerator door alarm", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "freezer_temperature_alarm": { + "name": "Freezer temperature alarm", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "empty_dust_box_and_clean_filter": { + "name": "Empty dust box and clean filter", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "robot_is_stuck": { + "name": "Robot is stuck", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "docking_station_not_found": { + "name": "Docking station not found", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "poor_i_dos_1_fill_level": { + "name": "Poor i-Dos 1 fill level", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "poor_i_dos_2_fill_level": { + "name": "Poor i-Dos 2 fill level", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "grease_filter_max_saturation_nearly_reached": { + "name": "Grease filter max saturation nearly reached", + "state": { + "off": "[%key:common::state::off%]", + "confirmed": "[%key:component::home_connect::common::confirmed%]", + "present": "[%key:component::home_connect::common::present%]" + } + }, + "grease_filter_max_saturation_reached": { + "name": "Grease filter max saturation reached", "state": { "off": "[%key:common::state::off%]", "confirmed": "[%key:component::home_connect::common::confirmed%]", diff --git a/homeassistant/components/home_connect/switch.py b/homeassistant/components/home_connect/switch.py index 6f9aa0e679f..05f0ed2ddc3 100644 --- a/homeassistant/components/home_connect/switch.py +++ b/homeassistant/components/home_connect/switch.py @@ -22,16 +22,7 @@ from homeassistant.helpers.issue_registry import ( from homeassistant.helpers.typing import UNDEFINED, UndefinedType from .common import setup_home_connect_entry -from .const import ( - BSH_POWER_OFF, - BSH_POWER_ON, - BSH_POWER_STANDBY, - DOMAIN, - SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME, - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID, - SVE_TRANSLATION_PLACEHOLDER_KEY, - SVE_TRANSLATION_PLACEHOLDER_VALUE, -) +from .const import BSH_POWER_OFF, BSH_POWER_ON, BSH_POWER_STANDBY, DOMAIN from .coordinator import ( HomeConnectApplianceData, HomeConnectConfigEntry, @@ -226,8 +217,8 @@ class HomeConnectSwitch(HomeConnectEntity, SwitchEntity): translation_key="turn_on", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, - SVE_TRANSLATION_PLACEHOLDER_KEY: self.bsh_key, + "entity_id": self.entity_id, + "key": self.bsh_key, }, ) from err @@ -246,8 +237,8 @@ class HomeConnectSwitch(HomeConnectEntity, SwitchEntity): translation_key="turn_off", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, - SVE_TRANSLATION_PLACEHOLDER_KEY: self.bsh_key, + "entity_id": self.entity_id, + "key": self.bsh_key, }, ) from err @@ -275,7 +266,10 @@ class HomeConnectProgramSwitch(HomeConnectEntity, SwitchEntity): super().__init__( coordinator, appliance, - SwitchEntityDescription(key=EventKey.BSH_COMMON_ROOT_ACTIVE_PROGRAM), + SwitchEntityDescription( + key=EventKey.BSH_COMMON_ROOT_ACTIVE_PROGRAM, + entity_registry_enabled_default=False, + ), ) self._attr_name = f"{appliance.info.name} {desc}" self._attr_unique_id = f"{appliance.info.ha_id}-{desc}" @@ -313,11 +307,12 @@ class HomeConnectProgramSwitch(HomeConnectEntity, SwitchEntity): async_create_issue( self.hass, DOMAIN, - f"deprecated_program_switch_{self.entity_id}", + f"deprecated_program_switch_in_automations_scripts_{self.entity_id}", breaks_in_ha_version="2025.6.0", - is_fixable=False, + is_fixable=True, + is_persistent=True, severity=IssueSeverity.WARNING, - translation_key="deprecated_program_switch", + translation_key="deprecated_program_switch_in_automations_scripts", translation_placeholders={ "entity_id": self.entity_id, "items": "\n".join(items_list), @@ -326,12 +321,34 @@ class HomeConnectProgramSwitch(HomeConnectEntity, SwitchEntity): async def async_will_remove_from_hass(self) -> None: """Call when entity will be removed from hass.""" + async_delete_issue( + self.hass, + DOMAIN, + f"deprecated_program_switch_in_automations_scripts_{self.entity_id}", + ) async_delete_issue( self.hass, DOMAIN, f"deprecated_program_switch_{self.entity_id}" ) + def create_action_handler_issue(self) -> None: + """Create deprecation issue.""" + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_program_switch_{self.entity_id}", + breaks_in_ha_version="2025.6.0", + is_fixable=True, + is_persistent=True, + severity=IssueSeverity.WARNING, + translation_key="deprecated_program_switch", + translation_placeholders={ + "entity_id": self.entity_id, + }, + ) + async def async_turn_on(self, **kwargs: Any) -> None: """Start the program.""" + self.create_action_handler_issue() try: await self.coordinator.client.start_program( self.appliance.info.ha_id, program_key=self.program.key @@ -348,6 +365,7 @@ class HomeConnectProgramSwitch(HomeConnectEntity, SwitchEntity): async def async_turn_off(self, **kwargs: Any) -> None: """Stop the program.""" + self.create_action_handler_issue() try: await self.coordinator.client.stop_program(self.appliance.info.ha_id) except HomeConnectError as err: @@ -385,7 +403,7 @@ class HomeConnectPowerSwitch(HomeConnectEntity, SwitchEntity): translation_key="power_on", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME: self.appliance.info.name, + "appliance_name": self.appliance.info.name, }, ) from err @@ -398,7 +416,7 @@ class HomeConnectPowerSwitch(HomeConnectEntity, SwitchEntity): translation_domain=DOMAIN, translation_key="unable_to_retrieve_turn_off", translation_placeholders={ - SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME: self.appliance.info.name + "appliance_name": self.appliance.info.name }, ) @@ -406,9 +424,7 @@ class HomeConnectPowerSwitch(HomeConnectEntity, SwitchEntity): raise HomeAssistantError( translation_domain=DOMAIN, translation_key="turn_off_not_supported", - translation_placeholders={ - SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME: self.appliance.info.name - }, + translation_placeholders={"appliance_name": self.appliance.info.name}, ) try: await self.coordinator.client.set_setting( @@ -423,8 +439,8 @@ class HomeConnectPowerSwitch(HomeConnectEntity, SwitchEntity): translation_key="power_off", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_APPLIANCE_NAME: self.appliance.info.name, - SVE_TRANSLATION_PLACEHOLDER_VALUE: self.power_off_state, + "appliance_name": self.appliance.info.name, + "value": self.power_off_state, }, ) from err diff --git a/homeassistant/components/home_connect/time.py b/homeassistant/components/home_connect/time.py index a1761219d30..adf26d2d973 100644 --- a/homeassistant/components/home_connect/time.py +++ b/homeassistant/components/home_connect/time.py @@ -1,4 +1,4 @@ -"""Provides time enties for Home Connect.""" +"""Provides time entities for Home Connect.""" from datetime import time from typing import cast @@ -6,19 +6,21 @@ from typing import cast from aiohomeconnect.model import SettingKey from aiohomeconnect.model.error import HomeConnectError +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.script import scripts_with_entity from homeassistant.components.time import TimeEntity, TimeEntityDescription from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.issue_registry import ( + IssueSeverity, + async_create_issue, + async_delete_issue, +) from .common import setup_home_connect_entry -from .const import ( - DOMAIN, - SVE_TRANSLATION_KEY_SET_SETTING, - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID, - SVE_TRANSLATION_PLACEHOLDER_KEY, - SVE_TRANSLATION_PLACEHOLDER_VALUE, -) +from .const import DOMAIN from .coordinator import HomeConnectApplianceData, HomeConnectConfigEntry from .entity import HomeConnectEntity from .utils import get_dict_from_home_connect_error @@ -29,6 +31,7 @@ TIME_ENTITIES = ( TimeEntityDescription( key=SettingKey.BSH_COMMON_ALARM_CLOCK, translation_key="alarm_clock", + entity_registry_enabled_default=False, ), ) @@ -73,8 +76,78 @@ def time_to_seconds(t: time) -> int: class HomeConnectTimeEntity(HomeConnectEntity, TimeEntity): """Time setting class for Home Connect.""" + async def async_added_to_hass(self) -> None: + """Call when entity is added to hass.""" + await super().async_added_to_hass() + if self.bsh_key == SettingKey.BSH_COMMON_ALARM_CLOCK: + automations = automations_with_entity(self.hass, self.entity_id) + scripts = scripts_with_entity(self.hass, self.entity_id) + items = automations + scripts + if not items: + return + + entity_reg: er.EntityRegistry = er.async_get(self.hass) + entity_automations = [ + automation_entity + for automation_id in automations + if (automation_entity := entity_reg.async_get(automation_id)) + ] + entity_scripts = [ + script_entity + for script_id in scripts + if (script_entity := entity_reg.async_get(script_id)) + ] + + items_list = [ + f"- [{item.original_name}](/config/automation/edit/{item.unique_id})" + for item in entity_automations + ] + [ + f"- [{item.original_name}](/config/script/edit/{item.unique_id})" + for item in entity_scripts + ] + + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_time_alarm_clock_in_automations_scripts_{self.entity_id}", + breaks_in_ha_version="2025.10.0", + is_fixable=True, + is_persistent=True, + severity=IssueSeverity.WARNING, + translation_key="deprecated_time_alarm_clock", + translation_placeholders={ + "entity_id": self.entity_id, + "items": "\n".join(items_list), + }, + ) + + async def async_will_remove_from_hass(self) -> None: + """Call when entity will be removed from hass.""" + if self.bsh_key == SettingKey.BSH_COMMON_ALARM_CLOCK: + async_delete_issue( + self.hass, + DOMAIN, + f"deprecated_time_alarm_clock_in_automations_scripts_{self.entity_id}", + ) + async_delete_issue( + self.hass, DOMAIN, f"deprecated_time_alarm_clock_{self.entity_id}" + ) + async def async_set_value(self, value: time) -> None: """Set the native value of the entity.""" + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_time_alarm_clock_{self.entity_id}", + breaks_in_ha_version="2025.10.0", + is_fixable=True, + is_persistent=True, + severity=IssueSeverity.WARNING, + translation_key="deprecated_time_alarm_clock", + translation_placeholders={ + "entity_id": self.entity_id, + }, + ) try: await self.coordinator.client.set_setting( self.appliance.info.ha_id, @@ -84,12 +157,12 @@ class HomeConnectTimeEntity(HomeConnectEntity, TimeEntity): except HomeConnectError as err: raise HomeAssistantError( translation_domain=DOMAIN, - translation_key=SVE_TRANSLATION_KEY_SET_SETTING, + translation_key="set_setting_entity", translation_placeholders={ **get_dict_from_home_connect_error(err), - SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID: self.entity_id, - SVE_TRANSLATION_PLACEHOLDER_KEY: self.bsh_key, - SVE_TRANSLATION_PLACEHOLDER_VALUE: str(value), + "entity_id": self.entity_id, + "key": self.bsh_key, + "value": str(value), }, ) from err diff --git a/homeassistant/components/home_connect/utils.py b/homeassistant/components/home_connect/utils.py index 108465072e1..ee5febb3cf7 100644 --- a/homeassistant/components/home_connect/utils.py +++ b/homeassistant/components/home_connect/utils.py @@ -2,7 +2,7 @@ import re -from aiohomeconnect.model.error import HomeConnectApiError, HomeConnectError +from aiohomeconnect.model.error import HomeConnectError RE_CAMEL_CASE = re.compile(r"(? dict[str, str]: """Return a translation string from a Home Connect error.""" - return { - "error": str(err) - if isinstance(err, HomeConnectApiError) - else type(err).__name__ - } + return {"error": str(err)} def bsh_key_to_translation_key(bsh_key: str) -> str: diff --git a/homeassistant/components/homeassistant/strings.json b/homeassistant/components/homeassistant/strings.json index 590afd697b5..b8b5f77cf52 100644 --- a/homeassistant/components/homeassistant/strings.json +++ b/homeassistant/components/homeassistant/strings.json @@ -12,7 +12,7 @@ }, "imperial_unit_system": { "title": "The imperial unit system is deprecated", - "description": "The imperial unit system is deprecated and your system is currently using us customary. Please update your configuration to use the us customary unit system and reload the core configuration to fix this issue." + "description": "The imperial unit system is deprecated and your system is currently using US customary. Please update your configuration to use the US customary unit system and reload the Core configuration to fix this issue." }, "deprecated_yaml": { "title": "The {integration_title} YAML configuration is being removed", @@ -111,8 +111,8 @@ "description": "Checks the Home Assistant YAML-configuration files for errors. Errors will be shown in the Home Assistant logs." }, "reload_core_config": { - "name": "Reload core configuration", - "description": "Reloads the core configuration from the YAML-configuration." + "name": "Reload Core configuration", + "description": "Reloads the Core configuration from the YAML-configuration." }, "restart": { "name": "[%key:common::action::restart%]", @@ -160,7 +160,7 @@ }, "update_entity": { "name": "Update entity", - "description": "Forces one or more entities to update its data.", + "description": "Forces one or more entities to update their data.", "fields": { "entity_id": { "name": "Entities to update", @@ -188,7 +188,7 @@ }, "reload_all": { "name": "Reload all", - "description": "Reload all YAML configuration that can be reloaded without restarting Home Assistant." + "description": "Reloads all YAML configuration that can be reloaded without restarting Home Assistant." } }, "exceptions": { diff --git a/homeassistant/components/homeassistant_hardware/coordinator.py b/homeassistant/components/homeassistant_hardware/coordinator.py new file mode 100644 index 00000000000..c9a5c891328 --- /dev/null +++ b/homeassistant/components/homeassistant_hardware/coordinator.py @@ -0,0 +1,46 @@ +"""Home Assistant hardware firmware update coordinator.""" + +from __future__ import annotations + +from datetime import timedelta +import logging + +from aiohttp import ClientSession +from ha_silabs_firmware_client import ( + FirmwareManifest, + FirmwareUpdateClient, + ManifestMissing, +) + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +_LOGGER = logging.getLogger(__name__) + + +FIRMWARE_REFRESH_INTERVAL = timedelta(hours=8) + + +class FirmwareUpdateCoordinator(DataUpdateCoordinator[FirmwareManifest]): + """Coordinator to manage firmware updates.""" + + def __init__(self, hass: HomeAssistant, session: ClientSession, url: str) -> None: + """Initialize the firmware update coordinator.""" + super().__init__( + hass, + _LOGGER, + name="firmware update coordinator", + update_interval=FIRMWARE_REFRESH_INTERVAL, + ) + self.hass = hass + self.session = session + + self.client = FirmwareUpdateClient(url, session) + + async def _async_update_data(self) -> FirmwareManifest: + try: + return await self.client.async_update_data() + except ManifestMissing as err: + raise UpdateFailed( + "GitHub release assets haven't been uploaded yet" + ) from err diff --git a/homeassistant/components/homeassistant_hardware/firmware_config_flow.py b/homeassistant/components/homeassistant_hardware/firmware_config_flow.py index 83031587712..1b4840e5a98 100644 --- a/homeassistant/components/homeassistant_hardware/firmware_config_flow.py +++ b/homeassistant/components/homeassistant_hardware/firmware_config_flow.py @@ -33,6 +33,7 @@ from .util import ( OwningIntegration, get_otbr_addon_manager, get_zigbee_flasher_addon_manager, + guess_firmware_info, guess_hardware_owners, probe_silabs_firmware_info, ) @@ -511,6 +512,16 @@ class BaseFirmwareConfigFlow(BaseFirmwareInstallFlow, ConfigFlow): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Confirm a discovery.""" + assert self._device is not None + fw_info = await guess_firmware_info(self.hass, self._device) + + # If our guess for the firmware type is actually running, we can save the user + # an unnecessary confirmation and silently confirm the flow + for owner in fw_info.owners: + if await owner.is_running(self.hass): + self._probed_firmware_info = fw_info + return self._async_flow_finished() + return await self.async_step_pick_firmware() diff --git a/homeassistant/components/homeassistant_hardware/manifest.json b/homeassistant/components/homeassistant_hardware/manifest.json index 8f59ab61600..f3a02185b83 100644 --- a/homeassistant/components/homeassistant_hardware/manifest.json +++ b/homeassistant/components/homeassistant_hardware/manifest.json @@ -5,5 +5,8 @@ "codeowners": ["@home-assistant/core"], "documentation": "https://www.home-assistant.io/integrations/homeassistant_hardware", "integration_type": "system", - "requirements": ["universal-silabs-flasher==0.0.29"] + "requirements": [ + "universal-silabs-flasher==0.0.30", + "ha-silabs-firmware-client==0.2.0" + ] } diff --git a/homeassistant/components/homeassistant_hardware/strings.json b/homeassistant/components/homeassistant_hardware/strings.json index de328a54bb7..6dda01561f1 100644 --- a/homeassistant/components/homeassistant_hardware/strings.json +++ b/homeassistant/components/homeassistant_hardware/strings.json @@ -39,8 +39,8 @@ "description": "The OpenThread Border Router (OTBR) add-on is now starting." }, "otbr_failed": { - "title": "Failed to setup OpenThread Border Router", - "description": "The OpenThread Border Router add-on installation was unsuccessful. Ensure no other software is trying to communicate with the {model}, you have access to the internet and can install other add-ons, and try again. Check the Supervisor logs if the problem persists." + "title": "Failed to set up OpenThread Border Router", + "description": "The OpenThread Border Router add-on installation was unsuccessful. Ensure no other software is trying to communicate with the {model}, you have access to the Internet and can install other add-ons, and try again. Check the Supervisor logs if the problem persists." }, "confirm_otbr": { "title": "OpenThread Border Router setup complete", @@ -48,16 +48,16 @@ } }, "abort": { - "not_hassio_thread": "The OpenThread Border Router addon can only be installed with Home Assistant OS. If you would like to use the {model} as an Thread border router, please flash the firmware manually using the [web flasher]({docs_web_flasher_url}) and set up OpenThread Border Router to communicate with it.", + "not_hassio_thread": "The OpenThread Border Router add-on can only be installed with Home Assistant OS. If you would like to use the {model} as a Thread border router, please flash the firmware manually using the [web flasher]({docs_web_flasher_url}) and set up OpenThread Border Router to communicate with it.", "otbr_addon_already_running": "The OpenThread Border Router add-on is already running, it cannot be installed again.", "zha_still_using_stick": "This {model} is in use by the Zigbee Home Automation integration. Please migrate your Zigbee network to another adapter or delete the integration and try again.", "otbr_still_using_stick": "This {model} is in use by the OpenThread Border Router add-on. If you use the Thread network, make sure you have alternative border routers. Uninstall the add-on and try again.", - "unsupported_firmware": "The radio firmware on your {model} could not be determined. Make sure that no other integration or addon is currently trying to communicate with the device. If you are running Home Assistant OS in a virtual machine or in Docker, please make sure that permissions are set correctly for the device." + "unsupported_firmware": "The radio firmware on your {model} could not be determined. Make sure that no other integration or add-on is currently trying to communicate with the device. If you are running Home Assistant OS in a virtual machine or in Docker, please make sure that permissions are set correctly for the device." }, "progress": { - "install_zigbee_flasher_addon": "The Silicon Labs Flasher addon is installed, this may take a few minutes.", + "install_zigbee_flasher_addon": "The Silicon Labs Flasher add-on is installed, this may take a few minutes.", "run_zigbee_flasher_addon": "Please wait while Zigbee firmware is installed to your {model}, this will take a few minutes. Do not make any changes to your hardware or software until this finishes.", - "uninstall_zigbee_flasher_addon": "The Silicon Labs Flasher addon is being removed." + "uninstall_zigbee_flasher_addon": "The Silicon Labs Flasher add-on is being removed." } } }, diff --git a/homeassistant/components/homeassistant_hardware/update.py b/homeassistant/components/homeassistant_hardware/update.py new file mode 100644 index 00000000000..1b0f15ca021 --- /dev/null +++ b/homeassistant/components/homeassistant_hardware/update.py @@ -0,0 +1,326 @@ +"""Home Assistant Hardware base firmware update entity.""" + +from __future__ import annotations + +from collections.abc import AsyncIterator, Callable +from contextlib import AsyncExitStack, asynccontextmanager +from dataclasses import dataclass +import logging +from typing import Any, cast + +from ha_silabs_firmware_client import FirmwareManifest, FirmwareMetadata +from universal_silabs_flasher.firmware import parse_firmware_image +from universal_silabs_flasher.flasher import Flasher +from yarl import URL + +from homeassistant.components.update import ( + UpdateEntity, + UpdateEntityDescription, + UpdateEntityFeature, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import CALLBACK_TYPE, callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.restore_state import ExtraStoredData +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .coordinator import FirmwareUpdateCoordinator +from .helpers import async_register_firmware_info_callback +from .util import ( + ApplicationType, + FirmwareInfo, + guess_firmware_info, + probe_silabs_firmware_info, +) + +_LOGGER = logging.getLogger(__name__) + +type FirmwareChangeCallbackType = Callable[ + [ApplicationType | None, ApplicationType | None], None +] + + +@dataclass(kw_only=True, frozen=True) +class FirmwareUpdateEntityDescription(UpdateEntityDescription): + """Describes Home Assistant Hardware firmware update entity.""" + + version_parser: Callable[[str], str] + fw_type: str | None + version_key: str | None + expected_firmware_type: ApplicationType | None + firmware_name: str | None + + +@dataclass +class FirmwareUpdateExtraStoredData(ExtraStoredData): + """Extra stored data for Home Assistant Hardware firmware update entity.""" + + firmware_manifest: FirmwareManifest | None = None + + def as_dict(self) -> dict[str, Any]: + """Return a dict representation of the extra data.""" + return { + "firmware_manifest": ( + self.firmware_manifest.as_dict() + if self.firmware_manifest is not None + else None + ) + } + + @classmethod + def from_dict(cls, data: dict[str, Any]) -> FirmwareUpdateExtraStoredData: + """Initialize the extra data from a dict.""" + if data["firmware_manifest"] is None: + return cls(firmware_manifest=None) + + return cls( + FirmwareManifest.from_json( + data["firmware_manifest"], + # This data is not technically part of the manifest and is loaded externally + url=URL(data["firmware_manifest"]["url"]), + html_url=URL(data["firmware_manifest"]["html_url"]), + ) + ) + + +class BaseFirmwareUpdateEntity( + CoordinatorEntity[FirmwareUpdateCoordinator], UpdateEntity +): + """Base Home Assistant Hardware firmware update entity.""" + + # Subclasses provide the mapping between firmware types and entity descriptions + entity_description: FirmwareUpdateEntityDescription + bootloader_reset_type: str | None = None + + _attr_supported_features = ( + UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS + ) + _attr_has_entity_name = True + + def __init__( + self, + device: str, + config_entry: ConfigEntry, + update_coordinator: FirmwareUpdateCoordinator, + entity_description: FirmwareUpdateEntityDescription, + ) -> None: + """Initialize the Hardware firmware update entity.""" + super().__init__(update_coordinator) + + self.entity_description = entity_description + self._current_device = device + self._config_entry = config_entry + self._current_firmware_info: FirmwareInfo | None = None + self._firmware_type_change_callbacks: set[FirmwareChangeCallbackType] = set() + + self._latest_manifest: FirmwareManifest | None = None + self._latest_firmware: FirmwareMetadata | None = None + + def add_firmware_type_changed_callback( + self, + change_callback: FirmwareChangeCallbackType, + ) -> CALLBACK_TYPE: + """Add a callback for when the firmware type changes.""" + self._firmware_type_change_callbacks.add(change_callback) + + @callback + def remove_callback() -> None: + self._firmware_type_change_callbacks.discard(change_callback) + + return remove_callback + + async def async_added_to_hass(self) -> None: + """Handle entity which will be added.""" + await super().async_added_to_hass() + + self.async_on_remove( + async_register_firmware_info_callback( + self.hass, + self._current_device, + self._firmware_info_callback, + ) + ) + + self.async_on_remove( + self._config_entry.async_on_state_change(self._on_config_entry_change) + ) + + if (extra_data := await self.async_get_last_extra_data()) and ( + hardware_extra_data := FirmwareUpdateExtraStoredData.from_dict( + extra_data.as_dict() + ) + ): + self._latest_manifest = hardware_extra_data.firmware_manifest + + self._update_attributes() + + @property + def extra_restore_state_data(self) -> FirmwareUpdateExtraStoredData: + """Return state data to be restored.""" + return FirmwareUpdateExtraStoredData(firmware_manifest=self._latest_manifest) + + @callback + def _on_config_entry_change(self) -> None: + """Handle config entry changes.""" + self._update_attributes() + self.async_write_ha_state() + + @callback + def _firmware_info_callback(self, firmware_info: FirmwareInfo) -> None: + """Handle updated firmware info being pushed by an integration.""" + self._current_firmware_info = firmware_info + + # If the firmware type does not change, we can just update the attributes + if ( + self._current_firmware_info.firmware_type + == self.entity_description.expected_firmware_type + ): + self._update_attributes() + self.async_write_ha_state() + return + + # Otherwise, fire the firmware type change callbacks. They are expected to + # replace the entity so there is no purpose in firing other callbacks. + for change_callback in self._firmware_type_change_callbacks.copy(): + try: + change_callback( + self.entity_description.expected_firmware_type, + self._current_firmware_info.firmware_type, + ) + except Exception: # noqa: BLE001 + _LOGGER.warning( + "Failed to call firmware type changed callback", exc_info=True + ) + + def _update_attributes(self) -> None: + """Recompute the attributes of the entity.""" + self._attr_title = self.entity_description.firmware_name or "Unknown" + + if ( + self._current_firmware_info is None + or self._current_firmware_info.firmware_version is None + ): + self._attr_installed_version = None + else: + self._attr_installed_version = self.entity_description.version_parser( + self._current_firmware_info.firmware_version + ) + + self._latest_firmware = None + self._attr_latest_version = None + self._attr_release_summary = None + self._attr_release_url = None + + if ( + self._latest_manifest is None + or self.entity_description.fw_type is None + or self.entity_description.version_key is None + ): + return + + try: + self._latest_firmware = next( + f + for f in self._latest_manifest.firmwares + if f.filename.startswith(self.entity_description.fw_type) + ) + except StopIteration: + pass + else: + version = cast( + str, self._latest_firmware.metadata[self.entity_description.version_key] + ) + self._attr_latest_version = self.entity_description.version_parser(version) + self._attr_release_summary = self._latest_firmware.release_notes + self._attr_release_url = str(self._latest_manifest.html_url) + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self._latest_manifest = self.coordinator.data + self._update_attributes() + self.async_write_ha_state() + + def _update_progress(self, offset: int, total_size: int) -> None: + """Handle update progress.""" + + # Firmware updates in ~30s so we still get responsive update progress even + # without decimal places + self._attr_update_percentage = round((offset * 100) / total_size) + self.async_write_ha_state() + + @asynccontextmanager + async def _temporarily_stop_hardware_owners( + self, device: str + ) -> AsyncIterator[None]: + """Temporarily stop addons and integrations communicating with the device.""" + firmware_info = await guess_firmware_info(self.hass, device) + _LOGGER.debug("Identified firmware info: %s", firmware_info) + + async with AsyncExitStack() as stack: + for owner in firmware_info.owners: + await stack.enter_async_context(owner.temporarily_stop(self.hass)) + + yield + + async def async_install( + self, version: str | None, backup: bool, **kwargs: Any + ) -> None: + """Install an update.""" + assert self._latest_firmware is not None + assert self.entity_description.expected_firmware_type is not None + + # Start off by setting the progress bar to an indeterminate state + self._attr_in_progress = True + self._attr_update_percentage = None + self.async_write_ha_state() + + fw_data = await self.coordinator.client.async_fetch_firmware( + self._latest_firmware + ) + fw_image = await self.hass.async_add_executor_job(parse_firmware_image, fw_data) + + device = self._current_device + + flasher = Flasher( + device=device, + probe_methods=( + ApplicationType.GECKO_BOOTLOADER.as_flasher_application_type(), + ApplicationType.EZSP.as_flasher_application_type(), + ApplicationType.SPINEL.as_flasher_application_type(), + ApplicationType.CPC.as_flasher_application_type(), + ), + bootloader_reset=self.bootloader_reset_type, + ) + + async with self._temporarily_stop_hardware_owners(device): + try: + try: + # Enter the bootloader with indeterminate progress + await flasher.enter_bootloader() + + # Flash the firmware, with progress + await flasher.flash_firmware( + fw_image, progress_callback=self._update_progress + ) + except Exception as err: + raise HomeAssistantError("Failed to flash firmware") from err + + # Probe the running application type with indeterminate progress + self._attr_update_percentage = None + self.async_write_ha_state() + + firmware_info = await probe_silabs_firmware_info( + device, + probe_methods=(self.entity_description.expected_firmware_type,), + ) + + if firmware_info is None: + raise HomeAssistantError( + "Failed to probe the firmware after flashing" + ) + + self._firmware_info_callback(firmware_info) + finally: + self._attr_in_progress = False + self.async_write_ha_state() diff --git a/homeassistant/components/homeassistant_hardware/util.py b/homeassistant/components/homeassistant_hardware/util.py index 1afb786369e..64f363e4f23 100644 --- a/homeassistant/components/homeassistant_hardware/util.py +++ b/homeassistant/components/homeassistant_hardware/util.py @@ -4,7 +4,8 @@ from __future__ import annotations import asyncio from collections import defaultdict -from collections.abc import Iterable +from collections.abc import AsyncIterator, Iterable +from contextlib import asynccontextmanager from dataclasses import dataclass from enum import StrEnum import logging @@ -105,6 +106,28 @@ class OwningAddon: else: return addon_info.state == AddonState.RUNNING + @asynccontextmanager + async def temporarily_stop(self, hass: HomeAssistant) -> AsyncIterator[None]: + """Temporarily stop the add-on, restarting it after completion.""" + addon_manager = self._get_addon_manager(hass) + + try: + addon_info = await addon_manager.async_get_addon_info() + except AddonError: + yield + return + + if addon_info.state != AddonState.RUNNING: + yield + return + + try: + await addon_manager.async_stop_addon() + await addon_manager.async_wait_until_addon_state(AddonState.NOT_RUNNING) + yield + finally: + await addon_manager.async_start_addon_waiting() + @dataclass(kw_only=True) class OwningIntegration: @@ -123,6 +146,23 @@ class OwningIntegration: ConfigEntryState.SETUP_IN_PROGRESS, ) + @asynccontextmanager + async def temporarily_stop(self, hass: HomeAssistant) -> AsyncIterator[None]: + """Temporarily stop the integration, restarting it after completion.""" + if (entry := hass.config_entries.async_get_entry(self.config_entry_id)) is None: + yield + return + + if entry.state != ConfigEntryState.LOADED: + yield + return + + try: + await hass.config_entries.async_unload(entry.entry_id) + yield + finally: + await hass.config_entries.async_setup(entry.entry_id) + @dataclass(kw_only=True) class FirmwareInfo: diff --git a/homeassistant/components/homeassistant_sky_connect/__init__.py b/homeassistant/components/homeassistant_sky_connect/__init__.py index 758f0c1e1ef..dfc129ddc75 100644 --- a/homeassistant/components/homeassistant_sky_connect/__init__.py +++ b/homeassistant/components/homeassistant_sky_connect/__init__.py @@ -3,21 +3,87 @@ from __future__ import annotations import logging +import os.path from homeassistant.components.homeassistant_hardware.util import guess_firmware_info +from homeassistant.components.usb import ( + USBDevice, + async_register_port_event_callback, + scan_serial_ports, +) from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.typing import ConfigType + +from .const import ( + DESCRIPTION, + DEVICE, + DOMAIN, + FIRMWARE, + FIRMWARE_VERSION, + MANUFACTURER, + PID, + PRODUCT, + SERIAL_NUMBER, + VID, +) _LOGGER = logging.getLogger(__name__) +CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) + + +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the ZBT-1 integration.""" + + @callback + def async_port_event_callback( + added: set[USBDevice], removed: set[USBDevice] + ) -> None: + """Handle USB port events.""" + current_entries_by_path = { + entry.data[DEVICE]: entry + for entry in hass.config_entries.async_entries(DOMAIN) + } + + for device in added | removed: + path = device.device + entry = current_entries_by_path.get(path) + + if entry is not None: + _LOGGER.debug( + "Device %r has changed state, reloading config entry %s", + path, + entry, + ) + hass.config_entries.async_schedule_reload(entry.entry_id) + + async_register_port_event_callback(hass, async_port_event_callback) + + return True + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a Home Assistant SkyConnect config entry.""" + + # Postpone loading the config entry if the device is missing + device_path = entry.data[DEVICE] + if not await hass.async_add_executor_job(os.path.exists, device_path): + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="device_disconnected", + ) + + await hass.config_entries.async_forward_entry_setups(entry, ["update"]) + return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" + await hass.config_entries.async_unload_platforms(entry, ["update"]) return True @@ -25,7 +91,7 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> """Migrate old entry.""" _LOGGER.debug( - "Migrating from version %s:%s", config_entry.version, config_entry.minor_version + "Migrating from version %s.%s", config_entry.version, config_entry.minor_version ) if config_entry.version == 1: @@ -33,15 +99,13 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> # Add-on startup with type service get started before Core, always (e.g. the # Multi-Protocol add-on). Probing the firmware would interfere with the add-on, # so we can't safely probe here. Instead, we must make an educated guess! - firmware_guess = await guess_firmware_info( - hass, config_entry.data["device"] - ) + firmware_guess = await guess_firmware_info(hass, config_entry.data[DEVICE]) new_data = {**config_entry.data} - new_data["firmware"] = firmware_guess.firmware_type.value + new_data[FIRMWARE] = firmware_guess.firmware_type.value # Copy `description` to `product` - new_data["product"] = new_data["description"] + new_data[PRODUCT] = new_data[DESCRIPTION] hass.config_entries.async_update_entry( config_entry, @@ -50,6 +114,55 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> minor_version=2, ) + if config_entry.minor_version == 2: + # Add a `firmware_version` key + hass.config_entries.async_update_entry( + config_entry, + data={ + **config_entry.data, + FIRMWARE_VERSION: None, + }, + version=1, + minor_version=3, + ) + + if config_entry.minor_version == 3: + # Old SkyConnect config entries were missing keys + if any( + key not in config_entry.data + for key in (VID, PID, MANUFACTURER, PRODUCT, SERIAL_NUMBER) + ): + serial_ports = await hass.async_add_executor_job(scan_serial_ports) + serial_ports_info = {port.device: port for port in serial_ports} + device = config_entry.data[DEVICE] + + if not (usb_info := serial_ports_info.get(device)): + raise HomeAssistantError( + f"USB device {device} is missing, cannot migrate" + ) + + hass.config_entries.async_update_entry( + config_entry, + data={ + **config_entry.data, + VID: usb_info.vid, + PID: usb_info.pid, + MANUFACTURER: usb_info.manufacturer, + PRODUCT: usb_info.description, + DESCRIPTION: usb_info.description, + SERIAL_NUMBER: usb_info.serial_number, + }, + version=1, + minor_version=4, + ) + else: + # Existing entries are migrated by just incrementing the version + hass.config_entries.async_update_entry( + config_entry, + version=1, + minor_version=4, + ) + _LOGGER.debug( "Migration to version %s.%s successful", config_entry.version, diff --git a/homeassistant/components/homeassistant_sky_connect/config_flow.py b/homeassistant/components/homeassistant_sky_connect/config_flow.py index d8446c2d3f9..eb5ea214b3e 100644 --- a/homeassistant/components/homeassistant_sky_connect/config_flow.py +++ b/homeassistant/components/homeassistant_sky_connect/config_flow.py @@ -24,7 +24,20 @@ from homeassistant.config_entries import ( from homeassistant.core import callback from homeassistant.helpers.service_info.usb import UsbServiceInfo -from .const import DOCS_WEB_FLASHER_URL, DOMAIN, HardwareVariant +from .const import ( + DESCRIPTION, + DEVICE, + DOCS_WEB_FLASHER_URL, + DOMAIN, + FIRMWARE, + FIRMWARE_VERSION, + MANUFACTURER, + PID, + PRODUCT, + SERIAL_NUMBER, + VID, + HardwareVariant, +) from .util import get_hardware_variant, get_usb_service_info _LOGGER = logging.getLogger(__name__) @@ -37,6 +50,7 @@ if TYPE_CHECKING: def _get_translation_placeholders(self) -> dict[str, str]: return {} + else: # Multiple inheritance with `Protocol` seems to break TranslationPlaceholderProtocol = object @@ -67,7 +81,7 @@ class HomeAssistantSkyConnectConfigFlow( """Handle a config flow for Home Assistant SkyConnect.""" VERSION = 1 - MINOR_VERSION = 2 + MINOR_VERSION = 4 def __init__(self, *args: Any, **kwargs: Any) -> None: """Initialize the config flow.""" @@ -82,7 +96,7 @@ class HomeAssistantSkyConnectConfigFlow( config_entry: ConfigEntry, ) -> OptionsFlow: """Return the options flow.""" - firmware_type = ApplicationType(config_entry.data["firmware"]) + firmware_type = ApplicationType(config_entry.data[FIRMWARE]) if firmware_type is ApplicationType.CPC: return HomeAssistantSkyConnectMultiPanOptionsFlowHandler(config_entry) @@ -100,7 +114,7 @@ class HomeAssistantSkyConnectConfigFlow( unique_id = f"{vid}:{pid}_{serial_number}_{manufacturer}_{description}" if await self.async_set_unique_id(unique_id): - self._abort_if_unique_id_configured(updates={"device": device}) + self._abort_if_unique_id_configured(updates={DEVICE: device}) discovery_info.device = await self.hass.async_add_executor_job( usb.get_serial_by_id, discovery_info.device @@ -126,14 +140,15 @@ class HomeAssistantSkyConnectConfigFlow( return self.async_create_entry( title=self._hw_variant.full_name, data={ - "vid": self._usb_info.vid, - "pid": self._usb_info.pid, - "serial_number": self._usb_info.serial_number, - "manufacturer": self._usb_info.manufacturer, - "description": self._usb_info.description, # For backwards compatibility - "product": self._usb_info.description, - "device": self._usb_info.device, - "firmware": self._probed_firmware_info.firmware_type.value, + VID: self._usb_info.vid, + PID: self._usb_info.pid, + SERIAL_NUMBER: self._usb_info.serial_number, + MANUFACTURER: self._usb_info.manufacturer, + DESCRIPTION: self._usb_info.description, # For backwards compatibility + PRODUCT: self._usb_info.description, + DEVICE: self._usb_info.device, + FIRMWARE: self._probed_firmware_info.firmware_type.value, + FIRMWARE_VERSION: self._probed_firmware_info.firmware_version, }, ) @@ -148,7 +163,7 @@ class HomeAssistantSkyConnectMultiPanOptionsFlowHandler( ) -> silabs_multiprotocol_addon.SerialPortSettings: """Return the radio serial port settings.""" return silabs_multiprotocol_addon.SerialPortSettings( - device=self.config_entry.data["device"], + device=self.config_entry.data[DEVICE], baudrate="115200", flow_control=True, ) @@ -182,7 +197,8 @@ class HomeAssistantSkyConnectMultiPanOptionsFlowHandler( entry=self.config_entry, data={ **self.config_entry.data, - "firmware": ApplicationType.EZSP.value, + FIRMWARE: ApplicationType.EZSP.value, + FIRMWARE_VERSION: None, }, options=self.config_entry.options, ) @@ -201,15 +217,15 @@ class HomeAssistantSkyConnectOptionsFlowHandler( self._usb_info = get_usb_service_info(self.config_entry) self._hw_variant = HardwareVariant.from_usb_product_name( - self.config_entry.data["product"] + self.config_entry.data[PRODUCT] ) self._hardware_name = self._hw_variant.full_name self._device = self._usb_info.device self._probed_firmware_info = FirmwareInfo( device=self._device, - firmware_type=ApplicationType(self.config_entry.data["firmware"]), - firmware_version=None, + firmware_type=ApplicationType(self.config_entry.data[FIRMWARE]), + firmware_version=self.config_entry.data[FIRMWARE_VERSION], source="guess", owners=[], ) @@ -225,7 +241,8 @@ class HomeAssistantSkyConnectOptionsFlowHandler( entry=self.config_entry, data={ **self.config_entry.data, - "firmware": self._probed_firmware_info.firmware_type.value, + FIRMWARE: self._probed_firmware_info.firmware_type.value, + FIRMWARE_VERSION: self._probed_firmware_info.firmware_version, }, options=self.config_entry.options, ) diff --git a/homeassistant/components/homeassistant_sky_connect/const.py b/homeassistant/components/homeassistant_sky_connect/const.py index cae0b98a25b..70ff047366d 100644 --- a/homeassistant/components/homeassistant_sky_connect/const.py +++ b/homeassistant/components/homeassistant_sky_connect/const.py @@ -7,6 +7,20 @@ from typing import Self DOMAIN = "homeassistant_sky_connect" DOCS_WEB_FLASHER_URL = "https://skyconnect.home-assistant.io/firmware-update/" +NABU_CASA_FIRMWARE_RELEASES_URL = ( + "https://api.github.com/repos/NabuCasa/silabs-firmware-builder/releases/latest" +) + +FIRMWARE = "firmware" +FIRMWARE_VERSION = "firmware_version" +SERIAL_NUMBER = "serial_number" +MANUFACTURER = "manufacturer" +PRODUCT = "product" +DESCRIPTION = "description" +PID = "pid" +VID = "vid" +DEVICE = "device" + @dataclasses.dataclass(frozen=True) class VariantInfo: diff --git a/homeassistant/components/homeassistant_sky_connect/strings.json b/homeassistant/components/homeassistant_sky_connect/strings.json index a596b9846ce..a990f025e8d 100644 --- a/homeassistant/components/homeassistant_sky_connect/strings.json +++ b/homeassistant/components/homeassistant_sky_connect/strings.json @@ -195,5 +195,10 @@ "run_zigbee_flasher_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::run_zigbee_flasher_addon%]", "uninstall_zigbee_flasher_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::uninstall_zigbee_flasher_addon%]" } + }, + "exceptions": { + "device_disconnected": { + "message": "The device is not plugged in" + } } } diff --git a/homeassistant/components/homeassistant_sky_connect/update.py b/homeassistant/components/homeassistant_sky_connect/update.py new file mode 100644 index 00000000000..74c28b37eaf --- /dev/null +++ b/homeassistant/components/homeassistant_sky_connect/update.py @@ -0,0 +1,228 @@ +"""Home Assistant SkyConnect firmware update entity.""" + +from __future__ import annotations + +import logging + +import aiohttp + +from homeassistant.components.homeassistant_hardware.coordinator import ( + FirmwareUpdateCoordinator, +) +from homeassistant.components.homeassistant_hardware.update import ( + BaseFirmwareUpdateEntity, + FirmwareUpdateEntityDescription, +) +from homeassistant.components.homeassistant_hardware.util import ( + ApplicationType, + FirmwareInfo, +) +from homeassistant.components.update import UpdateDeviceClass +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .const import ( + DOMAIN, + FIRMWARE, + FIRMWARE_VERSION, + NABU_CASA_FIRMWARE_RELEASES_URL, + PRODUCT, + SERIAL_NUMBER, + HardwareVariant, +) + +_LOGGER = logging.getLogger(__name__) + + +FIRMWARE_ENTITY_DESCRIPTIONS: dict[ + ApplicationType | None, FirmwareUpdateEntityDescription +] = { + ApplicationType.EZSP: FirmwareUpdateEntityDescription( + key="firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw.split(" ", 1)[0], + fw_type="skyconnect_zigbee_ncp", + version_key="ezsp_version", + expected_firmware_type=ApplicationType.EZSP, + firmware_name="EmberZNet Zigbee", + ), + ApplicationType.SPINEL: FirmwareUpdateEntityDescription( + key="firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw.split("/", 1)[1].split("_", 1)[0], + fw_type="skyconnect_openthread_rcp", + version_key="ot_rcp_version", + expected_firmware_type=ApplicationType.SPINEL, + firmware_name="OpenThread RCP", + ), + ApplicationType.CPC: FirmwareUpdateEntityDescription( + key="firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw, + fw_type="skyconnect_multipan", + version_key="cpc_version", + expected_firmware_type=ApplicationType.CPC, + firmware_name="Multiprotocol", + ), + ApplicationType.GECKO_BOOTLOADER: FirmwareUpdateEntityDescription( + key="firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw, + fw_type=None, # We don't want to update the bootloader + version_key="gecko_bootloader_version", + expected_firmware_type=ApplicationType.GECKO_BOOTLOADER, + firmware_name="Gecko Bootloader", + ), + None: FirmwareUpdateEntityDescription( + key="firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw, + fw_type=None, + version_key=None, + expected_firmware_type=None, + firmware_name=None, + ), +} + + +def _async_create_update_entity( + hass: HomeAssistant, + config_entry: ConfigEntry, + session: aiohttp.ClientSession, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> FirmwareUpdateEntity: + """Create an update entity that handles firmware type changes.""" + firmware_type = config_entry.data[FIRMWARE] + + try: + entity_description = FIRMWARE_ENTITY_DESCRIPTIONS[ + ApplicationType(firmware_type) + ] + except (KeyError, ValueError): + _LOGGER.debug( + "Unknown firmware type %r, using default entity description", firmware_type + ) + entity_description = FIRMWARE_ENTITY_DESCRIPTIONS[None] + + entity = FirmwareUpdateEntity( + device=config_entry.data["device"], + config_entry=config_entry, + update_coordinator=FirmwareUpdateCoordinator( + hass, + session, + NABU_CASA_FIRMWARE_RELEASES_URL, + ), + entity_description=entity_description, + ) + + def firmware_type_changed( + old_type: ApplicationType | None, new_type: ApplicationType | None + ) -> None: + """Replace the current entity when the firmware type changes.""" + er.async_get(hass).async_remove(entity.entity_id) + async_add_entities( + [ + _async_create_update_entity( + hass, config_entry, session, async_add_entities + ) + ] + ) + + entity.async_on_remove( + entity.add_firmware_type_changed_callback(firmware_type_changed) + ) + + return entity + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the firmware update config entry.""" + session = async_get_clientsession(hass) + entity = _async_create_update_entity( + hass, config_entry, session, async_add_entities + ) + + async_add_entities([entity]) + + +class FirmwareUpdateEntity(BaseFirmwareUpdateEntity): + """SkyConnect firmware update entity.""" + + bootloader_reset_type = None + + def __init__( + self, + device: str, + config_entry: ConfigEntry, + update_coordinator: FirmwareUpdateCoordinator, + entity_description: FirmwareUpdateEntityDescription, + ) -> None: + """Initialize the SkyConnect firmware update entity.""" + super().__init__(device, config_entry, update_coordinator, entity_description) + + variant = HardwareVariant.from_usb_product_name( + self._config_entry.data[PRODUCT] + ) + serial_number = self._config_entry.data[SERIAL_NUMBER] + + self._attr_unique_id = f"{serial_number}_{self.entity_description.key}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, serial_number)}, + name=f"{variant.full_name} ({serial_number[:8]})", + model=variant.full_name, + manufacturer="Nabu Casa", + serial_number=serial_number, + ) + + # Use the cached firmware info if it exists + if self._config_entry.data[FIRMWARE] is not None: + self._current_firmware_info = FirmwareInfo( + device=device, + firmware_type=ApplicationType(self._config_entry.data[FIRMWARE]), + firmware_version=self._config_entry.data[FIRMWARE_VERSION], + owners=[], + source="homeassistant_sky_connect", + ) + + def _update_attributes(self) -> None: + """Recompute the attributes of the entity.""" + super()._update_attributes() + + assert self.device_entry is not None + device_registry = dr.async_get(self.hass) + device_registry.async_update_device( + device_id=self.device_entry.id, + sw_version=f"{self.entity_description.firmware_name} {self._attr_installed_version}", + ) + + @callback + def _firmware_info_callback(self, firmware_info: FirmwareInfo) -> None: + """Handle updated firmware info being pushed by an integration.""" + self.hass.config_entries.async_update_entry( + self._config_entry, + data={ + **self._config_entry.data, + FIRMWARE: firmware_info.firmware_type, + FIRMWARE_VERSION: firmware_info.firmware_version, + }, + ) + super()._firmware_info_callback(firmware_info) diff --git a/homeassistant/components/homeassistant_yellow/__init__.py b/homeassistant/components/homeassistant_yellow/__init__.py index b0837eeedbe..71aa8ef99b7 100644 --- a/homeassistant/components/homeassistant_yellow/__init__.py +++ b/homeassistant/components/homeassistant_yellow/__init__.py @@ -18,7 +18,7 @@ from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError from homeassistant.helpers import discovery_flow from homeassistant.helpers.hassio import is_hassio -from .const import FIRMWARE, RADIO_DEVICE, ZHA_HW_DISCOVERY_DATA +from .const import FIRMWARE, FIRMWARE_VERSION, RADIO_DEVICE, ZHA_HW_DISCOVERY_DATA _LOGGER = logging.getLogger(__name__) @@ -55,11 +55,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: data=ZHA_HW_DISCOVERY_DATA, ) + await hass.config_entries.async_forward_entry_setups(entry, ["update"]) + return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" + await hass.config_entries.async_unload_platforms(entry, ["update"]) return True @@ -87,6 +90,18 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> minor_version=2, ) + if config_entry.minor_version == 2: + # Add a `firmware_version` key + hass.config_entries.async_update_entry( + config_entry, + data={ + **config_entry.data, + FIRMWARE_VERSION: None, + }, + version=1, + minor_version=3, + ) + _LOGGER.debug( "Migration to version %s.%s successful", config_entry.version, diff --git a/homeassistant/components/homeassistant_yellow/config_flow.py b/homeassistant/components/homeassistant_yellow/config_flow.py index b916c6e46ca..5472c346e94 100644 --- a/homeassistant/components/homeassistant_yellow/config_flow.py +++ b/homeassistant/components/homeassistant_yellow/config_flow.py @@ -37,7 +37,14 @@ from homeassistant.config_entries import ( from homeassistant.core import HomeAssistant, async_get_hass, callback from homeassistant.helpers import discovery_flow, selector -from .const import DOMAIN, FIRMWARE, RADIO_DEVICE, ZHA_DOMAIN, ZHA_HW_DISCOVERY_DATA +from .const import ( + DOMAIN, + FIRMWARE, + FIRMWARE_VERSION, + RADIO_DEVICE, + ZHA_DOMAIN, + ZHA_HW_DISCOVERY_DATA, +) from .hardware import BOARD_NAME _LOGGER = logging.getLogger(__name__) @@ -55,7 +62,7 @@ class HomeAssistantYellowConfigFlow(BaseFirmwareConfigFlow, domain=DOMAIN): """Handle a config flow for Home Assistant Yellow.""" VERSION = 1 - MINOR_VERSION = 2 + MINOR_VERSION = 3 def __init__(self, *args: Any, **kwargs: Any) -> None: """Instantiate config flow.""" @@ -310,6 +317,7 @@ class HomeAssistantYellowOptionsFlowHandler( data={ **self.config_entry.data, FIRMWARE: self._probed_firmware_info.firmware_type.value, + FIRMWARE_VERSION: self._probed_firmware_info.firmware_version, }, ) diff --git a/homeassistant/components/homeassistant_yellow/const.py b/homeassistant/components/homeassistant_yellow/const.py index 79753ae9b9e..b8bf17391f9 100644 --- a/homeassistant/components/homeassistant_yellow/const.py +++ b/homeassistant/components/homeassistant_yellow/const.py @@ -2,7 +2,11 @@ DOMAIN = "homeassistant_yellow" +MODEL = "Home Assistant Yellow" +MANUFACTURER = "Nabu Casa" + RADIO_DEVICE = "/dev/ttyAMA1" + ZHA_HW_DISCOVERY_DATA = { "name": "Yellow", "port": { @@ -14,4 +18,9 @@ ZHA_HW_DISCOVERY_DATA = { } FIRMWARE = "firmware" +FIRMWARE_VERSION = "firmware_version" ZHA_DOMAIN = "zha" + +NABU_CASA_FIRMWARE_RELEASES_URL = ( + "https://api.github.com/repos/NabuCasa/silabs-firmware-builder/releases/latest" +) diff --git a/homeassistant/components/homeassistant_yellow/strings.json b/homeassistant/components/homeassistant_yellow/strings.json index b089e483899..41c1438b234 100644 --- a/homeassistant/components/homeassistant_yellow/strings.json +++ b/homeassistant/components/homeassistant_yellow/strings.json @@ -149,5 +149,12 @@ "run_zigbee_flasher_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::run_zigbee_flasher_addon%]", "uninstall_zigbee_flasher_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::uninstall_zigbee_flasher_addon%]" } + }, + "entity": { + "update": { + "radio_firmware": { + "name": "Radio firmware" + } + } } } diff --git a/homeassistant/components/homeassistant_yellow/update.py b/homeassistant/components/homeassistant_yellow/update.py new file mode 100644 index 00000000000..9531bd456cb --- /dev/null +++ b/homeassistant/components/homeassistant_yellow/update.py @@ -0,0 +1,226 @@ +"""Home Assistant Yellow firmware update entity.""" + +from __future__ import annotations + +import logging + +import aiohttp + +from homeassistant.components.homeassistant_hardware.coordinator import ( + FirmwareUpdateCoordinator, +) +from homeassistant.components.homeassistant_hardware.update import ( + BaseFirmwareUpdateEntity, + FirmwareUpdateEntityDescription, +) +from homeassistant.components.homeassistant_hardware.util import ( + ApplicationType, + FirmwareInfo, +) +from homeassistant.components.update import UpdateDeviceClass +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .const import ( + DOMAIN, + FIRMWARE, + FIRMWARE_VERSION, + MANUFACTURER, + MODEL, + NABU_CASA_FIRMWARE_RELEASES_URL, + RADIO_DEVICE, +) + +_LOGGER = logging.getLogger(__name__) + + +FIRMWARE_ENTITY_DESCRIPTIONS: dict[ + ApplicationType | None, FirmwareUpdateEntityDescription +] = { + ApplicationType.EZSP: FirmwareUpdateEntityDescription( + key="radio_firmware", + translation_key="radio_firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw.split(" ", 1)[0], + fw_type="yellow_zigbee_ncp", + version_key="ezsp_version", + expected_firmware_type=ApplicationType.EZSP, + firmware_name="EmberZNet Zigbee", + ), + ApplicationType.SPINEL: FirmwareUpdateEntityDescription( + key="radio_firmware", + translation_key="radio_firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw.split("/", 1)[1].split("_", 1)[0], + fw_type="yellow_openthread_rcp", + version_key="ot_rcp_version", + expected_firmware_type=ApplicationType.SPINEL, + firmware_name="OpenThread RCP", + ), + ApplicationType.CPC: FirmwareUpdateEntityDescription( + key="radio_firmware", + translation_key="radio_firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw, + fw_type="yellow_multipan", + version_key="cpc_version", + expected_firmware_type=ApplicationType.CPC, + firmware_name="Multiprotocol", + ), + ApplicationType.GECKO_BOOTLOADER: FirmwareUpdateEntityDescription( + key="radio_firmware", + translation_key="radio_firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw, + fw_type=None, # We don't want to update the bootloader + version_key="gecko_bootloader_version", + expected_firmware_type=ApplicationType.GECKO_BOOTLOADER, + firmware_name="Gecko Bootloader", + ), + None: FirmwareUpdateEntityDescription( + key="radio_firmware", + translation_key="radio_firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw, + fw_type=None, + version_key=None, + expected_firmware_type=None, + firmware_name=None, + ), +} + + +def _async_create_update_entity( + hass: HomeAssistant, + config_entry: ConfigEntry, + session: aiohttp.ClientSession, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> FirmwareUpdateEntity: + """Create an update entity that handles firmware type changes.""" + firmware_type = config_entry.data[FIRMWARE] + + try: + entity_description = FIRMWARE_ENTITY_DESCRIPTIONS[ + ApplicationType(firmware_type) + ] + except (KeyError, ValueError): + _LOGGER.debug( + "Unknown firmware type %r, using default entity description", firmware_type + ) + entity_description = FIRMWARE_ENTITY_DESCRIPTIONS[None] + + entity = FirmwareUpdateEntity( + device=RADIO_DEVICE, + config_entry=config_entry, + update_coordinator=FirmwareUpdateCoordinator( + hass, + session, + NABU_CASA_FIRMWARE_RELEASES_URL, + ), + entity_description=entity_description, + ) + + def firmware_type_changed( + old_type: ApplicationType | None, new_type: ApplicationType | None + ) -> None: + """Replace the current entity when the firmware type changes.""" + er.async_get(hass).async_remove(entity.entity_id) + async_add_entities( + [ + _async_create_update_entity( + hass, config_entry, session, async_add_entities + ) + ] + ) + + entity.async_on_remove( + entity.add_firmware_type_changed_callback(firmware_type_changed) + ) + + return entity + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the firmware update config entry.""" + session = async_get_clientsession(hass) + entity = _async_create_update_entity( + hass, config_entry, session, async_add_entities + ) + + async_add_entities([entity]) + + +class FirmwareUpdateEntity(BaseFirmwareUpdateEntity): + """Yellow firmware update entity.""" + + bootloader_reset_type = "yellow" # Triggers a GPIO reset + + def __init__( + self, + device: str, + config_entry: ConfigEntry, + update_coordinator: FirmwareUpdateCoordinator, + entity_description: FirmwareUpdateEntityDescription, + ) -> None: + """Initialize the Yellow firmware update entity.""" + super().__init__(device, config_entry, update_coordinator, entity_description) + self._attr_unique_id = self.entity_description.key + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, "yellow")}, + name=MODEL, + model=MODEL, + manufacturer=MANUFACTURER, + ) + + # Use the cached firmware info if it exists + if self._config_entry.data[FIRMWARE] is not None: + self._current_firmware_info = FirmwareInfo( + device=device, + firmware_type=ApplicationType(self._config_entry.data[FIRMWARE]), + firmware_version=self._config_entry.data[FIRMWARE_VERSION], + owners=[], + source="homeassistant_yellow", + ) + + def _update_attributes(self) -> None: + """Recompute the attributes of the entity.""" + super()._update_attributes() + + assert self.device_entry is not None + device_registry = dr.async_get(self.hass) + device_registry.async_update_device( + device_id=self.device_entry.id, + sw_version=f"{self.entity_description.firmware_name} {self._attr_installed_version}", + ) + + @callback + def _firmware_info_callback(self, firmware_info: FirmwareInfo) -> None: + """Handle updated firmware info being pushed by an integration.""" + self.hass.config_entries.async_update_entry( + self._config_entry, + data={ + **self._config_entry.data, + FIRMWARE: firmware_info.firmware_type, + FIRMWARE_VERSION: firmware_info.firmware_version, + }, + ) + super()._firmware_info_callback(firmware_info) diff --git a/homeassistant/components/homee/__init__.py b/homeassistant/components/homee/__init__.py index d7785ad9104..9fd88ee40aa 100644 --- a/homeassistant/components/homee/__init__.py +++ b/homeassistant/components/homee/__init__.py @@ -15,10 +15,13 @@ from .const import DOMAIN _LOGGER = logging.getLogger(__name__) PLATFORMS = [ + Platform.BINARY_SENSOR, Platform.BUTTON, Platform.COVER, Platform.LIGHT, + Platform.LOCK, Platform.NUMBER, + Platform.SELECT, Platform.SENSOR, Platform.SWITCH, Platform.VALVE, diff --git a/homeassistant/components/homee/binary_sensor.py b/homeassistant/components/homee/binary_sensor.py new file mode 100644 index 00000000000..3f5f5c46a29 --- /dev/null +++ b/homeassistant/components/homee/binary_sensor.py @@ -0,0 +1,190 @@ +"""The Homee binary sensor platform.""" + +from pyHomee.const import AttributeType +from pyHomee.model import HomeeAttribute + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import HomeeConfigEntry +from .entity import HomeeEntity + +PARALLEL_UPDATES = 0 + +BINARY_SENSOR_DESCRIPTIONS: dict[AttributeType, BinarySensorEntityDescription] = { + AttributeType.BATTERY_LOW_ALARM: BinarySensorEntityDescription( + key="battery", + device_class=BinarySensorDeviceClass.BATTERY, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.BLACKOUT_ALARM: BinarySensorEntityDescription( + key="blackout_alarm", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.COALARM: BinarySensorEntityDescription( + key="carbon_monoxide", device_class=BinarySensorDeviceClass.CO + ), + AttributeType.CO2ALARM: BinarySensorEntityDescription( + key="carbon_dioxide", device_class=BinarySensorDeviceClass.PROBLEM + ), + AttributeType.FLOOD_ALARM: BinarySensorEntityDescription( + key="flood", + device_class=BinarySensorDeviceClass.MOISTURE, + ), + AttributeType.HIGH_TEMPERATURE_ALARM: BinarySensorEntityDescription( + key="high_temperature", + device_class=BinarySensorDeviceClass.HEAT, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.LEAK_ALARM: BinarySensorEntityDescription( + key="leak_alarm", + device_class=BinarySensorDeviceClass.PROBLEM, + ), + AttributeType.LOAD_ALARM: BinarySensorEntityDescription( + key="load_alarm", + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.LOCK_STATE: BinarySensorEntityDescription( + key="lock", + device_class=BinarySensorDeviceClass.LOCK, + ), + AttributeType.LOW_TEMPERATURE_ALARM: BinarySensorEntityDescription( + key="low_temperature", + device_class=BinarySensorDeviceClass.COLD, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.MALFUNCTION_ALARM: BinarySensorEntityDescription( + key="malfunction", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.MAXIMUM_ALARM: BinarySensorEntityDescription( + key="maximum", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.MINIMUM_ALARM: BinarySensorEntityDescription( + key="minimum", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.MOTION_ALARM: BinarySensorEntityDescription( + key="motion", + device_class=BinarySensorDeviceClass.MOTION, + ), + AttributeType.MOTOR_BLOCKED_ALARM: BinarySensorEntityDescription( + key="motor_blocked", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.ON_OFF: BinarySensorEntityDescription( + key="plug", + device_class=BinarySensorDeviceClass.PLUG, + ), + AttributeType.OPEN_CLOSE: BinarySensorEntityDescription( + key="opening", + device_class=BinarySensorDeviceClass.OPENING, + ), + AttributeType.OVER_CURRENT_ALARM: BinarySensorEntityDescription( + key="overcurrent", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.OVERLOAD_ALARM: BinarySensorEntityDescription( + key="overload", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.PRESENCE_ALARM: BinarySensorEntityDescription( + key="presence", + device_class=BinarySensorDeviceClass.PRESENCE, + ), + AttributeType.POWER_SUPPLY_ALARM: BinarySensorEntityDescription( + key="power", + device_class=BinarySensorDeviceClass.POWER, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.RAIN_FALL: BinarySensorEntityDescription( + key="rain", + device_class=BinarySensorDeviceClass.MOISTURE, + ), + AttributeType.REPLACE_FILTER_ALARM: BinarySensorEntityDescription( + key="replace_filter", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.SMOKE_ALARM: BinarySensorEntityDescription( + key="smoke", + device_class=BinarySensorDeviceClass.SMOKE, + ), + AttributeType.STORAGE_ALARM: BinarySensorEntityDescription( + key="storage", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.SURGE_ALARM: BinarySensorEntityDescription( + key="surge", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.TAMPER_ALARM: BinarySensorEntityDescription( + key="tamper", + device_class=BinarySensorDeviceClass.TAMPER, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.VOLTAGE_DROP_ALARM: BinarySensorEntityDescription( + key="voltage_drop", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + ), + AttributeType.WATER_ALARM: BinarySensorEntityDescription( + key="water", + device_class=BinarySensorDeviceClass.MOISTURE, + entity_category=EntityCategory.DIAGNOSTIC, + ), +} + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: HomeeConfigEntry, + async_add_devices: AddConfigEntryEntitiesCallback, +) -> None: + """Add the Homee platform for the binary sensor component.""" + + async_add_devices( + HomeeBinarySensor( + attribute, config_entry, BINARY_SENSOR_DESCRIPTIONS[attribute.type] + ) + for node in config_entry.runtime_data.nodes + for attribute in node.attributes + if attribute.type in BINARY_SENSOR_DESCRIPTIONS and not attribute.editable + ) + + +class HomeeBinarySensor(HomeeEntity, BinarySensorEntity): + """Representation of a Homee binary sensor.""" + + def __init__( + self, + attribute: HomeeAttribute, + entry: HomeeConfigEntry, + description: BinarySensorEntityDescription, + ) -> None: + """Initialize a Homee binary sensor entity.""" + super().__init__(attribute, entry) + + self.entity_description = description + self._attr_translation_key = description.key + + @property + def is_on(self) -> bool: + """Return true if the binary sensor is on.""" + return bool(self._attribute.current_value) diff --git a/homeassistant/components/homee/button.py b/homeassistant/components/homee/button.py index af6d769c1dc..33a8b5f23c8 100644 --- a/homeassistant/components/homee/button.py +++ b/homeassistant/components/homee/button.py @@ -15,6 +15,8 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from . import HomeeConfigEntry from .entity import HomeeEntity +PARALLEL_UPDATES = 0 + BUTTON_DESCRIPTIONS: dict[AttributeType, ButtonEntityDescription] = { AttributeType.AUTOMATIC_MODE_IMPULSE: ButtonEntityDescription(key="automatic_mode"), AttributeType.BRIEFLY_OPEN_IMPULSE: ButtonEntityDescription(key="briefly_open"), diff --git a/homeassistant/components/homee/config_flow.py b/homeassistant/components/homee/config_flow.py index 61d2a3f25a5..1a3c5011f82 100644 --- a/homeassistant/components/homee/config_flow.py +++ b/homeassistant/components/homee/config_flow.py @@ -52,7 +52,7 @@ class HomeeConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except HomeeAuthenticationFailedException: errors["base"] = "invalid_auth" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: diff --git a/homeassistant/components/homee/cover.py b/homeassistant/components/homee/cover.py index 6e7e4fd5c55..79a9b00ffba 100644 --- a/homeassistant/components/homee/cover.py +++ b/homeassistant/components/homee/cover.py @@ -21,6 +21,8 @@ from .entity import HomeeNodeEntity _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + OPEN_CLOSE_ATTRIBUTES = [ AttributeType.OPEN_CLOSE, AttributeType.SLAT_ROTATION_IMPULSE, diff --git a/homeassistant/components/homee/light.py b/homeassistant/components/homee/light.py index b9c4460075a..9c66764760e 100644 --- a/homeassistant/components/homee/light.py +++ b/homeassistant/components/homee/light.py @@ -32,6 +32,8 @@ LIGHT_ATTRIBUTES = [ AttributeType.DIMMING_LEVEL, ] +PARALLEL_UPDATES = 0 + def is_light_node(node: HomeeNode) -> bool: """Determine if a node is controllable as a homee light based on its profile and attributes.""" diff --git a/homeassistant/components/homee/lock.py b/homeassistant/components/homee/lock.py new file mode 100644 index 00000000000..4cfc34e11fe --- /dev/null +++ b/homeassistant/components/homee/lock.py @@ -0,0 +1,73 @@ +"""The Homee lock platform.""" + +from typing import Any + +from pyHomee.const import AttributeChangedBy, AttributeType + +from homeassistant.components.lock import LockEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import HomeeConfigEntry +from .entity import HomeeEntity +from .helpers import get_name_for_enum + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: HomeeConfigEntry, + async_add_devices: AddConfigEntryEntitiesCallback, +) -> None: + """Add the Homee platform for the lock component.""" + + async_add_devices( + HomeeLock(attribute, config_entry) + for node in config_entry.runtime_data.nodes + for attribute in node.attributes + if (attribute.type == AttributeType.LOCK_STATE and attribute.editable) + ) + + +class HomeeLock(HomeeEntity, LockEntity): + """Representation of a Homee lock.""" + + _attr_name = None + + @property + def is_locked(self) -> bool: + """Return if lock is locked.""" + return self._attribute.current_value == 1.0 + + @property + def is_locking(self) -> bool: + """Return if lock is locking.""" + return self._attribute.target_value > self._attribute.current_value + + @property + def is_unlocking(self) -> bool: + """Return if lock is unlocking.""" + return self._attribute.target_value < self._attribute.current_value + + @property + def changed_by(self) -> str: + """Return by whom or what the lock was last changed.""" + changed_id = str(self._attribute.changed_by_id) + changed_by_name = get_name_for_enum( + AttributeChangedBy, self._attribute.changed_by + ) + if self._attribute.changed_by == AttributeChangedBy.USER: + changed_id = self._entry.runtime_data.get_user_by_id( + self._attribute.changed_by_id + ).username + + return f"{changed_by_name}-{changed_id}" + + async def async_lock(self, **kwargs: Any) -> None: + """Lock specified lock. A code to lock the lock with may be specified.""" + await self.async_set_homee_value(1) + + async def async_unlock(self, **kwargs: Any) -> None: + """Unlock specified lock. A code to unlock the lock with may be specified.""" + await self.async_set_homee_value(0) diff --git a/homeassistant/components/homee/manifest.json b/homeassistant/components/homee/manifest.json index e4622222be1..3c2a99c30dc 100644 --- a/homeassistant/components/homee/manifest.json +++ b/homeassistant/components/homee/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["homee"], "quality_scale": "bronze", - "requirements": ["pyHomee==1.2.7"] + "requirements": ["pyHomee==1.2.8"] } diff --git a/homeassistant/components/homee/number.py b/homeassistant/components/homee/number.py index 3f1f08a6618..5f76b826fcf 100644 --- a/homeassistant/components/homee/number.py +++ b/homeassistant/components/homee/number.py @@ -16,6 +16,8 @@ from . import HomeeConfigEntry from .const import HOMEE_UNIT_TO_HA_UNIT from .entity import HomeeEntity +PARALLEL_UPDATES = 0 + NUMBER_DESCRIPTIONS = { AttributeType.DOWN_POSITION: NumberEntityDescription( key="down_position", diff --git a/homeassistant/components/homee/quality_scale.yaml b/homeassistant/components/homee/quality_scale.yaml index ff99d177018..906218cf823 100644 --- a/homeassistant/components/homee/quality_scale.yaml +++ b/homeassistant/components/homee/quality_scale.yaml @@ -35,7 +35,7 @@ rules: entity-unavailable: done integration-owner: done log-when-unavailable: done - parallel-updates: todo + parallel-updates: done reauthentication-flow: todo test-coverage: todo diff --git a/homeassistant/components/homee/select.py b/homeassistant/components/homee/select.py new file mode 100644 index 00000000000..70c7972bbda --- /dev/null +++ b/homeassistant/components/homee/select.py @@ -0,0 +1,63 @@ +"""The Homee select platform.""" + +from pyHomee.const import AttributeType +from pyHomee.model import HomeeAttribute + +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import HomeeConfigEntry +from .entity import HomeeEntity + +PARALLEL_UPDATES = 0 + +SELECT_DESCRIPTIONS: dict[AttributeType, SelectEntityDescription] = { + AttributeType.REPEATER_MODE: SelectEntityDescription( + key="repeater_mode", + options=["off", "level1", "level2"], + entity_category=EntityCategory.CONFIG, + ), +} + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: HomeeConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Add the Homee platform for the select component.""" + + async_add_entities( + HomeeSelect(attribute, config_entry, SELECT_DESCRIPTIONS[attribute.type]) + for node in config_entry.runtime_data.nodes + for attribute in node.attributes + if attribute.type in SELECT_DESCRIPTIONS and attribute.editable + ) + + +class HomeeSelect(HomeeEntity, SelectEntity): + """Representation of a Homee select entity.""" + + def __init__( + self, + attribute: HomeeAttribute, + entry: HomeeConfigEntry, + description: SelectEntityDescription, + ) -> None: + """Initialize a Homee select entity.""" + super().__init__(attribute, entry) + self.entity_description = description + assert description.options is not None + self._attr_options = description.options + self._attr_translation_key = description.key + + @property + def current_option(self) -> str: + """Return the current selected option.""" + return self.options[int(self._attribute.current_value)] + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + await self.async_set_homee_value(self.options.index(option)) diff --git a/homeassistant/components/homee/sensor.py b/homeassistant/components/homee/sensor.py index 410f87f2168..e65b73b4a67 100644 --- a/homeassistant/components/homee/sensor.py +++ b/homeassistant/components/homee/sensor.py @@ -27,6 +27,8 @@ from .const import ( from .entity import HomeeEntity, HomeeNodeEntity from .helpers import get_name_for_enum +PARALLEL_UPDATES = 0 + def get_open_close_value(attribute: HomeeAttribute) -> str | None: """Return the open/close value.""" diff --git a/homeassistant/components/homee/strings.json b/homeassistant/components/homee/strings.json index 94f85824280..da8357d16bc 100644 --- a/homeassistant/components/homee/strings.json +++ b/homeassistant/components/homee/strings.json @@ -26,6 +26,76 @@ } }, "entity": { + "binary_sensor": { + "blackout_alarm": { + "name": "Blackout" + }, + "carbon_dioxide": { + "name": "Carbon dioxide" + }, + "flood": { + "name": "Flood" + }, + "high_temperature": { + "name": "High temperature" + }, + "leak_alarm": { + "name": "Leak" + }, + "load_alarm": { + "name": "Load", + "state": { + "off": "Normal", + "on": "Overload" + } + }, + "low_temperature": { + "name": "Low temperature" + }, + "malfunction": { + "name": "Malfunction" + }, + "maximum": { + "name": "Maximum level" + }, + "minimum": { + "name": "Minimum level" + }, + "motor_blocked": { + "name": "Motor blocked" + }, + "overcurrent": { + "name": "Overcurrent" + }, + "overload": { + "name": "Overload" + }, + "rain": { + "name": "Rain" + }, + "replace_filter": { + "name": "Replace filter", + "state": { + "on": "Replace" + } + }, + "storage": { + "name": "Storage", + "state": { + "off": "Space available", + "on": "Storage full" + } + }, + "surge": { + "name": "Surge" + }, + "voltage_drop": { + "name": "Voltage drop" + }, + "water": { + "name": "Water" + } + }, "button": { "automatic_mode": { "name": "Automatic mode" @@ -110,6 +180,16 @@ "name": "Wake-up interval" } }, + "select": { + "repeater_mode": { + "name": "Repeater mode", + "state": { + "off": "[%key:common::state::off%]", + "level1": "Level 1", + "level2": "Level 2" + } + } + }, "sensor": { "brightness": { "name": "Illuminance" diff --git a/homeassistant/components/homee/switch.py b/homeassistant/components/homee/switch.py index 86c7acdbf11..041b96963f1 100644 --- a/homeassistant/components/homee/switch.py +++ b/homeassistant/components/homee/switch.py @@ -20,6 +20,8 @@ from . import HomeeConfigEntry from .const import CLIMATE_PROFILES, LIGHT_PROFILES from .entity import HomeeEntity +PARALLEL_UPDATES = 0 + def get_device_class( attribute: HomeeAttribute, config_entry: HomeeConfigEntry diff --git a/homeassistant/components/homee/valve.py b/homeassistant/components/homee/valve.py index 9a4ff446a10..995716d7ef8 100644 --- a/homeassistant/components/homee/valve.py +++ b/homeassistant/components/homee/valve.py @@ -15,6 +15,8 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from . import HomeeConfigEntry from .entity import HomeeEntity +PARALLEL_UPDATES = 0 + VALVE_DESCRIPTIONS = { AttributeType.CURRENT_VALVE_POSITION: ValveEntityDescription( key="valve_position", diff --git a/homeassistant/components/homekit/__init__.py b/homeassistant/components/homekit/__init__.py index 97fb17d7db5..9bd5711832c 100644 --- a/homeassistant/components/homekit/__init__.py +++ b/homeassistant/components/homekit/__init__.py @@ -221,6 +221,34 @@ UNPAIR_SERVICE_SCHEMA = vol.All( ) +@callback +def _async_update_entries_from_yaml( + hass: HomeAssistant, config: ConfigType, start_import_flow: bool +) -> None: + current_entries = hass.config_entries.async_entries(DOMAIN) + entries_by_name, entries_by_port = _async_get_imported_entries_indices( + current_entries + ) + hk_config: list[dict[str, Any]] = config[DOMAIN] + + for index, conf in enumerate(hk_config): + if _async_update_config_entry_from_yaml( + hass, entries_by_name, entries_by_port, conf + ): + continue + + if start_import_flow: + conf[CONF_ENTRY_INDEX] = index + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=conf, + ), + eager_start=True, + ) + + def _async_all_homekit_instances(hass: HomeAssistant) -> list[HomeKit]: """All active HomeKit instances.""" hk_data: HomeKitEntryData | None @@ -258,31 +286,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: await hass.async_add_executor_job(get_loader) _async_register_events_and_services(hass) - if DOMAIN not in config: return True - current_entries = hass.config_entries.async_entries(DOMAIN) - entries_by_name, entries_by_port = _async_get_imported_entries_indices( - current_entries - ) - - for index, conf in enumerate(config[DOMAIN]): - if _async_update_config_entry_from_yaml( - hass, entries_by_name, entries_by_port, conf - ): - continue - - conf[CONF_ENTRY_INDEX] = index - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=conf, - ), - eager_start=True, - ) - + _async_update_entries_from_yaml(hass, config, start_import_flow=True) return True @@ -326,13 +333,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomeKitConfigEntry) -> b conf = entry.data options = entry.options - name = conf[CONF_NAME] - port = conf[CONF_PORT] - _LOGGER.debug("Begin setup HomeKit for %s", name) - + name: str = conf[CONF_NAME] + port: int = conf[CONF_PORT] # ip_address and advertise_ip are yaml only - ip_address = conf.get(CONF_IP_ADDRESS, _DEFAULT_BIND) - advertise_ips: list[str] = conf.get( + ip_address: str | list[str] | None = conf.get(CONF_IP_ADDRESS, _DEFAULT_BIND) + advertise_ips: list[str] + advertise_ips = conf.get( CONF_ADVERTISE_IP ) or await network.async_get_announce_addresses(hass) @@ -344,13 +350,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: HomeKitConfigEntry) -> b # with users who have not migrated yet we do not do exclude # these entities by default as we cannot migrate automatically # since it requires a re-pairing. - exclude_accessory_mode = conf.get( + exclude_accessory_mode: bool = conf.get( CONF_EXCLUDE_ACCESSORY_MODE, DEFAULT_EXCLUDE_ACCESSORY_MODE ) - homekit_mode = options.get(CONF_HOMEKIT_MODE, DEFAULT_HOMEKIT_MODE) - entity_config = options.get(CONF_ENTITY_CONFIG, {}).copy() - entity_filter = FILTER_SCHEMA(options.get(CONF_FILTER, {})) - devices = options.get(CONF_DEVICES, []) + homekit_mode: str = options.get(CONF_HOMEKIT_MODE, DEFAULT_HOMEKIT_MODE) + entity_config: dict[str, Any] = options.get(CONF_ENTITY_CONFIG, {}).copy() + entity_filter: EntityFilter = FILTER_SCHEMA(options.get(CONF_FILTER, {})) + devices: list[str] = options.get(CONF_DEVICES, []) homekit = HomeKit( hass, @@ -500,26 +506,15 @@ def _async_register_events_and_services(hass: HomeAssistant) -> None: async def _handle_homekit_reload(service: ServiceCall) -> None: """Handle start HomeKit service call.""" config = await async_integration_yaml_config(hass, DOMAIN) - if not config or DOMAIN not in config: return - - current_entries = hass.config_entries.async_entries(DOMAIN) - entries_by_name, entries_by_port = _async_get_imported_entries_indices( - current_entries - ) - - for conf in config[DOMAIN]: - _async_update_config_entry_from_yaml( - hass, entries_by_name, entries_by_port, conf + _async_update_entries_from_yaml(hass, config, start_import_flow=False) + await asyncio.gather( + *( + create_eager_task(hass.config_entries.async_reload(entry.entry_id)) + for entry in hass.config_entries.async_entries(DOMAIN) ) - - reload_tasks = [ - create_eager_task(hass.config_entries.async_reload(entry.entry_id)) - for entry in current_entries - ] - - await asyncio.gather(*reload_tasks) + ) async_register_admin_service( hass, @@ -537,7 +532,7 @@ class HomeKit: hass: HomeAssistant, name: str, port: int, - ip_address: str | None, + ip_address: list[str] | str | None, entity_filter: EntityFilter, exclude_accessory_mode: bool, entity_config: dict[str, Any], diff --git a/homeassistant/components/homekit/accessories.py b/homeassistant/components/homekit/accessories.py index 8d10387e239..0d810d6986d 100644 --- a/homeassistant/components/homekit/accessories.py +++ b/homeassistant/components/homekit/accessories.py @@ -15,6 +15,7 @@ from pyhap.service import Service from pyhap.util import callback as pyhap_callback from homeassistant.components.cover import CoverDeviceClass, CoverEntityFeature +from homeassistant.components.lawn_mower import LawnMowerEntityFeature from homeassistant.components.media_player import MediaPlayerDeviceClass from homeassistant.components.remote import RemoteEntityFeature from homeassistant.components.sensor import SensorDeviceClass @@ -250,6 +251,13 @@ def get_accessory( # noqa: C901 elif state.domain == "vacuum": a_type = "Vacuum" + elif ( + state.domain == "lawn_mower" + and features & LawnMowerEntityFeature.DOCK + and features & LawnMowerEntityFeature.START_MOWING + ): + a_type = "LawnMower" + elif state.domain == "remote" and features & RemoteEntityFeature.ACTIVITY: a_type = "ActivityRemote" diff --git a/homeassistant/components/homekit/config_flow.py b/homeassistant/components/homekit/config_flow.py index 53db7774821..0ef2e8563bc 100644 --- a/homeassistant/components/homekit/config_flow.py +++ b/homeassistant/components/homekit/config_flow.py @@ -106,6 +106,7 @@ SUPPORTED_DOMAINS = [ "sensor", "switch", "vacuum", + "lawn_mower", "water_heater", VALVE_DOMAIN, ] @@ -123,6 +124,7 @@ DEFAULT_DOMAINS = [ REMOTE_DOMAIN, "switch", "vacuum", + "lawn_mower", "water_heater", ] diff --git a/homeassistant/components/homekit/manifest.json b/homeassistant/components/homekit/manifest.json index f9a31489ca4..4ae2e43dfb2 100644 --- a/homeassistant/components/homekit/manifest.json +++ b/homeassistant/components/homekit/manifest.json @@ -10,7 +10,7 @@ "loggers": ["pyhap"], "requirements": [ "HAP-python==4.9.2", - "fnv-hash-fast==1.2.6", + "fnv-hash-fast==1.4.0", "PyQRCode==1.2.1", "base36==0.1.1" ], diff --git a/homeassistant/components/homekit/type_switches.py b/homeassistant/components/homekit/type_switches.py index 0482a5956ac..8c6fc1ed672 100644 --- a/homeassistant/components/homekit/type_switches.py +++ b/homeassistant/components/homekit/type_switches.py @@ -16,6 +16,12 @@ from pyhap.const import ( from homeassistant.components import button, input_button from homeassistant.components.input_select import ATTR_OPTIONS, SERVICE_SELECT_OPTION +from homeassistant.components.lawn_mower import ( + DOMAIN as LAWN_MOWER_DOMAIN, + SERVICE_DOCK, + SERVICE_START_MOWING, + LawnMowerActivity, +) from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.components.vacuum import ( DOMAIN as VACUUM_DOMAIN, @@ -218,6 +224,29 @@ class Vacuum(Switch): self.char_on.set_value(current_state) +@TYPES.register("LawnMower") +class LawnMower(Switch): + """Generate a Switch accessory.""" + + def set_state(self, value: bool) -> None: + """Move switch state to value if call came from HomeKit.""" + _LOGGER.debug("%s: Set switch state to %s", self.entity_id, value) + state = self.hass.states.get(self.entity_id) + assert state + + service = SERVICE_START_MOWING if value else SERVICE_DOCK + self.async_call_service( + LAWN_MOWER_DOMAIN, service, {ATTR_ENTITY_ID: self.entity_id} + ) + + @callback + def async_update_state(self, new_state: State) -> None: + """Update switch state after state changed.""" + current_state = new_state.state in (LawnMowerActivity.MOWING, STATE_ON) + _LOGGER.debug("%s: Set current state to %s", self.entity_id, current_state) + self.char_on.set_value(current_state) + + class ValveBase(HomeAccessory): """Valve base class.""" diff --git a/homeassistant/components/homekit_controller/manifest.json b/homeassistant/components/homekit_controller/manifest.json index 98db9a397d3..6562a3edcc9 100644 --- a/homeassistant/components/homekit_controller/manifest.json +++ b/homeassistant/components/homekit_controller/manifest.json @@ -14,6 +14,6 @@ "documentation": "https://www.home-assistant.io/integrations/homekit_controller", "iot_class": "local_push", "loggers": ["aiohomekit", "commentjson"], - "requirements": ["aiohomekit==3.2.8"], + "requirements": ["aiohomekit==3.2.13"], "zeroconf": ["_hap._tcp.local.", "_hap._udp.local."] } diff --git a/homeassistant/components/homekit_controller/media_player.py b/homeassistant/components/homekit_controller/media_player.py index 5315c7c89f3..e3b4a760680 100644 --- a/homeassistant/components/homekit_controller/media_player.py +++ b/homeassistant/components/homekit_controller/media_player.py @@ -83,7 +83,7 @@ class HomeKitTelevision(HomeKitEntity, MediaPlayerEntity): @property def supported_features(self) -> MediaPlayerEntityFeature: """Flag media player features that are supported.""" - features = MediaPlayerEntityFeature(0) + features = MediaPlayerEntityFeature.TURN_OFF | MediaPlayerEntityFeature.TURN_ON if self.service.has(CharacteristicsTypes.ACTIVE_IDENTIFIER): features |= MediaPlayerEntityFeature.SELECT_SOURCE @@ -177,6 +177,14 @@ class HomeKitTelevision(HomeKitEntity, MediaPlayerEntity): return MediaPlayerState.ON + async def async_turn_on(self) -> None: + """Turn the tv on.""" + await self.async_put_characteristics({CharacteristicsTypes.ACTIVE: 1}) + + async def async_turn_off(self) -> None: + """Turn the tv off.""" + await self.async_put_characteristics({CharacteristicsTypes.ACTIVE: 0}) + async def async_media_play(self) -> None: """Send play command.""" if self.state == MediaPlayerState.PLAYING: diff --git a/homeassistant/components/homematic/sensor.py b/homeassistant/components/homematic/sensor.py index 24172e196c1..bdd446d7091 100644 --- a/homeassistant/components/homematic/sensor.py +++ b/homeassistant/components/homematic/sensor.py @@ -178,6 +178,7 @@ SENSOR_DESCRIPTIONS: dict[str, SensorEntityDescription] = { key="WIND_DIRECTION", native_unit_of_measurement=DEGREE, device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), "WIND_DIRECTION_RANGE": SensorEntityDescription( key="WIND_DIRECTION_RANGE", diff --git a/homeassistant/components/homematic/strings.json b/homeassistant/components/homematic/strings.json index d962a218a4f..78159189db8 100644 --- a/homeassistant/components/homematic/strings.json +++ b/homeassistant/components/homematic/strings.json @@ -2,7 +2,7 @@ "services": { "virtualkey": { "name": "Virtual key", - "description": "Presses a virtual key from CCU/Homegear or simulate keypress.", + "description": "Simulates a keypress (or other valid action) on CCU/Homegear with virtual or device keys.", "fields": { "address": { "name": "Address", @@ -24,7 +24,7 @@ }, "set_variable_value": { "name": "Set variable value", - "description": "Sets the name of a node.", + "description": "Sets the value of a system variable.", "fields": { "entity_id": { "name": "Entity", diff --git a/homeassistant/components/homematicip_cloud/strings.json b/homeassistant/components/homematicip_cloud/strings.json index 228ebc7500e..7b1b08ac4e2 100644 --- a/homeassistant/components/homematicip_cloud/strings.json +++ b/homeassistant/components/homematicip_cloud/strings.json @@ -35,7 +35,7 @@ "services": { "activate_eco_mode_with_duration": { "name": "Activate eco mode with duration", - "description": "Activates eco mode with period.", + "description": "Activates the eco mode for a specified duration.", "fields": { "duration": { "name": "Duration", @@ -49,7 +49,7 @@ }, "activate_eco_mode_with_period": { "name": "Activate eco more with period", - "description": "[%key:component::homematicip_cloud::services::activate_eco_mode_with_duration::description%]", + "description": "Activates the eco mode until a given time.", "fields": { "endtime": { "name": "Endtime", @@ -63,7 +63,7 @@ }, "activate_vacation": { "name": "Activate vacation", - "description": "Activates the vacation mode until the given time.", + "description": "Activates the vacation mode until a given time.", "fields": { "endtime": { "name": "[%key:component::homematicip_cloud::services::activate_eco_mode_with_period::fields::endtime::name%]", diff --git a/homeassistant/components/huawei_lte/config_flow.py b/homeassistant/components/huawei_lte/config_flow.py index 96e160ece7b..4ca9e7531e3 100644 --- a/homeassistant/components/huawei_lte/config_flow.py +++ b/homeassistant/components/huawei_lte/config_flow.py @@ -178,8 +178,8 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN): except Timeout: _LOGGER.warning("Connection timeout", exc_info=True) errors[CONF_URL] = "connection_timeout" - except Exception: # noqa: BLE001 - _LOGGER.warning("Unknown error connecting to device", exc_info=True) + except Exception: + _LOGGER.exception("Unknown error connecting to device") errors[CONF_URL] = "unknown" return conn @@ -188,8 +188,8 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN): try: conn.close() conn.requests_session.close() - except Exception: # noqa: BLE001 - _LOGGER.debug("Disconnect error", exc_info=True) + except Exception: + _LOGGER.exception("Disconnect error") async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/hue/icons.json b/homeassistant/components/hue/icons.json index 31464308b0a..646c420f1fe 100644 --- a/homeassistant/components/hue/icons.json +++ b/homeassistant/components/hue/icons.json @@ -1,4 +1,28 @@ { + "entity": { + "light": { + "hue_light": { + "state_attributes": { + "effect": { + "state": { + "candle": "mdi:candle", + "sparkle": "mdi:shimmer", + "glisten": "mdi:creation", + "sunrise": "mdi:weather-sunset-up", + "sunset": "mdi:weather-sunset", + "fire": "mdi:fire", + "prism": "mdi:triangle-outline", + "opal": "mdi:diamond-stone", + "underwater": "mdi:waves", + "cosmos": "mdi:star-shooting", + "sunbeam": "mdi:spotlight-beam", + "enchant": "mdi:magic-staff" + } + } + } + } + } + }, "services": { "hue_activate_scene": { "service": "mdi:palette" diff --git a/homeassistant/components/hue/strings.json b/homeassistant/components/hue/strings.json index 2f7f2e55561..3326dd1043f 100644 --- a/homeassistant/components/hue/strings.json +++ b/homeassistant/components/hue/strings.json @@ -11,7 +11,7 @@ } }, "manual": { - "title": "Manual configure a Hue bridge", + "title": "Manually configure a Hue bridge", "data": { "host": "[%key:common::config_flow::data::host%]" }, @@ -46,8 +46,8 @@ "button_2": "Second button", "button_3": "Third button", "button_4": "Fourth button", - "double_buttons_1_3": "First and Third buttons", - "double_buttons_2_4": "Second and Fourth buttons", + "double_buttons_1_3": "First and third button", + "double_buttons_2_4": "Second and fourth button", "dim_down": "Dim down", "dim_up": "Dim up", "turn_off": "[%key:common::action::turn_off%]", @@ -102,6 +102,28 @@ } } }, + "light": { + "hue_light": { + "state_attributes": { + "effect": { + "state": { + "candle": "Candle", + "sparkle": "Sparkle", + "glisten": "Glisten", + "sunrise": "Sunrise", + "sunset": "Sunset", + "fire": "Fire", + "prism": "Prism", + "opal": "Opal", + "underwater": "Underwater", + "cosmos": "Cosmos", + "sunbeam": "Sunbeam", + "enchant": "Enchant" + } + } + } + } + }, "sensor": { "zigbee_connectivity": { "name": "Zigbee connectivity", @@ -175,5 +197,11 @@ } } } + }, + "issues": { + "deprecated_effect_none": { + "title": "Light turned on with deprecated effect", + "description": "A light was turned on with the deprecated effect `None`. This has been replaced with `off`. Please update any automations, scenes, or scripts that use this effect." + } } } diff --git a/homeassistant/components/hue/v2/light.py b/homeassistant/components/hue/v2/light.py index 4b00299bc9d..8eb7ec8936e 100644 --- a/homeassistant/components/hue/v2/light.py +++ b/homeassistant/components/hue/v2/light.py @@ -18,6 +18,7 @@ from homeassistant.components.light import ( ATTR_FLASH, ATTR_TRANSITION, ATTR_XY_COLOR, + EFFECT_OFF, FLASH_SHORT, ColorMode, LightEntity, @@ -28,6 +29,7 @@ from homeassistant.components.light import ( from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.util import color as color_util from ..bridge import HueBridge @@ -39,11 +41,13 @@ from .helpers import ( normalize_hue_transition, ) -EFFECT_NONE = "None" FALLBACK_MIN_KELVIN = 6500 FALLBACK_MAX_KELVIN = 2000 FALLBACK_KELVIN = 5800 # halfway +# HA 2025.4 replaced the deprecated effect "None" with HA default "off" +DEPRECATED_EFFECT_NONE = "None" + async def async_setup_entry( hass: HomeAssistant, @@ -75,7 +79,7 @@ class HueLight(HueBaseEntity, LightEntity): _fixed_color_mode: ColorMode | None = None entity_description = LightEntityDescription( - key="hue_light", has_entity_name=True, name=None + key="hue_light", translation_key="hue_light", has_entity_name=True, name=None ) def __init__( @@ -118,7 +122,7 @@ class HueLight(HueBaseEntity, LightEntity): if x != TimedEffectStatus.NO_EFFECT ] if len(self._attr_effect_list) > 0: - self._attr_effect_list.insert(0, EFFECT_NONE) + self._attr_effect_list.insert(0, EFFECT_OFF) self._attr_supported_features |= LightEntityFeature.EFFECT @property @@ -211,7 +215,7 @@ class HueLight(HueBaseEntity, LightEntity): if timed_effects := self.resource.timed_effects: if timed_effects.status != TimedEffectStatus.NO_EFFECT: return timed_effects.status.value - return EFFECT_NONE + return EFFECT_OFF async def async_turn_on(self, **kwargs: Any) -> None: """Turn the device on.""" @@ -233,12 +237,29 @@ class HueLight(HueBaseEntity, LightEntity): self._color_temp_active = color_temp is not None flash = kwargs.get(ATTR_FLASH) effect = effect_str = kwargs.get(ATTR_EFFECT) - if effect_str in (EFFECT_NONE, EFFECT_NONE.lower()): - # ignore effect if set to "None" and we have no effect active - # the special effect "None" is only used to stop an active effect + if effect_str == DEPRECATED_EFFECT_NONE: + # deprecated effect "None" is now "off" + effect_str = EFFECT_OFF + async_create_issue( + self.hass, + DOMAIN, + "deprecated_effect_none", + breaks_in_ha_version="2025.10.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_effect_none", + ) + self.logger.warning( + "Detected deprecated effect 'None' in %s, use 'off' instead. " + "This will stop working in HA 2025.10", + self.entity_id, + ) + if effect_str == EFFECT_OFF: + # ignore effect if set to "off" and we have no effect active + # the special effect "off" is only used to stop an active effect # but sending it while no effect is active can actually result in issues # https://github.com/home-assistant/core/issues/122165 - effect = None if self.effect == EFFECT_NONE else EffectStatus.NO_EFFECT + effect = None if self.effect == EFFECT_OFF else EffectStatus.NO_EFFECT elif effect_str is not None: # work out if we got a regular effect or timed effect effect = EffectStatus(effect_str) diff --git a/homeassistant/components/humidifier/strings.json b/homeassistant/components/humidifier/strings.json index 753368dc572..436f7df8312 100644 --- a/homeassistant/components/humidifier/strings.json +++ b/homeassistant/components/humidifier/strings.json @@ -89,7 +89,7 @@ "fields": { "mode": { "name": "Mode", - "description": "Operation mode. For example, _normal_, _eco_, or _away_. For a list of possible values, refer to the integration documentation." + "description": "Operation mode. For example, \"normal\", \"eco\", or \"away\". For a list of possible values, refer to the integration documentation." } } }, diff --git a/homeassistant/components/husqvarna_automower/config_flow.py b/homeassistant/components/husqvarna_automower/config_flow.py index 7efed529453..31ca5eef0cd 100644 --- a/homeassistant/components/husqvarna_automower/config_flow.py +++ b/homeassistant/components/husqvarna_automower/config_flow.py @@ -54,7 +54,8 @@ class HusqvarnaConfigFlowHandler( automower_api = AutomowerSession(AsyncConfigFlowAuth(websession, token), tz) try: status_data = await automower_api.get_status() - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") return self.async_abort(reason="unknown") if status_data == {}: return self.async_abort(reason="no_mower_connected") diff --git a/homeassistant/components/husqvarna_automower/coordinator.py b/homeassistant/components/husqvarna_automower/coordinator.py index 819ee41a43d..9456074596a 100644 --- a/homeassistant/components/husqvarna_automower/coordinator.py +++ b/homeassistant/components/husqvarna_automower/coordinator.py @@ -13,7 +13,7 @@ from aioautomower.exceptions import ( HusqvarnaTimeoutError, HusqvarnaWSServerHandshakeError, ) -from aioautomower.model import MowerAttributes +from aioautomower.model import MowerDictionary from aioautomower.session import AutomowerSession from homeassistant.config_entries import ConfigEntry @@ -32,7 +32,7 @@ DEFAULT_RECONNECT_TIME = 2 # Define a default reconnect time type AutomowerConfigEntry = ConfigEntry[AutomowerDataUpdateCoordinator] -class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttributes]]): +class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[MowerDictionary]): """Class to manage fetching Husqvarna data.""" config_entry: AutomowerConfigEntry @@ -61,7 +61,7 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib self._zones_last_update: dict[str, set[str]] = {} self._areas_last_update: dict[str, set[int]] = {} - async def _async_update_data(self) -> dict[str, MowerAttributes]: + async def _async_update_data(self) -> MowerDictionary: """Subscribe for websocket and poll data from the API.""" if not self.ws_connected: await self.api.connect() @@ -84,7 +84,7 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib return data @callback - def callback(self, ws_data: dict[str, MowerAttributes]) -> None: + def callback(self, ws_data: MowerDictionary) -> None: """Process websocket callbacks and write them to the DataUpdateCoordinator.""" self.async_set_updated_data(ws_data) @@ -119,7 +119,7 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib "reconnect_task", ) - def _async_add_remove_devices(self, data: dict[str, MowerAttributes]) -> None: + def _async_add_remove_devices(self, data: MowerDictionary) -> None: """Add new device, remove non-existing device.""" current_devices = set(data) @@ -159,9 +159,7 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib for mower_callback in self.new_devices_callbacks: mower_callback(new_devices) - def _async_add_remove_stay_out_zones( - self, data: dict[str, MowerAttributes] - ) -> None: + def _async_add_remove_stay_out_zones(self, data: MowerDictionary) -> None: """Add new stay-out zones, remove non-existing stay-out zones.""" current_zones = { mower_id: set(mower_data.stay_out_zones.zones) @@ -207,7 +205,7 @@ class AutomowerDataUpdateCoordinator(DataUpdateCoordinator[dict[str, MowerAttrib return current_zones - def _async_add_remove_work_areas(self, data: dict[str, MowerAttributes]) -> None: + def _async_add_remove_work_areas(self, data: MowerDictionary) -> None: """Add new work areas, remove non-existing work areas.""" current_areas = { mower_id: set(mower_data.work_areas) diff --git a/homeassistant/components/husqvarna_automower/manifest.json b/homeassistant/components/husqvarna_automower/manifest.json index 0eabf5ec0d6..7f728148be3 100644 --- a/homeassistant/components/husqvarna_automower/manifest.json +++ b/homeassistant/components/husqvarna_automower/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_push", "loggers": ["aioautomower"], "quality_scale": "silver", - "requirements": ["aioautomower==2025.1.1"] + "requirements": ["aioautomower==2025.3.2"] } diff --git a/homeassistant/components/husqvarna_automower/number.py b/homeassistant/components/husqvarna_automower/number.py index cdcf4b45a2d..9ed00113d4b 100644 --- a/homeassistant/components/husqvarna_automower/number.py +++ b/homeassistant/components/husqvarna_automower/number.py @@ -44,7 +44,7 @@ async def async_set_work_area_cutting_height( ) -> None: """Set cutting height for work area.""" await coordinator.api.commands.workarea_settings( - mower_id, int(cheight), work_area_id + mower_id, work_area_id, cutting_height=int(cheight) ) diff --git a/homeassistant/components/husqvarna_automower/sensor.py b/homeassistant/components/husqvarna_automower/sensor.py index 2e1d4041e5a..d7a83c82185 100644 --- a/homeassistant/components/husqvarna_automower/sensor.py +++ b/homeassistant/components/husqvarna_automower/sensor.py @@ -227,12 +227,16 @@ def _get_work_area_names(data: MowerAttributes) -> list[str]: @callback def _get_current_work_area_name(data: MowerAttributes) -> str: """Return the name of the current work area.""" - if data.mower.work_area_id is None: - return STATE_NO_WORK_AREA_ACTIVE if TYPE_CHECKING: # Sensor does not get created if values are None assert data.work_areas is not None - return data.work_areas[data.mower.work_area_id].name + if ( + data.mower.work_area_id is not None + and data.mower.work_area_id in data.work_areas + ): + return data.work_areas[data.mower.work_area_id].name + + return STATE_NO_WORK_AREA_ACTIVE @callback @@ -295,6 +299,18 @@ MOWER_SENSOR_TYPES: tuple[AutomowerSensorEntityDescription, ...] = ( exists_fn=lambda data: data.statistics.cutting_blade_usage_time is not None, value_fn=attrgetter("statistics.cutting_blade_usage_time"), ), + AutomowerSensorEntityDescription( + key="downtime", + translation_key="downtime", + state_class=SensorStateClass.TOTAL, + device_class=SensorDeviceClass.DURATION, + entity_registry_enabled_default=False, + native_unit_of_measurement=UnitOfTime.SECONDS, + suggested_display_precision=0, + suggested_unit_of_measurement=UnitOfTime.HOURS, + exists_fn=lambda data: data.statistics.downtime is not None, + value_fn=attrgetter("statistics.downtime"), + ), AutomowerSensorEntityDescription( key="total_charging_time", translation_key="total_charging_time", @@ -367,6 +383,18 @@ MOWER_SENSOR_TYPES: tuple[AutomowerSensorEntityDescription, ...] = ( exists_fn=lambda data: data.statistics.total_drive_distance is not None, value_fn=attrgetter("statistics.total_drive_distance"), ), + AutomowerSensorEntityDescription( + key="uptime", + translation_key="uptime", + state_class=SensorStateClass.TOTAL, + device_class=SensorDeviceClass.DURATION, + entity_registry_enabled_default=False, + native_unit_of_measurement=UnitOfTime.SECONDS, + suggested_display_precision=0, + suggested_unit_of_measurement=UnitOfTime.HOURS, + exists_fn=lambda data: data.statistics.uptime is not None, + value_fn=attrgetter("statistics.uptime"), + ), AutomowerSensorEntityDescription( key="next_start_timestamp", translation_key="next_start_timestamp", diff --git a/homeassistant/components/husqvarna_automower/strings.json b/homeassistant/components/husqvarna_automower/strings.json index 9bd0bb06b3e..35ce342867f 100644 --- a/homeassistant/components/husqvarna_automower/strings.json +++ b/homeassistant/components/husqvarna_automower/strings.json @@ -221,6 +221,9 @@ "cutting_blade_usage_time": { "name": "Cutting blade usage time" }, + "downtime": { + "name": "Downtime" + }, "restricted_reason": { "name": "Restricted reason", "state": { @@ -263,6 +266,9 @@ "demo": "Demo" } }, + "uptime": { + "name": "Uptime" + }, "work_area": { "name": "Work area", "state": { diff --git a/homeassistant/components/iaqualink/manifest.json b/homeassistant/components/iaqualink/manifest.json index 2531632075c..7e05bd72f0b 100644 --- a/homeassistant/components/iaqualink/manifest.json +++ b/homeassistant/components/iaqualink/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/iaqualink", "iot_class": "cloud_polling", "loggers": ["iaqualink"], - "requirements": ["iaqualink==0.5.0", "h2==4.1.0"], + "requirements": ["iaqualink==0.5.3", "h2==4.1.0"], "single_config_entry": true } diff --git a/homeassistant/components/icloud/strings.json b/homeassistant/components/icloud/strings.json index adc96043d66..fc78e8c2ba6 100644 --- a/homeassistant/components/icloud/strings.json +++ b/homeassistant/components/icloud/strings.json @@ -46,7 +46,7 @@ "services": { "update": { "name": "Update", - "description": "Updates iCloud devices.", + "description": "Asks for a state update of all devices linked to an iCloud account.", "fields": { "account": { "name": "Account", diff --git a/homeassistant/components/idasen_desk/strings.json b/homeassistant/components/idasen_desk/strings.json index 7486973638b..ccac87a75e0 100644 --- a/homeassistant/components/idasen_desk/strings.json +++ b/homeassistant/components/idasen_desk/strings.json @@ -7,7 +7,7 @@ "address": "Device" }, "data_description": { - "address": "The bluetooth device for the desk." + "address": "The Bluetooth device for the desk." } } }, diff --git a/homeassistant/components/igloohome/__init__.py b/homeassistant/components/igloohome/__init__.py index 5e5e21452cf..a3907fcbcf3 100644 --- a/homeassistant/components/igloohome/__init__.py +++ b/homeassistant/components/igloohome/__init__.py @@ -19,7 +19,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession -PLATFORMS: list[Platform] = [Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.LOCK, Platform.SENSOR] @dataclass @@ -35,7 +35,6 @@ type IgloohomeConfigEntry = ConfigEntry[IgloohomeRuntimeData] async def async_setup_entry(hass: HomeAssistant, entry: IgloohomeConfigEntry) -> bool: """Set up igloohome from a config entry.""" - authentication = IgloohomeAuth( session=async_get_clientsession(hass), client_id=entry.data[CONF_CLIENT_ID], diff --git a/homeassistant/components/igloohome/lock.py b/homeassistant/components/igloohome/lock.py new file mode 100644 index 00000000000..b434c055145 --- /dev/null +++ b/homeassistant/components/igloohome/lock.py @@ -0,0 +1,91 @@ +"""Implementation of the lock platform.""" + +from datetime import timedelta + +from aiohttp import ClientError +from igloohome_api import ( + BRIDGE_JOB_LOCK, + BRIDGE_JOB_UNLOCK, + DEVICE_TYPE_LOCK, + Api as IgloohomeApi, + ApiException, + GetDeviceInfoResponse, +) + +from homeassistant.components.lock import LockEntity, LockEntityFeature +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import IgloohomeConfigEntry +from .entity import IgloohomeBaseEntity +from .utils import get_linked_bridge + +# Scan interval set to allow Lock entity update the bridge linked to it. +SCAN_INTERVAL = timedelta(hours=1) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: IgloohomeConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up lock entities.""" + async_add_entities( + IgloohomeLockEntity( + api_device_info=device, + api=entry.runtime_data.api, + bridge_id=str(bridge), + ) + for device in entry.runtime_data.devices + if device.type == DEVICE_TYPE_LOCK + and (bridge := get_linked_bridge(device.deviceId, entry.runtime_data.devices)) + is not None + ) + + +class IgloohomeLockEntity(IgloohomeBaseEntity, LockEntity): + """Implementation of a device that has locking capabilities.""" + + # Operating on assumed state because there is no API to query the state. + _attr_assumed_state = True + _attr_supported_features = LockEntityFeature.OPEN + _attr_name = None + + def __init__( + self, api_device_info: GetDeviceInfoResponse, api: IgloohomeApi, bridge_id: str + ) -> None: + """Initialize the class.""" + super().__init__( + api_device_info=api_device_info, + api=api, + unique_key="lock", + ) + self.bridge_id = bridge_id + + async def async_lock(self, **kwargs): + """Lock this lock.""" + try: + await self.api.create_bridge_proxied_job( + self.api_device_info.deviceId, self.bridge_id, BRIDGE_JOB_LOCK + ) + except (ApiException, ClientError) as err: + raise HomeAssistantError from err + + async def async_unlock(self, **kwargs): + """Unlock this lock.""" + try: + await self.api.create_bridge_proxied_job( + self.api_device_info.deviceId, self.bridge_id, BRIDGE_JOB_UNLOCK + ) + except (ApiException, ClientError) as err: + raise HomeAssistantError from err + + async def async_open(self, **kwargs): + """Open (unlatch) this lock.""" + try: + await self.api.create_bridge_proxied_job( + self.api_device_info.deviceId, self.bridge_id, BRIDGE_JOB_UNLOCK + ) + except (ApiException, ClientError) as err: + raise HomeAssistantError from err diff --git a/homeassistant/components/igloohome/utils.py b/homeassistant/components/igloohome/utils.py new file mode 100644 index 00000000000..be17912b8b8 --- /dev/null +++ b/homeassistant/components/igloohome/utils.py @@ -0,0 +1,16 @@ +"""House utility functions.""" + +from igloohome_api import DEVICE_TYPE_BRIDGE, GetDeviceInfoResponse + + +def get_linked_bridge( + device_id: str, devices: list[GetDeviceInfoResponse] +) -> str | None: + """Return the ID of the bridge that is linked to the device. None if no bridge is linked.""" + bridges = (bridge for bridge in devices if bridge.type == DEVICE_TYPE_BRIDGE) + for bridge in bridges: + if device_id in ( + linked_device.deviceId for linked_device in bridge.linkedDevices + ): + return bridge.deviceId + return None diff --git a/homeassistant/components/imgw_pib/__init__.py b/homeassistant/components/imgw_pib/__init__.py index f9524316570..4bceee51f8e 100644 --- a/homeassistant/components/imgw_pib/__init__.py +++ b/homeassistant/components/imgw_pib/__init__.py @@ -38,7 +38,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ImgwPibConfigEntry) -> b hydrological_details=False, ) except (ClientError, TimeoutError, ApiError) as err: - raise ConfigEntryNotReady from err + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="cannot_connect", + translation_placeholders={ + "entry": entry.title, + "error": repr(err), + }, + ) from err coordinator = ImgwPibDataUpdateCoordinator(hass, entry, imgwpib, station_id) await coordinator.async_config_entry_first_refresh() diff --git a/homeassistant/components/imgw_pib/config_flow.py b/homeassistant/components/imgw_pib/config_flow.py index 558528fcbef..805bfa2ccb3 100644 --- a/homeassistant/components/imgw_pib/config_flow.py +++ b/homeassistant/components/imgw_pib/config_flow.py @@ -50,7 +50,7 @@ class ImgwPibFlowHandler(ConfigFlow, domain=DOMAIN): hydrological_data = await imgwpib.get_hydrological_data() except (ClientError, TimeoutError, ApiError): errors["base"] = "cannot_connect" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: diff --git a/homeassistant/components/imgw_pib/coordinator.py b/homeassistant/components/imgw_pib/coordinator.py index fbe470ca953..f74878d672c 100644 --- a/homeassistant/components/imgw_pib/coordinator.py +++ b/homeassistant/components/imgw_pib/coordinator.py @@ -63,4 +63,11 @@ class ImgwPibDataUpdateCoordinator(DataUpdateCoordinator[HydrologicalData]): try: return await self.imgwpib.get_hydrological_data() except ApiError as err: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + translation_placeholders={ + "entry": self.config_entry.title, + "error": repr(err), + }, + ) from err diff --git a/homeassistant/components/imgw_pib/manifest.json b/homeassistant/components/imgw_pib/manifest.json index 0ecc1b4b7d0..3d8b34055fd 100644 --- a/homeassistant/components/imgw_pib/manifest.json +++ b/homeassistant/components/imgw_pib/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/imgw_pib", "iot_class": "cloud_polling", - "requirements": ["imgw_pib==1.0.9"] + "requirements": ["imgw_pib==1.0.10"] } diff --git a/homeassistant/components/imgw_pib/sensor.py b/homeassistant/components/imgw_pib/sensor.py index 33b82bbb43b..7871006b2ae 100644 --- a/homeassistant/components/imgw_pib/sensor.py +++ b/homeassistant/components/imgw_pib/sensor.py @@ -24,7 +24,8 @@ from .const import DOMAIN from .coordinator import ImgwPibConfigEntry, ImgwPibDataUpdateCoordinator from .entity import ImgwPibEntity -PARALLEL_UPDATES = 1 +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/imgw_pib/strings.json b/homeassistant/components/imgw_pib/strings.json index 9a17dcf7087..33cd3cb3917 100644 --- a/homeassistant/components/imgw_pib/strings.json +++ b/homeassistant/components/imgw_pib/strings.json @@ -4,6 +4,9 @@ "user": { "data": { "station_id": "Hydrological station" + }, + "data_description": { + "station_id": "Select a hydrological station from the list." } } }, @@ -25,5 +28,13 @@ "name": "Water temperature" } } + }, + "exceptions": { + "cannot_connect": { + "message": "An error occurred while connecting to the IMGW-PIB API for {entry}: {error}" + }, + "update_error": { + "message": "An error occurred while retrieving data from the IMGW-PIB API for {entry}: {error}" + } } } diff --git a/homeassistant/components/improv_ble/__init__.py b/homeassistant/components/improv_ble/__init__.py index 985684cb5b8..ff40b65a8d0 100644 --- a/homeassistant/components/improv_ble/__init__.py +++ b/homeassistant/components/improv_ble/__init__.py @@ -1 +1,11 @@ """The Improv BLE integration.""" + +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up improv_ble from a config entry.""" + raise NotImplementedError diff --git a/homeassistant/components/improv_ble/config_flow.py b/homeassistant/components/improv_ble/config_flow.py index 22f2bf3623c..0dcefba6428 100644 --- a/homeassistant/components/improv_ble/config_flow.py +++ b/homeassistant/components/improv_ble/config_flow.py @@ -83,12 +83,9 @@ class ImprovBLEConfigFlow(ConfigFlow, domain=DOMAIN): self._discovery_info = self._discovered_devices[address] return await self.async_step_start_improv() - current_addresses = self._async_current_ids() for discovery in bluetooth.async_discovered_service_info(self.hass): - if ( - discovery.address in current_addresses - or discovery.address in self._discovered_devices - or not device_filter(discovery.advertisement) + if discovery.address in self._discovered_devices or not device_filter( + discovery.advertisement ): continue self._discovered_devices[discovery.address] = discovery @@ -364,6 +361,18 @@ class ImprovBLEConfigFlow(ConfigFlow, domain=DOMAIN): assert self._provision_result is not None result = self._provision_result + if result["type"] == "abort" and result["reason"] in ( + "provision_successful", + "provision_successful_url", + ): + # Delete ignored config entry, if it exists + address = self.context["unique_id"] + current_entries = self._async_current_entries(include_ignore=True) + for entry in current_entries: + if entry.unique_id == address: + _LOGGER.debug("Removing ignored entry: %s", entry) + await self.hass.config_entries.async_remove(entry.entry_id) + break self._provision_result = None return result diff --git a/homeassistant/components/incomfort/config_flow.py b/homeassistant/components/incomfort/config_flow.py index 875bc25bd2f..027c3ad4691 100644 --- a/homeassistant/components/incomfort/config_flow.py +++ b/homeassistant/components/incomfort/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from typing import Any from incomfortclient import InvalidGateway, InvalidHeaterList @@ -31,6 +32,7 @@ from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo from .const import CONF_LEGACY_SETPOINT_STATUS, DOMAIN from .coordinator import InComfortConfigEntry, async_connect_gateway +_LOGGER = logging.getLogger(__name__) TITLE = "Intergas InComfort/Intouch Lan2RF gateway" CONFIG_SCHEMA = vol.Schema( @@ -88,7 +90,8 @@ async def async_try_connect_gateway( return {"base": "no_heaters"} except TimeoutError: return {"base": "timeout_error"} - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") return {"base": "unknown"} return None diff --git a/homeassistant/components/incomfort/manifest.json b/homeassistant/components/incomfort/manifest.json index d02b1d27554..825f198dd30 100644 --- a/homeassistant/components/incomfort/manifest.json +++ b/homeassistant/components/incomfort/manifest.json @@ -10,5 +10,6 @@ "documentation": "https://www.home-assistant.io/integrations/incomfort", "iot_class": "local_polling", "loggers": ["incomfortclient"], + "quality_scale": "platinum", "requirements": ["incomfort-client==0.6.7"] } diff --git a/homeassistant/components/incomfort/quality_scale.yaml b/homeassistant/components/incomfort/quality_scale.yaml new file mode 100644 index 00000000000..f5af3c9d061 --- /dev/null +++ b/homeassistant/components/incomfort/quality_scale.yaml @@ -0,0 +1,77 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + No actions implemented. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + No actions implemented. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: done + comment: | + Entities are set up dand updated through the datacoordimator. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: done + reauthentication-flow: done + parallel-updates: done + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: done + + # Gold + entity-translations: done + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: done + discovery: done + stale-devices: + status: exempt + comment: > + There is a maximum of 3 heaters that can be discovered by the gateway. + The user must remove manually any heeater devices that have been replaced. + diagnostics: done + exception-translations: done + icon-translations: done + reconfiguration-flow: done + dynamic-devices: done + discovery-update-info: done + repair-issues: + status: exempt + comment: | + No current issues to repair. + docs-use-cases: done + docs-supported-devices: done + docs-supported-functions: done + docs-data-update: done + docs-known-limitations: + status: done + comment: There are no known limmitations, + docs-troubleshooting: done + docs-examples: done + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/inkbird/manifest.json b/homeassistant/components/inkbird/manifest.json index acc7414edac..aaa9c4b3473 100644 --- a/homeassistant/components/inkbird/manifest.json +++ b/homeassistant/components/inkbird/manifest.json @@ -21,6 +21,18 @@ { "local_name": "tps", "connectable": false + }, + { + "local_name": "ITH-11-B", + "connectable": false + }, + { + "local_name": "ITH-13-B", + "connectable": false + }, + { + "local_name": "ITH-21-B", + "connectable": false } ], "codeowners": ["@bdraco"], @@ -28,5 +40,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/inkbird", "iot_class": "local_push", - "requirements": ["inkbird-ble==0.7.1"] + "requirements": ["inkbird-ble==0.9.0"] } diff --git a/homeassistant/components/insteon/strings.json b/homeassistant/components/insteon/strings.json index 538107dd816..3a15d667ca7 100644 --- a/homeassistant/components/insteon/strings.json +++ b/homeassistant/components/insteon/strings.json @@ -111,7 +111,7 @@ }, "services": { "add_all_link": { - "name": "Add all link", + "name": "Add All-Link", "description": "Tells the Insteon Modem (IM) start All-Linking mode. Once the IM is in All-Linking mode, press the link button on the device to complete All-Linking.", "fields": { "group": { @@ -120,13 +120,13 @@ }, "mode": { "name": "[%key:common::config_flow::data::mode%]", - "description": "Linking mode controller - IM is controller responder - IM is responder." + "description": "Linking mode of the Insteon Modem." } } }, "delete_all_link": { - "name": "Delete all link", - "description": "Tells the Insteon Modem (IM) to remove an All-Link record from the All-Link Database of the IM and a device. Once the IM is set to delete the link, press the link button on the corresponding device to complete the process.", + "name": "Delete All-Link", + "description": "Tells the Insteon Modem (IM) to remove an All-Link record from the All-Link database of the IM and a device. Once the IM is set to delete the link, press the link button on the corresponding device to complete the process.", "fields": { "group": { "name": "Group", @@ -135,8 +135,8 @@ } }, "load_all_link_database": { - "name": "Load all link database", - "description": "Load the All-Link Database for a device. WARNING - Loading a device All-LInk database is very time consuming and inconsistent. This may take a LONG time and may need to be repeated to obtain all records.", + "name": "Load All-Link database", + "description": "Loads the All-Link database for a device. WARNING - Loading a device All-Link database is very time consuming and inconsistent. This may take a LONG time and may need to be repeated to obtain all records.", "fields": { "entity_id": { "name": "Entity", @@ -149,8 +149,8 @@ } }, "print_all_link_database": { - "name": "Print all link database", - "description": "Prints the All-Link Database for a device. Requires that the All-Link Database is loaded into memory.", + "name": "Print All-Link database", + "description": "Prints the All-Link database for a device. Requires that the All-Link database is loaded into memory.", "fields": { "entity_id": { "name": "Entity", @@ -159,8 +159,8 @@ } }, "print_im_all_link_database": { - "name": "Print IM all link database", - "description": "Prints the All-Link Database for the INSTEON Modem (IM)." + "name": "Print IM All-Link database", + "description": "Prints the All-Link database for the INSTEON Modem (IM)." }, "x10_all_units_off": { "name": "X10 all units off", diff --git a/homeassistant/components/intent/__init__.py b/homeassistant/components/intent/__init__.py index 2f9587e2173..922fa376903 100644 --- a/homeassistant/components/intent/__init__.py +++ b/homeassistant/components/intent/__init__.py @@ -2,13 +2,14 @@ from __future__ import annotations +from collections.abc import Collection import logging from typing import Any, Protocol from aiohttp import web import voluptuous as vol -from homeassistant.components import http +from homeassistant.components import http, sensor from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN from homeassistant.components.cover import ( ATTR_POSITION, @@ -40,7 +41,12 @@ from homeassistant.const import ( SERVICE_TURN_ON, ) from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State -from homeassistant.helpers import config_validation as cv, integration_platform, intent +from homeassistant.helpers import ( + area_registry as ar, + config_validation as cv, + integration_platform, + intent, +) from homeassistant.helpers.typing import ConfigType from homeassistant.util import dt as dt_util @@ -454,6 +460,9 @@ class GetTemperatureIntent(intent.IntentHandler): slot_schema = { vol.Optional("area"): intent.non_empty_string, vol.Optional("name"): intent.non_empty_string, + vol.Optional("floor"): intent.non_empty_string, + vol.Optional("preferred_area_id"): cv.string, + vol.Optional("preferred_floor_id"): cv.string, } platforms = {CLIMATE_DOMAIN} @@ -470,13 +479,71 @@ class GetTemperatureIntent(intent.IntentHandler): if "area" in slots: area = slots["area"]["value"] + floor_name: str | None = None + if "floor" in slots: + floor_name = slots["floor"]["value"] + + match_preferences = intent.MatchTargetsPreferences( + area_id=slots.get("preferred_area_id", {}).get("value"), + floor_id=slots.get("preferred_floor_id", {}).get("value"), + ) + + if (not name) and (area or match_preferences.area_id): + # Look for temperature sensors assigned to an area + area_registry = ar.async_get(hass) + area_temperature_ids: dict[str, str] = {} + + # Keep candidates that are registered as area temperature sensors + def area_candidate_filter( + candidate: intent.MatchTargetsCandidate, + possible_area_ids: Collection[str], + ) -> bool: + for area_id in possible_area_ids: + temperature_id = area_temperature_ids.get(area_id) + if (temperature_id is None) and ( + area_entry := area_registry.async_get_area(area_id) + ): + temperature_id = area_entry.temperature_entity_id or "" + area_temperature_ids[area_id] = temperature_id + + if candidate.state.entity_id == temperature_id: + return True + + return False + + match_constraints = intent.MatchTargetsConstraints( + area_name=area, + floor_name=floor_name, + domains=[sensor.DOMAIN], + device_classes=[sensor.SensorDeviceClass.TEMPERATURE], + assistant=intent_obj.assistant, + single_target=True, + ) + match_result = intent.async_match_targets( + hass, + match_constraints, + match_preferences, + area_candidate_filter=area_candidate_filter, + ) + if match_result.is_match: + # Found temperature sensor + response = intent_obj.create_response() + response.response_type = intent.IntentResponseType.QUERY_ANSWER + response.async_set_states(matched_states=match_result.states) + return response + + # Look for climate devices match_constraints = intent.MatchTargetsConstraints( name=name, area_name=area, + floor_name=floor_name, domains=[CLIMATE_DOMAIN], assistant=intent_obj.assistant, + single_target=True, + ) + match_result = intent.async_match_targets( + hass, match_constraints, match_preferences ) - match_result = intent.async_match_targets(hass, match_constraints) if not match_result.is_match: raise intent.MatchFailedError( result=match_result, constraints=match_constraints diff --git a/homeassistant/components/iometer/__init__.py b/homeassistant/components/iometer/__init__.py index bbf046e70e9..feb7ce9b8cf 100644 --- a/homeassistant/components/iometer/__init__.py +++ b/homeassistant/components/iometer/__init__.py @@ -12,7 +12,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from .coordinator import IOmeterConfigEntry, IOMeterCoordinator -PLATFORMS: list[Platform] = [Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: IOmeterConfigEntry) -> bool: diff --git a/homeassistant/components/iometer/binary_sensor.py b/homeassistant/components/iometer/binary_sensor.py new file mode 100644 index 00000000000..f443c4ae94a --- /dev/null +++ b/homeassistant/components/iometer/binary_sensor.py @@ -0,0 +1,87 @@ +"""IOmeter binary sensor.""" + +from collections.abc import Callable +from dataclasses import dataclass + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .coordinator import IOMeterCoordinator, IOmeterData +from .entity import IOmeterEntity + + +@dataclass(frozen=True, kw_only=True) +class IOmeterBinarySensorDescription(BinarySensorEntityDescription): + """Describes Iometer binary sensor entity.""" + + value_fn: Callable[[IOmeterData], bool | None] + + +SENSOR_TYPES: list[IOmeterBinarySensorDescription] = [ + IOmeterBinarySensorDescription( + key="connection_status", + translation_key="connection_status", + device_class=BinarySensorDeviceClass.CONNECTIVITY, + entity_registry_enabled_default=False, + value_fn=lambda data: ( + data.status.device.core.connection_status == "connected" + if data.status.device.core.connection_status is not None + else None + ), + ), + IOmeterBinarySensorDescription( + key="attachment_status", + translation_key="attachment_status", + device_class=BinarySensorDeviceClass.CONNECTIVITY, + entity_registry_enabled_default=False, + value_fn=lambda data: ( + data.status.device.core.attachment_status == "attached" + if data.status.device.core.attachment_status is not None + else None + ), + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the Sensors.""" + coordinator: IOMeterCoordinator = config_entry.runtime_data + + async_add_entities( + IOmeterBinarySensor( + coordinator=coordinator, + description=description, + ) + for description in SENSOR_TYPES + ) + + +class IOmeterBinarySensor(IOmeterEntity, BinarySensorEntity): + """Defines a IOmeter binary sensor.""" + + entity_description: IOmeterBinarySensorDescription + + def __init__( + self, + coordinator: IOMeterCoordinator, + description: IOmeterBinarySensorDescription, + ) -> None: + """Initialize the sensor.""" + super().__init__(coordinator) + self.entity_description = description + self._attr_unique_id = f"{coordinator.identifier}_{description.key}" + + @property + def is_on(self) -> bool | None: + """Return the binary sensor state.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/iometer/coordinator.py b/homeassistant/components/iometer/coordinator.py index 708983fb28e..4050341151b 100644 --- a/homeassistant/components/iometer/coordinator.py +++ b/homeassistant/components/iometer/coordinator.py @@ -8,6 +8,7 @@ from iometer import IOmeterClient, IOmeterConnectionError, Reading, Status from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN @@ -31,6 +32,7 @@ class IOMeterCoordinator(DataUpdateCoordinator[IOmeterData]): config_entry: IOmeterConfigEntry client: IOmeterClient + current_fw_version: str = "" def __init__( self, @@ -58,4 +60,17 @@ class IOMeterCoordinator(DataUpdateCoordinator[IOmeterData]): except IOmeterConnectionError as error: raise UpdateFailed(f"Error communicating with IOmeter: {error}") from error + fw_version = f"{status.device.core.version}/{status.device.bridge.version}" + if self.current_fw_version and fw_version != self.current_fw_version: + device_registry = dr.async_get(self.hass) + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, status.device.id)} + ) + assert device_entry + device_registry.async_update_device( + device_entry.id, + sw_version=fw_version, + ) + self.current_fw_version = fw_version + return IOmeterData(reading=reading, status=status) diff --git a/homeassistant/components/iometer/entity.py b/homeassistant/components/iometer/entity.py index 86494857e18..a52ef1c66ed 100644 --- a/homeassistant/components/iometer/entity.py +++ b/homeassistant/components/iometer/entity.py @@ -20,5 +20,5 @@ class IOmeterEntity(CoordinatorEntity[IOMeterCoordinator]): identifiers={(DOMAIN, status.device.id)}, manufacturer="IOmeter GmbH", model="IOmeter", - sw_version=f"{status.device.core.version}/{status.device.bridge.version}", + sw_version=coordinator.current_fw_version, ) diff --git a/homeassistant/components/iometer/strings.json b/homeassistant/components/iometer/strings.json index 31deb16aa9c..b3878dd1b53 100644 --- a/homeassistant/components/iometer/strings.json +++ b/homeassistant/components/iometer/strings.json @@ -60,6 +60,14 @@ "wifi_rssi": { "name": "Signal strength Wi-Fi" } + }, + "binary_sensor": { + "connection_status": { + "name": "Core/Bridge connection status" + }, + "attachment_status": { + "name": "Core attachment status" + } } } } diff --git a/homeassistant/components/iqvia/strings.json b/homeassistant/components/iqvia/strings.json index 5dc0dea53d5..a0697a6c210 100644 --- a/homeassistant/components/iqvia/strings.json +++ b/homeassistant/components/iqvia/strings.json @@ -4,7 +4,7 @@ "user": { "description": "Fill out your U.S. or Canadian ZIP code.", "data": { - "zip_code": "ZIP Code" + "zip_code": "ZIP code" } } }, diff --git a/homeassistant/components/iron_os/coordinator.py b/homeassistant/components/iron_os/coordinator.py index fc89ecea43c..84c9b895766 100644 --- a/homeassistant/components/iron_os/coordinator.py +++ b/homeassistant/components/iron_os/coordinator.py @@ -8,6 +8,7 @@ from enum import Enum import logging from typing import cast +from awesomeversion import AwesomeVersion from pynecil import ( CharSetting, CommunicationError, @@ -34,6 +35,8 @@ SCAN_INTERVAL = timedelta(seconds=5) SCAN_INTERVAL_GITHUB = timedelta(hours=3) SCAN_INTERVAL_SETTINGS = timedelta(seconds=60) +V223 = AwesomeVersion("v2.23") + @dataclass class IronOSCoordinators: @@ -72,6 +75,7 @@ class IronOSBaseCoordinator[_DataT](DataUpdateCoordinator[_DataT]): ), ) self.device = device + self.v223_features = False async def _async_setup(self) -> None: """Set up the coordinator.""" @@ -81,6 +85,8 @@ class IronOSBaseCoordinator[_DataT](DataUpdateCoordinator[_DataT]): except CommunicationError as e: raise UpdateFailed("Cannot connect to device") from e + self.v223_features = AwesomeVersion(self.device_info.build) >= V223 + class IronOSLiveDataCoordinator(IronOSBaseCoordinator[LiveDataResponse]): """IronOS coordinator.""" diff --git a/homeassistant/components/iron_os/icons.json b/homeassistant/components/iron_os/icons.json index 6410c561b9d..695b9d16849 100644 --- a/homeassistant/components/iron_os/icons.json +++ b/homeassistant/components/iron_os/icons.json @@ -73,6 +73,9 @@ }, "power_limit": { "default": "mdi:flash-alert" + }, + "hall_effect_sleep_time": { + "default": "mdi:timer-sand" } }, "select": { @@ -105,6 +108,9 @@ }, "usb_pd_mode": { "default": "mdi:meter-electric-outline" + }, + "tip_type": { + "default": "mdi:pencil-outline" } }, "sensor": { @@ -154,7 +160,16 @@ "soldering": "mdi:soldering-iron", "sleeping": "mdi:sleep", "settings": "mdi:menu-open", - "debug": "mdi:bug-play" + "debug": "mdi:bug-play", + "soldering_profile": "mdi:chart-box-outline", + "temperature_adjust": "mdi:thermostat-box", + "usb_pd_debug": "mdi:bug-play", + "thermal_runaway": "mdi:fire-alert", + "startup_logo": "mdi:dots-circle", + "cjc_calibration": "mdi:tune-vertical", + "startup_warnings": "mdi:alert", + "initialisation_done": "mdi:check-circle", + "hibernating": "mdi:sleep" } }, "estimated_power": { diff --git a/homeassistant/components/iron_os/manifest.json b/homeassistant/components/iron_os/manifest.json index 462e75c5b6e..58cbdaa3bc6 100644 --- a/homeassistant/components/iron_os/manifest.json +++ b/homeassistant/components/iron_os/manifest.json @@ -13,5 +13,6 @@ "documentation": "https://www.home-assistant.io/integrations/iron_os", "iot_class": "local_polling", "loggers": ["pynecil"], - "requirements": ["pynecil==4.0.1"] + "quality_scale": "platinum", + "requirements": ["pynecil==4.1.0"] } diff --git a/homeassistant/components/iron_os/number.py b/homeassistant/components/iron_os/number.py index b8bb3c7d999..6ad5947cb6f 100644 --- a/homeassistant/components/iron_os/number.py +++ b/homeassistant/components/iron_os/number.py @@ -65,6 +65,7 @@ class PinecilNumber(StrEnum): VOLTAGE_DIV = "voltage_div" TEMP_INCREMENT_SHORT = "temp_increment_short" TEMP_INCREMENT_LONG = "temp_increment_long" + HALL_EFFECT_SLEEP_TIME = "hall_effect_sleep_time" def multiply(value: float | None, multiplier: float) -> float | None: @@ -323,6 +324,23 @@ PINECIL_NUMBER_DESCRIPTIONS: tuple[IronOSNumberEntityDescription, ...] = ( ), ) +PINECIL_NUMBER_DESCRIPTIONS_V223: tuple[IronOSNumberEntityDescription, ...] = ( + IronOSNumberEntityDescription( + key=PinecilNumber.HALL_EFFECT_SLEEP_TIME, + translation_key=PinecilNumber.HALL_EFFECT_SLEEP_TIME, + value_fn=(lambda _, settings: settings.get("hall_sleep_time")), + characteristic=CharSetting.HALL_SLEEP_TIME, + raw_value_fn=lambda value: value, + mode=NumberMode.BOX, + native_min_value=0, + native_max_value=60, + native_step=5, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfTime.SECONDS, + entity_registry_enabled_default=False, + ), +) + async def async_setup_entry( hass: HomeAssistant, @@ -331,10 +349,13 @@ async def async_setup_entry( ) -> None: """Set up number entities from a config entry.""" coordinators = entry.runtime_data + descriptions = PINECIL_NUMBER_DESCRIPTIONS + + if coordinators.live_data.v223_features: + descriptions += PINECIL_NUMBER_DESCRIPTIONS_V223 async_add_entities( - IronOSNumberEntity(coordinators, description) - for description in PINECIL_NUMBER_DESCRIPTIONS + IronOSNumberEntity(coordinators, description) for description in descriptions ) diff --git a/homeassistant/components/iron_os/quality_scale.yaml b/homeassistant/components/iron_os/quality_scale.yaml index c80b8b5adfe..8f7eb5ff36a 100644 --- a/homeassistant/components/iron_os/quality_scale.yaml +++ b/homeassistant/components/iron_os/quality_scale.yaml @@ -21,8 +21,10 @@ rules: entity-unique-id: done has-entity-name: done runtime-data: done - test-before-configure: todo - test-before-setup: todo + test-before-configure: + status: exempt + comment: Device is set up from a Bluetooth discovery + test-before-setup: done unique-config-entry: done # Silver @@ -70,7 +72,9 @@ rules: repair-issues: status: exempt comment: no repairs/issues - stale-devices: todo + stale-devices: + status: exempt + comment: Stale devices are removed with the config entry as there is only one device per entry # Platinum async-dependency: done diff --git a/homeassistant/components/iron_os/select.py b/homeassistant/components/iron_os/select.py index a005bf29af2..32652829531 100644 --- a/homeassistant/components/iron_os/select.py +++ b/homeassistant/components/iron_os/select.py @@ -17,6 +17,7 @@ from pynecil import ( ScrollSpeed, SettingsDataResponse, TempUnit, + TipType, USBPDMode, ) @@ -53,6 +54,7 @@ class PinecilSelect(StrEnum): LOCKING_MODE = "locking_mode" LOGO_DURATION = "logo_duration" USB_PD_MODE = "usb_pd_mode" + TIP_TYPE = "tip_type" def enum_to_str(enum: Enum | None) -> str | None: @@ -138,6 +140,8 @@ PINECIL_SELECT_DESCRIPTIONS: tuple[IronOSSelectEntityDescription, ...] = ( entity_category=EntityCategory.CONFIG, entity_registry_enabled_default=False, ), +) +PINECIL_SELECT_DESCRIPTIONS_V222: tuple[IronOSSelectEntityDescription, ...] = ( IronOSSelectEntityDescription( key=PinecilSelect.USB_PD_MODE, translation_key=PinecilSelect.USB_PD_MODE, @@ -149,6 +153,27 @@ PINECIL_SELECT_DESCRIPTIONS: tuple[IronOSSelectEntityDescription, ...] = ( entity_registry_enabled_default=False, ), ) +PINECIL_SELECT_DESCRIPTIONS_V223: tuple[IronOSSelectEntityDescription, ...] = ( + IronOSSelectEntityDescription( + key=PinecilSelect.USB_PD_MODE, + translation_key=PinecilSelect.USB_PD_MODE, + characteristic=CharSetting.USB_PD_MODE, + value_fn=lambda x: enum_to_str(x.get("usb_pd_mode")), + raw_value_fn=lambda value: USBPDMode[value.upper()], + options=[x.name.lower() for x in USBPDMode], + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + ), + IronOSSelectEntityDescription( + key=PinecilSelect.TIP_TYPE, + translation_key=PinecilSelect.TIP_TYPE, + characteristic=CharSetting.TIP_TYPE, + value_fn=lambda x: enum_to_str(x.get("tip_type")), + raw_value_fn=lambda value: TipType[value.upper()], + options=[x.name.lower() for x in TipType], + entity_category=EntityCategory.CONFIG, + ), +) async def async_setup_entry( @@ -157,11 +182,17 @@ async def async_setup_entry( async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up select entities from a config entry.""" - coordinator = entry.runtime_data + coordinators = entry.runtime_data + descriptions = PINECIL_SELECT_DESCRIPTIONS + + descriptions += ( + PINECIL_SELECT_DESCRIPTIONS_V223 + if coordinators.live_data.v223_features + else PINECIL_SELECT_DESCRIPTIONS_V222 + ) async_add_entities( - IronOSSelectEntity(coordinator, description) - for description in PINECIL_SELECT_DESCRIPTIONS + IronOSSelectEntity(coordinators, description) for description in descriptions ) diff --git a/homeassistant/components/iron_os/strings.json b/homeassistant/components/iron_os/strings.json index 60168699427..ddae9a3020f 100644 --- a/homeassistant/components/iron_os/strings.json +++ b/homeassistant/components/iron_os/strings.json @@ -94,6 +94,9 @@ }, "temp_increment_long": { "name": "Long-press temperature step" + }, + "hall_effect_sleep_time": { + "name": "Hall sensor sleep timeout" } }, "select": { @@ -173,6 +176,15 @@ "off": "[%key:common::state::off%]", "on": "[%key:common::state::on%]" } + }, + "tip_type": { + "name": "Soldering tip type", + "state": { + "auto": "Auto sense", + "ts100_long": "TS100 long/Hakko T12 tip", + "pine_short": "Pinecil short tip", + "pts200": "PTS200 short tip" + } } }, "sensor": { @@ -223,7 +235,16 @@ "sleeping": "Sleeping", "settings": "Settings", "debug": "Debug", - "boost": "Boost" + "boost": "Boost", + "soldering_profile": "Soldering profile", + "temperature_adjust": "Temperature adjust", + "usb_pd_debug": "USB PD debug", + "thermal_runaway": "Thermal runaway", + "startup_logo": "Booting", + "cjc_calibration": "CJC calibration", + "startup_warnings": "Startup warnings", + "initialisation_done": "Initialisation done", + "hibernating": "Hibernating" } }, "estimated_power": { diff --git a/homeassistant/components/ista_ecotrend/sensor.py b/homeassistant/components/ista_ecotrend/sensor.py index ee54e502c26..0a8ed6e9ddb 100644 --- a/homeassistant/components/ista_ecotrend/sensor.py +++ b/homeassistant/components/ista_ecotrend/sensor.py @@ -8,6 +8,7 @@ import datetime from enum import StrEnum import logging +from homeassistant.components.recorder.models import StatisticMeanType from homeassistant.components.recorder.models.statistics import ( StatisticData, StatisticMetaData, @@ -270,7 +271,7 @@ class IstaSensor(CoordinatorEntity[IstaCoordinator], SensorEntity): ] metadata: StatisticMetaData = { - "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": f"{self.device_entry.name} {self.name}", "source": DOMAIN, diff --git a/homeassistant/components/isy994/__init__.py b/homeassistant/components/isy994/__init__.py index 738c7e2d5ad..e387196ba94 100644 --- a/homeassistant/components/isy994/__init__.py +++ b/homeassistant/components/isy994/__init__.py @@ -138,7 +138,7 @@ async def async_setup_entry( for vtype, _, vid in isy.variables.children: numbers.append(isy.variables[vtype][vid]) if ( - isy.conf[CONFIG_NETWORKING] or isy.conf[CONFIG_PORTAL] + isy.conf[CONFIG_NETWORKING] or isy.conf.get(CONFIG_PORTAL) ) and isy.networking.nobjs: isy_data.devices[CONF_NETWORK] = _create_service_device_info( isy, name=CONFIG_NETWORKING, unique_id=CONF_NETWORK diff --git a/homeassistant/components/isy994/manifest.json b/homeassistant/components/isy994/manifest.json index 3aa81027b4f..eb804d7af09 100644 --- a/homeassistant/components/isy994/manifest.json +++ b/homeassistant/components/isy994/manifest.json @@ -24,7 +24,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["pyisy"], - "requirements": ["pyisy==3.1.14"], + "requirements": ["pyisy==3.1.15"], "ssdp": [ { "manufacturer": "Universal Devices Inc.", diff --git a/homeassistant/components/isy994/sensor.py b/homeassistant/components/isy994/sensor.py index 2655f4d3c4e..2d27f4602c6 100644 --- a/homeassistant/components/isy994/sensor.py +++ b/homeassistant/components/isy994/sensor.py @@ -97,9 +97,9 @@ ISY_CONTROL_TO_DEVICE_CLASS = { "WEIGHT": SensorDeviceClass.WEIGHT, "WINDCH": SensorDeviceClass.TEMPERATURE, } -ISY_CONTROL_TO_STATE_CLASS = { - control: SensorStateClass.MEASUREMENT for control in ISY_CONTROL_TO_DEVICE_CLASS -} +ISY_CONTROL_TO_STATE_CLASS = dict.fromkeys( + ISY_CONTROL_TO_DEVICE_CLASS, SensorStateClass.MEASUREMENT +) ISY_CONTROL_TO_ENTITY_CATEGORY = { PROP_RAMP_RATE: EntityCategory.DIAGNOSTIC, PROP_ON_LEVEL: EntityCategory.DIAGNOSTIC, diff --git a/homeassistant/components/jellyfin/client_wrapper.py b/homeassistant/components/jellyfin/client_wrapper.py index ab5d5e7d7f8..91fe0885e4c 100644 --- a/homeassistant/components/jellyfin/client_wrapper.py +++ b/homeassistant/components/jellyfin/client_wrapper.py @@ -97,16 +97,27 @@ def get_artwork_url( client: JellyfinClient, item: dict[str, Any], max_width: int = 600 ) -> str | None: """Find a suitable thumbnail for an item.""" - artwork_id: str = item["Id"] - artwork_type = "Primary" + artwork_id: str | None = None + artwork_type: str | None = None parent_backdrop_id: str | None = item.get("ParentBackdropItemId") - if "Backdrop" in item[ITEM_KEY_IMAGE_TAGS]: + if "AlbumPrimaryImageTag" in item: + # jellyfin_apiclient_python doesn't support passing a specific tag to `.artwork`, + # so we don't use the actual value of AlbumPrimaryImageTag. + # However, its mere presence tells us that the album does have primary artwork, + # and the resulting URL will pull the primary album art even if the tag is not specified. + artwork_type = "Primary" + artwork_id = item["AlbumId"] + elif "Backdrop" in item[ITEM_KEY_IMAGE_TAGS]: artwork_type = "Backdrop" + artwork_id = item["Id"] elif parent_backdrop_id: artwork_type = "Backdrop" artwork_id = parent_backdrop_id - elif "Primary" not in item[ITEM_KEY_IMAGE_TAGS]: + elif "Primary" in item[ITEM_KEY_IMAGE_TAGS]: + artwork_type = "Primary" + artwork_id = item["Id"] + else: return None return str(client.jellyfin.artwork(artwork_id, artwork_type, max_width)) diff --git a/homeassistant/components/jewish_calendar/__init__.py b/homeassistant/components/jewish_calendar/__init__.py index 823e9bd59be..47d60d74938 100644 --- a/homeassistant/components/jewish_calendar/__init__.py +++ b/homeassistant/components/jewish_calendar/__init__.py @@ -3,6 +3,7 @@ from __future__ import annotations from functools import partial +import logging from hdate import Location @@ -14,7 +15,9 @@ from homeassistant.const import ( CONF_TIME_ZONE, Platform, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import config_validation as cv, entity_registry as er +from homeassistant.helpers.typing import ConfigType from .const import ( CONF_CANDLE_LIGHT_MINUTES, @@ -24,10 +27,21 @@ from .const import ( DEFAULT_DIASPORA, DEFAULT_HAVDALAH_OFFSET_MINUTES, DEFAULT_LANGUAGE, + DOMAIN, ) from .entity import JewishCalendarConfigEntry, JewishCalendarData +from .service import async_setup_services +_LOGGER = logging.getLogger(__name__) PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR] +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + + +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the Jewish Calendar service.""" + async_setup_services(hass) + + return True async def async_setup_entry( @@ -80,3 +94,49 @@ async def async_unload_entry( ) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) + + +async def async_migrate_entry( + hass: HomeAssistant, config_entry: JewishCalendarConfigEntry +) -> bool: + """Migrate old entry.""" + + _LOGGER.debug("Migrating from version %s", config_entry.version) + + @callback + def update_unique_id( + entity_entry: er.RegistryEntry, + ) -> dict[str, str] | None: + """Update unique ID of entity entry.""" + key_translations = { + "first_light": "alot_hashachar", + "talit": "talit_and_tefillin", + "sunrise": "netz_hachama", + "gra_end_shma": "sof_zman_shema_gra", + "mga_end_shma": "sof_zman_shema_mga", + "gra_end_tfila": "sof_zman_tfilla_gra", + "mga_end_tfila": "sof_zman_tfilla_mga", + "midday": "chatzot_hayom", + "big_mincha": "mincha_gedola", + "small_mincha": "mincha_ketana", + "plag_mincha": "plag_hamincha", + "sunset": "shkia", + "first_stars": "tset_hakohavim_tsom", + "three_stars": "tset_hakohavim_shabbat", + } + old_keys = tuple(key_translations.keys()) + if entity_entry.unique_id.endswith(old_keys): + old_key = entity_entry.unique_id.split("-")[1] + new_unique_id = f"{config_entry.entry_id}-{key_translations[old_key]}" + return {"new_unique_id": new_unique_id} + return None + + if config_entry.version > 1: + # This means the user has downgraded from a future version + return False + + if config_entry.version == 1: + await er.async_migrate_entries(hass, config_entry.entry_id, update_unique_id) + hass.config_entries.async_update_entry(config_entry, version=2) + + return True diff --git a/homeassistant/components/jewish_calendar/binary_sensor.py b/homeassistant/components/jewish_calendar/binary_sensor.py index 5ff3171b7de..f33d79a01f5 100644 --- a/homeassistant/components/jewish_calendar/binary_sensor.py +++ b/homeassistant/components/jewish_calendar/binary_sensor.py @@ -5,9 +5,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass import datetime as dt -from datetime import datetime -import hdate from hdate.zmanim import Zmanim from homeassistant.components.binary_sensor import ( @@ -27,7 +25,7 @@ from .entity import JewishCalendarConfigEntry, JewishCalendarEntity class JewishCalendarBinarySensorMixIns(BinarySensorEntityDescription): """Binary Sensor description mixin class for Jewish Calendar.""" - is_on: Callable[[Zmanim], bool] = lambda _: False + is_on: Callable[[Zmanim, dt.datetime], bool] = lambda _, __: False @dataclass(frozen=True) @@ -42,18 +40,18 @@ BINARY_SENSORS: tuple[JewishCalendarBinarySensorEntityDescription, ...] = ( key="issur_melacha_in_effect", name="Issur Melacha in Effect", icon="mdi:power-plug-off", - is_on=lambda state: bool(state.issur_melacha_in_effect), + is_on=lambda state, now: bool(state.issur_melacha_in_effect(now)), ), JewishCalendarBinarySensorEntityDescription( key="erev_shabbat_hag", name="Erev Shabbat/Hag", - is_on=lambda state: bool(state.erev_shabbat_chag), + is_on=lambda state, now: bool(state.erev_shabbat_chag(now)), entity_registry_enabled_default=False, ), JewishCalendarBinarySensorEntityDescription( key="motzei_shabbat_hag", name="Motzei Shabbat/Hag", - is_on=lambda state: bool(state.motzei_shabbat_chag), + is_on=lambda state, now: bool(state.motzei_shabbat_chag(now)), entity_registry_enabled_default=False, ), ) @@ -84,16 +82,16 @@ class JewishCalendarBinarySensor(JewishCalendarEntity, BinarySensorEntity): def is_on(self) -> bool: """Return true if sensor is on.""" zmanim = self._get_zmanim() - return self.entity_description.is_on(zmanim) + return self.entity_description.is_on(zmanim, dt_util.now()) def _get_zmanim(self) -> Zmanim: """Return the Zmanim object for now().""" - return hdate.Zmanim( - date=dt_util.now(), + return Zmanim( + date=dt.date.today(), location=self._location, candle_lighting_offset=self._candle_lighting_offset, havdalah_offset=self._havdalah_offset, - hebrew=self._hebrew, + language=self._language, ) async def async_added_to_hass(self) -> None: @@ -109,7 +107,7 @@ class JewishCalendarBinarySensor(JewishCalendarEntity, BinarySensorEntity): return await super().async_will_remove_from_hass() @callback - def _update(self, now: datetime | None = None) -> None: + def _update(self, now: dt.datetime | None = None) -> None: """Update the state of the sensor.""" self._update_unsub = None self._schedule_update() @@ -119,7 +117,7 @@ class JewishCalendarBinarySensor(JewishCalendarEntity, BinarySensorEntity): """Schedule the next update of the sensor.""" now = dt_util.now() zmanim = self._get_zmanim() - update = zmanim.zmanim["sunrise"] + dt.timedelta(days=1) + update = zmanim.netz_hachama.local + dt.timedelta(days=1) candle_lighting = zmanim.candle_lighting if candle_lighting is not None and now < candle_lighting < update: update = candle_lighting diff --git a/homeassistant/components/jewish_calendar/config_flow.py b/homeassistant/components/jewish_calendar/config_flow.py index a2eadbf57bd..23bcb23435b 100644 --- a/homeassistant/components/jewish_calendar/config_flow.py +++ b/homeassistant/components/jewish_calendar/config_flow.py @@ -86,7 +86,7 @@ def _get_data_schema(hass: HomeAssistant) -> vol.Schema: class JewishCalendarConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Jewish calendar.""" - VERSION = 1 + VERSION = 2 @staticmethod @callback diff --git a/homeassistant/components/jewish_calendar/const.py b/homeassistant/components/jewish_calendar/const.py index 4af76a8927b..0d5455fcd86 100644 --- a/homeassistant/components/jewish_calendar/const.py +++ b/homeassistant/components/jewish_calendar/const.py @@ -2,6 +2,9 @@ DOMAIN = "jewish_calendar" +ATTR_DATE = "date" +ATTR_NUSACH = "nusach" + CONF_DIASPORA = "diaspora" CONF_CANDLE_LIGHT_MINUTES = "candle_lighting_minutes_before_sunset" CONF_HAVDALAH_OFFSET_MINUTES = "havdalah_minutes_after_sunset" @@ -11,3 +14,5 @@ DEFAULT_CANDLE_LIGHT = 18 DEFAULT_DIASPORA = False DEFAULT_HAVDALAH_OFFSET_MINUTES = 0 DEFAULT_LANGUAGE = "english" + +SERVICE_COUNT_OMER = "count_omer" diff --git a/homeassistant/components/jewish_calendar/entity.py b/homeassistant/components/jewish_calendar/entity.py index 1d2a6e45c0a..2c031f0d160 100644 --- a/homeassistant/components/jewish_calendar/entity.py +++ b/homeassistant/components/jewish_calendar/entity.py @@ -3,6 +3,7 @@ from dataclasses import dataclass from hdate import Location +from hdate.translator import Language from homeassistant.config_entries import ConfigEntry from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo @@ -17,7 +18,7 @@ type JewishCalendarConfigEntry = ConfigEntry[JewishCalendarData] class JewishCalendarData: """Jewish Calendar runtime dataclass.""" - language: str + language: Language diaspora: bool location: Location candle_lighting_offset: int @@ -43,7 +44,6 @@ class JewishCalendarEntity(Entity): ) data = config_entry.runtime_data self._location = data.location - self._hebrew = data.language == "hebrew" self._language = data.language self._candle_lighting_offset = data.candle_lighting_offset self._havdalah_offset = data.havdalah_offset diff --git a/homeassistant/components/jewish_calendar/icons.json b/homeassistant/components/jewish_calendar/icons.json new file mode 100644 index 00000000000..24b922df7a2 --- /dev/null +++ b/homeassistant/components/jewish_calendar/icons.json @@ -0,0 +1,7 @@ +{ + "services": { + "count_omer": { + "service": "mdi:counter" + } + } +} diff --git a/homeassistant/components/jewish_calendar/manifest.json b/homeassistant/components/jewish_calendar/manifest.json index aca45320002..877c4cf9a99 100644 --- a/homeassistant/components/jewish_calendar/manifest.json +++ b/homeassistant/components/jewish_calendar/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/jewish_calendar", "iot_class": "calculated", "loggers": ["hdate"], - "requirements": ["hdate==0.11.1"], + "requirements": ["hdate[astral]==1.0.3"], "single_config_entry": true } diff --git a/homeassistant/components/jewish_calendar/sensor.py b/homeassistant/components/jewish_calendar/sensor.py index eee1d966ae6..7cb281b3af4 100644 --- a/homeassistant/components/jewish_calendar/sensor.py +++ b/homeassistant/components/jewish_calendar/sensor.py @@ -2,12 +2,13 @@ from __future__ import annotations -from datetime import date as Date +import datetime as dt import logging -from typing import Any, cast +from typing import Any -from hdate import HDate, HebrewDate, htables -from hdate.zmanim import Zmanim +from hdate import HDateInfo, Zmanim +from hdate.holidays import HolidayDatabase +from hdate.parasha import Parasha from homeassistant.components.sensor import ( SensorDeviceClass, @@ -59,83 +60,83 @@ INFO_SENSORS: tuple[SensorEntityDescription, ...] = ( TIME_SENSORS: tuple[SensorEntityDescription, ...] = ( SensorEntityDescription( - key="first_light", + key="alot_hashachar", name="Alot Hashachar", # codespell:ignore alot icon="mdi:weather-sunset-up", entity_registry_enabled_default=False, ), SensorEntityDescription( - key="talit", + key="talit_and_tefillin", name="Talit and Tefillin", icon="mdi:calendar-clock", entity_registry_enabled_default=False, ), SensorEntityDescription( - key="sunrise", + key="netz_hachama", name="Hanetz Hachama", icon="mdi:calendar-clock", ), SensorEntityDescription( - key="gra_end_shma", + key="sof_zman_shema_gra", name='Latest time for Shma Gr"a', icon="mdi:calendar-clock", entity_registry_enabled_default=False, ), SensorEntityDescription( - key="mga_end_shma", + key="sof_zman_shema_mga", name='Latest time for Shma MG"A', icon="mdi:calendar-clock", entity_registry_enabled_default=False, ), SensorEntityDescription( - key="gra_end_tfila", + key="sof_zman_tfilla_gra", name='Latest time for Tefilla Gr"a', icon="mdi:calendar-clock", entity_registry_enabled_default=False, ), SensorEntityDescription( - key="mga_end_tfila", + key="sof_zman_tfilla_mga", name='Latest time for Tefilla MG"A', icon="mdi:calendar-clock", entity_registry_enabled_default=False, ), SensorEntityDescription( - key="midday", + key="chatzot_hayom", name="Chatzot Hayom", icon="mdi:calendar-clock", entity_registry_enabled_default=False, ), SensorEntityDescription( - key="big_mincha", + key="mincha_gedola", name="Mincha Gedola", icon="mdi:calendar-clock", entity_registry_enabled_default=False, ), SensorEntityDescription( - key="small_mincha", + key="mincha_ketana", name="Mincha Ketana", icon="mdi:calendar-clock", entity_registry_enabled_default=False, ), SensorEntityDescription( - key="plag_mincha", + key="plag_hamincha", name="Plag Hamincha", icon="mdi:weather-sunset-down", entity_registry_enabled_default=False, ), SensorEntityDescription( - key="sunset", + key="shkia", name="Shkia", icon="mdi:weather-sunset", ), SensorEntityDescription( - key="first_stars", + key="tset_hakohavim_tsom", name="T'set Hakochavim", icon="mdi:weather-night", entity_registry_enabled_default=False, ), SensorEntityDescription( - key="three_stars", + key="tset_hakohavim_shabbat", name="T'set Hakochavim, 3 stars", icon="mdi:weather-night", entity_registry_enabled_default=False, @@ -212,7 +213,9 @@ class JewishCalendarSensor(JewishCalendarEntity, SensorEntity): _LOGGER.debug("Now: %s Sunset: %s", now, sunset) - daytime_date = HDate(today, diaspora=self._diaspora, hebrew=self._hebrew) + daytime_date = HDateInfo( + today, diaspora=self._diaspora, language=self._language + ) # The Jewish day starts after darkness (called "tzais") and finishes at # sunset ("shkia"). The time in between is a gray area @@ -238,14 +241,14 @@ class JewishCalendarSensor(JewishCalendarEntity, SensorEntity): "New value for %s: %s", self.entity_description.key, self._attr_native_value ) - def make_zmanim(self, date: Date) -> Zmanim: + def make_zmanim(self, date: dt.date) -> Zmanim: """Create a Zmanim object.""" return Zmanim( date=date, location=self._location, candle_lighting_offset=self._candle_lighting_offset, havdalah_offset=self._havdalah_offset, - hebrew=self._hebrew, + language=self._language, ) @property @@ -254,43 +257,40 @@ class JewishCalendarSensor(JewishCalendarEntity, SensorEntity): return self._attrs def get_state( - self, daytime_date: HDate, after_shkia_date: HDate, after_tzais_date: HDate + self, + daytime_date: HDateInfo, + after_shkia_date: HDateInfo, + after_tzais_date: HDateInfo, ) -> Any | None: """For a given type of sensor, return the state.""" # Terminology note: by convention in py-libhdate library, "upcoming" # refers to "current" or "upcoming" dates. if self.entity_description.key == "date": - hdate = cast(HebrewDate, after_shkia_date.hdate) - month = htables.MONTHS[hdate.month.value - 1] + hdate = after_shkia_date.hdate + hdate.month.set_language(self._language) self._attrs = { - "hebrew_year": hdate.year, - "hebrew_month_name": month.hebrew if self._hebrew else month.english, - "hebrew_day": hdate.day, + "hebrew_year": str(hdate.year), + "hebrew_month_name": str(hdate.month), + "hebrew_day": str(hdate.day), } - return after_shkia_date.hebrew_date + return after_shkia_date.hdate if self.entity_description.key == "weekly_portion": - self._attr_options = [ - (p.hebrew if self._hebrew else p.english) for p in htables.PARASHAOT - ] + self._attr_options = list(Parasha) # Compute the weekly portion based on the upcoming shabbat. return after_tzais_date.upcoming_shabbat.parasha if self.entity_description.key == "holiday": - _id = _type = _type_id = "" - _holiday_type = after_shkia_date.holiday_type - if isinstance(_holiday_type, list): - _id = ", ".join(after_shkia_date.holiday_name) - _type = ", ".join([_htype.name for _htype in _holiday_type]) - _type_id = ", ".join([str(_htype.value) for _htype in _holiday_type]) - else: - _id = after_shkia_date.holiday_name - _type = _holiday_type.name - _type_id = _holiday_type.value - self._attrs = {"id": _id, "type": _type, "type_id": _type_id} - self._attr_options = htables.get_all_holidays(self._language) - - return after_shkia_date.holiday_description + _holidays = after_shkia_date.holidays + _id = ", ".join(holiday.name for holiday in _holidays) + _type = ", ".join( + dict.fromkeys(_holiday.type.name for _holiday in _holidays) + ) + self._attrs = {"id": _id, "type": _type} + self._attr_options = HolidayDatabase(self._diaspora).get_all_names( + self._language + ) + return ", ".join(str(holiday) for holiday in _holidays) if _holidays else "" if self.entity_description.key == "omer_count": - return after_shkia_date.omer_day + return after_shkia_date.omer.total_days if after_shkia_date.omer else 0 if self.entity_description.key == "daf_yomi": return daytime_date.daf_yomi @@ -303,7 +303,10 @@ class JewishCalendarTimeSensor(JewishCalendarSensor): _attr_device_class = SensorDeviceClass.TIMESTAMP def get_state( - self, daytime_date: HDate, after_shkia_date: HDate, after_tzais_date: HDate + self, + daytime_date: HDateInfo, + after_shkia_date: HDateInfo, + after_tzais_date: HDateInfo, ) -> Any | None: """For a given type of sensor, return the state.""" if self.entity_description.key == "upcoming_shabbat_candle_lighting": @@ -325,5 +328,5 @@ class JewishCalendarTimeSensor(JewishCalendarSensor): ) return times.havdalah - times = self.make_zmanim(dt_util.now()).zmanim - return times[self.entity_description.key] + times = self.make_zmanim(dt_util.now().date()) + return times.zmanim[self.entity_description.key].local diff --git a/homeassistant/components/jewish_calendar/service.py b/homeassistant/components/jewish_calendar/service.py new file mode 100644 index 00000000000..7c3c7a21f1c --- /dev/null +++ b/homeassistant/components/jewish_calendar/service.py @@ -0,0 +1,63 @@ +"""Services for Jewish Calendar.""" + +import datetime +from typing import cast + +from hdate import HebrewDate +from hdate.omer import Nusach, Omer +from hdate.translator import Language +import voluptuous as vol + +from homeassistant.const import CONF_LANGUAGE +from homeassistant.core import ( + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, +) +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.selector import LanguageSelector, LanguageSelectorConfig + +from .const import ATTR_DATE, ATTR_NUSACH, DOMAIN, SERVICE_COUNT_OMER + +SUPPORTED_LANGUAGES = {"en": "english", "fr": "french", "he": "hebrew"} +OMER_SCHEMA = vol.Schema( + { + vol.Required(ATTR_DATE, default=datetime.date.today): cv.date, + vol.Required(ATTR_NUSACH, default="sfarad"): vol.In( + [nusach.name.lower() for nusach in Nusach] + ), + vol.Required(CONF_LANGUAGE, default="he"): LanguageSelector( + LanguageSelectorConfig(languages=list(SUPPORTED_LANGUAGES.keys())) + ), + } +) + + +def async_setup_services(hass: HomeAssistant) -> None: + """Set up the Jewish Calendar services.""" + + async def get_omer_count(call: ServiceCall) -> ServiceResponse: + """Return the Omer blessing for a given date.""" + hebrew_date = HebrewDate.from_gdate(call.data["date"]) + nusach = Nusach[call.data["nusach"].upper()] + + # Currently Omer only supports Hebrew, English, and French and requires + # the full language name + language = cast(Language, SUPPORTED_LANGUAGES[call.data[CONF_LANGUAGE]]) + + omer = Omer(date=hebrew_date, nusach=nusach, language=language) + return { + "message": str(omer.count_str()), + "weeks": omer.week, + "days": omer.day, + "total_days": omer.total_days, + } + + hass.services.async_register( + DOMAIN, + SERVICE_COUNT_OMER, + get_omer_count, + schema=OMER_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) diff --git a/homeassistant/components/jewish_calendar/services.yaml b/homeassistant/components/jewish_calendar/services.yaml new file mode 100644 index 00000000000..894fa30fee3 --- /dev/null +++ b/homeassistant/components/jewish_calendar/services.yaml @@ -0,0 +1,29 @@ +count_omer: + fields: + date: + required: true + example: "2025-04-14" + selector: + date: + nusach: + required: true + example: "sfarad" + default: "sfarad" + selector: + select: + translation_key: "nusach" + options: + - "sfarad" + - "ashkenaz" + - "adot_mizrah" + - "italian" + language: + required: true + default: "he" + example: "he" + selector: + language: + languages: + - "en" + - "he" + - "fr" diff --git a/homeassistant/components/jewish_calendar/strings.json b/homeassistant/components/jewish_calendar/strings.json index 1b7b86c0056..41e666b1e5d 100644 --- a/homeassistant/components/jewish_calendar/strings.json +++ b/homeassistant/components/jewish_calendar/strings.json @@ -45,5 +45,35 @@ } } } + }, + "selector": { + "nusach": { + "options": { + "sfarad": "Sfarad", + "ashkenaz": "Ashkenaz", + "adot_mizrah": "Adot Mizrah", + "italian": "Italian" + } + } + }, + "services": { + "count_omer": { + "name": "Count the Omer", + "description": "Returns the phrase for counting the Omer on a given date.", + "fields": { + "date": { + "name": "Date", + "description": "Date to count the Omer for." + }, + "nusach": { + "name": "Nusach", + "description": "Nusach to count the Omer in." + }, + "language": { + "name": "Language", + "description": "Language to count the Omer in." + } + } + } } } diff --git a/homeassistant/components/keymitt_ble/strings.json b/homeassistant/components/keymitt_ble/strings.json index 2a1f428603e..5e7e895d222 100644 --- a/homeassistant/components/keymitt_ble/strings.json +++ b/homeassistant/components/keymitt_ble/strings.json @@ -34,7 +34,7 @@ "services": { "calibrate": { "name": "Calibrate", - "description": "Calibration - Set depth, press & hold duration, and operation mode. Warning - this will send a push command to the device.", + "description": "Sets the depth, press or release duration, and operation mode. Warning - this will send a push command to the device.", "fields": { "entity_id": { "name": "Entity", @@ -42,15 +42,15 @@ }, "depth": { "name": "Depth", - "description": "Depth in percent." + "description": "How far to extend the push arm." }, "duration": { "name": "Duration", - "description": "Duration in seconds." + "description": "How long to press or release." }, "mode": { "name": "[%key:common::config_flow::data::mode%]", - "description": "Normal | invert | toggle." + "description": "The operation mode of the arm." } } } diff --git a/homeassistant/components/kitchen_sink/__init__.py b/homeassistant/components/kitchen_sink/__init__.py index de8e521f0e8..2f876ca855d 100644 --- a/homeassistant/components/kitchen_sink/__init__.py +++ b/homeassistant/components/kitchen_sink/__init__.py @@ -12,14 +12,24 @@ from random import random import voluptuous as vol from homeassistant.components.recorder import DOMAIN as RECORDER_DOMAIN, get_instance -from homeassistant.components.recorder.models import StatisticData, StatisticMetaData +from homeassistant.components.recorder.models import ( + StatisticData, + StatisticMeanType, + StatisticMetaData, +) from homeassistant.components.recorder.statistics import ( async_add_external_statistics, async_import_statistics, get_last_statistics, ) from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import Platform, UnitOfEnergy, UnitOfTemperature, UnitOfVolume +from homeassistant.const import ( + DEGREE, + Platform, + UnitOfEnergy, + UnitOfTemperature, + UnitOfVolume, +) from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue @@ -72,6 +82,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set the config entry up.""" + if "recorder" in hass.config.components: + # Insert stats for mean_type_changed issue + await _insert_wrong_wind_direction_statistics(hass) + # Set up demo platforms with config entry await hass.config_entries.async_forward_entry_setups( entry, COMPONENTS_WITH_DEMO_PLATFORM @@ -233,7 +247,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": "Outdoor temperature", "statistic_id": f"{DOMAIN}:temperature_outdoor", "unit_of_measurement": UnitOfTemperature.CELSIUS, - "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, } statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1) @@ -246,7 +260,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": "Energy consumption 1", "statistic_id": f"{DOMAIN}:energy_consumption_kwh", "unit_of_measurement": UnitOfEnergy.KILO_WATT_HOUR, - "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, } await _insert_sum_statistics(hass, metadata, yesterday_midnight, today_midnight, 1) @@ -258,7 +272,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": "Energy consumption 2", "statistic_id": f"{DOMAIN}:energy_consumption_mwh", "unit_of_measurement": UnitOfEnergy.MEGA_WATT_HOUR, - "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, } await _insert_sum_statistics( @@ -272,7 +286,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": "Gas consumption 1", "statistic_id": f"{DOMAIN}:gas_consumption_m3", "unit_of_measurement": UnitOfVolume.CUBIC_METERS, - "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, } await _insert_sum_statistics( @@ -286,7 +300,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": "Gas consumption 2", "statistic_id": f"{DOMAIN}:gas_consumption_ft3", "unit_of_measurement": UnitOfVolume.CUBIC_FEET, - "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, } await _insert_sum_statistics(hass, metadata, yesterday_midnight, today_midnight, 15) @@ -298,7 +312,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": None, "statistic_id": "sensor.statistics_issues_issue_1", "unit_of_measurement": UnitOfVolume.CUBIC_METERS, - "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, } statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1) @@ -310,7 +324,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": None, "statistic_id": "sensor.statistics_issues_issue_2", "unit_of_measurement": "cats", - "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, } statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1) @@ -322,7 +336,7 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": None, "statistic_id": "sensor.statistics_issues_issue_3", "unit_of_measurement": UnitOfVolume.CUBIC_METERS, - "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, } statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1) @@ -334,8 +348,28 @@ async def _insert_statistics(hass: HomeAssistant) -> None: "name": None, "statistic_id": "sensor.statistics_issues_issue_4", "unit_of_measurement": UnitOfVolume.CUBIC_METERS, - "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, } statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 15, 1) async_import_statistics(hass, metadata, statistics) + + +async def _insert_wrong_wind_direction_statistics(hass: HomeAssistant) -> None: + """Insert some fake wind direction statistics.""" + now = dt_util.now() + yesterday = now - datetime.timedelta(days=1) + yesterday_midnight = yesterday.replace(hour=0, minute=0, second=0, microsecond=0) + today_midnight = yesterday_midnight + datetime.timedelta(days=1) + + # Add some statistics required to raise the mean_type_changed issue later + metadata: StatisticMetaData = { + "source": RECORDER_DOMAIN, + "name": None, + "statistic_id": "sensor.statistics_issues_issue_5", + "unit_of_measurement": DEGREE, + "mean_type": StatisticMeanType.ARITHMETIC, + "has_sum": False, + } + statistics = _generate_mean_statistics(yesterday_midnight, today_midnight, 0, 360) + async_import_statistics(hass, metadata, statistics) diff --git a/homeassistant/components/kitchen_sink/backup.py b/homeassistant/components/kitchen_sink/backup.py index 44ac0456105..46b204845ad 100644 --- a/homeassistant/components/kitchen_sink/backup.py +++ b/homeassistant/components/kitchen_sink/backup.py @@ -7,7 +7,13 @@ from collections.abc import AsyncIterator, Callable, Coroutine import logging from typing import Any -from homeassistant.components.backup import AddonInfo, AgentBackup, BackupAgent, Folder +from homeassistant.components.backup import ( + AddonInfo, + AgentBackup, + BackupAgent, + BackupNotFound, + Folder, +) from homeassistant.core import HomeAssistant, callback from . import DATA_BACKUP_AGENT_LISTENERS, DOMAIN @@ -110,9 +116,9 @@ class KitchenSinkBackupAgent(BackupAgent): self, backup_id: str, **kwargs: Any, - ) -> AgentBackup | None: + ) -> AgentBackup: """Return a backup.""" for backup in self._uploads: if backup.backup_id == backup_id: return backup - return None + raise BackupNotFound(f"Backup {backup_id} not found") diff --git a/homeassistant/components/kitchen_sink/config_flow.py b/homeassistant/components/kitchen_sink/config_flow.py index e1ffe334038..aa722d27944 100644 --- a/homeassistant/components/kitchen_sink/config_flow.py +++ b/homeassistant/components/kitchen_sink/config_flow.py @@ -17,6 +17,7 @@ from homeassistant.config_entries import ( SubentryFlowResult, ) from homeassistant.core import callback +from homeassistant.helpers import config_validation as cv from . import DOMAIN @@ -80,30 +81,30 @@ class OptionsFlowHandler(OptionsFlow): if user_input is not None: return self.async_create_entry(data=self.config_entry.options | user_input) - return self.async_show_form( - step_id="options_1", - data_schema=vol.Schema( - { - vol.Required("section_1"): data_entry_flow.section( - vol.Schema( - { - vol.Optional( - CONF_BOOLEAN, - default=self.config_entry.options.get( - CONF_BOOLEAN, False - ), - ): bool, - vol.Optional( - CONF_INT, - default=self.config_entry.options.get(CONF_INT, 10), - ): int, - } - ), - {"collapsed": False}, + data_schema = vol.Schema( + { + vol.Required("section_1"): data_entry_flow.section( + vol.Schema( + { + vol.Optional( + CONF_BOOLEAN, + default=self.config_entry.options.get( + CONF_BOOLEAN, False + ), + ): bool, + vol.Optional(CONF_INT): cv.positive_int, + } ), - } - ), + {"collapsed": False}, + ), + } ) + self.add_suggested_values_to_schema( + data_schema, + {"section_1": {"int": self.config_entry.options.get(CONF_INT, 10)}}, + ) + + return self.async_show_form(step_id="options_1", data_schema=data_schema) class SubentryFlowHandler(ConfigSubentryFlow): @@ -146,7 +147,7 @@ class SubentryFlowHandler(ConfigSubentryFlow): if user_input is not None: title = user_input.pop("name") return self.async_update_and_abort( - self._get_reconfigure_entry(), + self._get_entry(), self._get_reconfigure_subentry(), data=user_input, title=title, diff --git a/homeassistant/components/kitchen_sink/sensor.py b/homeassistant/components/kitchen_sink/sensor.py index 19d1b31aeab..04cb833f0df 100644 --- a/homeassistant/components/kitchen_sink/sensor.py +++ b/homeassistant/components/kitchen_sink/sensor.py @@ -8,7 +8,7 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import UnitOfPower +from homeassistant.const import DEGREE, UnitOfPower from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback @@ -87,6 +87,16 @@ async def async_setup_entry( state_class=None, unit_of_measurement=UnitOfPower.WATT, ), + DemoSensor( + device_unique_id="statistics_issues", + unique_id="statistics_issue_5", + device_name="Statistics issues", + entity_name="Issue 5", + state=100, + device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + unit_of_measurement=DEGREE, + ), ] ) diff --git a/homeassistant/components/knx/__init__.py b/homeassistant/components/knx/__init__.py index fa3439b02f4..8ad16642e45 100644 --- a/homeassistant/components/knx/__init__.py +++ b/homeassistant/components/knx/__init__.py @@ -486,7 +486,7 @@ class KNXModule: transcoder := DPTBase.parse_transcoder(dpt) ): self._address_filter_transcoder.update( - {_filter: transcoder for _filter in _filters} + dict.fromkeys(_filters, transcoder) ) return self.xknx.telegram_queue.register_telegram_received_cb( diff --git a/homeassistant/components/knx/expose.py b/homeassistant/components/knx/expose.py index 6585b848d8a..461e6f25879 100644 --- a/homeassistant/components/knx/expose.py +++ b/homeassistant/components/knx/expose.py @@ -30,6 +30,7 @@ from homeassistant.exceptions import TemplateError from homeassistant.helpers.event import async_track_state_change_event from homeassistant.helpers.template import Template from homeassistant.helpers.typing import ConfigType, StateType +from homeassistant.util import dt as dt_util from .const import CONF_RESPOND_TO_READ, KNX_ADDRESS from .schema import ExposeSchema @@ -217,7 +218,7 @@ class KNXExposeTime: self.device = xknx_device_cls( self.xknx, name=expose_type.capitalize(), - localtime=True, + localtime=dt_util.get_default_time_zone(), group_address=config[KNX_ADDRESS], ) diff --git a/homeassistant/components/knx/manifest.json b/homeassistant/components/knx/manifest.json index 98e3a6a5242..bde6dfa226f 100644 --- a/homeassistant/components/knx/manifest.json +++ b/homeassistant/components/knx/manifest.json @@ -12,7 +12,7 @@ "requirements": [ "xknx==3.6.0", "xknxproject==3.8.2", - "knx-frontend==2025.1.30.194235" + "knx-frontend==2025.3.8.214559" ], "single_config_entry": true } diff --git a/homeassistant/components/knx/services.py b/homeassistant/components/knx/services.py index f0f760180f4..fc28e0850ed 100644 --- a/homeassistant/components/knx/services.py +++ b/homeassistant/components/knx/services.py @@ -126,7 +126,7 @@ async def service_event_register_modify(call: ServiceCall) -> None: transcoder := DPTBase.parse_transcoder(dpt) ): knx_module.group_address_transcoder.update( - {_address: transcoder for _address in group_addresses} + dict.fromkeys(group_addresses, transcoder) ) for group_address in group_addresses: if group_address in knx_module.knx_event_callback.group_addresses: diff --git a/homeassistant/components/knx/storage/entity_store_schema.py b/homeassistant/components/knx/storage/entity_store_schema.py index d99ffa86f52..cde18a181ec 100644 --- a/homeassistant/components/knx/storage/entity_store_schema.py +++ b/homeassistant/components/knx/storage/entity_store_schema.py @@ -114,7 +114,7 @@ BINARY_SENSOR_SCHEMA = vol.Schema( ), vol.Optional(CONF_RESET_AFTER): selector.NumberSelector( selector.NumberSelectorConfig( - min=0, max=10, step=0.1, unit_of_measurement="s" + min=0, max=600, step=0.1, unit_of_measurement="s" ) ), }, diff --git a/homeassistant/components/konnected/strings.json b/homeassistant/components/konnected/strings.json index e1a6863a199..df92e014f12 100644 --- a/homeassistant/components/konnected/strings.json +++ b/homeassistant/components/konnected/strings.json @@ -2,19 +2,19 @@ "config": { "step": { "import_confirm": { - "title": "Import Konnected Device", - "description": "A Konnected Alarm Panel with ID {id} has been discovered in configuration.yaml. This flow will allow you to import it into a config entry." + "title": "Import Konnected device", + "description": "A Konnected alarm panel with ID {id} has been discovered in configuration.yaml. This flow will allow you to import it into a config entry." }, "user": { - "description": "Please enter the host information for your Konnected Panel.", + "description": "Please enter the host information for your Konnected panel.", "data": { "host": "[%key:common::config_flow::data::ip%]", "port": "[%key:common::config_flow::data::port%]" } }, "confirm": { - "title": "Konnected Device Ready", - "description": "Model: {model}\nID: {id}\nHost: {host}\nPort: {port}\n\nYou can configure the IO and panel behavior in the Konnected Alarm Panel settings." + "title": "Konnected device ready", + "description": "Model: {model}\nID: {id}\nHost: {host}\nPort: {port}\n\nYou can configure the IO and panel behavior in the Konnected alarm panel settings." } }, "error": { @@ -45,8 +45,8 @@ } }, "options_io_ext": { - "title": "Configure Extended I/O", - "description": "Select the configuration of the remaining I/O below. You'll be able to configure detailed options in the next steps.", + "title": "Configure extended I/O", + "description": "Select the configuration of the remaining I/O below. You'll be able to configure detailed options in the next steps.", "data": { "8": "Zone 8", "9": "Zone 9", @@ -59,25 +59,25 @@ } }, "options_binary": { - "title": "Configure Binary Sensor", + "title": "Configure binary sensor", "description": "{zone} options", "data": { - "type": "Binary Sensor Type", + "type": "Binary sensor type", "name": "[%key:common::config_flow::data::name%]", "inverse": "Invert the open/close state" } }, "options_digital": { - "title": "Configure Digital Sensor", + "title": "Configure digital sensor", "description": "[%key:component::konnected::options::step::options_binary::description%]", "data": { - "type": "Sensor Type", + "type": "Sensor type", "name": "[%key:common::config_flow::data::name%]", - "poll_interval": "Poll Interval (minutes)" + "poll_interval": "Poll interval (minutes)" } }, "options_switch": { - "title": "Configure Switchable Output", + "title": "Configure switchable output", "description": "{zone} options: state {state}", "data": { "name": "[%key:common::config_flow::data::name%]", @@ -89,18 +89,18 @@ } }, "options_misc": { - "title": "Configure Misc", + "title": "Configure misc", "description": "Please select the desired behavior for your panel", "data": { "discovery": "Respond to discovery requests on your network", "blink": "Blink panel LED on when sending state change", - "override_api_host": "Override default Home Assistant API host panel URL", - "api_host": "Override API host URL" + "override_api_host": "Override default Home Assistant API host URL", + "api_host": "Custom API host URL" } } }, "error": { - "bad_host": "Invalid Override API host URL" + "bad_host": "Invalid custom API host URL" }, "abort": { "not_konn_panel": "[%key:component::konnected::config::abort::not_konn_panel%]" diff --git a/homeassistant/components/lacrosse_view/sensor.py b/homeassistant/components/lacrosse_view/sensor.py index 667fcbb8dcc..dde8dfd54a2 100644 --- a/homeassistant/components/lacrosse_view/sensor.py +++ b/homeassistant/components/lacrosse_view/sensor.py @@ -106,6 +106,7 @@ SENSOR_DESCRIPTIONS = { native_unit_of_measurement=DEGREE, suggested_display_precision=2, device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), "WetDry": LaCrosseSensorEntityDescription( key="WetDry", diff --git a/homeassistant/components/lastfm/config_flow.py b/homeassistant/components/lastfm/config_flow.py index 0e1f680dd63..ca40aebd0d4 100644 --- a/homeassistant/components/lastfm/config_flow.py +++ b/homeassistant/components/lastfm/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +import logging from typing import Any from pylast import LastFMNetwork, PyLastError, User, WSError @@ -32,6 +33,8 @@ CONFIG_SCHEMA: vol.Schema = vol.Schema( } ) +_LOGGER = logging.getLogger(__name__) + def get_lastfm_user(api_key: str, username: str) -> tuple[User, dict[str, str]]: """Get and validate lastFM User.""" @@ -49,7 +52,8 @@ def get_lastfm_user(api_key: str, username: str) -> tuple[User, dict[str, str]]: errors["base"] = "invalid_auth" else: errors["base"] = "unknown" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" return user, errors diff --git a/homeassistant/components/lawn_mower/__init__.py b/homeassistant/components/lawn_mower/__init__.py index 0680bfc9d71..f8c3e0cd67d 100644 --- a/homeassistant/components/lawn_mower/__init__.py +++ b/homeassistant/components/lawn_mower/__init__.py @@ -28,6 +28,7 @@ from .const import ( _LOGGER = logging.getLogger(__name__) DATA_COMPONENT: HassKey[EntityComponent[LawnMowerEntity]] = HassKey(DOMAIN) +ENTITY_ID_FORMAT = DOMAIN + ".{}" PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE SCAN_INTERVAL = timedelta(seconds=60) diff --git a/homeassistant/components/lcn/strings.json b/homeassistant/components/lcn/strings.json index 0bdd85a3678..0a8112d997a 100644 --- a/homeassistant/components/lcn/strings.json +++ b/homeassistant/components/lcn/strings.json @@ -396,19 +396,19 @@ }, "address_to_device_id": { "name": "Address to device ID", - "description": "Convert LCN address to device ID.", + "description": "Converts an LCN address into a device ID.", "fields": { "id": { "name": "Module or group ID", - "description": "Target module or group ID." + "description": "Module or group number of the target." }, "segment_id": { "name": "Segment ID", - "description": "Target segment ID." + "description": "Segment number of the target." }, "type": { "name": "Type", - "description": "Target type." + "description": "Module type of the target." }, "host": { "name": "Host name", diff --git a/homeassistant/components/ld2410_ble/manifest.json b/homeassistant/components/ld2410_ble/manifest.json index 36d0150642e..764345710dd 100644 --- a/homeassistant/components/ld2410_ble/manifest.json +++ b/homeassistant/components/ld2410_ble/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/ld2410_ble", "integration_type": "device", "iot_class": "local_push", - "requirements": ["bluetooth-data-tools==1.23.4", "ld2410-ble==0.1.1"] + "requirements": ["bluetooth-data-tools==1.26.5", "ld2410-ble==0.1.1"] } diff --git a/homeassistant/components/led_ble/light.py b/homeassistant/components/led_ble/light.py index 14f2f228e13..2facda734d5 100644 --- a/homeassistant/components/led_ble/light.py +++ b/homeassistant/components/led_ble/light.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Any +from typing import Any, cast from led_ble import LEDBLE @@ -83,7 +83,7 @@ class LEDBLEEntity(CoordinatorEntity[DataUpdateCoordinator[None]], LightEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Instruct the light to turn on.""" - brightness = kwargs.get(ATTR_BRIGHTNESS, self.brightness) + brightness = cast(int, kwargs.get(ATTR_BRIGHTNESS, self.brightness)) if effect := kwargs.get(ATTR_EFFECT): await self._async_set_effect(effect, brightness) return diff --git a/homeassistant/components/led_ble/manifest.json b/homeassistant/components/led_ble/manifest.json index 309399e6958..b88ef3f029a 100644 --- a/homeassistant/components/led_ble/manifest.json +++ b/homeassistant/components/led_ble/manifest.json @@ -35,5 +35,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/led_ble", "iot_class": "local_polling", - "requirements": ["bluetooth-data-tools==1.23.4", "led-ble==1.1.6"] + "requirements": ["bluetooth-data-tools==1.26.5", "led-ble==1.1.6"] } diff --git a/homeassistant/components/lektrico/strings.json b/homeassistant/components/lektrico/strings.json index 3b4417c346a..eb0203e0661 100644 --- a/homeassistant/components/lektrico/strings.json +++ b/homeassistant/components/lektrico/strings.json @@ -24,7 +24,7 @@ "entity": { "binary_sensor": { "state_e_activated": { - "name": "Ev error" + "name": "EV error" }, "overtemp": { "name": "Thermal throttling" @@ -45,10 +45,10 @@ "name": "Overvoltage" }, "rcd_error": { - "name": "Rcd error" + "name": "RCD error" }, "cp_diode_failure": { - "name": "Ev diode short" + "name": "EV diode short" }, "contactor_failure": { "name": "Relay contacts welded" @@ -64,7 +64,7 @@ }, "number": { "led_max_brightness": { - "name": "Led brightness" + "name": "LED brightness" }, "dynamic_limit": { "name": "Dynamic limit" diff --git a/homeassistant/components/lg_thinq/__init__.py b/homeassistant/components/lg_thinq/__init__.py index 72d81af4ff0..f83cbadf925 100644 --- a/homeassistant/components/lg_thinq/__init__.py +++ b/homeassistant/components/lg_thinq/__init__.py @@ -47,6 +47,7 @@ PLATFORMS = [ Platform.SENSOR, Platform.SWITCH, Platform.VACUUM, + Platform.WATER_HEATER, ] _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/lg_thinq/coordinator.py b/homeassistant/components/lg_thinq/coordinator.py index 513cd27a7b2..9f84c422277 100644 --- a/homeassistant/components/lg_thinq/coordinator.py +++ b/homeassistant/components/lg_thinq/coordinator.py @@ -63,10 +63,12 @@ class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): # Add a callback to handle core config update. self.unit_system: str | None = None - self.hass.bus.async_listen( - event_type=EVENT_CORE_CONFIG_UPDATE, - listener=self._handle_update_config, - event_filter=self.async_config_update_filter, + self.config_entry.async_on_unload( + self.hass.bus.async_listen( + event_type=EVENT_CORE_CONFIG_UPDATE, + listener=self._handle_update_config, + event_filter=self.async_config_update_filter, + ) ) async def _handle_update_config(self, _: Event) -> None: diff --git a/homeassistant/components/lg_thinq/manifest.json b/homeassistant/components/lg_thinq/manifest.json index b00d28c1d4f..cffc61cb1c4 100644 --- a/homeassistant/components/lg_thinq/manifest.json +++ b/homeassistant/components/lg_thinq/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/lg_thinq", "iot_class": "cloud_push", "loggers": ["thinqconnect"], - "requirements": ["thinqconnect==1.0.4"] + "requirements": ["thinqconnect==1.0.5"] } diff --git a/homeassistant/components/lg_thinq/water_heater.py b/homeassistant/components/lg_thinq/water_heater.py new file mode 100644 index 00000000000..5a5c8d024b6 --- /dev/null +++ b/homeassistant/components/lg_thinq/water_heater.py @@ -0,0 +1,201 @@ +"""Support for waterheater entities.""" + +from __future__ import annotations + +import logging +from typing import Any + +from thinqconnect import DeviceType +from thinqconnect.integration import ExtendedProperty + +from homeassistant.components.water_heater import ( + ATTR_OPERATION_MODE, + STATE_ECO, + STATE_HEAT_PUMP, + STATE_OFF, + STATE_PERFORMANCE, + WaterHeaterEntity, + WaterHeaterEntityDescription, + WaterHeaterEntityFeature, +) +from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import ThinqConfigEntry +from .coordinator import DeviceDataUpdateCoordinator +from .entity import ThinQEntity + +DEVICE_TYPE_WH_MAP: dict[DeviceType, WaterHeaterEntityDescription] = { + DeviceType.WATER_HEATER: WaterHeaterEntityDescription( + key=ExtendedProperty.WATER_HEATER, + name=None, + ), + DeviceType.SYSTEM_BOILER: WaterHeaterEntityDescription( + key=ExtendedProperty.WATER_BOILER, + name=None, + ), +} + +# Mapping between device and HA operation modes +DEVICE_OP_MODE_TO_HA = { + "auto": STATE_ECO, + "heat_pump": STATE_HEAT_PUMP, + "turbo": STATE_PERFORMANCE, + "vacation": STATE_OFF, +} +HA_STATE_TO_DEVICE_OP_MODE = {v: k for k, v in DEVICE_OP_MODE_TO_HA.items()} + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ThinqConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up an entry for water_heater platform.""" + entities: list[ThinQWaterHeaterEntity] = [] + for coordinator in entry.runtime_data.coordinators.values(): + if ( + description := DEVICE_TYPE_WH_MAP.get(coordinator.api.device.device_type) + ) is not None: + if coordinator.api.device.device_type == DeviceType.WATER_HEATER: + entities.append( + ThinQWaterHeaterEntity( + coordinator, description, ExtendedProperty.WATER_HEATER + ) + ) + elif coordinator.api.device.device_type == DeviceType.SYSTEM_BOILER: + entities.append( + ThinQWaterBoilerEntity( + coordinator, description, ExtendedProperty.WATER_BOILER + ) + ) + if entities: + async_add_entities(entities) + + +class ThinQWaterHeaterEntity(ThinQEntity, WaterHeaterEntity): + """Represent a ThinQ water heater entity.""" + + def __init__( + self, + coordinator: DeviceDataUpdateCoordinator, + entity_description: WaterHeaterEntityDescription, + property_id: str, + ) -> None: + """Initialize a water_heater entity.""" + super().__init__(coordinator, entity_description, property_id) + self._attr_supported_features = ( + WaterHeaterEntityFeature.TARGET_TEMPERATURE + | WaterHeaterEntityFeature.OPERATION_MODE + ) + self._attr_temperature_unit = ( + self._get_unit_of_measurement(self.data.unit) or UnitOfTemperature.CELSIUS + ) + if modes := self.data.job_modes: + self._attr_operation_list = [ + DEVICE_OP_MODE_TO_HA.get(mode, mode) for mode in modes + ] + else: + self._attr_operation_list = [STATE_HEAT_PUMP] + + def _update_status(self) -> None: + """Update status itself.""" + super()._update_status() + self._attr_current_temperature = self.data.current_temp + self._attr_target_temperature = self.data.target_temp + + if self.data.max is not None: + self._attr_max_temp = self.data.max + if self.data.min is not None: + self._attr_min_temp = self.data.min + if self.data.step is not None: + self._attr_target_temperature_step = self.data.step + + self._attr_temperature_unit = ( + self._get_unit_of_measurement(self.data.unit) or UnitOfTemperature.CELSIUS + ) + if self.data.is_on: + self._attr_current_operation = ( + DEVICE_OP_MODE_TO_HA.get(job_mode, job_mode) + if (job_mode := self.data.job_mode) is not None + else STATE_HEAT_PUMP + ) + else: + self._attr_current_operation = STATE_OFF + + _LOGGER.debug( + "[%s:%s] update status: c:%s, t:%s, op_mode:%s, op_list:%s, is_on:%s", + self.coordinator.device_name, + self.property_id, + self.current_temperature, + self.target_temperature, + self.current_operation, + self.operation_list, + self.data.is_on, + ) + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new target temperatures.""" + _LOGGER.debug( + "[%s:%s] async_set_temperature: %s", + self.coordinator.device_name, + self.property_id, + kwargs, + ) + if (operation_mode := kwargs.get(ATTR_OPERATION_MODE)) is not None: + await self.async_set_operation_mode(str(operation_mode)) + if operation_mode == STATE_OFF: + return + + if ( + temperature := kwargs.get(ATTR_TEMPERATURE) + ) is not None and temperature != self.target_temperature: + await self.async_call_api( + self.coordinator.api.async_set_target_temperature( + self.property_id, temperature + ) + ) + + async def async_set_operation_mode(self, operation_mode: str) -> None: + """Set new operation mode.""" + mode = HA_STATE_TO_DEVICE_OP_MODE.get(operation_mode, operation_mode) + _LOGGER.debug( + "[%s:%s] async_set_operation_mode: %s", + self.coordinator.device_name, + self.property_id, + mode, + ) + await self.async_call_api( + self.coordinator.api.async_set_job_mode(self.property_id, mode) + ) + + +class ThinQWaterBoilerEntity(ThinQWaterHeaterEntity): + """Represent a ThinQ water boiler entity.""" + + def __init__( + self, + coordinator: DeviceDataUpdateCoordinator, + entity_description: WaterHeaterEntityDescription, + property_id: str, + ) -> None: + """Initialize a water_heater entity.""" + super().__init__(coordinator, entity_description, property_id) + self._attr_supported_features |= WaterHeaterEntityFeature.ON_OFF + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the entity on.""" + _LOGGER.debug( + "[%s:%s] async_turn_on", self.coordinator.device_name, self.property_id + ) + await self.async_call_api(self.coordinator.api.async_turn_on(self.property_id)) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the entity off.""" + _LOGGER.debug( + "[%s:%s] async_turn_off", self.coordinator.device_name, self.property_id + ) + await self.async_call_api(self.coordinator.api.async_turn_off(self.property_id)) diff --git a/homeassistant/components/lifx/manifest.json b/homeassistant/components/lifx/manifest.json index 8d460c25322..18b9457ebf4 100644 --- a/homeassistant/components/lifx/manifest.json +++ b/homeassistant/components/lifx/manifest.json @@ -51,7 +51,7 @@ "iot_class": "local_polling", "loggers": ["aiolifx", "aiolifx_effects", "bitstring"], "requirements": [ - "aiolifx==1.1.2", + "aiolifx==1.1.4", "aiolifx-effects==0.3.2", "aiolifx-themes==0.6.4" ] diff --git a/homeassistant/components/lifx/strings.json b/homeassistant/components/lifx/strings.json index c407489d52d..be0485c6dff 100644 --- a/homeassistant/components/lifx/strings.json +++ b/homeassistant/components/lifx/strings.json @@ -201,7 +201,7 @@ }, "effect_morph": { "name": "Morph effect", - "description": "Starts the firmware-based Morph effect on LIFX Tiles on Candle.", + "description": "Starts the firmware-based Morph effect on LIFX Tiles or Candle.", "fields": { "speed": { "name": "Speed", @@ -223,23 +223,23 @@ }, "effect_sky": { "name": "Sky effect", - "description": "Starts the firmware-based Sky effect on LIFX Ceiling.", + "description": "Starts a firmware-based effect on LIFX Ceiling lights that animates a sky scene across the device.", "fields": { "speed": { "name": "Speed", - "description": "How long the Sunrise and Sunset sky types will take to complete. For the Cloud sky type, it is the speed of the clouds across the device." + "description": "How long the Sunrise and Sunset sky types will take to complete. For the Clouds sky type, it is the speed of the clouds across the device." }, "sky_type": { "name": "Sky type", "description": "The style of sky that will be animated by the effect." }, "cloud_saturation_min": { - "name": "Cloud saturation Minimum", - "description": "Minimum cloud saturation." + "name": "Cloud saturation minimum", + "description": "The minimum cloud saturation for the Clouds sky type." }, "cloud_saturation_max": { - "name": "Cloud Saturation maximum", - "description": "Maximum cloud saturation." + "name": "Cloud saturation maximum", + "description": "The maximum cloud saturation for the Clouds sky type." }, "palette": { "name": "Palette", diff --git a/homeassistant/components/light/__init__.py b/homeassistant/components/light/__init__.py index 637ba45c7d9..7b548533058 100644 --- a/homeassistant/components/light/__init__.py +++ b/homeassistant/components/light/__init__.py @@ -465,7 +465,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa: ): params.pop(_DEPRECATED_ATTR_COLOR_TEMP.value) color_temp = params.pop(ATTR_COLOR_TEMP_KELVIN) - brightness = params.get(ATTR_BRIGHTNESS, light.brightness) + brightness = cast(int, params.get(ATTR_BRIGHTNESS, light.brightness)) params[ATTR_RGBWW_COLOR] = color_util.color_temperature_to_rgbww( color_temp, brightness, diff --git a/homeassistant/components/light/icons.json b/homeassistant/components/light/icons.json index df98def090e..6218c733f4c 100644 --- a/homeassistant/components/light/icons.json +++ b/homeassistant/components/light/icons.json @@ -1,7 +1,15 @@ { "entity_component": { "_": { - "default": "mdi:lightbulb" + "default": "mdi:lightbulb", + "state_attributes": { + "effect": { + "default": "mdi:circle-medium", + "state": { + "off": "mdi:star-off" + } + } + } } }, "services": { diff --git a/homeassistant/components/light/intent.py b/homeassistant/components/light/intent.py index 83f2ee58b5e..250e1f5b2c1 100644 --- a/homeassistant/components/light/intent.py +++ b/homeassistant/components/light/intent.py @@ -28,13 +28,21 @@ async def async_setup_intents(hass: HomeAssistant) -> None: DOMAIN, SERVICE_TURN_ON, optional_slots={ - ("color", ATTR_RGB_COLOR): color_util.color_name_to_rgb, - ("temperature", ATTR_COLOR_TEMP_KELVIN): cv.positive_int, - ("brightness", ATTR_BRIGHTNESS_PCT): vol.All( - vol.Coerce(int), vol.Range(0, 100) + "color": intent.IntentSlotInfo( + service_data_name=ATTR_RGB_COLOR, + value_schema=color_util.color_name_to_rgb, + ), + "temperature": intent.IntentSlotInfo( + service_data_name=ATTR_COLOR_TEMP_KELVIN, + value_schema=cv.positive_int, + ), + "brightness": intent.IntentSlotInfo( + service_data_name=ATTR_BRIGHTNESS_PCT, + description="The brightness percentage of the light between 0 and 100, where 0 is off and 100 is fully lit", + value_schema=vol.All(vol.Coerce(int), vol.Range(0, 100)), ), }, - description="Sets the brightness or color of a light", + description="Sets the brightness percentage or color of a light", platforms={DOMAIN}, ), ) diff --git a/homeassistant/components/light/strings.json b/homeassistant/components/light/strings.json index c0f658c3a44..4a3b98ded46 100644 --- a/homeassistant/components/light/strings.json +++ b/homeassistant/components/light/strings.json @@ -93,7 +93,10 @@ "name": "Color temperature (Kelvin)" }, "effect": { - "name": "Effect" + "name": "Effect", + "state": { + "off": "[%key:common::state::off%]" + } }, "effect_list": { "name": "Available effects" diff --git a/homeassistant/components/linkedgo/__init__.py b/homeassistant/components/linkedgo/__init__.py new file mode 100644 index 00000000000..e26fefa6b96 --- /dev/null +++ b/homeassistant/components/linkedgo/__init__.py @@ -0,0 +1 @@ +"""LinkedGo virtual integration.""" diff --git a/homeassistant/components/linkedgo/manifest.json b/homeassistant/components/linkedgo/manifest.json new file mode 100644 index 00000000000..03c650cac08 --- /dev/null +++ b/homeassistant/components/linkedgo/manifest.json @@ -0,0 +1,6 @@ +{ + "domain": "linkedgo", + "name": "LinkedGo", + "integration_type": "virtual", + "supported_by": "shelly" +} diff --git a/homeassistant/components/linkplay/entity.py b/homeassistant/components/linkplay/entity.py index 74e067f5eb3..0bfb34af42c 100644 --- a/homeassistant/components/linkplay/entity.py +++ b/homeassistant/components/linkplay/entity.py @@ -4,13 +4,13 @@ from collections.abc import Callable, Coroutine from typing import Any, Concatenate from linkplay.bridge import LinkPlayBridge +from linkplay.manufacturers import MANUFACTURER_GENERIC, get_info_from_project from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr from homeassistant.helpers.entity import Entity from . import DOMAIN, LinkPlayRequestException -from .utils import MANUFACTURER_GENERIC, get_info_from_project def exception_wrap[_LinkPlayEntityT: LinkPlayBaseEntity, **_P, _R]( diff --git a/homeassistant/components/linkplay/manifest.json b/homeassistant/components/linkplay/manifest.json index ec9a8759a30..02acd0f04f4 100644 --- a/homeassistant/components/linkplay/manifest.json +++ b/homeassistant/components/linkplay/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["linkplay"], - "requirements": ["python-linkplay==0.1.3"], + "requirements": ["python-linkplay==0.2.2"], "zeroconf": ["_linkplay._tcp.local."] } diff --git a/homeassistant/components/linkplay/media_player.py b/homeassistant/components/linkplay/media_player.py index 2986db76520..16b0d5f75f1 100644 --- a/homeassistant/components/linkplay/media_player.py +++ b/homeassistant/components/linkplay/media_player.py @@ -86,16 +86,10 @@ REPEAT_MAP: dict[LoopMode, RepeatMode] = { REPEAT_MAP_INV: dict[RepeatMode, LoopMode] = {v: k for k, v in REPEAT_MAP.items()} -EQUALIZER_MAP: dict[EqualizerMode, str] = { - EqualizerMode.NONE: "None", - EqualizerMode.CLASSIC: "Classic", - EqualizerMode.POP: "Pop", - EqualizerMode.JAZZ: "Jazz", - EqualizerMode.VOCAL: "Vocal", +EQUALIZER_MAP_INV: dict[str, EqualizerMode] = { + mode.value: mode for mode in EqualizerMode } -EQUALIZER_MAP_INV: dict[str, EqualizerMode] = {v: k for k, v in EQUALIZER_MAP.items()} - DEFAULT_FEATURES: MediaPlayerEntityFeature = ( MediaPlayerEntityFeature.PLAY | MediaPlayerEntityFeature.PLAY_MEDIA @@ -125,6 +119,8 @@ SERVICE_PLAY_PRESET_SCHEMA = cv.make_entity_service_schema( } ) +RETRY_POLL_MAXIMUM = 3 + async def async_setup_entry( hass: HomeAssistant, @@ -146,7 +142,6 @@ async def async_setup_entry( class LinkPlayMediaPlayerEntity(LinkPlayBaseEntity, MediaPlayerEntity): """Representation of a LinkPlay media player.""" - _attr_sound_mode_list = list(EQUALIZER_MAP.values()) _attr_device_class = MediaPlayerDeviceClass.RECEIVER _attr_media_content_type = MediaType.MUSIC _attr_name = None @@ -156,19 +151,26 @@ class LinkPlayMediaPlayerEntity(LinkPlayBaseEntity, MediaPlayerEntity): super().__init__(bridge) self._attr_unique_id = bridge.device.uuid + self._retry_count = 0 self._attr_source_list = [ SOURCE_MAP[playing_mode] for playing_mode in bridge.device.playmode_support ] + self._attr_sound_mode_list = [ + mode.value for mode in bridge.player.available_equalizer_modes + ] @exception_wrap async def async_update(self) -> None: """Update the state of the media player.""" try: await self._bridge.player.update_status() + self._retry_count = 0 self._update_properties() except LinkPlayRequestException: - self._attr_available = False + self._retry_count += 1 + if self._retry_count >= RETRY_POLL_MAXIMUM: + self._attr_available = False @exception_wrap async def async_select_source(self, source: str) -> None: @@ -342,7 +344,7 @@ class LinkPlayMediaPlayerEntity(LinkPlayBaseEntity, MediaPlayerEntity): self._attr_is_volume_muted = self._bridge.player.muted self._attr_repeat = REPEAT_MAP[self._bridge.player.loop_mode] self._attr_shuffle = self._bridge.player.loop_mode == LoopMode.RANDOM_PLAYBACK - self._attr_sound_mode = EQUALIZER_MAP[self._bridge.player.equalizer_mode] + self._attr_sound_mode = self._bridge.player.equalizer_mode.value self._attr_supported_features = DEFAULT_FEATURES if self._bridge.player.status == PlayingStatus.PLAYING: diff --git a/homeassistant/components/linkplay/strings.json b/homeassistant/components/linkplay/strings.json index 31b4649e131..5d68754879c 100644 --- a/homeassistant/components/linkplay/strings.json +++ b/homeassistant/components/linkplay/strings.json @@ -11,7 +11,7 @@ } }, "discovery_confirm": { - "description": "Do you want to setup {name}?" + "description": "Do you want to set up {name}?" } }, "abort": { @@ -26,11 +26,11 @@ "services": { "play_preset": { "name": "Play preset", - "description": "Play the preset number on the device.", + "description": "Plays a preset on the device.", "fields": { "preset_number": { "name": "Preset number", - "description": "The preset number on the device to play." + "description": "The number of the preset to play." } } } @@ -44,7 +44,7 @@ }, "exceptions": { "invalid_grouping_entity": { - "message": "Entity with id {entity_id} can't be added to the LinkPlay multiroom. Is the entity a LinkPlay mediaplayer?" + "message": "Entity with ID {entity_id} can't be added to the LinkPlay multiroom. Is the entity a LinkPlay media player?" } } } diff --git a/homeassistant/components/linkplay/utils.py b/homeassistant/components/linkplay/utils.py index 7151ed1537a..63d04a3afc4 100644 --- a/homeassistant/components/linkplay/utils.py +++ b/homeassistant/components/linkplay/utils.py @@ -1,7 +1,5 @@ """Utilities for the LinkPlay component.""" -from typing import Final - from aiohttp import ClientSession from linkplay.utils import async_create_unverified_client_session @@ -10,75 +8,6 @@ from homeassistant.core import Event, HomeAssistant, callback from .const import DATA_SESSION, DOMAIN -MANUFACTURER_ARTSOUND: Final[str] = "ArtSound" -MANUFACTURER_ARYLIC: Final[str] = "Arylic" -MANUFACTURER_IEAST: Final[str] = "iEAST" -MANUFACTURER_WIIM: Final[str] = "WiiM" -MANUFACTURER_GGMM: Final[str] = "GGMM" -MANUFACTURER_MEDION: Final[str] = "Medion" -MANUFACTURER_GENERIC: Final[str] = "Generic" -MODELS_ARTSOUND_SMART_ZONE4: Final[str] = "Smart Zone 4 AMP" -MODELS_ARTSOUND_SMART_HYDE: Final[str] = "Smart Hyde" -MODELS_ARYLIC_S50: Final[str] = "S50+" -MODELS_ARYLIC_S50_PRO: Final[str] = "S50 Pro" -MODELS_ARYLIC_A30: Final[str] = "A30" -MODELS_ARYLIC_A50: Final[str] = "A50" -MODELS_ARYLIC_A50S: Final[str] = "A50+" -MODELS_ARYLIC_UP2STREAM_AMP: Final[str] = "Up2Stream Amp 2.0" -MODELS_ARYLIC_UP2STREAM_AMP_2P1: Final[str] = "Up2Stream Amp 2.1" -MODELS_ARYLIC_UP2STREAM_AMP_V3: Final[str] = "Up2Stream Amp v3" -MODELS_ARYLIC_UP2STREAM_AMP_V4: Final[str] = "Up2Stream Amp v4" -MODELS_ARYLIC_UP2STREAM_PRO: Final[str] = "Up2Stream Pro v1" -MODELS_ARYLIC_UP2STREAM_PRO_V3: Final[str] = "Up2Stream Pro v3" -MODELS_ARYLIC_S10P: Final[str] = "Arylic S10+" -MODELS_ARYLIC_UP2STREAM_PLATE_AMP: Final[str] = "Up2Stream Plate Amp" -MODELS_IEAST_AUDIOCAST_M5: Final[str] = "AudioCast M5" -MODELS_WIIM_AMP: Final[str] = "WiiM Amp" -MODELS_WIIM_MINI: Final[str] = "WiiM Mini" -MODELS_GGMM_GGMM_E2: Final[str] = "GGMM E2" -MODELS_MEDION_MD_43970: Final[str] = "Life P66970 (MD 43970)" -MODELS_GENERIC: Final[str] = "Generic" - -PROJECTID_LOOKUP: Final[dict[str, tuple[str, str]]] = { - "SMART_ZONE4_AMP": (MANUFACTURER_ARTSOUND, MODELS_ARTSOUND_SMART_ZONE4), - "SMART_HYDE": (MANUFACTURER_ARTSOUND, MODELS_ARTSOUND_SMART_HYDE), - "ARYLIC_S50": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_S50), - "RP0016_S50PRO_S": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_S50_PRO), - "RP0011_WB60_S": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_A30), - "X-50": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_A50), - "ARYLIC_A50S": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_A50S), - "RP0011_WB60": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP), - "UP2STREAM_AMP_V3": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP_V3), - "UP2STREAM_AMP_V4": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP_V4), - "UP2STREAM_PRO_V3": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_PRO_V3), - "S10P_WIFI": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_S10P), - "ARYLIC_V20": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_PLATE_AMP), - "UP2STREAM_MINI_V3": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "UP2STREAM_AMP_2P1": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP_2P1), - "RP0014_A50C_S": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "ARYLIC_A30": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "ARYLIC_SUBWOOFER": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "ARYLIC_S50A": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "RP0010_D5_S": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "RP0001": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "RP0013_WA31S": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "RP0010_D5": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "RP0013_WA31S_S": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "RP0014_A50D_S": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "ARYLIC_A50TE": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "ARYLIC_A50N": (MANUFACTURER_ARYLIC, MODELS_GENERIC), - "iEAST-02": (MANUFACTURER_IEAST, MODELS_IEAST_AUDIOCAST_M5), - "WiiM_Amp_4layer": (MANUFACTURER_WIIM, MODELS_WIIM_AMP), - "Muzo_Mini": (MANUFACTURER_WIIM, MODELS_WIIM_MINI), - "GGMM_E2A": (MANUFACTURER_GGMM, MODELS_GGMM_GGMM_E2), - "A16": (MANUFACTURER_MEDION, MODELS_MEDION_MD_43970), -} - - -def get_info_from_project(project: str) -> tuple[str, str]: - """Get manufacturer and model info based on given project.""" - return PROJECTID_LOOKUP.get(project, (MANUFACTURER_GENERIC, MODELS_GENERIC)) - async def async_get_client_session(hass: HomeAssistant) -> ClientSession: """Get a ClientSession that can be used with LinkPlay devices.""" diff --git a/homeassistant/components/litterrobot/strings.json b/homeassistant/components/litterrobot/strings.json index 19b007de068..052427f3032 100644 --- a/homeassistant/components/litterrobot/strings.json +++ b/homeassistant/components/litterrobot/strings.json @@ -77,31 +77,31 @@ "status_code": { "name": "Status code", "state": { - "br": "Bonnet Removed", - "ccc": "Clean Cycle Complete", - "ccp": "Clean Cycle In Progress", - "cd": "Cat Detected", - "csf": "Cat Sensor Fault", - "csi": "Cat Sensor Interrupted", - "cst": "Cat Sensor Timing", - "df1": "Drawer Almost Full - 2 Cycles Left", - "df2": "Drawer Almost Full - 1 Cycle Left", - "dfs": "Drawer Full", - "dhf": "Dump + Home Position Fault", - "dpf": "Dump Position Fault", - "ec": "Empty Cycle", - "hpf": "Home Position Fault", + "br": "Bonnet removed", + "ccc": "Clean cycle complete", + "ccp": "Clean cycle in progress", + "cd": "Cat detected", + "csf": "Cat sensor fault", + "csi": "Cat sensor interrupted", + "cst": "Cat sensor timing", + "df1": "Drawer almost full - 2 cycles left", + "df2": "Drawer almost full - 1 cycle left", + "dfs": "Drawer full", + "dhf": "Dump + home position fault", + "dpf": "Dump position fault", + "ec": "Empty cycle", + "hpf": "Home position fault", "off": "[%key:common::state::off%]", "offline": "Offline", - "otf": "Over Torque Fault", + "otf": "Over torque fault", "p": "[%key:common::state::paused%]", - "pd": "Pinch Detect", - "pwrd": "Powering Down", - "pwru": "Powering Up", + "pd": "Pinch detect", + "pwrd": "Powering down", + "pwru": "Powering up", "rdy": "Ready", - "scf": "Cat Sensor Fault At Startup", - "sdf": "Drawer Full At Startup", - "spf": "Pinch Detect At Startup" + "scf": "Cat sensor fault at startup", + "sdf": "Drawer full at startup", + "spf": "Pinch detect at startup" } }, "waste_drawer": { diff --git a/homeassistant/components/local_calendar/manifest.json b/homeassistant/components/local_calendar/manifest.json index 21a4134a8b6..528552aaa57 100644 --- a/homeassistant/components/local_calendar/manifest.json +++ b/homeassistant/components/local_calendar/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/local_calendar", "iot_class": "local_polling", "loggers": ["ical"], - "requirements": ["ical==8.3.0"] + "requirements": ["ical==9.0.3"] } diff --git a/homeassistant/components/local_todo/manifest.json b/homeassistant/components/local_todo/manifest.json index 68154f10885..6f117131c20 100644 --- a/homeassistant/components/local_todo/manifest.json +++ b/homeassistant/components/local_todo/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/local_todo", "iot_class": "local_polling", - "requirements": ["ical==8.3.0"] + "requirements": ["ical==9.0.3"] } diff --git a/homeassistant/components/local_todo/strings.json b/homeassistant/components/local_todo/strings.json index 2403fae60a5..ebf7810494c 100644 --- a/homeassistant/components/local_todo/strings.json +++ b/homeassistant/components/local_todo/strings.json @@ -6,7 +6,8 @@ "description": "Please choose a name for your new To-do list", "data": { "todo_list_name": "To-do list name" - } + }, + "submit": "Create" } }, "abort": { diff --git a/homeassistant/components/logger/helpers.py b/homeassistant/components/logger/helpers.py index 034266428a3..00cea7e8aa5 100644 --- a/homeassistant/components/logger/helpers.py +++ b/homeassistant/components/logger/helpers.py @@ -203,7 +203,7 @@ class LoggerSettings: else: loggers = {domain} - combined_logs = {logger: LOGSEVERITY[settings.level] for logger in loggers} + combined_logs = dict.fromkeys(loggers, LOGSEVERITY[settings.level]) # Don't override the log levels with the ones from YAML # since we want whatever the user is asking for to be honored. diff --git a/homeassistant/components/lovelace/__init__.py b/homeassistant/components/lovelace/__init__.py index 4d8472da9a2..c0262f42f6c 100644 --- a/homeassistant/components/lovelace/__init__.py +++ b/homeassistant/components/lovelace/__init__.py @@ -6,7 +6,7 @@ from typing import Any import voluptuous as vol -from homeassistant.components import frontend, websocket_api +from homeassistant.components import frontend, onboarding, websocket_api from homeassistant.config import ( async_hass_config_yaml, async_process_component_and_handle_errors, @@ -17,6 +17,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import collection, config_validation as cv from homeassistant.helpers.frame import report_usage from homeassistant.helpers.service import async_register_admin_service +from homeassistant.helpers.translation import async_get_translations from homeassistant.helpers.typing import ConfigType from homeassistant.loader import async_get_integration from homeassistant.util import slugify @@ -282,6 +283,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: STORAGE_DASHBOARD_UPDATE_FIELDS, ).async_setup(hass) + def create_map_dashboard() -> None: + """Create a map dashboard.""" + hass.async_create_task(_create_map_dashboard(hass, dashboards_collection)) + + if not onboarding.async_is_onboarded(hass): + onboarding.async_add_listener(hass, create_map_dashboard) + return True @@ -323,3 +331,25 @@ def _register_panel( kwargs["sidebar_icon"] = config.get(CONF_ICON, DEFAULT_ICON) frontend.async_register_built_in_panel(hass, DOMAIN, **kwargs) + + +async def _create_map_dashboard( + hass: HomeAssistant, dashboards_collection: dashboard.DashboardsCollection +) -> None: + """Create a map dashboard.""" + translations = await async_get_translations( + hass, hass.config.language, "dashboard", {onboarding.DOMAIN} + ) + title = translations["component.onboarding.dashboard.map.title"] + + await dashboards_collection.async_create_item( + { + CONF_ALLOW_SINGLE_WORD: True, + CONF_ICON: "mdi:map", + CONF_TITLE: title, + CONF_URL_PATH: "map", + } + ) + + map_store = hass.data[LOVELACE_DATA].dashboards["map"] + await map_store.async_save({"strategy": {"type": "map"}}) diff --git a/homeassistant/components/lutron_caseta/__init__.py b/homeassistant/components/lutron_caseta/__init__.py index d697d6244b5..b489fe9dba7 100644 --- a/homeassistant/components/lutron_caseta/__init__.py +++ b/homeassistant/components/lutron_caseta/__init__.py @@ -3,7 +3,6 @@ from __future__ import annotations import asyncio -import contextlib from itertools import chain import logging import ssl @@ -37,11 +36,12 @@ from .const import ( ATTR_SERIAL, ATTR_TYPE, BRIDGE_DEVICE_ID, - BRIDGE_TIMEOUT, CONF_CA_CERTS, CONF_CERTFILE, CONF_KEYFILE, CONF_SUBTYPE, + CONFIGURE_TIMEOUT, + CONNECT_TIMEOUT, DOMAIN, LUTRON_CASETA_BUTTON_EVENT, MANUFACTURER, @@ -161,28 +161,40 @@ async def async_setup_entry( keyfile = hass.config.path(entry.data[CONF_KEYFILE]) certfile = hass.config.path(entry.data[CONF_CERTFILE]) ca_certs = hass.config.path(entry.data[CONF_CA_CERTS]) - bridge = None + connected_future: asyncio.Future[None] = hass.loop.create_future() + + def _on_connect() -> None: + nonlocal connected_future + if not connected_future.done(): + connected_future.set_result(None) try: bridge = Smartbridge.create_tls( - hostname=host, keyfile=keyfile, certfile=certfile, ca_certs=ca_certs + hostname=host, + keyfile=keyfile, + certfile=certfile, + ca_certs=ca_certs, + on_connect_callback=_on_connect, ) except ssl.SSLError: _LOGGER.error("Invalid certificate used to connect to bridge at %s", host) return False - timed_out = True - with contextlib.suppress(TimeoutError): - async with asyncio.timeout(BRIDGE_TIMEOUT): - await bridge.connect() - timed_out = False + connect_task = hass.async_create_task(bridge.connect()) + for future, name, timeout in ( + (connected_future, "connect", CONNECT_TIMEOUT), + (connect_task, "configure", CONFIGURE_TIMEOUT), + ): + try: + async with asyncio.timeout(timeout): + await future + except TimeoutError as ex: + connect_task.cancel() + await bridge.close() + raise ConfigEntryNotReady(f"Timed out on {name} for {host}") from ex - if timed_out or not bridge.is_connected(): - await bridge.close() - if timed_out: - raise ConfigEntryNotReady(f"Timed out while trying to connect to {host}") - if not bridge.is_connected(): - raise ConfigEntryNotReady(f"Cannot connect to {host}") + if not bridge.is_connected(): + raise ConfigEntryNotReady(f"Connection failed to {host}") _LOGGER.debug("Connected to Lutron Caseta bridge via LEAP at %s", host) await _async_migrate_unique_ids(hass, entry) diff --git a/homeassistant/components/lutron_caseta/config_flow.py b/homeassistant/components/lutron_caseta/config_flow.py index 767c3d2f2b7..45e7a04bdc9 100644 --- a/homeassistant/components/lutron_caseta/config_flow.py +++ b/homeassistant/components/lutron_caseta/config_flow.py @@ -20,10 +20,11 @@ from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo from .const import ( ABORT_REASON_CANNOT_CONNECT, BRIDGE_DEVICE_ID, - BRIDGE_TIMEOUT, CONF_CA_CERTS, CONF_CERTFILE, CONF_KEYFILE, + CONFIGURE_TIMEOUT, + CONNECT_TIMEOUT, DOMAIN, ERROR_CANNOT_CONNECT, STEP_IMPORT_FAILED, @@ -232,7 +233,7 @@ class LutronCasetaFlowHandler(ConfigFlow, domain=DOMAIN): return None try: - async with asyncio.timeout(BRIDGE_TIMEOUT): + async with asyncio.timeout(CONNECT_TIMEOUT + CONFIGURE_TIMEOUT): await bridge.connect() except TimeoutError: _LOGGER.error( diff --git a/homeassistant/components/lutron_caseta/const.py b/homeassistant/components/lutron_caseta/const.py index 809b9e8d007..26a83de6f4b 100644 --- a/homeassistant/components/lutron_caseta/const.py +++ b/homeassistant/components/lutron_caseta/const.py @@ -34,7 +34,8 @@ ACTION_RELEASE = "release" CONF_SUBTYPE = "subtype" -BRIDGE_TIMEOUT = 35 +CONNECT_TIMEOUT = 9 +CONFIGURE_TIMEOUT = 50 UNASSIGNED_AREA = "Unassigned" diff --git a/homeassistant/components/lutron_caseta/cover.py b/homeassistant/components/lutron_caseta/cover.py index 3727dbf17ba..e05fddb996f 100644 --- a/homeassistant/components/lutron_caseta/cover.py +++ b/homeassistant/components/lutron_caseta/cover.py @@ -108,6 +108,7 @@ PYLUTRON_TYPE_TO_CLASSES = { "QsWirelessHorizontalSheerBlind": LutronCasetaShade, "Shade": LutronCasetaShade, "PalladiomWireFreeShade": LutronCasetaShade, + "SerenaEssentialsRollerShade": LutronCasetaShade, } diff --git a/homeassistant/components/lutron_caseta/manifest.json b/homeassistant/components/lutron_caseta/manifest.json index bbb6df41a89..96b00a1f392 100644 --- a/homeassistant/components/lutron_caseta/manifest.json +++ b/homeassistant/components/lutron_caseta/manifest.json @@ -9,7 +9,7 @@ }, "iot_class": "local_push", "loggers": ["pylutron_caseta"], - "requirements": ["pylutron-caseta==0.23.0"], + "requirements": ["pylutron-caseta==0.24.0"], "zeroconf": [ { "type": "_lutron._tcp.local.", diff --git a/homeassistant/components/lyric/strings.json b/homeassistant/components/lyric/strings.json index 83c65359643..bc48a791e70 100644 --- a/homeassistant/components/lyric/strings.json +++ b/homeassistant/components/lyric/strings.json @@ -53,12 +53,12 @@ }, "services": { "set_hold_time": { - "name": "Set Hold Time", - "description": "Sets the time to hold until.", + "name": "Set hold time", + "description": "Sets the time period to keep the temperature and override the schedule.", "fields": { "time_period": { - "name": "Time Period", - "description": "Time to hold until." + "name": "Time period", + "description": "Duration for which to override the schedule." } } } diff --git a/homeassistant/components/mastodon/__init__.py b/homeassistant/components/mastodon/__init__.py index ab8514c8321..17b8614a2e9 100644 --- a/homeassistant/components/mastodon/__init__.py +++ b/homeassistant/components/mastodon/__init__.py @@ -2,7 +2,7 @@ from __future__ import annotations -from mastodon.Mastodon import Mastodon, MastodonError +from mastodon.Mastodon import Account, Instance, InstanceV2, Mastodon, MastodonError from homeassistant.const import ( CONF_ACCESS_TOKEN, @@ -107,7 +107,9 @@ async def async_migrate_entry(hass: HomeAssistant, entry: MastodonConfigEntry) - return True -def setup_mastodon(entry: MastodonConfigEntry) -> tuple[Mastodon, dict, dict]: +def setup_mastodon( + entry: MastodonConfigEntry, +) -> tuple[Mastodon, InstanceV2 | Instance, Account]: """Get mastodon details.""" client = create_mastodon_client( entry.data[CONF_BASE_URL], diff --git a/homeassistant/components/mastodon/config_flow.py b/homeassistant/components/mastodon/config_flow.py index 1b93cbecd98..1ae1e6b229e 100644 --- a/homeassistant/components/mastodon/config_flow.py +++ b/homeassistant/components/mastodon/config_flow.py @@ -4,7 +4,12 @@ from __future__ import annotations from typing import Any -from mastodon.Mastodon import MastodonNetworkError, MastodonUnauthorizedError +from mastodon.Mastodon import ( + Account, + Instance, + MastodonNetworkError, + MastodonUnauthorizedError, +) import voluptuous as vol from yarl import URL @@ -56,8 +61,8 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN): client_secret: str, access_token: str, ) -> tuple[ - dict[str, str] | None, - dict[str, str] | None, + Instance | None, + Account | None, dict[str, str], ]: """Check connection to the Mastodon instance.""" diff --git a/homeassistant/components/mastodon/const.py b/homeassistant/components/mastodon/const.py index a4af49a27a6..2efda329467 100644 --- a/homeassistant/components/mastodon/const.py +++ b/homeassistant/components/mastodon/const.py @@ -12,14 +12,6 @@ DATA_HASS_CONFIG = "mastodon_hass_config" DEFAULT_URL: Final = "https://mastodon.social" DEFAULT_NAME: Final = "Mastodon" -INSTANCE_VERSION: Final = "version" -INSTANCE_URI: Final = "uri" -INSTANCE_DOMAIN: Final = "domain" -ACCOUNT_USERNAME: Final = "username" -ACCOUNT_FOLLOWERS_COUNT: Final = "followers_count" -ACCOUNT_FOLLOWING_COUNT: Final = "following_count" -ACCOUNT_STATUSES_COUNT: Final = "statuses_count" - ATTR_CONFIG_ENTRY_ID = "config_entry_id" ATTR_STATUS = "status" ATTR_VISIBILITY = "visibility" diff --git a/homeassistant/components/mastodon/coordinator.py b/homeassistant/components/mastodon/coordinator.py index 5d2b193b4a8..99785eca80b 100644 --- a/homeassistant/components/mastodon/coordinator.py +++ b/homeassistant/components/mastodon/coordinator.py @@ -4,10 +4,9 @@ from __future__ import annotations from dataclasses import dataclass from datetime import timedelta -from typing import Any from mastodon import Mastodon -from mastodon.Mastodon import MastodonError +from mastodon.Mastodon import Account, Instance, InstanceV2, MastodonError from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -21,15 +20,15 @@ class MastodonData: """Mastodon data type.""" client: Mastodon - instance: dict - account: dict + instance: InstanceV2 | Instance + account: Account coordinator: MastodonCoordinator type MastodonConfigEntry = ConfigEntry[MastodonData] -class MastodonCoordinator(DataUpdateCoordinator[dict[str, Any]]): +class MastodonCoordinator(DataUpdateCoordinator[Account]): """Class to manage fetching Mastodon data.""" config_entry: MastodonConfigEntry @@ -47,9 +46,9 @@ class MastodonCoordinator(DataUpdateCoordinator[dict[str, Any]]): ) self.client = client - async def _async_update_data(self) -> dict[str, Any]: + async def _async_update_data(self) -> Account: try: - account: dict = await self.hass.async_add_executor_job( + account: Account = await self.hass.async_add_executor_job( self.client.account_verify_credentials ) except MastodonError as ex: diff --git a/homeassistant/components/mastodon/diagnostics.py b/homeassistant/components/mastodon/diagnostics.py index dc7c1b785ab..31444413dfd 100644 --- a/homeassistant/components/mastodon/diagnostics.py +++ b/homeassistant/components/mastodon/diagnostics.py @@ -4,6 +4,8 @@ from __future__ import annotations from typing import Any +from mastodon.Mastodon import Account, Instance + from homeassistant.core import HomeAssistant from .coordinator import MastodonConfigEntry @@ -25,7 +27,7 @@ async def async_get_config_entry_diagnostics( } -def get_diagnostics(config_entry: MastodonConfigEntry) -> tuple[dict, dict]: +def get_diagnostics(config_entry: MastodonConfigEntry) -> tuple[Instance, Account]: """Get mastodon diagnostics.""" client = config_entry.runtime_data.client diff --git a/homeassistant/components/mastodon/entity.py b/homeassistant/components/mastodon/entity.py index 2ae8c0d852e..60224e75e41 100644 --- a/homeassistant/components/mastodon/entity.py +++ b/homeassistant/components/mastodon/entity.py @@ -4,7 +4,7 @@ from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DEFAULT_NAME, DOMAIN, INSTANCE_VERSION +from .const import DEFAULT_NAME, DOMAIN from .coordinator import MastodonConfigEntry, MastodonCoordinator from .utils import construct_mastodon_username @@ -40,7 +40,7 @@ class MastodonEntity(CoordinatorEntity[MastodonCoordinator]): manufacturer="Mastodon gGmbH", model=full_account_name, entry_type=DeviceEntryType.SERVICE, - sw_version=data.runtime_data.instance[INSTANCE_VERSION], + sw_version=data.runtime_data.instance.version, name=name, ) diff --git a/homeassistant/components/mastodon/manifest.json b/homeassistant/components/mastodon/manifest.json index 20c506e7766..d7b21ad3a0c 100644 --- a/homeassistant/components/mastodon/manifest.json +++ b/homeassistant/components/mastodon/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["mastodon"], - "requirements": ["Mastodon.py==1.8.1"] + "requirements": ["Mastodon.py==2.0.1"] } diff --git a/homeassistant/components/mastodon/notify.py b/homeassistant/components/mastodon/notify.py index 8e7e9dc1947..149ef1f6a48 100644 --- a/homeassistant/components/mastodon/notify.py +++ b/homeassistant/components/mastodon/notify.py @@ -5,7 +5,7 @@ from __future__ import annotations from typing import Any, cast from mastodon import Mastodon -from mastodon.Mastodon import MastodonAPIError +from mastodon.Mastodon import MastodonAPIError, MediaAttachment import voluptuous as vol from homeassistant.components.notify import ( @@ -52,7 +52,7 @@ async def async_get_service( if discovery_info is None: return None - client: Mastodon = discovery_info.get("client") + client = cast(Mastodon, discovery_info.get("client")) return MastodonNotificationService(hass, client) @@ -114,7 +114,7 @@ class MastodonNotificationService(BaseNotificationService): message, visibility=target, spoiler_text=content_warning, - media_ids=mediadata["id"], + media_ids=mediadata.id, sensitive=sensitive, ) except MastodonAPIError as err: @@ -134,12 +134,14 @@ class MastodonNotificationService(BaseNotificationService): translation_key="unable_to_send_message", ) from err - def _upload_media(self, media_path: Any = None) -> Any: + def _upload_media(self, media_path: Any = None) -> MediaAttachment: """Upload media.""" with open(media_path, "rb"): media_type = get_media_type(media_path) try: - mediadata = self.client.media_post(media_path, mime_type=media_type) + mediadata: MediaAttachment = self.client.media_post( + media_path, mime_type=media_type + ) except MastodonAPIError as err: raise HomeAssistantError( translation_domain=DOMAIN, diff --git a/homeassistant/components/mastodon/quality_scale.yaml b/homeassistant/components/mastodon/quality_scale.yaml index 43636ed6924..f07f7e0a8ad 100644 --- a/homeassistant/components/mastodon/quality_scale.yaml +++ b/homeassistant/components/mastodon/quality_scale.yaml @@ -93,7 +93,4 @@ rules: # Platinum async-dependency: todo inject-websession: todo - strict-typing: - status: todo - comment: | - Requirement 'Mastodon.py==1.8.1' appears untyped + strict-typing: done diff --git a/homeassistant/components/mastodon/sensor.py b/homeassistant/components/mastodon/sensor.py index 74537e33cae..bfdc9c90333 100644 --- a/homeassistant/components/mastodon/sensor.py +++ b/homeassistant/components/mastodon/sensor.py @@ -4,7 +4,8 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from typing import Any + +from mastodon.Mastodon import Account from homeassistant.components.sensor import ( SensorEntity, @@ -15,11 +16,6 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.typing import StateType -from .const import ( - ACCOUNT_FOLLOWERS_COUNT, - ACCOUNT_FOLLOWING_COUNT, - ACCOUNT_STATUSES_COUNT, -) from .coordinator import MastodonConfigEntry from .entity import MastodonEntity @@ -31,7 +27,7 @@ PARALLEL_UPDATES = 0 class MastodonSensorEntityDescription(SensorEntityDescription): """Describes Mastodon sensor entity.""" - value_fn: Callable[[dict[str, Any]], StateType] + value_fn: Callable[[Account], StateType] ENTITY_DESCRIPTIONS = ( @@ -39,19 +35,19 @@ ENTITY_DESCRIPTIONS = ( key="followers", translation_key="followers", state_class=SensorStateClass.TOTAL, - value_fn=lambda data: data.get(ACCOUNT_FOLLOWERS_COUNT), + value_fn=lambda data: data.followers_count, ), MastodonSensorEntityDescription( key="following", translation_key="following", state_class=SensorStateClass.TOTAL, - value_fn=lambda data: data.get(ACCOUNT_FOLLOWING_COUNT), + value_fn=lambda data: data.following_count, ), MastodonSensorEntityDescription( key="posts", translation_key="posts", state_class=SensorStateClass.TOTAL, - value_fn=lambda data: data.get(ACCOUNT_STATUSES_COUNT), + value_fn=lambda data: data.statuses_count, ), ) diff --git a/homeassistant/components/mastodon/services.py b/homeassistant/components/mastodon/services.py index 7ab351f8c29..68e95e726a1 100644 --- a/homeassistant/components/mastodon/services.py +++ b/homeassistant/components/mastodon/services.py @@ -5,7 +5,7 @@ from functools import partial from typing import Any, cast from mastodon import Mastodon -from mastodon.Mastodon import MastodonAPIError +from mastodon.Mastodon import MastodonAPIError, MediaAttachment import voluptuous as vol from homeassistant.config_entries import ConfigEntryState @@ -104,7 +104,7 @@ def setup_services(hass: HomeAssistant) -> None: def _post(client: Mastodon, **kwargs: Any) -> None: """Post to Mastodon.""" - media_data: dict[str, Any] | None = None + media_data: MediaAttachment | None = None media_path = kwargs.get("media_path") if media_path: @@ -137,7 +137,7 @@ def setup_services(hass: HomeAssistant) -> None: try: media_ids: str | None = None if media_data: - media_ids = media_data["id"] + media_ids = media_data.id client.status_post(media_ids=media_ids, **kwargs) except MastodonAPIError as err: raise HomeAssistantError( diff --git a/homeassistant/components/mastodon/utils.py b/homeassistant/components/mastodon/utils.py index e9c2567b675..898578c931b 100644 --- a/homeassistant/components/mastodon/utils.py +++ b/homeassistant/components/mastodon/utils.py @@ -6,8 +6,9 @@ import mimetypes from typing import Any from mastodon import Mastodon +from mastodon.Mastodon import Account, Instance, InstanceV2 -from .const import ACCOUNT_USERNAME, DEFAULT_NAME, INSTANCE_DOMAIN, INSTANCE_URI +from .const import DEFAULT_NAME def create_mastodon_client( @@ -23,14 +24,13 @@ def create_mastodon_client( def construct_mastodon_username( - instance: dict[str, str] | None, account: dict[str, str] | None + instance: InstanceV2 | Instance | None, account: Account | None ) -> str: """Construct a mastodon username from the account and instance.""" if instance and account: - return ( - f"@{account[ACCOUNT_USERNAME]}@" - f"{instance.get(INSTANCE_URI, instance.get(INSTANCE_DOMAIN))}" - ) + if type(instance) is InstanceV2: + return f"@{account.username}@{instance.domain}" + return f"@{account.username}@{instance.uri}" return DEFAULT_NAME diff --git a/homeassistant/components/matter/event.py b/homeassistant/components/matter/event.py index 6fa775fd1b9..fa7d96ed1ae 100644 --- a/homeassistant/components/matter/event.py +++ b/homeassistant/components/matter/event.py @@ -69,7 +69,7 @@ class MatterEventEntity(MatterEntity, EventEntity): max_presses_supported = self.get_matter_attribute_value( clusters.Switch.Attributes.MultiPressMax ) - max_presses_supported = min(max_presses_supported or 1, 8) + max_presses_supported = min(max_presses_supported or 2, 8) for i in range(max_presses_supported): event_types.append(f"multi_press_{i + 1}") # noqa: PERF401 elif feature_map & SwitchFeature.kMomentarySwitch: diff --git a/homeassistant/components/matter/number.py b/homeassistant/components/matter/number.py index 44538f46856..2c7a9651c60 100644 --- a/homeassistant/components/matter/number.py +++ b/homeassistant/components/matter/number.py @@ -169,8 +169,8 @@ DISCOVERY_SCHEMAS = [ device_class=NumberDeviceClass.TEMPERATURE, entity_category=EntityCategory.CONFIG, translation_key="temperature_offset", - native_max_value=25, - native_min_value=-25, + native_max_value=50, + native_min_value=-50, native_step=0.5, native_unit_of_measurement=UnitOfTemperature.CELSIUS, measurement_to_ha=lambda x: None if x is None else x / 10, diff --git a/homeassistant/components/mcp/__init__.py b/homeassistant/components/mcp/__init__.py index 4a2b4da990d..41b6a260d9f 100644 --- a/homeassistant/components/mcp/__init__.py +++ b/homeassistant/components/mcp/__init__.py @@ -39,7 +39,6 @@ async def async_setup_entry( entry.async_on_unload(unsub) entry.runtime_data = coordinator - entry.async_on_unload(coordinator.close) return True diff --git a/homeassistant/components/mcp/coordinator.py b/homeassistant/components/mcp/coordinator.py index a5c5ee55dbf..6e66036c548 100644 --- a/homeassistant/components/mcp/coordinator.py +++ b/homeassistant/components/mcp/coordinator.py @@ -40,6 +40,7 @@ async def mcp_client(url: str) -> AsyncGenerator[ClientSession]: await session.initialize() yield session except ExceptionGroup as err: + _LOGGER.debug("Error creating MCP client: %s", err) raise err.exceptions[0] from err @@ -51,13 +52,13 @@ class ModelContextProtocolTool(llm.Tool): name: str, description: str | None, parameters: vol.Schema, - session: ClientSession, + server_url: str, ) -> None: """Initialize the tool.""" self.name = name self.description = description self.parameters = parameters - self.session = session + self.server_url = server_url async def async_call( self, @@ -67,10 +68,16 @@ class ModelContextProtocolTool(llm.Tool): ) -> JsonObjectType: """Call the tool.""" try: - result = await self.session.call_tool( - tool_input.tool_name, tool_input.tool_args - ) + async with asyncio.timeout(TIMEOUT): + async with mcp_client(self.server_url) as session: + result = await session.call_tool( + tool_input.tool_name, tool_input.tool_args + ) + except TimeoutError as error: + _LOGGER.debug("Timeout when calling tool: %s", error) + raise HomeAssistantError(f"Timeout when calling tool: {error}") from error except httpx.HTTPStatusError as error: + _LOGGER.debug("Error when calling tool: %s", error) raise HomeAssistantError(f"Error when calling tool: {error}") from error return result.model_dump(exclude_unset=True, exclude_none=True) @@ -79,8 +86,6 @@ class ModelContextProtocolCoordinator(DataUpdateCoordinator[list[llm.Tool]]): """Define an object to hold MCP data.""" config_entry: ConfigEntry - _session: ClientSession | None = None - _setup_error: Exception | None = None def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None: """Initialize ModelContextProtocolCoordinator.""" @@ -91,52 +96,6 @@ class ModelContextProtocolCoordinator(DataUpdateCoordinator[list[llm.Tool]]): config_entry=config_entry, update_interval=UPDATE_INTERVAL, ) - self._stop = asyncio.Event() - - async def _async_setup(self) -> None: - """Set up the client connection.""" - connected = asyncio.Event() - stop = asyncio.Event() - self.config_entry.async_create_background_task( - self.hass, self._connect(connected, stop), "mcp-client" - ) - try: - async with asyncio.timeout(TIMEOUT): - await connected.wait() - self._stop = stop - finally: - if self._setup_error is not None: - raise self._setup_error - - async def _connect(self, connected: asyncio.Event, stop: asyncio.Event) -> None: - """Create a server-sent event MCP client.""" - url = self.config_entry.data[CONF_URL] - try: - async with ( - sse_client(url=url) as streams, - ClientSession(*streams) as session, - ): - await session.initialize() - self._session = session - connected.set() - await stop.wait() - except httpx.HTTPStatusError as err: - self._setup_error = err - _LOGGER.debug("Error connecting to MCP server: %s", err) - raise UpdateFailed(f"Error connecting to MCP server: {err}") from err - except ExceptionGroup as err: - self._setup_error = err.exceptions[0] - _LOGGER.debug("Error connecting to MCP server: %s", err) - raise UpdateFailed( - "Error connecting to MCP server: {err.exceptions[0]}" - ) from err.exceptions[0] - finally: - self._session = None - - async def close(self) -> None: - """Close the client connection.""" - if self._stop is not None: - self._stop.set() async def _async_update_data(self) -> list[llm.Tool]: """Fetch data from API endpoint. @@ -144,11 +103,15 @@ class ModelContextProtocolCoordinator(DataUpdateCoordinator[list[llm.Tool]]): This is the place to pre-process the data to lookup tables so entities can quickly look up their data. """ - if self._session is None: - raise UpdateFailed("No session available") try: - result = await self._session.list_tools() + async with asyncio.timeout(TIMEOUT): + async with mcp_client(self.config_entry.data[CONF_URL]) as session: + result = await session.list_tools() + except TimeoutError as error: + _LOGGER.debug("Timeout when listing tools: %s", error) + raise UpdateFailed(f"Timeout when listing tools: {error}") from error except httpx.HTTPError as err: + _LOGGER.debug("Error communicating with API: %s", err) raise UpdateFailed(f"Error communicating with API: {err}") from err _LOGGER.debug("Received tools: %s", result.tools) @@ -165,7 +128,7 @@ class ModelContextProtocolCoordinator(DataUpdateCoordinator[list[llm.Tool]]): tool.name, tool.description, parameters, - self._session, + self.config_entry.data[CONF_URL], ) ) return tools diff --git a/homeassistant/components/mcp/manifest.json b/homeassistant/components/mcp/manifest.json index ee4baf04802..9cd1e2899a6 100644 --- a/homeassistant/components/mcp/manifest.json +++ b/homeassistant/components/mcp/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/mcp", "iot_class": "local_polling", "quality_scale": "silver", - "requirements": ["mcp==1.1.2"] + "requirements": ["mcp==1.5.0"] } diff --git a/homeassistant/components/mcp_server/__init__.py b/homeassistant/components/mcp_server/__init__.py index 941eccbe528..e523f46228f 100644 --- a/homeassistant/components/mcp_server/__init__.py +++ b/homeassistant/components/mcp_server/__init__.py @@ -6,7 +6,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType -from . import http, llm_api +from . import http from .const import DOMAIN from .session import SessionManager from .types import MCPServerConfigEntry @@ -25,7 +25,6 @@ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Model Context Protocol component.""" http.async_register(hass) - llm_api.async_register_api(hass) return True diff --git a/homeassistant/components/mcp_server/config_flow.py b/homeassistant/components/mcp_server/config_flow.py index 8d8d311b874..e8df68de5e2 100644 --- a/homeassistant/components/mcp_server/config_flow.py +++ b/homeassistant/components/mcp_server/config_flow.py @@ -16,7 +16,7 @@ from homeassistant.helpers.selector import ( SelectSelectorConfig, ) -from .const import DOMAIN, LLM_API, LLM_API_NAME +from .const import DOMAIN _LOGGER = logging.getLogger(__name__) @@ -33,13 +33,6 @@ class ModelContextServerProtocolConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle the initial step.""" llm_apis = {api.id: api.name for api in llm.async_get_apis(self.hass)} - if LLM_API not in llm_apis: - # MCP server component is not loaded yet, so make the LLM API a choice. - llm_apis = { - LLM_API: LLM_API_NAME, - **llm_apis, - } - if user_input is not None: return self.async_create_entry( title=llm_apis[user_input[CONF_LLM_HASS_API]], data=user_input diff --git a/homeassistant/components/mcp_server/const.py b/homeassistant/components/mcp_server/const.py index 8958ac36616..3f2e12cbb6a 100644 --- a/homeassistant/components/mcp_server/const.py +++ b/homeassistant/components/mcp_server/const.py @@ -2,5 +2,6 @@ DOMAIN = "mcp_server" TITLE = "Model Context Protocol Server" -LLM_API = "stateless_assist" -LLM_API_NAME = "Stateless Assist" +# The Stateless API is no longer registered explicitly, but this name may still exist in the +# users config entry. +STATELESS_LLM_API = "stateless_assist" diff --git a/homeassistant/components/mcp_server/llm_api.py b/homeassistant/components/mcp_server/llm_api.py deleted file mode 100644 index 5c29b29153e..00000000000 --- a/homeassistant/components/mcp_server/llm_api.py +++ /dev/null @@ -1,48 +0,0 @@ -"""LLM API for MCP Server.""" - -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import llm -from homeassistant.util import yaml as yaml_util - -from .const import LLM_API, LLM_API_NAME - -EXPOSED_ENTITY_FIELDS = {"name", "domain", "description", "areas", "names"} - - -def async_register_api(hass: HomeAssistant) -> None: - """Register the LLM API.""" - llm.async_register_api(hass, StatelessAssistAPI(hass)) - - -class StatelessAssistAPI(llm.AssistAPI): - """LLM API for MCP Server that provides the Assist API without state information in the prompt. - - Syncing the state information is possible, but may put unnecessary load on - the system so we are instead providing the prompt without entity state. Since - actions don't care about the current state, there is little quality loss. - """ - - def __init__(self, hass: HomeAssistant) -> None: - """Initialize the StatelessAssistAPI.""" - super().__init__(hass) - self.id = LLM_API - self.name = LLM_API_NAME - - @callback - def _async_get_exposed_entities_prompt( - self, llm_context: llm.LLMContext, exposed_entities: dict | None - ) -> list[str]: - """Return the prompt for the exposed entities.""" - prompt = [] - - if exposed_entities and exposed_entities["entities"]: - prompt.append( - "An overview of the areas and the devices in this smart home:" - ) - entities = [ - {k: v for k, v in entity_info.items() if k in EXPOSED_ENTITY_FIELDS} - for entity_info in exposed_entities["entities"].values() - ] - prompt.append(yaml_util.dump(list(entities))) - - return prompt diff --git a/homeassistant/components/mcp_server/manifest.json b/homeassistant/components/mcp_server/manifest.json index 18b2e5bc417..b5fb1bdcd87 100644 --- a/homeassistant/components/mcp_server/manifest.json +++ b/homeassistant/components/mcp_server/manifest.json @@ -8,6 +8,6 @@ "integration_type": "service", "iot_class": "local_push", "quality_scale": "silver", - "requirements": ["mcp==1.1.2", "aiohttp_sse==2.2.0", "anyio==4.8.0"], + "requirements": ["mcp==1.5.0", "aiohttp_sse==2.2.0", "anyio==4.9.0"], "single_config_entry": true } diff --git a/homeassistant/components/mcp_server/server.py b/homeassistant/components/mcp_server/server.py index ba21abd722c..affa4faecd6 100644 --- a/homeassistant/components/mcp_server/server.py +++ b/homeassistant/components/mcp_server/server.py @@ -21,6 +21,8 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import llm +from .const import STATELESS_LLM_API + _LOGGER = logging.getLogger(__name__) @@ -47,16 +49,23 @@ async def create_server( A Model Context Protocol Server object is associated with a single session. The MCP SDK handles the details of the protocol. """ + if llm_api_id == STATELESS_LLM_API: + llm_api_id = llm.LLM_API_ASSIST - server = Server("home-assistant") + server = Server[Any]("home-assistant") + + async def get_api_instance() -> llm.APIInstance: + """Get the LLM API selected.""" + # Backwards compatibility with old MCP Server config + return await llm.async_get_api(hass, llm_api_id, llm_context) @server.list_prompts() # type: ignore[no-untyped-call, misc] async def handle_list_prompts() -> list[types.Prompt]: - llm_api = await llm.async_get_api(hass, llm_api_id, llm_context) + llm_api = await get_api_instance() return [ types.Prompt( name=llm_api.api.name, - description=f"Default prompt for the Home Assistant LLM API {llm_api.api.name}", + description=f"Default prompt for Home Assistant {llm_api.api.name} API", ) ] @@ -64,12 +73,12 @@ async def create_server( async def handle_get_prompt( name: str, arguments: dict[str, str] | None ) -> types.GetPromptResult: - llm_api = await llm.async_get_api(hass, llm_api_id, llm_context) + llm_api = await get_api_instance() if name != llm_api.api.name: raise ValueError(f"Unknown prompt: {name}") return types.GetPromptResult( - description=f"Default prompt for the Home Assistant LLM API {llm_api.api.name}", + description=f"Default prompt for Home Assistant {llm_api.api.name} API", messages=[ types.PromptMessage( role="assistant", @@ -84,13 +93,13 @@ async def create_server( @server.list_tools() # type: ignore[no-untyped-call, misc] async def list_tools() -> list[types.Tool]: """List available time tools.""" - llm_api = await llm.async_get_api(hass, llm_api_id, llm_context) + llm_api = await get_api_instance() return [_format_tool(tool, llm_api.custom_serializer) for tool in llm_api.tools] @server.call_tool() # type: ignore[no-untyped-call, misc] async def call_tool(name: str, arguments: dict) -> Sequence[types.TextContent]: """Handle calling tools.""" - llm_api = await llm.async_get_api(hass, llm_api_id, llm_context) + llm_api = await get_api_instance() tool_input = llm.ToolInput(tool_name=name, tool_args=arguments) _LOGGER.debug("Tool call: %s(%s)", tool_input.tool_name, tool_input.tool_args) diff --git a/homeassistant/components/mealie/strings.json b/homeassistant/components/mealie/strings.json index fa63252e837..186fc4c4ac0 100644 --- a/homeassistant/components/mealie/strings.json +++ b/homeassistant/components/mealie/strings.json @@ -146,11 +146,11 @@ "services": { "get_mealplan": { "name": "Get mealplan", - "description": "Get mealplan from Mealie", + "description": "Gets a mealplan from Mealie", "fields": { "config_entry_id": { "name": "Mealie instance", - "description": "Select the Mealie instance to get mealplan from" + "description": "The Mealie instance to use for this action." }, "start_date": { "name": "Start date", @@ -164,7 +164,7 @@ }, "get_recipe": { "name": "Get recipe", - "description": "Get recipe from Mealie", + "description": "Gets a recipe from Mealie", "fields": { "config_entry_id": { "name": "[%key:component::mealie::services::get_mealplan::fields::config_entry_id::name%]", @@ -178,7 +178,7 @@ }, "import_recipe": { "name": "Import recipe", - "description": "Import recipe from an URL", + "description": "Imports a recipe from an URL", "fields": { "config_entry_id": { "name": "[%key:component::mealie::services::get_mealplan::fields::config_entry_id::name%]", @@ -196,7 +196,7 @@ }, "set_random_mealplan": { "name": "Set random mealplan", - "description": "Set a random mealplan for a specific date", + "description": "Sets a random mealplan for a specific date", "fields": { "config_entry_id": { "name": "[%key:component::mealie::services::get_mealplan::fields::config_entry_id::name%]", @@ -214,7 +214,7 @@ }, "set_mealplan": { "name": "Set a mealplan", - "description": "Set a mealplan for a specific date", + "description": "Sets a mealplan for a specific date", "fields": { "config_entry_id": { "name": "[%key:component::mealie::services::get_mealplan::fields::config_entry_id::name%]", diff --git a/homeassistant/components/meater/config_flow.py b/homeassistant/components/meater/config_flow.py index a7ba3ba1498..5c11b10755c 100644 --- a/homeassistant/components/meater/config_flow.py +++ b/homeassistant/components/meater/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from typing import Any from meater import AuthenticationError, MeaterApi, ServiceUnavailableError @@ -14,6 +15,8 @@ from homeassistant.helpers import aiohttp_client from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) + REAUTH_SCHEMA = vol.Schema({vol.Required(CONF_PASSWORD): str}) USER_SCHEMA = vol.Schema( {vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str} @@ -84,7 +87,8 @@ class MeaterConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_auth" except ServiceUnavailableError: errors["base"] = "service_unavailable_error" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown_auth_error" else: data = {"username": username, "password": password} diff --git a/homeassistant/components/media_extractor/manifest.json b/homeassistant/components/media_extractor/manifest.json index 575c0fa878d..e049a827c75 100644 --- a/homeassistant/components/media_extractor/manifest.json +++ b/homeassistant/components/media_extractor/manifest.json @@ -8,6 +8,6 @@ "iot_class": "calculated", "loggers": ["yt_dlp"], "quality_scale": "internal", - "requirements": ["yt-dlp[default]==2025.02.19"], + "requirements": ["yt-dlp[default]==2025.03.26"], "single_config_entry": true } diff --git a/homeassistant/components/media_extractor/strings.json b/homeassistant/components/media_extractor/strings.json index 125aa08337a..11b5a884e4d 100644 --- a/homeassistant/components/media_extractor/strings.json +++ b/homeassistant/components/media_extractor/strings.json @@ -17,12 +17,12 @@ }, "media_content_type": { "name": "Media content type", - "description": "The type of the content to play. Must be one of MUSIC, TVSHOW, VIDEO, EPISODE, CHANNEL or PLAYLIST MUSIC." + "description": "The type of the content to play." } } }, "extract_media_url": { - "name": "Get Media URL", + "name": "Get media URL", "description": "Extract media URL from a service.", "fields": { "url": { diff --git a/homeassistant/components/media_player/__init__.py b/homeassistant/components/media_player/__init__.py index a30b01694fa..45d08bea7ce 100644 --- a/homeassistant/components/media_player/__init__.py +++ b/homeassistant/components/media_player/__init__.py @@ -1031,7 +1031,6 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): if self.state in { MediaPlayerState.OFF, - MediaPlayerState.IDLE, MediaPlayerState.STANDBY, }: await self.async_turn_on() diff --git a/homeassistant/components/media_player/browse_media.py b/homeassistant/components/media_player/browse_media.py index c917164a2ee..d234050c1b2 100644 --- a/homeassistant/components/media_player/browse_media.py +++ b/homeassistant/components/media_player/browse_media.py @@ -23,7 +23,11 @@ from homeassistant.helpers.network import ( from .const import CONTENT_AUTH_EXPIRY_TIME, MediaClass, MediaType # Paths that we don't need to sign -PATHS_WITHOUT_AUTH = ("/api/tts_proxy/", "/api/esphome/ffmpeg_proxy/") +PATHS_WITHOUT_AUTH = ( + "/api/tts_proxy/", + "/api/esphome/ffmpeg_proxy/", + "/api/assist_satellite/static/", +) @callback diff --git a/homeassistant/components/media_player/intent.py b/homeassistant/components/media_player/intent.py index edfab2a668f..af37c0d68bb 100644 --- a/homeassistant/components/media_player/intent.py +++ b/homeassistant/components/media_player/intent.py @@ -96,11 +96,16 @@ async def async_setup_intents(hass: HomeAssistant) -> None: required_states={MediaPlayerState.PLAYING}, required_features=MediaPlayerEntityFeature.VOLUME_SET, required_slots={ - ATTR_MEDIA_VOLUME_LEVEL: vol.All( - vol.Coerce(int), vol.Range(min=0, max=100), lambda val: val / 100 - ) + ATTR_MEDIA_VOLUME_LEVEL: intent.IntentSlotInfo( + description="The volume percentage of the media player", + value_schema=vol.All( + vol.Coerce(int), + vol.Range(min=0, max=100), + lambda val: val / 100, + ), + ), }, - description="Sets the volume of a media player", + description="Sets the volume percentage of a media player", platforms={DOMAIN}, device_classes={MediaPlayerDeviceClass}, ), diff --git a/homeassistant/components/melcloud/climate.py b/homeassistant/components/melcloud/climate.py index 9c2ee60b12c..682a28ea080 100644 --- a/homeassistant/components/melcloud/climate.py +++ b/homeassistant/components/melcloud/climate.py @@ -3,7 +3,7 @@ from __future__ import annotations from datetime import timedelta -from typing import Any +from typing import Any, cast from pymelcloud import DEVICE_TYPE_ATA, DEVICE_TYPE_ATW, AtaDevice, AtwDevice import pymelcloud.ata_device as ata @@ -236,7 +236,7 @@ class AtaDeviceClimate(MelCloudClimate): set_dict: dict[str, Any] = {} if ATTR_HVAC_MODE in kwargs: self._apply_set_hvac_mode( - kwargs.get(ATTR_HVAC_MODE, self.hvac_mode), set_dict + cast(HVACMode, kwargs.get(ATTR_HVAC_MODE, self.hvac_mode)), set_dict ) if ATTR_TEMPERATURE in kwargs: diff --git a/homeassistant/components/melcloud/strings.json b/homeassistant/components/melcloud/strings.json index 19ef0b76aad..8c168295e88 100644 --- a/homeassistant/components/melcloud/strings.json +++ b/homeassistant/components/melcloud/strings.json @@ -11,20 +11,20 @@ }, "reauth_confirm": { "title": "[%key:common::config_flow::title::reauth%]", - "description": "The Melcloud integration needs to re-authenticate your connection details", + "description": "The MELCloud integration needs to re-authenticate your connection details", "data": { "username": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" } }, "reconfigure": { - "title": "Reconfigure your MelCloud", + "title": "Reconfigure your MELCloud", "description": "Reconfigure the entry to obtain a new token, for your account: `{username}`.", "data": { "password": "[%key:common::config_flow::data::password%]" }, "data_description": { - "password": "Enter the (new) password for MelCloud." + "password": "Enter the (new) password for MELCloud." } } }, @@ -70,7 +70,7 @@ }, "deprecated_yaml_import_issue_cannot_connect": { "title": "The MELCloud YAML configuration import failed", - "description": "Configuring MELCloud using YAML is being removed but there was an connection error importing your YAML configuration.\n\nEnsure connection to MELCloud works and restart Home Assistant to try again or remove the MELCloud YAML configuration from your configuration.yaml file and continue to [set up the integration](/config/integrations/dashboard/add?domain=melcoud) manually." + "description": "Configuring MELCloud using YAML is being removed but there was a connection error importing your YAML configuration.\n\nEnsure connection to MELCloud works and restart Home Assistant to try again or remove the MELCloud YAML configuration from your configuration.yaml file and continue to [set up the integration](/config/integrations/dashboard/add?domain=melcoud) manually." } }, "entity": { diff --git a/homeassistant/components/meteo_france/__init__.py b/homeassistant/components/meteo_france/__init__.py index 5c4ada6b5f1..5f1d5269538 100644 --- a/homeassistant/components/meteo_france/__init__.py +++ b/homeassistant/components/meteo_france/__init__.py @@ -57,7 +57,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Fetch data from API endpoint.""" assert isinstance(department, str) return await hass.async_add_executor_job( - client.get_warning_current_phenomenoms, department, 0, True + client.get_warning_current_phenomenons, department, 0, True ) coordinator_forecast = DataUpdateCoordinator( diff --git a/homeassistant/components/meteo_france/manifest.json b/homeassistant/components/meteo_france/manifest.json index 567788ec479..d82d0c3f91b 100644 --- a/homeassistant/components/meteo_france/manifest.json +++ b/homeassistant/components/meteo_france/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/meteo_france", "iot_class": "cloud_polling", "loggers": ["meteofrance_api"], - "requirements": ["meteofrance-api==1.3.0"] + "requirements": ["meteofrance-api==1.4.0"] } diff --git a/homeassistant/components/meteo_france/sensor.py b/homeassistant/components/meteo_france/sensor.py index c29cc1ceda9..7333f7b0c19 100644 --- a/homeassistant/components/meteo_france/sensor.py +++ b/homeassistant/components/meteo_france/sensor.py @@ -7,7 +7,7 @@ from typing import Any from meteofrance_api.helpers import ( get_warning_text_status_from_indice_color, - readeable_phenomenoms_dict, + readable_phenomenons_dict, ) from meteofrance_api.model.forecast import Forecast from meteofrance_api.model.rain import Rain @@ -336,7 +336,7 @@ class MeteoFranceAlertSensor(MeteoFranceSensor[CurrentPhenomenons]): def extra_state_attributes(self): """Return the state attributes.""" return { - **readeable_phenomenoms_dict(self.coordinator.data.phenomenons_max_colors), + **readable_phenomenons_dict(self.coordinator.data.phenomenons_max_colors), } diff --git a/homeassistant/components/meteoclimatic/sensor.py b/homeassistant/components/meteoclimatic/sensor.py index 169da7a0a18..6e508bd63d8 100644 --- a/homeassistant/components/meteoclimatic/sensor.py +++ b/homeassistant/components/meteoclimatic/sensor.py @@ -102,6 +102,7 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( native_unit_of_measurement=DEGREE, icon="mdi:weather-windy", device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), SensorEntityDescription( key="rain", diff --git a/homeassistant/components/modbus/__init__.py b/homeassistant/components/modbus/__init__.py index 61df7206402..52642cc32e3 100644 --- a/homeassistant/components/modbus/__init__.py +++ b/homeassistant/components/modbus/__init__.py @@ -79,6 +79,16 @@ from .const import ( CONF_FAN_MODE_TOP, CONF_FAN_MODE_VALUES, CONF_FANS, + CONF_HVAC_ACTION_COOLING, + CONF_HVAC_ACTION_DEFROSTING, + CONF_HVAC_ACTION_DRYING, + CONF_HVAC_ACTION_FAN, + CONF_HVAC_ACTION_HEATING, + CONF_HVAC_ACTION_IDLE, + CONF_HVAC_ACTION_OFF, + CONF_HVAC_ACTION_PREHEATING, + CONF_HVAC_ACTION_REGISTER, + CONF_HVAC_ACTION_VALUES, CONF_HVAC_MODE_AUTO, CONF_HVAC_MODE_COOL, CONF_HVAC_MODE_DRY, @@ -297,6 +307,45 @@ CLIMATE_SCHEMA = vol.All( vol.Optional(CONF_WRITE_REGISTERS, default=False): cv.boolean, } ), + vol.Optional(CONF_HVAC_ACTION_REGISTER): vol.Maybe( + { + CONF_ADDRESS: cv.positive_int, + CONF_HVAC_ACTION_VALUES: { + vol.Optional(CONF_HVAC_ACTION_COOLING): vol.Any( + cv.positive_int, [cv.positive_int] + ), + vol.Optional(CONF_HVAC_ACTION_DEFROSTING): vol.Any( + cv.positive_int, [cv.positive_int] + ), + vol.Optional(CONF_HVAC_ACTION_DRYING): vol.Any( + cv.positive_int, [cv.positive_int] + ), + vol.Optional(CONF_HVAC_ACTION_FAN): vol.Any( + cv.positive_int, [cv.positive_int] + ), + vol.Optional(CONF_HVAC_ACTION_HEATING): vol.Any( + cv.positive_int, [cv.positive_int] + ), + vol.Optional(CONF_HVAC_ACTION_IDLE): vol.Any( + cv.positive_int, [cv.positive_int] + ), + vol.Optional(CONF_HVAC_ACTION_OFF): vol.Any( + cv.positive_int, [cv.positive_int] + ), + vol.Optional(CONF_HVAC_ACTION_PREHEATING): vol.Any( + cv.positive_int, [cv.positive_int] + ), + }, + vol.Optional( + CONF_INPUT_TYPE, default=CALL_TYPE_REGISTER_HOLDING + ): vol.In( + [ + CALL_TYPE_REGISTER_HOLDING, + CALL_TYPE_REGISTER_INPUT, + ] + ), + } + ), vol.Optional(CONF_FAN_MODE_REGISTER): vol.Maybe( vol.All( { diff --git a/homeassistant/components/modbus/climate.py b/homeassistant/components/modbus/climate.py index fca1b94611a..be10a9495c6 100644 --- a/homeassistant/components/modbus/climate.py +++ b/homeassistant/components/modbus/climate.py @@ -24,6 +24,7 @@ from homeassistant.components.climate import ( SWING_VERTICAL, ClimateEntity, ClimateEntityFeature, + HVACAction, HVACMode, ) from homeassistant.const import ( @@ -61,6 +62,16 @@ from .const import ( CONF_FAN_MODE_REGISTER, CONF_FAN_MODE_TOP, CONF_FAN_MODE_VALUES, + CONF_HVAC_ACTION_COOLING, + CONF_HVAC_ACTION_DEFROSTING, + CONF_HVAC_ACTION_DRYING, + CONF_HVAC_ACTION_FAN, + CONF_HVAC_ACTION_HEATING, + CONF_HVAC_ACTION_IDLE, + CONF_HVAC_ACTION_OFF, + CONF_HVAC_ACTION_PREHEATING, + CONF_HVAC_ACTION_REGISTER, + CONF_HVAC_ACTION_VALUES, CONF_HVAC_MODE_AUTO, CONF_HVAC_MODE_COOL, CONF_HVAC_MODE_DRY, @@ -74,6 +85,7 @@ from .const import ( CONF_HVAC_ON_VALUE, CONF_HVAC_ONOFF_COIL, CONF_HVAC_ONOFF_REGISTER, + CONF_INPUT_TYPE, CONF_MAX_TEMP, CONF_MIN_TEMP, CONF_STEP, @@ -188,6 +200,34 @@ class ModbusThermostat(BaseStructPlatform, RestoreEntity, ClimateEntity): self._attr_hvac_mode = HVACMode.AUTO self._attr_hvac_modes = [HVACMode.AUTO] + if CONF_HVAC_ACTION_REGISTER in config: + action_config = config[CONF_HVAC_ACTION_REGISTER] + self._hvac_action_register = action_config[CONF_ADDRESS] + self._hvac_action_type = action_config[CONF_INPUT_TYPE] + + self._attr_hvac_action = None + self._hvac_action_mapping: list[tuple[int, HVACAction]] = [] + action_value_config = action_config[CONF_HVAC_ACTION_VALUES] + + for hvac_action_kw, hvac_action in ( + (CONF_HVAC_ACTION_COOLING, HVACAction.COOLING), + (CONF_HVAC_ACTION_DEFROSTING, HVACAction.DEFROSTING), + (CONF_HVAC_ACTION_DRYING, HVACAction.DRYING), + (CONF_HVAC_ACTION_FAN, HVACAction.FAN), + (CONF_HVAC_ACTION_HEATING, HVACAction.HEATING), + (CONF_HVAC_ACTION_IDLE, HVACAction.IDLE), + (CONF_HVAC_ACTION_OFF, HVACAction.OFF), + (CONF_HVAC_ACTION_PREHEATING, HVACAction.PREHEATING), + ): + if hvac_action_kw in action_value_config: + values = action_value_config[hvac_action_kw] + if not isinstance(values, list): + values = [values] + for value in values: + self._hvac_action_mapping.append((value, hvac_action)) + else: + self._hvac_action_register = None + if CONF_FAN_MODE_REGISTER in config: self._attr_supported_features = ( self._attr_supported_features | ClimateEntityFeature.FAN_MODE @@ -216,7 +256,6 @@ class ModbusThermostat(BaseStructPlatform, RestoreEntity, ClimateEntity): self._fan_mode_mapping_from_modbus[value] = fan_mode self._fan_mode_mapping_to_modbus[fan_mode] = value self._attr_fan_modes.append(fan_mode) - else: # No FAN modes defined self._fan_mode_register = None @@ -457,6 +496,20 @@ class ModbusThermostat(BaseStructPlatform, RestoreEntity, ClimateEntity): self._attr_hvac_mode = mode break + # Read the HVAC action register if defined + if self._hvac_action_register is not None: + hvac_action = await self._async_read_register( + self._hvac_action_type, self._hvac_action_register, raw=True + ) + + # Translate the value received + if hvac_action is not None: + self._attr_hvac_action = None + for value, action in self._hvac_action_mapping: + if hvac_action == value: + self._attr_hvac_action = action + break + # Read the Fan mode register if defined if self._fan_mode_register is not None: fan_mode = await self._async_read_register( diff --git a/homeassistant/components/modbus/const.py b/homeassistant/components/modbus/const.py index 5926569040d..634637a6b08 100644 --- a/homeassistant/components/modbus/const.py +++ b/homeassistant/components/modbus/const.py @@ -63,6 +63,16 @@ CONF_HVAC_ONOFF_REGISTER = "hvac_onoff_register" CONF_HVAC_ON_VALUE = "hvac_on_value" CONF_HVAC_OFF_VALUE = "hvac_off_value" CONF_HVAC_ONOFF_COIL = "hvac_onoff_coil" +CONF_HVAC_ACTION_REGISTER = "hvac_action_register" +CONF_HVAC_ACTION_COOLING = "action_cooling" +CONF_HVAC_ACTION_DEFROSTING = "action_defrosting" +CONF_HVAC_ACTION_DRYING = "action_drying" +CONF_HVAC_ACTION_FAN = "action_fan" +CONF_HVAC_ACTION_HEATING = "action_heating" +CONF_HVAC_ACTION_IDLE = "action_idle" +CONF_HVAC_ACTION_OFF = "action_off" +CONF_HVAC_ACTION_PREHEATING = "action_preheating" +CONF_HVAC_ACTION_VALUES = "values" CONF_HVAC_MODE_OFF = "state_off" CONF_HVAC_MODE_HEAT = "state_heat" CONF_HVAC_MODE_COOL = "state_cool" diff --git a/homeassistant/components/moehlenhoff_alpha2/manifest.json b/homeassistant/components/moehlenhoff_alpha2/manifest.json index 14f40991a84..45b7f8c9565 100644 --- a/homeassistant/components/moehlenhoff_alpha2/manifest.json +++ b/homeassistant/components/moehlenhoff_alpha2/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/moehlenhoff_alpha2", "iot_class": "local_push", - "requirements": ["moehlenhoff-alpha2==1.3.1"] + "requirements": ["moehlenhoff-alpha2==1.4.0"] } diff --git a/homeassistant/components/mopeka/strings.json b/homeassistant/components/mopeka/strings.json index 2455eea2f76..23feb554772 100644 --- a/homeassistant/components/mopeka/strings.json +++ b/homeassistant/components/mopeka/strings.json @@ -6,7 +6,7 @@ "description": "[%key:component::bluetooth::config::step::user::description%]", "data": { "address": "[%key:common::config_flow::data::device%]", - "medium_type": "Medium Type" + "medium_type": "Medium type" } }, "bluetooth_confirm": { diff --git a/homeassistant/components/motion_blinds/__init__.py b/homeassistant/components/motion_blinds/__init__.py index df06ffb75fc..2abcc273e23 100644 --- a/homeassistant/components/motion_blinds/__init__.py +++ b/homeassistant/components/motion_blinds/__init__.py @@ -12,6 +12,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from .const import ( + CONF_BLIND_TYPE_LIST, CONF_INTERFACE, CONF_WAIT_FOR_PUSH, DEFAULT_INTERFACE, @@ -39,6 +40,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: key = entry.data[CONF_API_KEY] multicast_interface = entry.data.get(CONF_INTERFACE, DEFAULT_INTERFACE) wait_for_push = entry.options.get(CONF_WAIT_FOR_PUSH, DEFAULT_WAIT_FOR_PUSH) + blind_type_list = entry.data.get(CONF_BLIND_TYPE_LIST) # Create multicast Listener async with setup_lock: @@ -81,7 +83,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # Connect to motion gateway multicast = hass.data[DOMAIN][KEY_MULTICAST_LISTENER] connect_gateway_class = ConnectMotionGateway(hass, multicast) - if not await connect_gateway_class.async_connect_gateway(host, key): + if not await connect_gateway_class.async_connect_gateway( + host, key, blind_type_list + ): raise ConfigEntryNotReady motion_gateway = connect_gateway_class.gateway_device api_lock = asyncio.Lock() @@ -95,6 +99,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass, entry, _LOGGER, coordinator_info ) + # store blind type list for next time + if entry.data.get(CONF_BLIND_TYPE_LIST) != motion_gateway.blind_type_list: + data = { + **entry.data, + CONF_BLIND_TYPE_LIST: motion_gateway.blind_type_list, + } + hass.config_entries.async_update_entry(entry, data=data) + # Fetch initial data so we have data when entities subscribe await coordinator.async_config_entry_first_refresh() diff --git a/homeassistant/components/motion_blinds/config_flow.py b/homeassistant/components/motion_blinds/config_flow.py index d8d1e7c21f1..954f9e25c21 100644 --- a/homeassistant/components/motion_blinds/config_flow.py +++ b/homeassistant/components/motion_blinds/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +import logging from typing import Any from motionblinds import MotionDiscovery, MotionGateway @@ -28,6 +29,8 @@ from .const import ( ) from .gateway import ConnectMotionGateway +_LOGGER = logging.getLogger(__name__) + CONFIG_SCHEMA = vol.Schema( { vol.Optional(CONF_HOST): str, @@ -93,7 +96,8 @@ class MotionBlindsFlowHandler(ConfigFlow, domain=DOMAIN): try: # key not needed for GetDeviceList request await self.hass.async_add_executor_job(gateway.GetDeviceList) - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Failed to connect to Motion Gateway") return self.async_abort(reason="not_motionblinds") if not gateway.available: @@ -156,6 +160,7 @@ class MotionBlindsFlowHandler(ConfigFlow, domain=DOMAIN): errors: dict[str, str] = {} if user_input is not None: key = user_input[CONF_API_KEY] + assert self._host connect_gateway_class = ConnectMotionGateway(self.hass) if not await connect_gateway_class.async_connect_gateway(self._host, key): diff --git a/homeassistant/components/motion_blinds/const.py b/homeassistant/components/motion_blinds/const.py index 96067d7ceb0..950fa3ab4c7 100644 --- a/homeassistant/components/motion_blinds/const.py +++ b/homeassistant/components/motion_blinds/const.py @@ -8,6 +8,7 @@ DEFAULT_GATEWAY_NAME = "Motionblinds Gateway" PLATFORMS = [Platform.BUTTON, Platform.COVER, Platform.SENSOR] +CONF_BLIND_TYPE_LIST = "blind_type_list" CONF_WAIT_FOR_PUSH = "wait_for_push" CONF_INTERFACE = "interface" DEFAULT_WAIT_FOR_PUSH = False diff --git a/homeassistant/components/motion_blinds/gateway.py b/homeassistant/components/motion_blinds/gateway.py index 44f7caa74b2..9826557919c 100644 --- a/homeassistant/components/motion_blinds/gateway.py +++ b/homeassistant/components/motion_blinds/gateway.py @@ -42,11 +42,16 @@ class ConnectMotionGateway: for blind in self.gateway_device.device_list.values(): blind.Update_from_cache() - async def async_connect_gateway(self, host, key): + async def async_connect_gateway( + self, + host: str, + key: str, + blind_type_list: dict[str, int] | None = None, + ) -> bool: """Connect to the Motion Gateway.""" _LOGGER.debug("Initializing with host %s (key %s)", host, key[:3]) self._gateway_device = MotionGateway( - ip=host, key=key, multicast=self._multicast + ip=host, key=key, multicast=self._multicast, blind_type_list=blind_type_list ) try: # update device info and get the connected sub devices diff --git a/homeassistant/components/motion_blinds/manifest.json b/homeassistant/components/motion_blinds/manifest.json index b327c146300..1654d5b5937 100644 --- a/homeassistant/components/motion_blinds/manifest.json +++ b/homeassistant/components/motion_blinds/manifest.json @@ -21,5 +21,5 @@ "documentation": "https://www.home-assistant.io/integrations/motion_blinds", "iot_class": "local_push", "loggers": ["motionblinds"], - "requirements": ["motionblinds==0.6.25"] + "requirements": ["motionblinds==0.6.26"] } diff --git a/homeassistant/components/motion_blinds/strings.json b/homeassistant/components/motion_blinds/strings.json index ddbf928462a..12060cd69f0 100644 --- a/homeassistant/components/motion_blinds/strings.json +++ b/homeassistant/components/motion_blinds/strings.json @@ -3,20 +3,20 @@ "flow_title": "{short_mac} ({ip_address})", "step": { "user": { - "description": "Connect to your Motion Gateway, if the IP address is not set, auto-discovery is used", + "description": "Connect to your Motionblinds gateway. If the IP address is not set, auto-discovery is used", "data": { "host": "[%key:common::config_flow::data::ip%]" } }, "connect": { - "description": "You will need the 16 character API Key, see https://www.home-assistant.io/integrations/motion_blinds/#retrieving-the-key for instructions", + "description": "You will need the 16 character API key, see https://www.home-assistant.io/integrations/motion_blinds/#retrieving-the-api-key for instructions", "data": { "api_key": "[%key:common::config_flow::data::api_key%]" } }, "select": { - "title": "Select the Motion Gateway that you wish to connect", - "description": "Run the setup again if you want to connect additional Motion Gateways", + "title": "Select the Motionblinds gateway that you wish to connect", + "description": "Run the setup again if you want to connect additional Motionblinds gateways", "data": { "select_ip": "[%key:common::config_flow::data::ip%]" } @@ -29,7 +29,7 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", "connection_error": "[%key:common::config_flow::error::cannot_connect%]", - "not_motionblinds": "Discovered device is not a Motion gateway" + "not_motionblinds": "Discovered device is not a Motionblinds gateway" } }, "options": { diff --git a/homeassistant/components/motionblinds_ble/strings.json b/homeassistant/components/motionblinds_ble/strings.json index d6532f12386..ec1fb080854 100644 --- a/homeassistant/components/motionblinds_ble/strings.json +++ b/homeassistant/components/motionblinds_ble/strings.json @@ -1,8 +1,8 @@ { "config": { "abort": { - "no_bluetooth_adapter": "No bluetooth adapter found", - "no_devices_found": "Could not find any bluetooth devices" + "no_bluetooth_adapter": "No Bluetooth adapter found", + "no_devices_found": "Could not find any Bluetooth devices" }, "error": { "could_not_find_motor": "Could not find a motor with that MAC code", diff --git a/homeassistant/components/mqtt/__init__.py b/homeassistant/components/mqtt/__init__.py index 6656afe2c8a..ae010bf18c9 100644 --- a/homeassistant/components/mqtt/__init__.py +++ b/homeassistant/components/mqtt/__init__.py @@ -13,7 +13,7 @@ import voluptuous as vol from homeassistant import config as conf_util from homeassistant.components import websocket_api from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_DISCOVERY, SERVICE_RELOAD +from homeassistant.const import CONF_DISCOVERY, CONF_PLATFORM, SERVICE_RELOAD from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.exceptions import ( ConfigValidationError, @@ -81,6 +81,7 @@ from .const import ( ENTRY_OPTION_FIELDS, MQTT_CONNECTION_STATE, TEMPLATE_ERRORS, + Platform, ) from .models import ( DATA_MQTT, @@ -293,6 +294,21 @@ async def async_check_config_schema( ) from exc +def _platforms_in_use(hass: HomeAssistant, entry: ConfigEntry) -> set[str | Platform]: + """Return a set of platforms in use.""" + domains: set[str | Platform] = { + entry.domain + for entry in er.async_entries_for_config_entry( + er.async_get(hass), entry.entry_id + ) + } + # Update with domains from subentries + for subentry in entry.subentries.values(): + components = subentry.data["components"].values() + domains.update(component[CONF_PLATFORM] for component in components) + return domains + + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the actions and websocket API for the MQTT component.""" @@ -434,12 +450,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: mqtt_data, conf = await _setup_client() platforms_used = platforms_from_config(mqtt_data.config) - platforms_used.update( - entry.domain - for entry in er.async_entries_for_config_entry( - er.async_get(hass), entry.entry_id - ) - ) + platforms_used.update(_platforms_in_use(hass, entry)) integration = async_get_loaded_integration(hass, DOMAIN) # Preload platforms we know we are going to use so # discovery can setup each platform synchronously diff --git a/homeassistant/components/mqtt/abbreviations.py b/homeassistant/components/mqtt/abbreviations.py index 2d73cc5865c..a9037a5f247 100644 --- a/homeassistant/components/mqtt/abbreviations.py +++ b/homeassistant/components/mqtt/abbreviations.py @@ -150,6 +150,7 @@ ABBREVIATIONS = { "pl_rst_pct": "payload_reset_percentage", "pl_rst_pr_mode": "payload_reset_preset_mode", "pl_stop": "payload_stop", + "pl_stop_tilt": "payload_stop_tilt", "pl_strt": "payload_start", "pl_ret": "payload_return_to_base", "pl_toff": "payload_turn_off", diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py index e985dc9b87f..f6f53599363 100644 --- a/homeassistant/components/mqtt/client.py +++ b/homeassistant/components/mqtt/client.py @@ -1022,8 +1022,6 @@ class MQTT: Resubscribe to all topics we were subscribed to and publish birth message. """ - # pylint: disable-next=import-outside-toplevel - if reason_code.is_failure: # 24: Continue authentication # 25: Re-authenticate diff --git a/homeassistant/components/mqtt/config_flow.py b/homeassistant/components/mqtt/config_flow.py index 22568b0f2b8..7fe01e9a890 100644 --- a/homeassistant/components/mqtt/config_flow.py +++ b/homeassistant/components/mqtt/config_flow.py @@ -5,38 +5,70 @@ from __future__ import annotations import asyncio from collections import OrderedDict from collections.abc import Callable, Mapping +from copy import deepcopy +from dataclasses import dataclass +from enum import IntEnum import logging import queue from ssl import PROTOCOL_TLS_CLIENT, SSLContext, SSLError from types import MappingProxyType -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, cast +from uuid import uuid4 -from cryptography.hazmat.primitives.serialization import load_pem_private_key -from cryptography.x509 import load_pem_x509_certificate +from cryptography.hazmat.primitives.serialization import ( + Encoding, + NoEncryption, + PrivateFormat, + load_der_private_key, + load_pem_private_key, +) +from cryptography.x509 import load_der_x509_certificate, load_pem_x509_certificate import voluptuous as vol from homeassistant.components.file_upload import process_uploaded_file from homeassistant.components.hassio import AddonError, AddonManager, AddonState +from homeassistant.components.sensor import ( + CONF_STATE_CLASS, + DEVICE_CLASS_UNITS, + SensorDeviceClass, + SensorStateClass, +) +from homeassistant.components.switch import SwitchDeviceClass from homeassistant.config_entries import ( SOURCE_RECONFIGURE, ConfigEntry, ConfigFlow, ConfigFlowResult, + ConfigSubentryFlow, OptionsFlow, + SubentryFlowResult, ) from homeassistant.const import ( + ATTR_CONFIGURATION_URL, + ATTR_HW_VERSION, + ATTR_MODEL, + ATTR_MODEL_ID, + ATTR_NAME, + ATTR_SW_VERSION, CONF_CLIENT_ID, + CONF_DEVICE, + CONF_DEVICE_CLASS, CONF_DISCOVERY, CONF_HOST, + CONF_NAME, + CONF_OPTIMISTIC, CONF_PASSWORD, CONF_PAYLOAD, + CONF_PLATFORM, CONF_PORT, CONF_PROTOCOL, + CONF_UNIT_OF_MEASUREMENT, CONF_USERNAME, + CONF_VALUE_TEMPLATE, ) from homeassistant.core import HomeAssistant, callback -from homeassistant.data_entry_flow import AbortFlow -from homeassistant.helpers import config_validation as cv +from homeassistant.data_entry_flow import AbortFlow, SectionConfig, section +from homeassistant.helpers import config_validation as cv, entity_registry as er from homeassistant.helpers.hassio import is_hassio from homeassistant.helpers.json import json_dumps from homeassistant.helpers.selector import ( @@ -47,9 +79,12 @@ from homeassistant.helpers.selector import ( NumberSelectorConfig, NumberSelectorMode, SelectOptionDict, + Selector, SelectSelector, SelectSelectorConfig, SelectSelectorMode, + TemplateSelector, + TemplateSelectorConfig, TextSelector, TextSelectorConfig, TextSelectorType, @@ -64,13 +99,27 @@ from .const import ( ATTR_QOS, ATTR_RETAIN, ATTR_TOPIC, + CONF_AVAILABILITY_TEMPLATE, + CONF_AVAILABILITY_TOPIC, CONF_BIRTH_MESSAGE, CONF_BROKER, CONF_CERTIFICATE, CONF_CLIENT_CERT, CONF_CLIENT_KEY, + CONF_COMMAND_TEMPLATE, + CONF_COMMAND_TOPIC, CONF_DISCOVERY_PREFIX, + CONF_ENTITY_PICTURE, + CONF_EXPIRE_AFTER, CONF_KEEPALIVE, + CONF_LAST_RESET_VALUE_TEMPLATE, + CONF_OPTIONS, + CONF_PAYLOAD_AVAILABLE, + CONF_PAYLOAD_NOT_AVAILABLE, + CONF_QOS, + CONF_RETAIN, + CONF_STATE_TOPIC, + CONF_SUGGESTED_DISPLAY_PRECISION, CONF_TLS_INSECURE, CONF_TRANSPORT, CONF_WILL_MESSAGE, @@ -82,9 +131,12 @@ from .const import ( DEFAULT_DISCOVERY, DEFAULT_ENCODING, DEFAULT_KEEPALIVE, + DEFAULT_PAYLOAD_AVAILABLE, + DEFAULT_PAYLOAD_NOT_AVAILABLE, DEFAULT_PORT, DEFAULT_PREFIX, DEFAULT_PROTOCOL, + DEFAULT_QOS, DEFAULT_TRANSPORT, DEFAULT_WILL, DEFAULT_WS_PATH, @@ -92,12 +144,17 @@ from .const import ( SUPPORTED_PROTOCOLS, TRANSPORT_TCP, TRANSPORT_WEBSOCKETS, + Platform, ) +from .models import MqttAvailabilityData, MqttDeviceData, MqttSubentryData from .util import ( async_create_certificate_temp_files, get_file_path, + learn_more_url, valid_birth_will, valid_publish_topic, + valid_subscribe_topic, + valid_subscribe_topic_template, ) _LOGGER = logging.getLogger(__name__) @@ -105,6 +162,8 @@ _LOGGER = logging.getLogger(__name__) ADDON_SETUP_TIMEOUT = 5 ADDON_SETUP_TIMEOUT_ROUNDS = 5 +CONF_CLIENT_KEY_PASSWORD = "client_key_password" + MQTT_TIMEOUT = 5 ADVANCED_OPTIONS = "advanced_options" @@ -119,9 +178,8 @@ PORT_SELECTOR = vol.All( vol.Coerce(int), ) PASSWORD_SELECTOR = TextSelector(TextSelectorConfig(type=TextSelectorType.PASSWORD)) -QOS_SELECTOR = vol.All( - NumberSelector(NumberSelectorConfig(mode=NumberSelectorMode.BOX, min=0, max=2)), - vol.Coerce(int), +QOS_SELECTOR = NumberSelector( + NumberSelectorConfig(mode=NumberSelectorMode.BOX, min=0, max=2) ) KEEPALIVE_SELECTOR = vol.All( NumberSelector( @@ -165,12 +223,251 @@ BROKER_VERIFICATION_SELECTOR = SelectSelector( # mime configuration from https://pki-tutorial.readthedocs.io/en/latest/mime.html CA_CERT_UPLOAD_SELECTOR = FileSelector( - FileSelectorConfig(accept=".crt,application/x-x509-ca-cert") + FileSelectorConfig(accept=".pem,.crt,.cer,.der,application/x-x509-ca-cert") ) CERT_UPLOAD_SELECTOR = FileSelector( - FileSelectorConfig(accept=".crt,application/x-x509-user-cert") + FileSelectorConfig(accept=".pem,.crt,.cer,.der,application/x-x509-user-cert") ) -KEY_UPLOAD_SELECTOR = FileSelector(FileSelectorConfig(accept=".key,application/pkcs8")) +KEY_UPLOAD_SELECTOR = FileSelector( + FileSelectorConfig(accept=".pem,.key,.der,.pk8,application/pkcs8") +) + +# Subentry selectors +SUBENTRY_PLATFORMS = [Platform.NOTIFY, Platform.SENSOR, Platform.SWITCH] +SUBENTRY_PLATFORM_SELECTOR = SelectSelector( + SelectSelectorConfig( + options=[platform.value for platform in SUBENTRY_PLATFORMS], + mode=SelectSelectorMode.DROPDOWN, + translation_key=CONF_PLATFORM, + ) +) +TEMPLATE_SELECTOR = TemplateSelector(TemplateSelectorConfig()) + +SUBENTRY_AVAILABILITY_SCHEMA = vol.Schema( + { + vol.Optional(CONF_AVAILABILITY_TOPIC): TEXT_SELECTOR, + vol.Optional(CONF_AVAILABILITY_TEMPLATE): TEMPLATE_SELECTOR, + vol.Optional( + CONF_PAYLOAD_AVAILABLE, default=DEFAULT_PAYLOAD_AVAILABLE + ): TEXT_SELECTOR, + vol.Optional( + CONF_PAYLOAD_NOT_AVAILABLE, default=DEFAULT_PAYLOAD_NOT_AVAILABLE + ): TEXT_SELECTOR, + } +) + +# Sensor specific selectors +SENSOR_DEVICE_CLASS_SELECTOR = SelectSelector( + SelectSelectorConfig( + options=[device_class.value for device_class in SensorDeviceClass], + mode=SelectSelectorMode.DROPDOWN, + translation_key="device_class_sensor", + sort=True, + ) +) +SENSOR_STATE_CLASS_SELECTOR = SelectSelector( + SelectSelectorConfig( + options=[device_class.value for device_class in SensorStateClass], + mode=SelectSelectorMode.DROPDOWN, + translation_key=CONF_STATE_CLASS, + ) +) +OPTIONS_SELECTOR = SelectSelector( + SelectSelectorConfig( + options=[], + custom_value=True, + multiple=True, + ) +) +SUGGESTED_DISPLAY_PRECISION_SELECTOR = NumberSelector( + NumberSelectorConfig(mode=NumberSelectorMode.BOX, min=0, max=9) +) +EXPIRE_AFTER_SELECTOR = NumberSelector( + NumberSelectorConfig(mode=NumberSelectorMode.BOX, min=0) +) + +# Switch specific selectors +SWITCH_DEVICE_CLASS_SELECTOR = SelectSelector( + SelectSelectorConfig( + options=[device_class.value for device_class in SwitchDeviceClass], + mode=SelectSelectorMode.DROPDOWN, + translation_key="device_class_switch", + ) +) + + +@callback +def validate_sensor_platform_config( + config: dict[str, Any], +) -> dict[str, str]: + """Validate the sensor options, state and device class config.""" + errors: dict[str, str] = {} + # Only allow `options` to be set for `enum` sensors + # to limit the possible sensor values + if config.get(CONF_OPTIONS) is not None: + if config.get(CONF_STATE_CLASS) or config.get(CONF_UNIT_OF_MEASUREMENT): + errors[CONF_OPTIONS] = "options_not_allowed_with_state_class_or_uom" + + if (device_class := config.get(CONF_DEVICE_CLASS)) != SensorDeviceClass.ENUM: + errors[CONF_DEVICE_CLASS] = "options_device_class_enum" + + if ( + (device_class := config.get(CONF_DEVICE_CLASS)) == SensorDeviceClass.ENUM + and errors is not None + and CONF_OPTIONS not in config + ): + errors[CONF_OPTIONS] = "options_with_enum_device_class" + + if ( + device_class in DEVICE_CLASS_UNITS + and (unit_of_measurement := config.get(CONF_UNIT_OF_MEASUREMENT)) is None + and errors is not None + ): + # Do not allow an empty unit of measurement in a subentry data flow + errors[CONF_UNIT_OF_MEASUREMENT] = "uom_required_for_device_class" + return errors + + if ( + device_class is not None + and device_class in DEVICE_CLASS_UNITS + and unit_of_measurement not in DEVICE_CLASS_UNITS[device_class] + ): + errors[CONF_UNIT_OF_MEASUREMENT] = "invalid_uom" + + return errors + + +@dataclass(frozen=True) +class PlatformField: + """Stores a platform config field schema, required flag and validator.""" + + selector: Selector[Any] | Callable[..., Selector[Any]] + required: bool + validator: Callable[..., Any] + error: str | None = None + default: str | int | vol.Undefined = vol.UNDEFINED + exclude_from_reconfig: bool = False + conditions: tuple[dict[str, Any], ...] | None = None + custom_filtering: bool = False + section: str | None = None + + +@callback +def unit_of_measurement_selector(user_data: dict[str, Any | None]) -> Selector: + """Return a context based unit of measurement selector.""" + if ( + user_data is None + or (device_class := user_data.get(CONF_DEVICE_CLASS)) is None + or device_class not in DEVICE_CLASS_UNITS + ): + return TEXT_SELECTOR + return SelectSelector( + SelectSelectorConfig( + options=[str(uom) for uom in DEVICE_CLASS_UNITS[device_class]], + sort=True, + custom_value=True, + ) + ) + + +COMMON_ENTITY_FIELDS = { + CONF_PLATFORM: PlatformField( + SUBENTRY_PLATFORM_SELECTOR, True, str, exclude_from_reconfig=True + ), + CONF_NAME: PlatformField(TEXT_SELECTOR, False, str, exclude_from_reconfig=True), + CONF_ENTITY_PICTURE: PlatformField(TEXT_SELECTOR, False, cv.url, "invalid_url"), +} + +PLATFORM_ENTITY_FIELDS = { + Platform.NOTIFY.value: {}, + Platform.SENSOR.value: { + CONF_DEVICE_CLASS: PlatformField(SENSOR_DEVICE_CLASS_SELECTOR, False, str), + CONF_STATE_CLASS: PlatformField(SENSOR_STATE_CLASS_SELECTOR, False, str), + CONF_UNIT_OF_MEASUREMENT: PlatformField( + unit_of_measurement_selector, False, str, custom_filtering=True + ), + CONF_SUGGESTED_DISPLAY_PRECISION: PlatformField( + SUGGESTED_DISPLAY_PRECISION_SELECTOR, + False, + cv.positive_int, + section="advanced_settings", + ), + CONF_OPTIONS: PlatformField( + OPTIONS_SELECTOR, + False, + cv.ensure_list, + conditions=({"device_class": "enum"},), + ), + }, + Platform.SWITCH.value: { + CONF_DEVICE_CLASS: PlatformField(SWITCH_DEVICE_CLASS_SELECTOR, False, str), + }, +} +PLATFORM_MQTT_FIELDS = { + Platform.NOTIFY.value: { + CONF_COMMAND_TOPIC: PlatformField( + TEXT_SELECTOR, True, valid_publish_topic, "invalid_publish_topic" + ), + CONF_COMMAND_TEMPLATE: PlatformField( + TEMPLATE_SELECTOR, False, cv.template, "invalid_template" + ), + CONF_RETAIN: PlatformField(BOOLEAN_SELECTOR, False, bool), + }, + Platform.SENSOR.value: { + CONF_STATE_TOPIC: PlatformField( + TEXT_SELECTOR, True, valid_subscribe_topic, "invalid_subscribe_topic" + ), + CONF_VALUE_TEMPLATE: PlatformField( + TEMPLATE_SELECTOR, False, cv.template, "invalid_template" + ), + CONF_LAST_RESET_VALUE_TEMPLATE: PlatformField( + TEMPLATE_SELECTOR, + False, + cv.template, + "invalid_template", + conditions=({CONF_STATE_CLASS: "total"},), + ), + CONF_EXPIRE_AFTER: PlatformField( + EXPIRE_AFTER_SELECTOR, False, cv.positive_int, section="advanced_settings" + ), + }, + Platform.SWITCH.value: { + CONF_COMMAND_TOPIC: PlatformField( + TEXT_SELECTOR, True, valid_publish_topic, "invalid_publish_topic" + ), + CONF_COMMAND_TEMPLATE: PlatformField( + TEMPLATE_SELECTOR, False, cv.template, "invalid_template" + ), + CONF_STATE_TOPIC: PlatformField( + TEXT_SELECTOR, False, valid_subscribe_topic, "invalid_subscribe_topic" + ), + CONF_VALUE_TEMPLATE: PlatformField( + TEMPLATE_SELECTOR, False, cv.template, "invalid_template" + ), + CONF_RETAIN: PlatformField(BOOLEAN_SELECTOR, False, bool), + CONF_OPTIMISTIC: PlatformField(BOOLEAN_SELECTOR, False, bool), + }, +} +ENTITY_CONFIG_VALIDATOR: dict[ + str, + Callable[[dict[str, Any]], dict[str, str]] | None, +] = { + Platform.NOTIFY.value: None, + Platform.SENSOR.value: validate_sensor_platform_config, + Platform.SWITCH.value: None, +} + +MQTT_DEVICE_PLATFORM_FIELDS = { + ATTR_NAME: PlatformField(TEXT_SELECTOR, False, str), + ATTR_SW_VERSION: PlatformField(TEXT_SELECTOR, False, str), + ATTR_HW_VERSION: PlatformField(TEXT_SELECTOR, False, str), + ATTR_MODEL: PlatformField(TEXT_SELECTOR, False, str), + ATTR_MODEL_ID: PlatformField(TEXT_SELECTOR, False, str), + ATTR_CONFIGURATION_URL: PlatformField(TEXT_SELECTOR, False, cv.url, "invalid_url"), + CONF_QOS: PlatformField( + QOS_SELECTOR, False, int, default=DEFAULT_QOS, section="mqtt_settings" + ), +} REAUTH_SCHEMA = vol.Schema( { @@ -204,6 +501,170 @@ def update_password_from_user_input( return substituted_used_data +@callback +def validate_field( + field: str, + validator: Callable[..., Any], + user_input: dict[str, Any] | None, + errors: dict[str, str], + error: str, +) -> None: + """Validate a single field.""" + if user_input is None or field not in user_input: + return + try: + validator(user_input[field]) + except (ValueError, vol.Invalid): + errors[field] = error + + +@callback +def _check_conditions( + platform_field: PlatformField, component_data: dict[str, Any] | None = None +) -> bool: + """Only include field if one of conditions match, or no conditions are set.""" + if platform_field.conditions is None or component_data is None: + return True + return any( + all(component_data.get(key) == value for key, value in condition.items()) + for condition in platform_field.conditions + ) + + +@callback +def calculate_merged_config( + merged_user_input: dict[str, Any], + data_schema_fields: dict[str, PlatformField], + component_data: dict[str, Any], +) -> dict[str, Any]: + """Calculate merged config.""" + base_schema_fields = { + key + for key, platform_field in data_schema_fields.items() + if _check_conditions(platform_field, component_data) + } - set(merged_user_input) + return { + key: value + for key, value in component_data.items() + if key not in base_schema_fields + } | merged_user_input + + +@callback +def validate_user_input( + user_input: dict[str, Any], + data_schema_fields: dict[str, PlatformField], + *, + component_data: dict[str, Any] | None = None, + config_validator: Callable[[dict[str, Any]], dict[str, str]] | None = None, +) -> tuple[dict[str, Any], dict[str, str]]: + """Validate user input.""" + errors: dict[str, str] = {} + # Merge sections + merged_user_input: dict[str, Any] = {} + for key, value in user_input.items(): + if isinstance(value, dict): + merged_user_input.update(value) + else: + merged_user_input[key] = value + + for field, value in merged_user_input.items(): + validator = data_schema_fields[field].validator + try: + validator(value) + except (ValueError, vol.Invalid): + errors[field] = data_schema_fields[field].error or "invalid_input" + + if config_validator is not None: + if TYPE_CHECKING: + assert component_data is not None + + errors |= config_validator( + calculate_merged_config( + merged_user_input, data_schema_fields, component_data + ), + ) + + return merged_user_input, errors + + +@callback +def data_schema_from_fields( + data_schema_fields: dict[str, PlatformField], + reconfig: bool, + component_data: dict[str, Any] | None = None, + user_input: dict[str, Any] | None = None, + device_data: MqttDeviceData | None = None, +) -> vol.Schema: + """Generate custom data schema from platform fields or device data.""" + if device_data is not None: + component_data_with_user_input: dict[str, Any] | None = dict(device_data) + if TYPE_CHECKING: + assert component_data_with_user_input is not None + component_data_with_user_input.update( + component_data_with_user_input.pop("mqtt_settings", {}) + ) + else: + component_data_with_user_input = deepcopy(component_data) + if component_data_with_user_input is not None and user_input is not None: + component_data_with_user_input |= user_input + + sections: dict[str | None, None] = { + field_details.section: None for field_details in data_schema_fields.values() + } + data_schema: dict[Any, Any] = {} + all_data_element_options: set[Any] = set() + no_reconfig_options: set[Any] = set() + for schema_section in sections: + data_schema_element = { + vol.Required(field_name, default=field_details.default) + if field_details.required + else vol.Optional( + field_name, default=field_details.default + ): field_details.selector(component_data_with_user_input) # type: ignore[operator] + if field_details.custom_filtering + else field_details.selector + for field_name, field_details in data_schema_fields.items() + if field_details.section == schema_section + and (not field_details.exclude_from_reconfig or not reconfig) + and _check_conditions(field_details, component_data_with_user_input) + } + data_element_options = set(data_schema_element) + all_data_element_options |= data_element_options + no_reconfig_options |= { + field_name + for field_name, field_details in data_schema_fields.items() + if field_details.section == schema_section + and field_details.exclude_from_reconfig + } + if schema_section is None: + data_schema.update(data_schema_element) + continue + collapsed = ( + not any( + (default := data_schema_fields[str(option)].default) is vol.UNDEFINED + or component_data_with_user_input[str(option)] != default + for option in data_element_options + if option in component_data_with_user_input + ) + if component_data_with_user_input is not None + else True + ) + data_schema[vol.Optional(schema_section)] = section( + vol.Schema(data_schema_element), SectionConfig({"collapsed": collapsed}) + ) + + # Reset all fields from the component_data not in the schema + if component_data: + filtered_fields = ( + set(data_schema_fields) - all_data_element_options - no_reconfig_options + ) + for field in filtered_fields: + if field in component_data: + del component_data[field] + return vol.Schema(data_schema) + + class FlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow.""" @@ -219,6 +680,14 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): self.install_task: asyncio.Task | None = None self.start_task: asyncio.Task | None = None + @classmethod + @callback + def async_get_supported_subentry_types( + cls, config_entry: ConfigEntry + ) -> dict[str, type[ConfigSubentryFlow]]: + """Return subentries supported by this handler.""" + return {CONF_DEVICE: MQTTSubentryFlowHandler} + @staticmethod @callback def async_get_options_flow( @@ -710,17 +1179,514 @@ class MQTTOptionsFlowHandler(OptionsFlow): ) -async def _get_uploaded_file(hass: HomeAssistant, id: str) -> str: - """Get file content from uploaded file.""" +class MQTTSubentryFlowHandler(ConfigSubentryFlow): + """Handle MQTT subentry flow.""" - def _proces_uploaded_file() -> str: + _subentry_data: MqttSubentryData + _component_id: str | None = None + + @callback + def update_component_fields( + self, + data_schema_fields: dict[str, PlatformField], + merged_user_input: dict[str, Any], + ) -> None: + """Update the componment fields.""" + if TYPE_CHECKING: + assert self._component_id is not None + component_data = self._subentry_data["components"][self._component_id] + # Remove the fields from the component data + # if they are not in the schema and not in the user input + config = calculate_merged_config( + merged_user_input, data_schema_fields, component_data + ) + for field in ( + field + for field, platform_field in data_schema_fields.items() + if field in (set(component_data) - set(config)) + and not platform_field.exclude_from_reconfig + ): + component_data.pop(field) + component_data.update(merged_user_input) + + @callback + def generate_names(self) -> tuple[str, str]: + """Generate the device and full entity name.""" + if TYPE_CHECKING: + assert self._component_id is not None + device_name = self._subentry_data[CONF_DEVICE][CONF_NAME] + if entity_name := self._subentry_data["components"][self._component_id].get( + CONF_NAME + ): + full_entity_name: str = f"{device_name} {entity_name}" + else: + full_entity_name = device_name + return device_name, full_entity_name + + @callback + def get_suggested_values_from_component( + self, data_schema: vol.Schema + ) -> dict[str, Any]: + """Get suggestions from component data based on the data schema.""" + if TYPE_CHECKING: + assert self._component_id is not None + component_data = self._subentry_data["components"][self._component_id] + return { + field_key: self.get_suggested_values_from_component(value.schema) + if isinstance(value, section) + else component_data.get(field_key) + for field_key, value in data_schema.schema.items() + } + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Add a subentry.""" + self._subentry_data = MqttSubentryData(device=MqttDeviceData(), components={}) + return await self.async_step_device() + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Reconfigure a subentry.""" + reconfigure_subentry = self._get_reconfigure_subentry() + self._subentry_data = cast( + MqttSubentryData, deepcopy(dict(reconfigure_subentry.data)) + ) + return await self.async_step_summary_menu() + + async def async_step_device( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Add a new MQTT device.""" + errors: dict[str, Any] = {} + device_data = self._subentry_data[CONF_DEVICE] + data_schema = data_schema_from_fields( + MQTT_DEVICE_PLATFORM_FIELDS, + device_data=device_data, + reconfig=True, + ) + if user_input is not None: + _, errors = validate_user_input(user_input, MQTT_DEVICE_PLATFORM_FIELDS) + if not errors: + self._subentry_data[CONF_DEVICE] = cast(MqttDeviceData, user_input) + if self.source == SOURCE_RECONFIGURE: + return await self.async_step_summary_menu() + return await self.async_step_entity() + data_schema = self.add_suggested_values_to_schema( + data_schema, device_data if user_input is None else user_input + ) + return self.async_show_form( + step_id=CONF_DEVICE, + data_schema=data_schema, + errors=errors, + last_step=False, + ) + + async def async_step_entity( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Add or edit an mqtt entity.""" + errors: dict[str, str] = {} + data_schema_fields = COMMON_ENTITY_FIELDS + entity_name_label: str = "" + platform_label: str = "" + component_data: dict[str, Any] | None = None + if reconfig := (self._component_id is not None): + component_data = self._subentry_data["components"][self._component_id] + name: str | None = component_data.get(CONF_NAME) + platform_label = f"{self._subentry_data['components'][self._component_id][CONF_PLATFORM]} " + entity_name_label = f" ({name})" if name is not None else "" + data_schema = data_schema_from_fields(data_schema_fields, reconfig=reconfig) + if user_input is not None: + merged_user_input, errors = validate_user_input( + user_input, data_schema_fields, component_data=component_data + ) + if not errors: + if self._component_id is None: + self._component_id = uuid4().hex + self._subentry_data["components"].setdefault(self._component_id, {}) + self.update_component_fields(data_schema_fields, merged_user_input) + return await self.async_step_entity_platform_config() + data_schema = self.add_suggested_values_to_schema(data_schema, user_input) + elif self.source == SOURCE_RECONFIGURE and self._component_id is not None: + data_schema = self.add_suggested_values_to_schema( + data_schema, + self.get_suggested_values_from_component(data_schema), + ) + device_name = self._subentry_data[CONF_DEVICE][CONF_NAME] + return self.async_show_form( + step_id="entity", + data_schema=data_schema, + description_placeholders={ + "mqtt_device": device_name, + "entity_name_label": entity_name_label, + "platform_label": platform_label, + }, + errors=errors, + last_step=False, + ) + + def _show_update_or_delete_form(self, step_id: str) -> SubentryFlowResult: + """Help selecting an entity to update or delete.""" + device_name = self._subentry_data[CONF_DEVICE][CONF_NAME] + entities = [ + SelectOptionDict( + value=key, + label=f"{device_name} {component_data.get(CONF_NAME, '-')}" + f" ({component_data[CONF_PLATFORM]})", + ) + for key, component_data in self._subentry_data["components"].items() + ] + data_schema = vol.Schema( + { + vol.Required("component"): SelectSelector( + SelectSelectorConfig( + options=entities, + mode=SelectSelectorMode.LIST, + ) + ) + } + ) + return self.async_show_form( + step_id=step_id, data_schema=data_schema, last_step=False + ) + + async def async_step_update_entity( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Select the entity to update.""" + if user_input: + self._component_id = user_input["component"] + return await self.async_step_entity() + if len(self._subentry_data["components"]) == 1: + # Return first key + self._component_id = next(iter(self._subentry_data["components"])) + return await self.async_step_entity() + return self._show_update_or_delete_form("update_entity") + + async def async_step_delete_entity( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Select the entity to delete.""" + if user_input: + del self._subentry_data["components"][user_input["component"]] + return await self.async_step_summary_menu() + return self._show_update_or_delete_form("delete_entity") + + async def async_step_entity_platform_config( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Configure platform entity details.""" + if TYPE_CHECKING: + assert self._component_id is not None + component_data = self._subentry_data["components"][self._component_id] + platform = component_data[CONF_PLATFORM] + data_schema_fields = PLATFORM_ENTITY_FIELDS[platform] + errors: dict[str, str] = {} + + data_schema = data_schema_from_fields( + data_schema_fields, + reconfig=bool( + {field for field in data_schema_fields if field in component_data} + ), + component_data=component_data, + user_input=user_input, + ) + if not data_schema.schema: + return await self.async_step_mqtt_platform_config() + if user_input is not None: + # Test entity fields against the validator + merged_user_input, errors = validate_user_input( + user_input, + data_schema_fields, + component_data=component_data, + config_validator=ENTITY_CONFIG_VALIDATOR[platform], + ) + if not errors: + self.update_component_fields(data_schema_fields, merged_user_input) + return await self.async_step_mqtt_platform_config() + + data_schema = self.add_suggested_values_to_schema(data_schema, user_input) + else: + data_schema = self.add_suggested_values_to_schema( + data_schema, + self.get_suggested_values_from_component(data_schema), + ) + + device_name, full_entity_name = self.generate_names() + return self.async_show_form( + step_id="entity_platform_config", + data_schema=data_schema, + description_placeholders={ + "mqtt_device": device_name, + CONF_PLATFORM: platform, + "entity": full_entity_name, + "url": learn_more_url(platform), + } + | (user_input or {}), + errors=errors, + last_step=False, + ) + + async def async_step_mqtt_platform_config( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Configure entity platform MQTT details.""" + errors: dict[str, str] = {} + if TYPE_CHECKING: + assert self._component_id is not None + component_data = self._subentry_data["components"][self._component_id] + platform = component_data[CONF_PLATFORM] + data_schema_fields = PLATFORM_MQTT_FIELDS[platform] + data_schema = data_schema_from_fields( + data_schema_fields, + reconfig=bool( + {field for field in data_schema_fields if field in component_data} + ), + component_data=component_data, + ) + if user_input is not None: + # Test entity fields against the validator + merged_user_input, errors = validate_user_input( + user_input, + data_schema_fields, + component_data=component_data, + config_validator=ENTITY_CONFIG_VALIDATOR[platform], + ) + if not errors: + self.update_component_fields(data_schema_fields, merged_user_input) + self._component_id = None + if self.source == SOURCE_RECONFIGURE: + return await self.async_step_summary_menu() + return self._async_create_subentry() + + data_schema = self.add_suggested_values_to_schema(data_schema, user_input) + else: + data_schema = self.add_suggested_values_to_schema( + data_schema, + self.get_suggested_values_from_component(data_schema), + ) + device_name, full_entity_name = self.generate_names() + return self.async_show_form( + step_id="mqtt_platform_config", + data_schema=data_schema, + description_placeholders={ + "mqtt_device": device_name, + CONF_PLATFORM: platform, + "entity": full_entity_name, + "url": learn_more_url(platform), + }, + errors=errors, + last_step=False, + ) + + @callback + def _async_create_subentry( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Create a subentry for a new MQTT device.""" + device_name = self._subentry_data[CONF_DEVICE][CONF_NAME] + component_data: dict[str, Any] = next( + iter(self._subentry_data["components"].values()) + ) + platform = component_data[CONF_PLATFORM] + entity_name: str | None + if entity_name := component_data.get(CONF_NAME): + full_entity_name: str = f"{device_name} {entity_name}" + else: + full_entity_name = device_name + + return self.async_create_entry( + data=self._subentry_data, + title=self._subentry_data[CONF_DEVICE][CONF_NAME], + description_placeholders={ + "entity": full_entity_name, + CONF_PLATFORM: platform, + }, + ) + + async def async_step_availability( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Configure availability options.""" + errors: dict[str, str] = {} + validate_field( + "availability_topic", + valid_subscribe_topic, + user_input, + errors, + "invalid_subscribe_topic", + ) + validate_field( + "availability_template", + valid_subscribe_topic_template, + user_input, + errors, + "invalid_template", + ) + if not errors and user_input is not None: + self._subentry_data.setdefault("availability", MqttAvailabilityData()) + self._subentry_data["availability"] = cast(MqttAvailabilityData, user_input) + return await self.async_step_summary_menu() + + data_schema = SUBENTRY_AVAILABILITY_SCHEMA + data_schema = self.add_suggested_values_to_schema( + data_schema, + dict(self._subentry_data.setdefault("availability", {})) + if self.source == SOURCE_RECONFIGURE + else user_input, + ) + return self.async_show_form( + step_id="availability", + data_schema=data_schema, + errors=errors, + last_step=False, + ) + + async def async_step_summary_menu( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Show summary menu and decide to add more entities or to finish the flow.""" + self._component_id = None + mqtt_device = self._subentry_data[CONF_DEVICE][CONF_NAME] + mqtt_items = ", ".join( + f"{mqtt_device} {component_data.get(CONF_NAME, '-')} ({component_data[CONF_PLATFORM]})" + for component_data in self._subentry_data["components"].values() + ) + menu_options = [ + "entity", + "update_entity", + ] + if len(self._subentry_data["components"]) > 1: + menu_options.append("delete_entity") + menu_options.extend(["device", "availability"]) + if self._subentry_data != self._get_reconfigure_subentry().data: + menu_options.append("save_changes") + return self.async_show_menu( + step_id="summary_menu", + menu_options=menu_options, + description_placeholders={ + "mqtt_device": mqtt_device, + "mqtt_items": mqtt_items, + }, + ) + + async def async_step_save_changes( + self, user_input: dict[str, Any] | None = None + ) -> SubentryFlowResult: + """Save the changes made to the subentry.""" + entry = self._get_entry() + subentry = self._get_reconfigure_subentry() + entity_registry = er.async_get(self.hass) + + # When a component is removed from the MQTT device, + # And we save the changes to the subentry, + # we need to clean up stale entity registry entries. + # The component id is used as a part of the unique id of the entity. + for unique_id, platform in [ + ( + f"{subentry.subentry_id}_{component_id}", + subentry.data["components"][component_id][CONF_PLATFORM], + ) + for component_id in subentry.data["components"] + if component_id not in self._subentry_data["components"] + ]: + if entity_id := entity_registry.async_get_entity_id( + platform, DOMAIN, unique_id + ): + entity_registry.async_remove(entity_id) + + return self.async_update_and_abort( + entry, + subentry, + data=self._subentry_data, + title=self._subentry_data[CONF_DEVICE][CONF_NAME], + ) + + +@callback +def async_is_pem_data(data: bytes) -> bool: + """Return True if data is in PEM format.""" + return ( + b"-----BEGIN CERTIFICATE-----" in data + or b"-----BEGIN PRIVATE KEY-----" in data + or b"-----BEGIN RSA PRIVATE KEY-----" in data + or b"-----BEGIN ENCRYPTED PRIVATE KEY-----" in data + ) + + +class PEMType(IntEnum): + """Type of PEM data.""" + + CERTIFICATE = 1 + PRIVATE_KEY = 2 + + +@callback +def async_convert_to_pem( + data: bytes, pem_type: PEMType, password: str | None = None +) -> str | None: + """Convert data to PEM format.""" + try: + if async_is_pem_data(data): + if not password: + # Assume unencrypted PEM encoded private key + return data.decode(DEFAULT_ENCODING) + # Return decrypted PEM encoded private key + return ( + load_pem_private_key(data, password=password.encode(DEFAULT_ENCODING)) + .private_bytes( + encoding=Encoding.PEM, + format=PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=NoEncryption(), + ) + .decode(DEFAULT_ENCODING) + ) + # Convert from DER encoding to PEM + if pem_type == PEMType.CERTIFICATE: + return ( + load_der_x509_certificate(data) + .public_bytes( + encoding=Encoding.PEM, + ) + .decode(DEFAULT_ENCODING) + ) + # Assume DER encoded private key + pem_key_data: bytes = load_der_private_key( + data, password.encode(DEFAULT_ENCODING) if password else None + ).private_bytes( + encoding=Encoding.PEM, + format=PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=NoEncryption(), + ) + return pem_key_data.decode("utf-8") + except (TypeError, ValueError, SSLError): + _LOGGER.exception("Error converting %s file data to PEM format", pem_type.name) + return None + + +async def _get_uploaded_file(hass: HomeAssistant, id: str) -> bytes: + """Get file content from uploaded certificate or key file.""" + + def _proces_uploaded_file() -> bytes: with process_uploaded_file(hass, id) as file_path: - return file_path.read_text(encoding=DEFAULT_ENCODING) + return file_path.read_bytes() return await hass.async_add_executor_job(_proces_uploaded_file) -async def async_get_broker_settings( +def _validate_pki_file( + file_id: str | None, pem_data: str | None, errors: dict[str, str], error: str +) -> bool: + """Return False if uploaded file could not be converted to PEM format.""" + if file_id and not pem_data: + errors["base"] = error + return False + return True + + +async def async_get_broker_settings( # noqa: C901 flow: ConfigFlow | OptionsFlow, fields: OrderedDict[Any, Any], entry_config: MappingProxyType[str, Any] | None, @@ -768,6 +1734,10 @@ async def async_get_broker_settings( validated_user_input.update(user_input) client_certificate_id: str | None = user_input.get(CONF_CLIENT_CERT) client_key_id: str | None = user_input.get(CONF_CLIENT_KEY) + # We do not store the private key password in the entry data + client_key_password: str | None = validated_user_input.pop( + CONF_CLIENT_KEY_PASSWORD, None + ) if (client_certificate_id and not client_key_id) or ( not client_certificate_id and client_key_id ): @@ -775,7 +1745,14 @@ async def async_get_broker_settings( return False certificate_id: str | None = user_input.get(CONF_CERTIFICATE) if certificate_id: - certificate = await _get_uploaded_file(hass, certificate_id) + certificate_data_raw = await _get_uploaded_file(hass, certificate_id) + certificate = async_convert_to_pem( + certificate_data_raw, PEMType.CERTIFICATE + ) + if not _validate_pki_file( + certificate_id, certificate, errors, "bad_certificate" + ): + return False # Return to form for file upload CA cert or client cert and key if ( @@ -797,9 +1774,26 @@ async def async_get_broker_settings( return False if client_certificate_id: - client_certificate = await _get_uploaded_file(hass, client_certificate_id) + client_certificate_data = await _get_uploaded_file( + hass, client_certificate_id + ) + client_certificate = async_convert_to_pem( + client_certificate_data, PEMType.CERTIFICATE + ) + if not _validate_pki_file( + client_certificate_id, client_certificate, errors, "bad_client_cert" + ): + return False + if client_key_id: - client_key = await _get_uploaded_file(hass, client_key_id) + client_key_data = await _get_uploaded_file(hass, client_key_id) + client_key = async_convert_to_pem( + client_key_data, PEMType.PRIVATE_KEY, password=client_key_password + ) + if not _validate_pki_file( + client_key_id, client_key, errors, "client_key_error" + ): + return False certificate_data: dict[str, Any] = {} if certificate: @@ -956,6 +1950,14 @@ async def async_get_broker_settings( description={"suggested_value": user_input_basic.get(CONF_CLIENT_KEY)}, ) ] = KEY_UPLOAD_SELECTOR + fields[ + vol.Optional( + CONF_CLIENT_KEY_PASSWORD, + description={ + "suggested_value": user_input_basic.get(CONF_CLIENT_KEY_PASSWORD) + }, + ) + ] = PASSWORD_SELECTOR verification_mode = current_config.get(SET_CA_CERT) or ( "off" if current_ca_certificate is None @@ -1060,7 +2062,7 @@ def check_certicate_chain() -> str | None: with open(private_key, "rb") as client_key_file: load_pem_private_key(client_key_file.read(), password=None) except (TypeError, ValueError): - return "bad_client_key" + return "client_key_error" # Check the certificate chain context = SSLContext(PROTOCOL_TLS_CLIENT) if client_certificate and private_key: diff --git a/homeassistant/components/mqtt/const.py b/homeassistant/components/mqtt/const.py index 007b3b7e576..b2fcd492435 100644 --- a/homeassistant/components/mqtt/const.py +++ b/homeassistant/components/mqtt/const.py @@ -56,20 +56,55 @@ CONF_SUPPORTED_FEATURES = "supported_features" CONF_ACTION_TEMPLATE = "action_template" CONF_ACTION_TOPIC = "action_topic" +CONF_BLUE_TEMPLATE = "blue_template" +CONF_BRIGHTNESS_COMMAND_TEMPLATE = "brightness_command_template" +CONF_BRIGHTNESS_COMMAND_TOPIC = "brightness_command_topic" +CONF_BRIGHTNESS_SCALE = "brightness_scale" +CONF_BRIGHTNESS_STATE_TOPIC = "brightness_state_topic" +CONF_BRIGHTNESS_TEMPLATE = "brightness_template" +CONF_BRIGHTNESS_VALUE_TEMPLATE = "brightness_value_template" +CONF_COLOR_MODE = "color_mode" +CONF_COLOR_MODE_STATE_TOPIC = "color_mode_state_topic" +CONF_COLOR_MODE_VALUE_TEMPLATE = "color_mode_value_template" +CONF_COLOR_TEMP_COMMAND_TEMPLATE = "color_temp_command_template" +CONF_COLOR_TEMP_COMMAND_TOPIC = "color_temp_command_topic" CONF_COLOR_TEMP_KELVIN = "color_temp_kelvin" +CONF_COLOR_TEMP_TEMPLATE = "color_temp_template" +CONF_COLOR_TEMP_STATE_TOPIC = "color_temp_state_topic" +CONF_COLOR_TEMP_VALUE_TEMPLATE = "color_temp_value_template" +CONF_COMMAND_OFF_TEMPLATE = "command_off_template" +CONF_COMMAND_ON_TEMPLATE = "command_on_template" CONF_CURRENT_HUMIDITY_TEMPLATE = "current_humidity_template" CONF_CURRENT_HUMIDITY_TOPIC = "current_humidity_topic" CONF_CURRENT_TEMP_TEMPLATE = "current_temperature_template" CONF_CURRENT_TEMP_TOPIC = "current_temperature_topic" CONF_ENABLED_BY_DEFAULT = "enabled_by_default" +CONF_EFFECT_COMMAND_TEMPLATE = "effect_command_template" +CONF_EFFECT_COMMAND_TOPIC = "effect_command_topic" +CONF_EFFECT_LIST = "effect_list" +CONF_EFFECT_STATE_TOPIC = "effect_state_topic" +CONF_EFFECT_TEMPLATE = "effect_template" +CONF_EFFECT_VALUE_TEMPLATE = "effect_value_template" CONF_ENTITY_PICTURE = "entity_picture" +CONF_EXPIRE_AFTER = "expire_after" +CONF_FLASH_TIME_LONG = "flash_time_long" +CONF_FLASH_TIME_SHORT = "flash_time_short" +CONF_GREEN_TEMPLATE = "green_template" +CONF_HS_COMMAND_TEMPLATE = "hs_command_template" +CONF_HS_COMMAND_TOPIC = "hs_command_topic" +CONF_HS_STATE_TOPIC = "hs_state_topic" +CONF_HS_VALUE_TEMPLATE = "hs_value_template" +CONF_LAST_RESET_VALUE_TEMPLATE = "last_reset_value_template" CONF_MAX_KELVIN = "max_kelvin" +CONF_MAX_MIREDS = "max_mireds" CONF_MIN_KELVIN = "min_kelvin" +CONF_MIN_MIREDS = "min_mireds" CONF_MODE_COMMAND_TEMPLATE = "mode_command_template" CONF_MODE_COMMAND_TOPIC = "mode_command_topic" CONF_MODE_LIST = "modes" CONF_MODE_STATE_TEMPLATE = "mode_state_template" CONF_MODE_STATE_TOPIC = "mode_state_topic" +CONF_ON_COMMAND_TYPE = "on_command_type" CONF_PAYLOAD_CLOSE = "payload_close" CONF_PAYLOAD_OPEN = "payload_open" CONF_PAYLOAD_STOP = "payload_stop" @@ -78,10 +113,25 @@ CONF_POSITION_OPEN = "position_open" CONF_POWER_COMMAND_TOPIC = "power_command_topic" CONF_POWER_COMMAND_TEMPLATE = "power_command_template" CONF_PRECISION = "precision" +CONF_RED_TEMPLATE = "red_template" +CONF_RGB_COMMAND_TEMPLATE = "rgb_command_template" +CONF_RGB_COMMAND_TOPIC = "rgb_command_topic" +CONF_RGB_STATE_TOPIC = "rgb_state_topic" +CONF_RGB_VALUE_TEMPLATE = "rgb_value_template" +CONF_RGBW_COMMAND_TEMPLATE = "rgbw_command_template" +CONF_RGBW_COMMAND_TOPIC = "rgbw_command_topic" +CONF_RGBW_STATE_TOPIC = "rgbw_state_topic" +CONF_RGBW_VALUE_TEMPLATE = "rgbw_value_template" +CONF_RGBWW_COMMAND_TEMPLATE = "rgbww_command_template" +CONF_RGBWW_COMMAND_TOPIC = "rgbww_command_topic" +CONF_RGBWW_STATE_TOPIC = "rgbww_state_topic" +CONF_RGBWW_VALUE_TEMPLATE = "rgbww_value_template" CONF_STATE_CLOSED = "state_closed" CONF_STATE_CLOSING = "state_closing" CONF_STATE_OPEN = "state_open" CONF_STATE_OPENING = "state_opening" +CONF_SUGGESTED_DISPLAY_PRECISION = "suggested_display_precision" +CONF_SUPPORTED_COLOR_MODES = "supported_color_modes" CONF_TEMP_COMMAND_TEMPLATE = "temperature_command_template" CONF_TEMP_COMMAND_TOPIC = "temperature_command_topic" CONF_TEMP_STATE_TEMPLATE = "temperature_state_template" @@ -89,7 +139,14 @@ CONF_TEMP_STATE_TOPIC = "temperature_state_topic" CONF_TEMP_INITIAL = "initial" CONF_TEMP_MAX = "max_temp" CONF_TEMP_MIN = "min_temp" +CONF_XY_COMMAND_TEMPLATE = "xy_command_template" +CONF_XY_COMMAND_TOPIC = "xy_command_topic" +CONF_XY_STATE_TOPIC = "xy_state_topic" +CONF_XY_VALUE_TEMPLATE = "xy_value_template" +CONF_WHITE_COMMAND_TOPIC = "white_command_topic" +CONF_WHITE_SCALE = "white_scale" +# Config flow constants CONF_CERTIFICATE = "certificate" CONF_CLIENT_KEY = "client_key" CONF_CLIENT_CERT = "client_cert" @@ -110,15 +167,23 @@ CONF_CONFIGURATION_URL = "configuration_url" CONF_OBJECT_ID = "object_id" CONF_SUPPORT_URL = "support_url" +DEFAULT_BRIGHTNESS = False +DEFAULT_BRIGHTNESS_SCALE = 255 DEFAULT_PREFIX = "homeassistant" DEFAULT_BIRTH_WILL_TOPIC = DEFAULT_PREFIX + "/status" DEFAULT_DISCOVERY = True +DEFAULT_EFFECT = False DEFAULT_ENCODING = "utf-8" +DEFAULT_FLASH_TIME_LONG = 10 +DEFAULT_FLASH_TIME_SHORT = 2 DEFAULT_OPTIMISTIC = False +DEFAULT_ON_COMMAND_TYPE = "last" DEFAULT_QOS = 0 DEFAULT_PAYLOAD_AVAILABLE = "online" DEFAULT_PAYLOAD_CLOSE = "CLOSE" DEFAULT_PAYLOAD_NOT_AVAILABLE = "offline" +DEFAULT_PAYLOAD_OFF = "OFF" +DEFAULT_PAYLOAD_ON = "ON" DEFAULT_PAYLOAD_OPEN = "OPEN" DEFAULT_PORT = 1883 DEFAULT_RETAIN = False @@ -127,6 +192,7 @@ DEFAULT_WS_PATH = "/" DEFAULT_POSITION_CLOSED = 0 DEFAULT_POSITION_OPEN = 100 DEFAULT_RETAIN = False +DEFAULT_WHITE_SCALE = 255 PROTOCOL_31 = "3.1" PROTOCOL_311 = "3.1.1" diff --git a/homeassistant/components/mqtt/cover.py b/homeassistant/components/mqtt/cover.py index c93fdd9c760..428c4d0e205 100644 --- a/homeassistant/components/mqtt/cover.py +++ b/homeassistant/components/mqtt/cover.py @@ -81,6 +81,7 @@ CONF_TILT_STATUS_TOPIC = "tilt_status_topic" CONF_TILT_STATUS_TEMPLATE = "tilt_status_template" CONF_STATE_STOPPED = "state_stopped" +CONF_PAYLOAD_STOP_TILT = "payload_stop_tilt" CONF_TILT_CLOSED_POSITION = "tilt_closed_value" CONF_TILT_MAX = "tilt_max" CONF_TILT_MIN = "tilt_min" @@ -203,6 +204,9 @@ _PLATFORM_SCHEMA_BASE = MQTT_BASE_SCHEMA.extend( vol.Optional(CONF_VALUE_TEMPLATE): cv.template, vol.Optional(CONF_GET_POSITION_TEMPLATE): cv.template, vol.Optional(CONF_TILT_COMMAND_TEMPLATE): cv.template, + vol.Optional(CONF_PAYLOAD_STOP_TILT, default=DEFAULT_PAYLOAD_STOP): vol.Any( + cv.string, None + ), } ).extend(MQTT_ENTITY_COMMON_SCHEMA.schema) @@ -592,6 +596,12 @@ class MqttCover(MqttEntity, CoverEntity): self._attr_current_cover_tilt_position = tilt_percentage self.async_write_ha_state() + async def async_stop_cover_tilt(self, **kwargs: Any) -> None: + """Stop moving the cover tilt.""" + await self.async_publish_with_config( + self._config[CONF_TILT_COMMAND_TOPIC], self._config[CONF_PAYLOAD_STOP_TILT] + ) + async def async_set_cover_position(self, **kwargs: Any) -> None: """Move the cover to a specific position.""" position_percentage = kwargs[ATTR_POSITION] diff --git a/homeassistant/components/mqtt/entity.py b/homeassistant/components/mqtt/entity.py index fb047cc8d5e..8446f9041c9 100644 --- a/homeassistant/components/mqtt/entity.py +++ b/homeassistant/components/mqtt/entity.py @@ -43,7 +43,7 @@ from homeassistant.helpers.dispatcher import ( async_dispatcher_send, ) from homeassistant.helpers.entity import Entity, async_generate_entity_id -from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.event import ( async_track_device_registry_updated_event, async_track_entity_registry_updated_event, @@ -111,6 +111,7 @@ from .discovery import ( from .models import ( DATA_MQTT, MessageCallbackType, + MqttSubentryData, MqttValueTemplate, MqttValueTemplateException, PublishPayloadType, @@ -122,7 +123,7 @@ from .subscription import ( async_subscribe_topics_internal, async_unsubscribe_topics, ) -from .util import mqtt_config_entry_enabled +from .util import learn_more_url, mqtt_config_entry_enabled _LOGGER = logging.getLogger(__name__) @@ -238,7 +239,7 @@ def async_setup_entity_entry_helper( entry: ConfigEntry, entity_class: type[MqttEntity] | None, domain: str, - async_add_entities: AddEntitiesCallback, + async_add_entities: AddConfigEntryEntitiesCallback, discovery_schema: VolSchemaType, platform_schema_modern: VolSchemaType, schema_class_mapping: dict[str, type[MqttEntity]] | None = None, @@ -282,11 +283,10 @@ def async_setup_entity_entry_helper( @callback def _async_setup_entities() -> None: - """Set up MQTT items from configuration.yaml.""" + """Set up MQTT items from subentries and configuration.yaml.""" nonlocal entity_class mqtt_data = hass.data[DATA_MQTT] - if not (config_yaml := mqtt_data.config): - return + config_yaml = mqtt_data.config yaml_configs: list[ConfigType] = [ config for config_item in config_yaml @@ -294,6 +294,45 @@ def async_setup_entity_entry_helper( for config in configs if config_domain == domain ] + # process subentry entity setup + for config_subentry_id, subentry in entry.subentries.items(): + subentry_data = cast(MqttSubentryData, subentry.data) + availability_config = subentry_data.get("availability", {}) + subentry_entities: list[Entity] = [] + device_config = subentry_data["device"].copy() + device_mqtt_options = device_config.pop("mqtt_settings", {}) + device_config["identifiers"] = config_subentry_id + for component_id, component_data in subentry_data["components"].items(): + if component_data["platform"] != domain: + continue + component_config: dict[str, Any] = component_data.copy() + component_config[CONF_UNIQUE_ID] = ( + f"{config_subentry_id}_{component_id}" + ) + component_config[CONF_DEVICE] = device_config + component_config.pop("platform") + component_config.update(availability_config) + component_config.update(device_mqtt_options) + + try: + config = platform_schema_modern(component_config) + if schema_class_mapping is not None: + entity_class = schema_class_mapping[config[CONF_SCHEMA]] + if TYPE_CHECKING: + assert entity_class is not None + subentry_entities.append(entity_class(hass, config, entry, None)) + except vol.Invalid as exc: + _LOGGER.error( + "Schema violation occurred when trying to set up " + "entity from subentry %s %s %s: %s", + config_subentry_id, + subentry.title, + subentry.data, + exc, + ) + + async_add_entities(subentry_entities, config_subentry_id=config_subentry_id) + entities: list[Entity] = [] for yaml_config in yaml_configs: try: @@ -309,9 +348,6 @@ def async_setup_entity_entry_helper( line = getattr(yaml_config, "__line__", "?") issue_id = hex(hash(frozenset(yaml_config))) yaml_config_str = yaml_dump(yaml_config) - learn_more_url = ( - f"https://www.home-assistant.io/integrations/{domain}.mqtt/" - ) async_create_issue( hass, DOMAIN, @@ -319,7 +355,7 @@ def async_setup_entity_entry_helper( issue_domain=domain, is_fixable=False, severity=IssueSeverity.ERROR, - learn_more_url=learn_more_url, + learn_more_url=learn_more_url(domain), translation_placeholders={ "domain": domain, "config_file": config_file, diff --git a/homeassistant/components/mqtt/lawn_mower.py b/homeassistant/components/mqtt/lawn_mower.py index 7727efcf04d..1917c56f209 100644 --- a/homeassistant/components/mqtt/lawn_mower.py +++ b/homeassistant/components/mqtt/lawn_mower.py @@ -10,6 +10,7 @@ import voluptuous as vol from homeassistant.components import lawn_mower from homeassistant.components.lawn_mower import ( + ENTITY_ID_FORMAT, LawnMowerActivity, LawnMowerEntity, LawnMowerEntityFeature, @@ -50,7 +51,6 @@ CONF_START_MOWING_COMMAND_TOPIC = "start_mowing_command_topic" CONF_START_MOWING_COMMAND_TEMPLATE = "start_mowing_command_template" DEFAULT_NAME = "MQTT Lawn Mower" -ENTITY_ID_FORMAT = lawn_mower.DOMAIN + ".{}" MQTT_LAWN_MOWER_ATTRIBUTES_BLOCKED: frozenset[str] = frozenset() diff --git a/homeassistant/components/mqtt/light/schema_basic.py b/homeassistant/components/mqtt/light/schema_basic.py index a2f424b247d..a950aced665 100644 --- a/homeassistant/components/mqtt/light/schema_basic.py +++ b/homeassistant/components/mqtt/light/schema_basic.py @@ -51,12 +51,58 @@ from homeassistant.util import color as color_util from .. import subscription from ..config import MQTT_RW_SCHEMA from ..const import ( + CONF_BRIGHTNESS_COMMAND_TEMPLATE, + CONF_BRIGHTNESS_COMMAND_TOPIC, + CONF_BRIGHTNESS_SCALE, + CONF_BRIGHTNESS_STATE_TOPIC, + CONF_BRIGHTNESS_VALUE_TEMPLATE, + CONF_COLOR_MODE_STATE_TOPIC, + CONF_COLOR_MODE_VALUE_TEMPLATE, + CONF_COLOR_TEMP_COMMAND_TEMPLATE, + CONF_COLOR_TEMP_COMMAND_TOPIC, CONF_COLOR_TEMP_KELVIN, + CONF_COLOR_TEMP_STATE_TOPIC, + CONF_COLOR_TEMP_VALUE_TEMPLATE, CONF_COMMAND_TOPIC, + CONF_EFFECT_COMMAND_TEMPLATE, + CONF_EFFECT_COMMAND_TOPIC, + CONF_EFFECT_LIST, + CONF_EFFECT_STATE_TOPIC, + CONF_EFFECT_VALUE_TEMPLATE, + CONF_HS_COMMAND_TEMPLATE, + CONF_HS_COMMAND_TOPIC, + CONF_HS_STATE_TOPIC, + CONF_HS_VALUE_TEMPLATE, CONF_MAX_KELVIN, + CONF_MAX_MIREDS, CONF_MIN_KELVIN, + CONF_MIN_MIREDS, + CONF_ON_COMMAND_TYPE, + CONF_RGB_COMMAND_TEMPLATE, + CONF_RGB_COMMAND_TOPIC, + CONF_RGB_STATE_TOPIC, + CONF_RGB_VALUE_TEMPLATE, + CONF_RGBW_COMMAND_TEMPLATE, + CONF_RGBW_COMMAND_TOPIC, + CONF_RGBW_STATE_TOPIC, + CONF_RGBW_VALUE_TEMPLATE, + CONF_RGBWW_COMMAND_TEMPLATE, + CONF_RGBWW_COMMAND_TOPIC, + CONF_RGBWW_STATE_TOPIC, + CONF_RGBWW_VALUE_TEMPLATE, CONF_STATE_TOPIC, CONF_STATE_VALUE_TEMPLATE, + CONF_WHITE_COMMAND_TOPIC, + CONF_WHITE_SCALE, + CONF_XY_COMMAND_TEMPLATE, + CONF_XY_COMMAND_TOPIC, + CONF_XY_STATE_TOPIC, + CONF_XY_VALUE_TEMPLATE, + DEFAULT_BRIGHTNESS_SCALE, + DEFAULT_ON_COMMAND_TYPE, + DEFAULT_PAYLOAD_OFF, + DEFAULT_PAYLOAD_ON, + DEFAULT_WHITE_SCALE, PAYLOAD_NONE, ) from ..entity import MqttEntity @@ -74,47 +120,7 @@ from .schema import MQTT_LIGHT_SCHEMA_SCHEMA _LOGGER = logging.getLogger(__name__) -CONF_BRIGHTNESS_COMMAND_TEMPLATE = "brightness_command_template" -CONF_BRIGHTNESS_COMMAND_TOPIC = "brightness_command_topic" -CONF_BRIGHTNESS_SCALE = "brightness_scale" -CONF_BRIGHTNESS_STATE_TOPIC = "brightness_state_topic" -CONF_BRIGHTNESS_VALUE_TEMPLATE = "brightness_value_template" -CONF_COLOR_MODE_STATE_TOPIC = "color_mode_state_topic" -CONF_COLOR_MODE_VALUE_TEMPLATE = "color_mode_value_template" -CONF_COLOR_TEMP_COMMAND_TEMPLATE = "color_temp_command_template" -CONF_COLOR_TEMP_COMMAND_TOPIC = "color_temp_command_topic" -CONF_COLOR_TEMP_STATE_TOPIC = "color_temp_state_topic" -CONF_COLOR_TEMP_VALUE_TEMPLATE = "color_temp_value_template" -CONF_EFFECT_COMMAND_TEMPLATE = "effect_command_template" -CONF_EFFECT_COMMAND_TOPIC = "effect_command_topic" -CONF_EFFECT_LIST = "effect_list" -CONF_EFFECT_STATE_TOPIC = "effect_state_topic" -CONF_EFFECT_VALUE_TEMPLATE = "effect_value_template" -CONF_HS_COMMAND_TEMPLATE = "hs_command_template" -CONF_HS_COMMAND_TOPIC = "hs_command_topic" -CONF_HS_STATE_TOPIC = "hs_state_topic" -CONF_HS_VALUE_TEMPLATE = "hs_value_template" -CONF_MAX_MIREDS = "max_mireds" -CONF_MIN_MIREDS = "min_mireds" -CONF_RGB_COMMAND_TEMPLATE = "rgb_command_template" -CONF_RGB_COMMAND_TOPIC = "rgb_command_topic" -CONF_RGB_STATE_TOPIC = "rgb_state_topic" -CONF_RGB_VALUE_TEMPLATE = "rgb_value_template" -CONF_RGBW_COMMAND_TEMPLATE = "rgbw_command_template" -CONF_RGBW_COMMAND_TOPIC = "rgbw_command_topic" -CONF_RGBW_STATE_TOPIC = "rgbw_state_topic" -CONF_RGBW_VALUE_TEMPLATE = "rgbw_value_template" -CONF_RGBWW_COMMAND_TEMPLATE = "rgbww_command_template" -CONF_RGBWW_COMMAND_TOPIC = "rgbww_command_topic" -CONF_RGBWW_STATE_TOPIC = "rgbww_state_topic" -CONF_RGBWW_VALUE_TEMPLATE = "rgbww_value_template" -CONF_XY_COMMAND_TEMPLATE = "xy_command_template" -CONF_XY_COMMAND_TOPIC = "xy_command_topic" -CONF_XY_STATE_TOPIC = "xy_state_topic" -CONF_XY_VALUE_TEMPLATE = "xy_value_template" -CONF_WHITE_COMMAND_TOPIC = "white_command_topic" -CONF_WHITE_SCALE = "white_scale" -CONF_ON_COMMAND_TYPE = "on_command_type" +DEFAULT_NAME = "MQTT LightEntity" MQTT_LIGHT_ATTRIBUTES_BLOCKED = frozenset( { @@ -137,13 +143,6 @@ MQTT_LIGHT_ATTRIBUTES_BLOCKED = frozenset( } ) -DEFAULT_BRIGHTNESS_SCALE = 255 -DEFAULT_NAME = "MQTT LightEntity" -DEFAULT_PAYLOAD_OFF = "OFF" -DEFAULT_PAYLOAD_ON = "ON" -DEFAULT_WHITE_SCALE = 255 -DEFAULT_ON_COMMAND_TYPE = "last" - VALUES_ON_COMMAND_TYPE = ["first", "last", "brightness"] COMMAND_TEMPLATE_KEYS = [ diff --git a/homeassistant/components/mqtt/light/schema_json.py b/homeassistant/components/mqtt/light/schema_json.py index d18da9e917a..a1f86278cf0 100644 --- a/homeassistant/components/mqtt/light/schema_json.py +++ b/homeassistant/components/mqtt/light/schema_json.py @@ -55,13 +55,26 @@ from homeassistant.util.json import json_loads_object from .. import subscription from ..config import DEFAULT_QOS, DEFAULT_RETAIN, MQTT_RW_SCHEMA from ..const import ( + CONF_COLOR_MODE, CONF_COLOR_TEMP_KELVIN, CONF_COMMAND_TOPIC, + CONF_EFFECT_LIST, + CONF_FLASH_TIME_LONG, + CONF_FLASH_TIME_SHORT, CONF_MAX_KELVIN, + CONF_MAX_MIREDS, CONF_MIN_KELVIN, + CONF_MIN_MIREDS, CONF_QOS, CONF_RETAIN, CONF_STATE_TOPIC, + CONF_SUPPORTED_COLOR_MODES, + DEFAULT_BRIGHTNESS, + DEFAULT_BRIGHTNESS_SCALE, + DEFAULT_EFFECT, + DEFAULT_FLASH_TIME_LONG, + DEFAULT_FLASH_TIME_SHORT, + DEFAULT_WHITE_SCALE, ) from ..entity import MqttEntity from ..models import ReceiveMessage @@ -78,25 +91,7 @@ _LOGGER = logging.getLogger(__name__) DOMAIN = "mqtt_json" -DEFAULT_BRIGHTNESS = False -DEFAULT_EFFECT = False -DEFAULT_FLASH_TIME_LONG = 10 -DEFAULT_FLASH_TIME_SHORT = 2 DEFAULT_NAME = "MQTT JSON Light" -DEFAULT_BRIGHTNESS_SCALE = 255 -DEFAULT_WHITE_SCALE = 255 - -CONF_COLOR_MODE = "color_mode" -CONF_SUPPORTED_COLOR_MODES = "supported_color_modes" - -CONF_EFFECT_LIST = "effect_list" - -CONF_FLASH_TIME_LONG = "flash_time_long" -CONF_FLASH_TIME_SHORT = "flash_time_short" - -CONF_MAX_MIREDS = "max_mireds" -CONF_MIN_MIREDS = "min_mireds" - _PLATFORM_SCHEMA_BASE = ( MQTT_RW_SCHEMA.extend( diff --git a/homeassistant/components/mqtt/light/schema_template.py b/homeassistant/components/mqtt/light/schema_template.py index 901cee6f14c..f561f15fb51 100644 --- a/homeassistant/components/mqtt/light/schema_template.py +++ b/homeassistant/components/mqtt/light/schema_template.py @@ -40,10 +40,21 @@ from homeassistant.util import color as color_util from .. import subscription from ..config import MQTT_RW_SCHEMA from ..const import ( + CONF_BLUE_TEMPLATE, + CONF_BRIGHTNESS_TEMPLATE, CONF_COLOR_TEMP_KELVIN, + CONF_COLOR_TEMP_TEMPLATE, + CONF_COMMAND_OFF_TEMPLATE, + CONF_COMMAND_ON_TEMPLATE, CONF_COMMAND_TOPIC, + CONF_EFFECT_LIST, + CONF_EFFECT_TEMPLATE, + CONF_GREEN_TEMPLATE, CONF_MAX_KELVIN, + CONF_MAX_MIREDS, CONF_MIN_KELVIN, + CONF_MIN_MIREDS, + CONF_RED_TEMPLATE, CONF_STATE_TOPIC, PAYLOAD_NONE, ) @@ -51,6 +62,7 @@ from ..entity import MqttEntity from ..models import ( MqttCommandTemplate, MqttValueTemplate, + PayloadSentinel, PublishPayloadType, ReceiveMessage, ) @@ -64,18 +76,6 @@ DOMAIN = "mqtt_template" DEFAULT_NAME = "MQTT Template Light" -CONF_BLUE_TEMPLATE = "blue_template" -CONF_BRIGHTNESS_TEMPLATE = "brightness_template" -CONF_COLOR_TEMP_TEMPLATE = "color_temp_template" -CONF_COMMAND_OFF_TEMPLATE = "command_off_template" -CONF_COMMAND_ON_TEMPLATE = "command_on_template" -CONF_EFFECT_LIST = "effect_list" -CONF_EFFECT_TEMPLATE = "effect_template" -CONF_GREEN_TEMPLATE = "green_template" -CONF_MAX_MIREDS = "max_mireds" -CONF_MIN_MIREDS = "min_mireds" -CONF_RED_TEMPLATE = "red_template" - COMMAND_TEMPLATES = (CONF_COMMAND_ON_TEMPLATE, CONF_COMMAND_OFF_TEMPLATE) VALUE_TEMPLATES = ( CONF_BLUE_TEMPLATE, @@ -127,7 +127,9 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity): _command_templates: dict[ str, Callable[[PublishPayloadType, TemplateVarsType], PublishPayloadType] ] - _value_templates: dict[str, Callable[[ReceivePayloadType], ReceivePayloadType]] + _value_templates: dict[ + str, Callable[[ReceivePayloadType, ReceivePayloadType], ReceivePayloadType] + ] _fixed_color_mode: ColorMode | str | None _topics: dict[str, str | None] @@ -204,73 +206,133 @@ class MqttLightTemplate(MqttEntity, LightEntity, RestoreEntity): @callback def _state_received(self, msg: ReceiveMessage) -> None: """Handle new MQTT messages.""" - state = self._value_templates[CONF_STATE_TEMPLATE](msg.payload) - if state == STATE_ON: + state_value = self._value_templates[CONF_STATE_TEMPLATE]( + msg.payload, + PayloadSentinel.NONE, + ) + if not state_value: + _LOGGER.debug( + "Ignoring message from '%s' with empty state value", msg.topic + ) + elif state_value == STATE_ON: self._attr_is_on = True - elif state == STATE_OFF: + elif state_value == STATE_OFF: self._attr_is_on = False - elif state == PAYLOAD_NONE: + elif state_value == PAYLOAD_NONE: self._attr_is_on = None else: - _LOGGER.warning("Invalid state value received") + _LOGGER.warning( + "Invalid state value '%s' received from %s", + state_value, + msg.topic, + ) if CONF_BRIGHTNESS_TEMPLATE in self._config: - try: - if brightness := int( - self._value_templates[CONF_BRIGHTNESS_TEMPLATE](msg.payload) - ): - self._attr_brightness = brightness - else: - _LOGGER.debug( - "Ignoring zero brightness value for entity %s", - self.entity_id, + brightness_value = self._value_templates[CONF_BRIGHTNESS_TEMPLATE]( + msg.payload, + PayloadSentinel.NONE, + ) + if not brightness_value: + _LOGGER.debug( + "Ignoring message from '%s' with empty brightness value", + msg.topic, + ) + else: + try: + if brightness := int(brightness_value): + self._attr_brightness = brightness + else: + _LOGGER.debug( + "Ignoring zero brightness value for entity %s", + self.entity_id, + ) + except ValueError: + _LOGGER.warning( + "Invalid brightness value '%s' received from %s", + brightness_value, + msg.topic, ) - except ValueError: - _LOGGER.warning("Invalid brightness value received from %s", msg.topic) - if CONF_COLOR_TEMP_TEMPLATE in self._config: - try: - color_temp = self._value_templates[CONF_COLOR_TEMP_TEMPLATE]( - msg.payload + color_temp_value = self._value_templates[CONF_COLOR_TEMP_TEMPLATE]( + msg.payload, + PayloadSentinel.NONE, + ) + if not color_temp_value: + _LOGGER.debug( + "Ignoring message from '%s' with empty color temperature value", + msg.topic, ) - self._attr_color_temp_kelvin = ( - int(color_temp) - if self._color_temp_kelvin - else color_util.color_temperature_mired_to_kelvin(int(color_temp)) - if color_temp != "None" - else None - ) - except ValueError: - _LOGGER.warning("Invalid color temperature value received") + else: + try: + self._attr_color_temp_kelvin = ( + int(color_temp_value) + if self._color_temp_kelvin + else color_util.color_temperature_mired_to_kelvin( + int(color_temp_value) + ) + if color_temp_value != "None" + else None + ) + except ValueError: + _LOGGER.warning( + "Invalid color temperature value '%s' received from %s", + color_temp_value, + msg.topic, + ) if ( CONF_RED_TEMPLATE in self._config and CONF_GREEN_TEMPLATE in self._config and CONF_BLUE_TEMPLATE in self._config ): - try: - red = self._value_templates[CONF_RED_TEMPLATE](msg.payload) - green = self._value_templates[CONF_GREEN_TEMPLATE](msg.payload) - blue = self._value_templates[CONF_BLUE_TEMPLATE](msg.payload) - if red == "None" and green == "None" and blue == "None": - self._attr_hs_color = None - else: - self._attr_hs_color = color_util.color_RGB_to_hs( - int(red), int(green), int(blue) - ) + red_value = self._value_templates[CONF_RED_TEMPLATE]( + msg.payload, + PayloadSentinel.NONE, + ) + green_value = self._value_templates[CONF_GREEN_TEMPLATE]( + msg.payload, + PayloadSentinel.NONE, + ) + blue_value = self._value_templates[CONF_BLUE_TEMPLATE]( + msg.payload, + PayloadSentinel.NONE, + ) + if not red_value or not green_value or not blue_value: + _LOGGER.debug( + "Ignoring message from '%s' with empty color value", msg.topic + ) + elif red_value == "None" and green_value == "None" and blue_value == "None": + self._attr_hs_color = None self._update_color_mode() - except ValueError: - _LOGGER.warning("Invalid color value received") + else: + try: + self._attr_hs_color = color_util.color_RGB_to_hs( + int(red_value), int(green_value), int(blue_value) + ) + self._update_color_mode() + except ValueError: + _LOGGER.warning("Invalid color value received from %s", msg.topic) if CONF_EFFECT_TEMPLATE in self._config: - effect = str(self._value_templates[CONF_EFFECT_TEMPLATE](msg.payload)) - if ( - effect_list := self._config[CONF_EFFECT_LIST] - ) and effect in effect_list: - self._attr_effect = effect + effect_value = self._value_templates[CONF_EFFECT_TEMPLATE]( + msg.payload, + PayloadSentinel.NONE, + ) + if not effect_value: + _LOGGER.debug( + "Ignoring message from '%s' with empty effect value", msg.topic + ) + elif (effect_list := self._config[CONF_EFFECT_LIST]) and str( + effect_value + ) in effect_list: + self._attr_effect = str(effect_value) else: - _LOGGER.warning("Unsupported effect value received") + _LOGGER.warning( + "Unsupported effect value '%s' received from %s", + effect_value, + msg.topic, + ) @callback def _prepare_subscribe_topics(self) -> None: diff --git a/homeassistant/components/mqtt/models.py b/homeassistant/components/mqtt/models.py index 34c1f304944..8a42797b0f2 100644 --- a/homeassistant/components/mqtt/models.py +++ b/homeassistant/components/mqtt/models.py @@ -420,5 +420,41 @@ class MqttComponentConfig: discovery_payload: MQTTDiscoveryPayload +class DeviceMqttOptions(TypedDict, total=False): + """Hold the shared MQTT specific options for an MQTT device.""" + + qos: int + + +class MqttDeviceData(TypedDict, total=False): + """Hold the data for an MQTT device.""" + + name: str + identifiers: str + configuration_url: str + sw_version: str + hw_version: str + model: str + model_id: str + mqtt_settings: DeviceMqttOptions + + +class MqttAvailabilityData(TypedDict, total=False): + """Hold the availability configuration for a device.""" + + availability_topic: str + availability_template: str + payload_available: str + payload_not_available: str + + +class MqttSubentryData(TypedDict, total=False): + """Hold the data for a MQTT subentry.""" + + device: MqttDeviceData + components: dict[str, dict[str, Any]] + availability: MqttAvailabilityData + + DATA_MQTT: HassKey[MqttData] = HassKey("mqtt") DATA_MQTT_AVAILABLE: HassKey[asyncio.Future[bool]] = HassKey("mqtt_client_available") diff --git a/homeassistant/components/mqtt/sensor.py b/homeassistant/components/mqtt/sensor.py index 3e8a4fef0fa..b27ef68368a 100644 --- a/homeassistant/components/mqtt/sensor.py +++ b/homeassistant/components/mqtt/sensor.py @@ -11,6 +11,7 @@ import voluptuous as vol from homeassistant.components import sensor from homeassistant.components.sensor import ( CONF_STATE_CLASS, + DEVICE_CLASS_UNITS, DEVICE_CLASSES_SCHEMA, ENTITY_ID_FORMAT, STATE_CLASSES_SCHEMA, @@ -33,13 +34,22 @@ from homeassistant.core import CALLBACK_TYPE, HomeAssistant, State, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.event import async_call_later +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.typing import ConfigType, VolSchemaType from homeassistant.util import dt as dt_util from . import subscription from .config import MQTT_RO_SCHEMA -from .const import CONF_OPTIONS, CONF_STATE_TOPIC, PAYLOAD_NONE +from .const import ( + CONF_EXPIRE_AFTER, + CONF_LAST_RESET_VALUE_TEMPLATE, + CONF_OPTIONS, + CONF_STATE_TOPIC, + CONF_SUGGESTED_DISPLAY_PRECISION, + DOMAIN, + PAYLOAD_NONE, +) from .entity import MqttAvailabilityMixin, MqttEntity, async_setup_entity_entry_helper from .models import MqttValueTemplate, PayloadSentinel, ReceiveMessage from .schemas import MQTT_ENTITY_COMMON_SCHEMA @@ -49,10 +59,6 @@ _LOGGER = logging.getLogger(__name__) PARALLEL_UPDATES = 0 -CONF_EXPIRE_AFTER = "expire_after" -CONF_LAST_RESET_VALUE_TEMPLATE = "last_reset_value_template" -CONF_SUGGESTED_DISPLAY_PRECISION = "suggested_display_precision" - MQTT_SENSOR_ATTRIBUTES_BLOCKED = frozenset( { sensor.ATTR_LAST_RESET, @@ -63,6 +69,10 @@ MQTT_SENSOR_ATTRIBUTES_BLOCKED = frozenset( DEFAULT_NAME = "MQTT Sensor" DEFAULT_FORCE_UPDATE = False +URL_DOCS_SUPPORTED_SENSOR_UOM = ( + "https://www.home-assistant.io/integrations/sensor/#device-class" +) + _PLATFORM_SCHEMA_BASE = MQTT_RO_SCHEMA.extend( { vol.Optional(CONF_DEVICE_CLASS): vol.Any(DEVICE_CLASSES_SCHEMA, None), @@ -107,6 +117,23 @@ def validate_sensor_state_and_device_class_config(config: ConfigType) -> ConfigT f"got `{CONF_DEVICE_CLASS}` '{device_class}'" ) + if (device_class := config.get(CONF_DEVICE_CLASS)) is None or ( + unit_of_measurement := config.get(CONF_UNIT_OF_MEASUREMENT) + ) is None: + return config + + if ( + device_class in DEVICE_CLASS_UNITS + and unit_of_measurement not in DEVICE_CLASS_UNITS[device_class] + ): + _LOGGER.warning( + "The unit of measurement `%s` is not valid " + "together with device class `%s`. " + "this will stop working in HA Core 2025.7.0", + unit_of_measurement, + device_class, + ) + return config @@ -155,8 +182,40 @@ class MqttSensor(MqttEntity, RestoreSensor): None ) + @callback + def async_check_uom(self) -> None: + """Check if the unit of measurement is valid with the device class.""" + if ( + self._discovery_data is not None + or self.device_class is None + or self.native_unit_of_measurement is None + ): + return + if ( + self.device_class in DEVICE_CLASS_UNITS + and self.native_unit_of_measurement + not in DEVICE_CLASS_UNITS[self.device_class] + ): + async_create_issue( + self.hass, + DOMAIN, + self.entity_id, + issue_domain=sensor.DOMAIN, + is_fixable=False, + severity=IssueSeverity.WARNING, + learn_more_url=URL_DOCS_SUPPORTED_SENSOR_UOM, + translation_placeholders={ + "uom": self.native_unit_of_measurement, + "device_class": self.device_class.value, + "entity_id": self.entity_id, + }, + translation_key="invalid_unit_of_measurement", + breaks_in_ha_version="2025.7.0", + ) + async def mqtt_async_added_to_hass(self) -> None: """Restore state for entities with expire_after set.""" + self.async_check_uom() last_state: State | None last_sensor_data: SensorExtraStoredData | None if ( diff --git a/homeassistant/components/mqtt/strings.json b/homeassistant/components/mqtt/strings.json index fc316306d56..cedf120def1 100644 --- a/homeassistant/components/mqtt/strings.json +++ b/homeassistant/components/mqtt/strings.json @@ -3,6 +3,10 @@ "invalid_platform_config": { "title": "Invalid config found for mqtt {domain} item", "description": "Home Assistant detected an invalid config for a manually configured item.\n\nPlatform domain: **{domain}**\nConfiguration file: **{config_file}**\nNear line: **{line}**\nConfiguration found:\n```yaml\n{config}\n```\nError: **{error}**.\n\nMake sure the configuration is valid and [reload](/developer-tools/yaml) the manually configured MQTT items or restart Home Assistant to fix this issue." + }, + "invalid_unit_of_measurement": { + "title": "Sensor with invalid unit of measurement", + "description": "Manual configured Sensor entity **{entity_id}** has a configured unit of measurement **{uom}** which is not valid with configured device class **{device_class}**. Make sure a valid unit of measurement is configured or remove the device class, and [reload](/developer-tools/yaml) the manually configured MQTT items or restart Home Assistant to fix this issue." } }, "config": { @@ -26,6 +30,7 @@ "client_id": "Client ID (leave empty to randomly generated one)", "client_cert": "Upload client certificate file", "client_key": "Upload private key file", + "client_key_password": "[%key:common::config_flow::data::password%]", "keepalive": "The time between sending keep alive messages", "tls_insecure": "Ignore broker certificate validation", "protocol": "MQTT protocol", @@ -45,6 +50,7 @@ "client_id": "The unique ID to identify the Home Assistant MQTT API as MQTT client. It is recommended to leave this option blank.", "client_cert": "The client certificate to authenticate against your MQTT broker.", "client_key": "The private key file that belongs to your client certificate.", + "client_key_password": "The password for the private key file (if set).", "keepalive": "A value less than 90 seconds is advised.", "tls_insecure": "Option to ignore validation of your MQTT broker's certificate.", "protocol": "The MQTT protocol your broker operates at. For example 3.1.1.", @@ -93,8 +99,8 @@ "bad_will": "Invalid will topic", "bad_discovery_prefix": "Invalid discovery prefix", "bad_certificate": "The CA certificate is invalid", - "bad_client_cert": "Invalid client certificate, ensure a PEM coded file is supplied", - "bad_client_key": "Invalid private key, ensure a PEM coded file is supplied without password", + "bad_client_cert": "Invalid client certificate, ensure a valid file is supplied", + "client_key_error": "Invalid private key file or invalid password supplied", "bad_client_cert_key": "Client certificate and private key are not a valid pair", "bad_ws_headers": "Supply valid HTTP headers as a JSON object", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", @@ -102,6 +108,189 @@ "invalid_inclusion": "The client certificate and private key must be configured together" } }, + "config_subentries": { + "device": { + "initiate_flow": { + "user": "Add MQTT Device", + "reconfigure": "Reconfigure MQTT Device" + }, + "entry_type": "MQTT Device", + "step": { + "availability": { + "title": "Availability options", + "description": "The availability feature allows a device to report it's availability.", + "data": { + "availability_topic": "Availability topic", + "availability_template": "Availability template", + "payload_available": "Payload available", + "payload_not_available": "Payload not available" + }, + "data_description": { + "availability_topic": "Topic to receive the availability payload on", + "availability_template": "A [template](https://www.home-assistant.io/docs/configuration/templating/#using-templates-with-the-mqtt-integration) to render the availability payload received on the availability topic", + "payload_available": "The payload that indicates the device is available (defaults to 'online')", + "payload_not_available": "The payload that indicates the device is not available (defaults to 'offline')" + } + }, + "device": { + "title": "Configure MQTT device details", + "description": "Enter the MQTT device details:", + "data": { + "name": "[%key:common::config_flow::data::name%]", + "configuration_url": "Configuration URL", + "sw_version": "Software version", + "hw_version": "Hardware version", + "model": "Model", + "model_id": "Model ID" + }, + "data_description": { + "name": "The name of the manually added MQTT device.", + "configuration_url": "A link to the webpage that can manage the configuration of this device. Can be either a 'http://', 'https://' or an internal 'homeassistant://' URL.", + "sw_version": "The software version of the device. E.g. '2025.1.0'.", + "hw_version": "The hardware version of the device. E.g. 'v1.0 rev a'.", + "model": "E.g. 'Cleanmaster Pro'.", + "model_id": "E.g. '123NK2PRO'." + }, + "sections": { + "mqtt_settings": { + "name": "MQTT Settings", + "data": { + "qos": "QoS" + }, + "data_description": { + "qos": "The Quality of Service value the device's entities should use." + } + } + } + }, + "summary_menu": { + "title": "Reconfigure \"{mqtt_device}\"", + "description": "Entities set up:\n{mqtt_items}\n\nDecide what to do next:", + "menu_options": { + "entity": "Add another entity to \"{mqtt_device}\"", + "update_entity": "Update entity properties", + "delete_entity": "Delete an entity", + "availability": "Configure availability", + "device": "Update device properties", + "save_changes": "Save changes" + } + }, + "entity": { + "title": "Configure MQTT device \"{mqtt_device}\"", + "description": "Configure the basic {platform_label}entity settings{entity_name_label}", + "data": { + "platform": "Type of entity", + "name": "Entity name", + "entity_picture": "Entity picture" + }, + "data_description": { + "platform": "The type of the entity to configure.", + "name": "The name of the entity. Leave empty to set it to `None` to [mark it as main feature of the MQTT device](https://www.home-assistant.io/integrations/mqtt/#naming-of-mqtt-entities).", + "entity_picture": "An URL to a picture to be assigned." + } + }, + "delete_entity": { + "title": "Delete entity", + "description": "Delete an entity. The entity will be removed from the device. Removing an entity will break any automations or scripts that depend on it.", + "data": { + "component": "Entity" + }, + "data_description": { + "component": "Select the entity you want to delete. Minimal one entity is required." + } + }, + "update_entity": { + "title": "Select entity", + "description": "Select the entity you want to update", + "data": { + "component": "Entity" + }, + "data_description": { + "component": "Select the entity you want to update." + } + }, + "entity_platform_config": { + "title": "Configure MQTT device \"{mqtt_device}\"", + "description": "Please configure specific details for {platform} entity \"{entity}\":", + "data": { + "device_class": "Device class", + "state_class": "State class", + "unit_of_measurement": "Unit of measurement", + "options": "Add option" + }, + "data_description": { + "device_class": "The Device class of the {platform} entity. [Learn more.]({url}#device_class)", + "state_class": "The [State class](https://developers.home-assistant.io/docs/core/entity/sensor/#available-state-classes) of the sensor. [Learn more.]({url}#state_class)", + "unit_of_measurement": "Defines the unit of measurement of the sensor, if any.", + "options": "Options for allowed sensor state values. The sensor’s Device class must be set to Enumeration. The 'Options' setting cannot be used together with State class or Unit of measurement." + }, + "sections": { + "advanced_settings": { + "name": "Advanced options", + "data": { + "suggested_display_precision": "Suggested display precision" + }, + "data_description": { + "suggested_display_precision": "The number of decimals which should be used in the {platform} entity state after rounding. [Learn more.]({url}#suggested_display_precision)" + } + } + } + }, + "mqtt_platform_config": { + "title": "Configure MQTT device \"{mqtt_device}\"", + "description": "Please configure MQTT specific details for {platform} entity \"{entity}\":", + "data": { + "command_topic": "Command topic", + "command_template": "Command template", + "state_topic": "State topic", + "value_template": "Value template", + "last_reset_value_template": "Last reset value template", + "force_update": "Force update", + "optimistic": "Optimistic", + "retain": "Retain" + }, + "data_description": { + "command_topic": "The publishing topic that will be used to control the {platform} entity. [Learn more.]({url}#command_topic)", + "command_template": "A [template](https://www.home-assistant.io/docs/configuration/templating/#using-command-templates-with-mqtt) to render the payload to be published at the command topic.", + "state_topic": "The MQTT topic subscribed to receive {platform} state values. [Learn more.]({url}#state_topic)", + "value_template": "Defines a [template](https://www.home-assistant.io/docs/configuration/templating/#using-value-templates-with-mqtt) to extract the {platform} entity value.", + "last_reset_value_template": "Defines a [template](https://www.home-assistant.io/docs/configuration/templating/#using-value-templates-with-mqtt) to extract the last reset. When Last reset template is set, the State class option must be Total. [Learn more.]({url}#last_reset_value_template)", + "force_update": "Sends update events even if the value hasn’t changed. Useful if you want to have meaningful value graphs in history. [Learn more.]({url}#force_update)", + "optimistic": "Flag that defines if the {platform} entity works in optimistic mode. [Learn more.]({url}#optimistic)", + "retain": "Select if values published by the {platform} entity should be retained at the MQTT broker." + }, + "sections": { + "advanced_settings": { + "name": "Advanced settings", + "data": { + "expire_after": "Expire after" + }, + "data_description": { + "expire_after": "If set, it defines the number of seconds after the sensor’s state expires, if it’s not updated. After expiry, the sensor’s state becomes unavailable. If not set, the sensor's state never expires. [Learn more.]({url}#expire_after)" + } + } + } + } + }, + "abort": { + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + }, + "create_entry": { + "default": "MQTT device with {platform} entity \"{entity}\" was set up successfully.\n\nNote that you can reconfigure the MQTT device at any time, e.g. to add more entities." + }, + "error": { + "invalid_input": "Invalid value", + "invalid_subscribe_topic": "Invalid subscribe topic", + "invalid_template": "Invalid template", + "invalid_uom": "The unit of measurement \"{unit_of_measurement}\" is not supported by the selected device class, please either remove the device class, select a device class which supports \"{unit_of_measurement}\", or pick a supported unit of measurement from the list", + "invalid_url": "Invalid URL", + "options_not_allowed_with_state_class_or_uom": "The 'Options' setting is not allowed when state class or unit of measurement are used", + "options_device_class_enum": "The 'Options' setting must be used with the Enumeration device class. If you continue, the existing options will be reset", + "options_with_enum_device_class": "Configure options for the enumeration sensor", + "uom_required_for_device_class": "The selected device class requires a unit" + } + } + }, "device_automation": { "trigger_type": { "button_short_press": "\"{subtype}\" pressed", @@ -207,7 +396,7 @@ "bad_discovery_prefix": "[%key:component::mqtt::config::error::bad_discovery_prefix%]", "bad_certificate": "[%key:component::mqtt::config::error::bad_certificate%]", "bad_client_cert": "[%key:component::mqtt::config::error::bad_client_cert%]", - "bad_client_key": "[%key:component::mqtt::config::error::bad_client_key%]", + "client_key_error": "[%key:component::mqtt::config::error::client_key_error%]", "bad_client_cert_key": "[%key:component::mqtt::config::error::bad_client_cert_key%]", "bad_ws_headers": "[%key:component::mqtt::config::error::bad_ws_headers%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", @@ -215,12 +404,92 @@ } }, "selector": { + "device_class_sensor": { + "options": { + "apparent_power": "[%key:component::sensor::entity_component::apparent_power::name%]", + "area": "[%key:component::sensor::entity_component::area::name%]", + "aqi": "[%key:component::sensor::entity_component::aqi::name%]", + "atmospheric_pressure": "[%key:component::sensor::entity_component::atmospheric_pressure::name%]", + "battery": "[%key:component::sensor::entity_component::battery::name%]", + "blood_glucose_concentration": "[%key:component::sensor::entity_component::blood_glucose_concentration::name%]", + "carbon_dioxide": "[%key:component::sensor::entity_component::carbon_dioxide::name%]", + "carbon_monoxide": "[%key:component::sensor::entity_component::carbon_monoxide::name%]", + "conductivity": "[%key:component::sensor::entity_component::conductivity::name%]", + "current": "[%key:component::sensor::entity_component::current::name%]", + "data_rate": "[%key:component::sensor::entity_component::data_rate::name%]", + "data_size": "[%key:component::sensor::entity_component::data_size::name%]", + "date": "[%key:component::sensor::entity_component::date::name%]", + "distance": "[%key:component::sensor::entity_component::distance::name%]", + "duration": "[%key:component::sensor::entity_component::duration::name%]", + "energy": "[%key:component::sensor::entity_component::energy::name%]", + "energy_distance": "[%key:component::sensor::entity_component::energy_distance::name%]", + "energy_storage": "[%key:component::sensor::entity_component::energy_storage::name%]", + "enum": "Enumeration", + "frequency": "[%key:component::sensor::entity_component::frequency::name%]", + "gas": "[%key:component::sensor::entity_component::gas::name%]", + "humidity": "[%key:component::sensor::entity_component::humidity::name%]", + "illuminance": "[%key:component::sensor::entity_component::illuminance::name%]", + "irradiance": "[%key:component::sensor::entity_component::irradiance::name%]", + "moisture": "[%key:component::sensor::entity_component::moisture::name%]", + "monetary": "[%key:component::sensor::entity_component::monetary::name%]", + "nitrogen_dioxide": "[%key:component::sensor::entity_component::nitrogen_dioxide::name%]", + "nitrogen_monoxide": "[%key:component::sensor::entity_component::nitrogen_monoxide::name%]", + "nitrous_oxide": "[%key:component::sensor::entity_component::nitrous_oxide::name%]", + "ozone": "[%key:component::sensor::entity_component::ozone::name%]", + "ph": "[%key:component::sensor::entity_component::ph::name%]", + "pm1": "[%key:component::sensor::entity_component::pm1::name%]", + "pm10": "[%key:component::sensor::entity_component::pm10::name%]", + "pm25": "[%key:component::sensor::entity_component::pm25::name%]", + "power": "[%key:component::sensor::entity_component::power::name%]", + "power_factor": "[%key:component::sensor::entity_component::power_factor::name%]", + "precipitation": "[%key:component::sensor::entity_component::precipitation::name%]", + "precipitation_intensity": "[%key:component::sensor::entity_component::precipitation_intensity::name%]", + "pressure": "[%key:component::sensor::entity_component::pressure::name%]", + "reactive_power": "[%key:component::sensor::entity_component::reactive_power::name%]", + "signal_strength": "[%key:component::sensor::entity_component::signal_strength::name%]", + "sound_pressure": "[%key:component::sensor::entity_component::sound_pressure::name%]", + "speed": "[%key:component::sensor::entity_component::speed::name%]", + "sulphur_dioxide": "[%key:component::sensor::entity_component::sulphur_dioxide::name%]", + "temperature": "[%key:component::sensor::entity_component::temperature::name%]", + "timestamp": "[%key:component::sensor::entity_component::timestamp::name%]", + "volatile_organic_compounds": "[%key:component::sensor::entity_component::volatile_organic_compounds::name%]", + "volatile_organic_compounds_parts": "[%key:component::sensor::entity_component::volatile_organic_compounds_parts::name%]", + "voltage": "[%key:component::sensor::entity_component::voltage::name%]", + "volume": "[%key:component::sensor::entity_component::volume::name%]", + "volume_flow_rate": "[%key:component::sensor::entity_component::volume_flow_rate::name%]", + "volume_storage": "[%key:component::sensor::entity_component::volume_storage::name%]", + "water": "[%key:component::sensor::entity_component::water::name%]", + "weight": "[%key:component::sensor::entity_component::weight::name%]", + "wind_direction": "[%key:component::sensor::entity_component::wind_direction::name%]", + "wind_speed": "[%key:component::sensor::entity_component::wind_speed::name%]" + } + }, + "device_class_switch": { + "options": { + "outlet": "[%key:component::switch::entity_component::outlet::name%]", + "switch": "[%key:component::switch::title%]" + } + }, + "platform": { + "options": { + "notify": "Notify", + "sensor": "Sensor", + "switch": "Switch" + } + }, "set_ca_cert": { "options": { "off": "[%key:common::state::off%]", "auto": "Auto", "custom": "Custom" } + }, + "state_class": { + "options": { + "measurement": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::measurement%]", + "total": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::total%]", + "total_increasing": "[%key:component::sensor::entity_component::_::state_attributes::state_class::state::total_increasing%]" + } } }, "services": { @@ -230,7 +499,7 @@ "fields": { "evaluate_payload": { "name": "Evaluate payload", - "description": "When `payload` is a Python bytes literal, evaluate the bytes literal and publish the raw data." + "description": "If 'Payload' is a Python bytes literal, evaluate the bytes literal and publish the raw data." }, "topic": { "name": "Topic", diff --git a/homeassistant/components/mqtt/update.py b/homeassistant/components/mqtt/update.py index c4916b5010c..145f0a2562c 100644 --- a/homeassistant/components/mqtt/update.py +++ b/homeassistant/components/mqtt/update.py @@ -26,7 +26,7 @@ from . import subscription from .config import DEFAULT_RETAIN, MQTT_RO_SCHEMA from .const import CONF_COMMAND_TOPIC, CONF_RETAIN, CONF_STATE_TOPIC, PAYLOAD_EMPTY_JSON from .entity import MqttEntity, async_setup_entity_entry_helper -from .models import MqttValueTemplate, ReceiveMessage +from .models import MqttValueTemplate, PayloadSentinel, ReceiveMessage from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic, valid_subscribe_topic @@ -136,7 +136,18 @@ class MqttUpdate(MqttEntity, UpdateEntity, RestoreEntity): @callback def _handle_state_message_received(self, msg: ReceiveMessage) -> None: """Handle receiving state message via MQTT.""" - payload = self._templates[CONF_VALUE_TEMPLATE](msg.payload) + payload = self._templates[CONF_VALUE_TEMPLATE]( + msg.payload, PayloadSentinel.DEFAULT + ) + + if payload is PayloadSentinel.DEFAULT: + _LOGGER.warning( + "Unable to process payload '%s' for topic %s, with value template '%s'", + msg.payload, + msg.topic, + self._config.get(CONF_VALUE_TEMPLATE), + ) + return if not payload or payload == PAYLOAD_EMPTY_JSON: _LOGGER.debug( diff --git a/homeassistant/components/mqtt/util.py b/homeassistant/components/mqtt/util.py index 27bdb4f2a35..e3996c80a8a 100644 --- a/homeassistant/components/mqtt/util.py +++ b/homeassistant/components/mqtt/util.py @@ -411,3 +411,9 @@ def migrate_certificate_file_to_content(file_name_or_auto: str) -> str | None: return certificate_file.read() except OSError: return None + + +@callback +def learn_more_url(platform: str) -> str: + """Return the URL for the platform specific MQTT documentation.""" + return f"https://www.home-assistant.io/integrations/{platform}.mqtt/" diff --git a/homeassistant/components/mullvad/config_flow.py b/homeassistant/components/mullvad/config_flow.py index c16f8879a7b..b179c5605ef 100644 --- a/homeassistant/components/mullvad/config_flow.py +++ b/homeassistant/components/mullvad/config_flow.py @@ -1,5 +1,6 @@ """Config flow for Mullvad VPN integration.""" +import logging from typing import Any from mullvad_api import MullvadAPI, MullvadAPIError @@ -8,6 +9,8 @@ from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) + class MullvadConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Mullvad VPN.""" @@ -24,7 +27,8 @@ class MullvadConfigFlow(ConfigFlow, domain=DOMAIN): await self.hass.async_add_executor_job(MullvadAPI) except MullvadAPIError: errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: return self.async_create_entry(title="Mullvad VPN", data=user_input) diff --git a/homeassistant/components/music_assistant/manifest.json b/homeassistant/components/music_assistant/manifest.json index fb8bb9c3ac2..28e8587e90c 100644 --- a/homeassistant/components/music_assistant/manifest.json +++ b/homeassistant/components/music_assistant/manifest.json @@ -7,6 +7,6 @@ "documentation": "https://www.home-assistant.io/integrations/music_assistant", "iot_class": "local_push", "loggers": ["music_assistant"], - "requirements": ["music-assistant-client==1.1.1"], + "requirements": ["music-assistant-client==1.2.0"], "zeroconf": ["_mass._tcp.local."] } diff --git a/homeassistant/components/music_assistant/media_player.py b/homeassistant/components/music_assistant/media_player.py index 56bde7bbae7..7d26f5b3a0c 100644 --- a/homeassistant/components/music_assistant/media_player.py +++ b/homeassistant/components/music_assistant/media_player.py @@ -94,6 +94,12 @@ SUPPORTED_FEATURES_BASE = ( | MediaPlayerEntityFeature.MEDIA_ENQUEUE | MediaPlayerEntityFeature.MEDIA_ANNOUNCE | MediaPlayerEntityFeature.SEEK + # we always add pause support, + # regardless if the underlying player actually natively supports pause + # because the MA behavior is to internally handle pause with stop + # (and a resume position) and we'd like to keep the UX consistent + # background info: https://github.com/home-assistant/core/issues/140118 + | MediaPlayerEntityFeature.PAUSE ) QUEUE_OPTION_MAP = { @@ -697,8 +703,6 @@ class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity): supported_features = SUPPORTED_FEATURES_BASE if PlayerFeature.SET_MEMBERS in self.player.supported_features: supported_features |= MediaPlayerEntityFeature.GROUPING - if PlayerFeature.PAUSE in self.player.supported_features: - supported_features |= MediaPlayerEntityFeature.PAUSE if self.player.mute_control != PLAYER_CONTROL_NONE: supported_features |= MediaPlayerEntityFeature.VOLUME_MUTE if self.player.volume_control != PLAYER_CONTROL_NONE: diff --git a/homeassistant/components/music_assistant/strings.json b/homeassistant/components/music_assistant/strings.json index 7338af7cb65..371ecdc3a86 100644 --- a/homeassistant/components/music_assistant/strings.json +++ b/homeassistant/components/music_assistant/strings.json @@ -139,8 +139,8 @@ } }, "get_library": { - "name": "Get Library items", - "description": "Get items from a Music Assistant library.", + "name": "Get library items", + "description": "Retrieves items from a Music Assistant library.", "fields": { "config_entry_id": { "name": "[%key:component::music_assistant::services::search::fields::config_entry_id::name%]", @@ -167,7 +167,7 @@ "description": "Offset to start the list from." }, "order_by": { - "name": "Order By", + "name": "Order by", "description": "Sort the list by this field." }, "album_type": { @@ -176,7 +176,7 @@ }, "album_artists_only": { "name": "Enable album artists filter (only for artist library)", - "description": "Only return Album Artists when listing the Artists library items." + "description": "Only return album artists when listing the artists library items." } } } diff --git a/homeassistant/components/mutesync/config_flow.py b/homeassistant/components/mutesync/config_flow.py index ef03df39968..a2aacfc927e 100644 --- a/homeassistant/components/mutesync/config_flow.py +++ b/homeassistant/components/mutesync/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations import asyncio +import logging from typing import Any import aiohttp @@ -16,6 +17,8 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) + STEP_USER_DATA_SCHEMA = vol.Schema({vol.Required("host"): str}) @@ -60,7 +63,8 @@ class MuteSyncConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except InvalidAuth: errors["base"] = "invalid_auth" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: return self.async_create_entry( diff --git a/homeassistant/components/mysensors/__init__.py b/homeassistant/components/mysensors/__init__.py index 19dcce78446..e2aca8b9f01 100644 --- a/homeassistant/components/mysensors/__init__.py +++ b/homeassistant/components/mysensors/__init__.py @@ -17,7 +17,6 @@ from .const import ( DOMAIN, MYSENSORS_DISCOVERED_NODES, MYSENSORS_GATEWAYS, - MYSENSORS_ON_UNLOAD, PLATFORMS, DevId, DiscoveryInfo, @@ -62,13 +61,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: if not unload_ok: return False - key = MYSENSORS_ON_UNLOAD.format(entry.entry_id) - if key in hass.data[DOMAIN]: - for fnct in hass.data[DOMAIN][key]: - fnct() - - hass.data[DOMAIN].pop(key) - del hass.data[DOMAIN][MYSENSORS_GATEWAYS][entry.entry_id] hass.data[DOMAIN].pop(MYSENSORS_DISCOVERED_NODES.format(entry.entry_id), None) diff --git a/homeassistant/components/mysensors/binary_sensor.py b/homeassistant/components/mysensors/binary_sensor.py index d42b2194315..e950f083b5b 100644 --- a/homeassistant/components/mysensors/binary_sensor.py +++ b/homeassistant/components/mysensors/binary_sensor.py @@ -20,7 +20,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from . import setup_mysensors_platform from .const import MYSENSORS_DISCOVERY, DiscoveryInfo from .entity import MySensorsChildEntity -from .helpers import on_unload @dataclass(frozen=True) @@ -86,9 +85,7 @@ async def async_setup_entry( async_add_entities=async_add_entities, ) - on_unload( - hass, - config_entry.entry_id, + config_entry.async_on_unload( async_dispatcher_connect( hass, MYSENSORS_DISCOVERY.format(config_entry.entry_id, Platform.BINARY_SENSOR), diff --git a/homeassistant/components/mysensors/climate.py b/homeassistant/components/mysensors/climate.py index d1504f3afab..eb54a76b8a8 100644 --- a/homeassistant/components/mysensors/climate.py +++ b/homeassistant/components/mysensors/climate.py @@ -21,7 +21,6 @@ from homeassistant.util.unit_system import METRIC_SYSTEM from . import setup_mysensors_platform from .const import MYSENSORS_DISCOVERY, DiscoveryInfo from .entity import MySensorsChildEntity -from .helpers import on_unload DICT_HA_TO_MYS = { HVACMode.AUTO: "AutoChangeOver", @@ -57,9 +56,7 @@ async def async_setup_entry( async_add_entities=async_add_entities, ) - on_unload( - hass, - config_entry.entry_id, + config_entry.async_on_unload( async_dispatcher_connect( hass, MYSENSORS_DISCOVERY.format(config_entry.entry_id, Platform.CLIMATE), @@ -85,7 +82,10 @@ class MySensorsHVAC(MySensorsChildEntity, ClimateEntity): and set_req.V_HVAC_SETPOINT_HEAT in self._values ): features = features | ClimateEntityFeature.TARGET_TEMPERATURE_RANGE - else: + elif ( + set_req.V_HVAC_SETPOINT_COOL in self._values + or set_req.V_HVAC_SETPOINT_HEAT in self._values + ): features = features | ClimateEntityFeature.TARGET_TEMPERATURE return features @@ -111,13 +111,11 @@ class MySensorsHVAC(MySensorsChildEntity, ClimateEntity): @property def target_temperature(self) -> float | None: - """Return the temperature we try to reach.""" + """Return the temperature we try to reach. + + Either V_HVAC_SETPOINT_COOL or V_HVAC_SETPOINT_HEAT may be used. + """ set_req = self.gateway.const.SetReq - if ( - set_req.V_HVAC_SETPOINT_COOL in self._values - and set_req.V_HVAC_SETPOINT_HEAT in self._values - ): - return None temp = self._values.get(set_req.V_HVAC_SETPOINT_COOL) if temp is None: temp = self._values.get(set_req.V_HVAC_SETPOINT_HEAT) @@ -127,21 +125,13 @@ class MySensorsHVAC(MySensorsChildEntity, ClimateEntity): def target_temperature_high(self) -> float | None: """Return the highbound target temperature we try to reach.""" set_req = self.gateway.const.SetReq - if set_req.V_HVAC_SETPOINT_HEAT in self._values: - temp = self._values.get(set_req.V_HVAC_SETPOINT_COOL) - return float(temp) if temp is not None else None - - return None + return float(self._values[set_req.V_HVAC_SETPOINT_COOL]) @property def target_temperature_low(self) -> float | None: """Return the lowbound target temperature we try to reach.""" set_req = self.gateway.const.SetReq - if set_req.V_HVAC_SETPOINT_COOL in self._values: - temp = self._values.get(set_req.V_HVAC_SETPOINT_HEAT) - return float(temp) if temp is not None else None - - return None + return float(self._values[set_req.V_HVAC_SETPOINT_HEAT]) @property def hvac_mode(self) -> HVACMode: @@ -185,10 +175,6 @@ class MySensorsHVAC(MySensorsChildEntity, ClimateEntity): self.gateway.set_child_value( self.node_id, self.child_id, value_type, value, ack=1 ) - if self.assumed_state: - # Optimistically assume that device has changed state - self._values[value_type] = value - self.async_write_ha_state() async def async_set_fan_mode(self, fan_mode: str) -> None: """Set new target temperature.""" @@ -196,10 +182,6 @@ class MySensorsHVAC(MySensorsChildEntity, ClimateEntity): self.gateway.set_child_value( self.node_id, self.child_id, set_req.V_HVAC_SPEED, fan_mode, ack=1 ) - if self.assumed_state: - # Optimistically assume that device has changed state - self._values[set_req.V_HVAC_SPEED] = fan_mode - self.async_write_ha_state() async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: """Set new target temperature.""" @@ -210,10 +192,6 @@ class MySensorsHVAC(MySensorsChildEntity, ClimateEntity): DICT_HA_TO_MYS[hvac_mode], ack=1, ) - if self.assumed_state: - # Optimistically assume that device has changed state - self._values[self.value_type] = hvac_mode - self.async_write_ha_state() @callback def _async_update(self) -> None: diff --git a/homeassistant/components/mysensors/const.py b/homeassistant/components/mysensors/const.py index a65b46616d3..a87b78b549e 100644 --- a/homeassistant/components/mysensors/const.py +++ b/homeassistant/components/mysensors/const.py @@ -34,7 +34,6 @@ CHILD_CALLBACK: str = "mysensors_child_callback_{}_{}_{}_{}" NODE_CALLBACK: str = "mysensors_node_callback_{}_{}" MYSENSORS_DISCOVERY: str = "mysensors_discovery_{}_{}" MYSENSORS_NODE_DISCOVERY: str = "mysensors_node_discovery" -MYSENSORS_ON_UNLOAD: str = "mysensors_on_unload_{}" TYPE: Final = "type" UPDATE_DELAY: float = 0.1 diff --git a/homeassistant/components/mysensors/cover.py b/homeassistant/components/mysensors/cover.py index 14e6ff6dc15..84346a5d10a 100644 --- a/homeassistant/components/mysensors/cover.py +++ b/homeassistant/components/mysensors/cover.py @@ -7,7 +7,7 @@ from typing import Any from homeassistant.components.cover import ATTR_POSITION, CoverEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.const import STATE_ON, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback @@ -15,7 +15,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from . import setup_mysensors_platform from .const import MYSENSORS_DISCOVERY, DiscoveryInfo from .entity import MySensorsChildEntity -from .helpers import on_unload @unique @@ -45,9 +44,7 @@ async def async_setup_entry( async_add_entities=async_add_entities, ) - on_unload( - hass, - config_entry.entry_id, + config_entry.async_on_unload( async_dispatcher_connect( hass, MYSENSORS_DISCOVERY.format(config_entry.entry_id, Platform.COVER), @@ -113,13 +110,6 @@ class MySensorsCover(MySensorsChildEntity, CoverEntity): self.gateway.set_child_value( self.node_id, self.child_id, set_req.V_UP, 1, ack=1 ) - if self.assumed_state: - # Optimistically assume that cover has changed state. - if set_req.V_DIMMER in self._values: - self._values[set_req.V_DIMMER] = 100 - else: - self._values[set_req.V_LIGHT] = STATE_ON - self.async_write_ha_state() async def async_close_cover(self, **kwargs: Any) -> None: """Move the cover down.""" @@ -127,13 +117,6 @@ class MySensorsCover(MySensorsChildEntity, CoverEntity): self.gateway.set_child_value( self.node_id, self.child_id, set_req.V_DOWN, 1, ack=1 ) - if self.assumed_state: - # Optimistically assume that cover has changed state. - if set_req.V_DIMMER in self._values: - self._values[set_req.V_DIMMER] = 0 - else: - self._values[set_req.V_LIGHT] = STATE_OFF - self.async_write_ha_state() async def async_set_cover_position(self, **kwargs: Any) -> None: """Move the cover to a specific position.""" @@ -142,10 +125,6 @@ class MySensorsCover(MySensorsChildEntity, CoverEntity): self.gateway.set_child_value( self.node_id, self.child_id, set_req.V_DIMMER, position, ack=1 ) - if self.assumed_state: - # Optimistically assume that cover has changed state. - self._values[set_req.V_DIMMER] = position - self.async_write_ha_state() async def async_stop_cover(self, **kwargs: Any) -> None: """Stop the device.""" diff --git a/homeassistant/components/mysensors/device_tracker.py b/homeassistant/components/mysensors/device_tracker.py index 56d8b2f5923..e6368b0b81d 100644 --- a/homeassistant/components/mysensors/device_tracker.py +++ b/homeassistant/components/mysensors/device_tracker.py @@ -12,7 +12,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from . import setup_mysensors_platform from .const import MYSENSORS_DISCOVERY, DiscoveryInfo from .entity import MySensorsChildEntity -from .helpers import on_unload async def async_setup_entry( @@ -33,9 +32,7 @@ async def async_setup_entry( async_add_entities=async_add_entities, ) - on_unload( - hass, - config_entry.entry_id, + config_entry.async_on_unload( async_dispatcher_connect( hass, MYSENSORS_DISCOVERY.format(config_entry.entry_id, Platform.DEVICE_TRACKER), diff --git a/homeassistant/components/mysensors/gateway.py b/homeassistant/components/mysensors/gateway.py index bdc83f30b21..91453ea3306 100644 --- a/homeassistant/components/mysensors/gateway.py +++ b/homeassistant/components/mysensors/gateway.py @@ -47,7 +47,6 @@ from .handler import HANDLERS from .helpers import ( discover_mysensors_node, discover_mysensors_platform, - on_unload, validate_child, validate_node, ) @@ -293,9 +292,7 @@ async def _gw_start( """Stop the gateway.""" await gw_stop(hass, entry, gateway) - on_unload( - hass, - entry.entry_id, + entry.async_on_unload( hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_this_gw), ) diff --git a/homeassistant/components/mysensors/helpers.py b/homeassistant/components/mysensors/helpers.py index c96ad6cea8e..9ed41dfe4e9 100644 --- a/homeassistant/components/mysensors/helpers.py +++ b/homeassistant/components/mysensors/helpers.py @@ -27,7 +27,6 @@ from .const import ( MYSENSORS_DISCOVERED_NODES, MYSENSORS_DISCOVERY, MYSENSORS_NODE_DISCOVERY, - MYSENSORS_ON_UNLOAD, TYPE_TO_PLATFORMS, DevId, GatewayId, @@ -41,18 +40,6 @@ SCHEMAS: Registry[ ] = Registry() -@callback -def on_unload(hass: HomeAssistant, gateway_id: GatewayId, fnct: Callable) -> None: - """Register a callback to be called when entry is unloaded. - - This function is used by platforms to cleanup after themselves. - """ - key = MYSENSORS_ON_UNLOAD.format(gateway_id) - if key not in hass.data[DOMAIN]: - hass.data[DOMAIN][key] = [] - hass.data[DOMAIN][key].append(fnct) - - @callback def discover_mysensors_platform( hass: HomeAssistant, gateway_id: GatewayId, platform: str, new_devices: list[DevId] diff --git a/homeassistant/components/mysensors/light.py b/homeassistant/components/mysensors/light.py index 9e4054ca3d0..fa5e625c72b 100644 --- a/homeassistant/components/mysensors/light.py +++ b/homeassistant/components/mysensors/light.py @@ -12,7 +12,7 @@ from homeassistant.components.light import ( LightEntity, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.const import STATE_ON, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback @@ -21,7 +21,6 @@ from homeassistant.util.color import rgb_hex_to_rgb_list from . import setup_mysensors_platform from .const import MYSENSORS_DISCOVERY, DiscoveryInfo, SensorType from .entity import MySensorsChildEntity -from .helpers import on_unload async def async_setup_entry( @@ -46,9 +45,7 @@ async def async_setup_entry( async_add_entities=async_add_entities, ) - on_unload( - hass, - config_entry.entry_id, + config_entry.async_on_unload( async_dispatcher_connect( hass, MYSENSORS_DISCOVERY.format(config_entry.entry_id, Platform.LIGHT), @@ -80,11 +77,6 @@ class MySensorsLight(MySensorsChildEntity, LightEntity): self.node_id, self.child_id, set_req.V_LIGHT, 1, ack=1 ) - if self.assumed_state: - # optimistically assume that light has changed state - self._state = True - self._values[set_req.V_LIGHT] = STATE_ON - def _turn_on_dimmer(self, **kwargs: Any) -> None: """Turn on dimmer child device.""" set_req = self.gateway.const.SetReq @@ -101,20 +93,10 @@ class MySensorsLight(MySensorsChildEntity, LightEntity): self.node_id, self.child_id, set_req.V_DIMMER, percent, ack=1 ) - if self.assumed_state: - # optimistically assume that light has changed state - self._attr_brightness = brightness - self._values[set_req.V_DIMMER] = percent - async def async_turn_off(self, **kwargs: Any) -> None: """Turn the device off.""" value_type = self.gateway.const.SetReq.V_LIGHT self.gateway.set_child_value(self.node_id, self.child_id, value_type, 0, ack=1) - if self.assumed_state: - # optimistically assume that light has changed state - self._state = False - self._values[value_type] = STATE_OFF - self.async_write_ha_state() @callback def _async_update_light(self) -> None: @@ -142,8 +124,6 @@ class MySensorsLightDimmer(MySensorsLight): """Turn the device on.""" self._turn_on_light() self._turn_on_dimmer(**kwargs) - if self.assumed_state: - self.async_write_ha_state() @callback def _async_update(self) -> None: @@ -164,8 +144,6 @@ class MySensorsLightRGB(MySensorsLight): self._turn_on_light() self._turn_on_dimmer(**kwargs) self._turn_on_rgb(**kwargs) - if self.assumed_state: - self.async_write_ha_state() def _turn_on_rgb(self, **kwargs: Any) -> None: """Turn on RGB child device.""" @@ -179,11 +157,6 @@ class MySensorsLightRGB(MySensorsLight): self.node_id, self.child_id, self.value_type, hex_color, ack=1 ) - if self.assumed_state: - # optimistically assume that light has changed state - self._attr_rgb_color = new_rgb - self._values[self.value_type] = hex_color - @callback def _async_update(self) -> None: """Update the controller with the latest value from a sensor.""" @@ -212,8 +185,6 @@ class MySensorsLightRGBW(MySensorsLightRGB): self._turn_on_light() self._turn_on_dimmer(**kwargs) self._turn_on_rgbw(**kwargs) - if self.assumed_state: - self.async_write_ha_state() def _turn_on_rgbw(self, **kwargs: Any) -> None: """Turn on RGBW child device.""" @@ -227,11 +198,6 @@ class MySensorsLightRGBW(MySensorsLightRGB): self.node_id, self.child_id, self.value_type, hex_color, ack=1 ) - if self.assumed_state: - # optimistically assume that light has changed state - self._attr_rgbw_color = new_rgbw - self._values[self.value_type] = hex_color - @callback def _async_update_rgb_or_w(self) -> None: """Update the controller with values from RGBW child.""" diff --git a/homeassistant/components/mysensors/remote.py b/homeassistant/components/mysensors/remote.py index ada801f92ab..ccb67f78eba 100644 --- a/homeassistant/components/mysensors/remote.py +++ b/homeassistant/components/mysensors/remote.py @@ -19,7 +19,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from . import setup_mysensors_platform from .const import MYSENSORS_DISCOVERY, DiscoveryInfo from .entity import MySensorsChildEntity -from .helpers import on_unload async def async_setup_entry( @@ -40,9 +39,7 @@ async def async_setup_entry( async_add_entities=async_add_entities, ) - on_unload( - hass, - config_entry.entry_id, + config_entry.async_on_unload( async_dispatcher_connect( hass, MYSENSORS_DISCOVERY.format(config_entry.entry_id, Platform.REMOTE), diff --git a/homeassistant/components/mysensors/sensor.py b/homeassistant/components/mysensors/sensor.py index 759cf7b010f..3793bed8af2 100644 --- a/homeassistant/components/mysensors/sensor.py +++ b/homeassistant/components/mysensors/sensor.py @@ -50,7 +50,6 @@ from .const import ( NodeDiscoveryInfo, ) from .entity import MySensorNodeEntity, MySensorsChildEntity -from .helpers import on_unload SENSORS: dict[str, SensorEntityDescription] = { "V_TEMP": SensorEntityDescription( @@ -103,6 +102,7 @@ SENSORS: dict[str, SensorEntityDescription] = { native_unit_of_measurement=DEGREE, icon="mdi:compass", device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), "V_WEIGHT": SensorEntityDescription( key="V_WEIGHT", @@ -233,9 +233,7 @@ async def async_setup_entry( gateway: BaseAsyncGateway = hass.data[DOMAIN][MYSENSORS_GATEWAYS][gateway_id] async_add_entities([MyBatterySensor(gateway_id, gateway, node_id)]) - on_unload( - hass, - config_entry.entry_id, + config_entry.async_on_unload( async_dispatcher_connect( hass, MYSENSORS_DISCOVERY.format(config_entry.entry_id, Platform.SENSOR), @@ -243,9 +241,7 @@ async def async_setup_entry( ), ) - on_unload( - hass, - config_entry.entry_id, + config_entry.async_on_unload( async_dispatcher_connect( hass, MYSENSORS_NODE_DISCOVERY, diff --git a/homeassistant/components/mysensors/strings.json b/homeassistant/components/mysensors/strings.json index 30fe5f46d6b..1636cb076cc 100644 --- a/homeassistant/components/mysensors/strings.json +++ b/homeassistant/components/mysensors/strings.json @@ -21,16 +21,16 @@ "device": "IP address of the gateway", "tcp_port": "[%key:common::config_flow::data::port%]", "version": "MySensors version", - "persistence_file": "persistence file (leave empty to auto-generate)" + "persistence_file": "Persistence file (leave empty to auto-generate)" } }, "gw_serial": { "description": "Serial gateway setup", "data": { "device": "Serial port", - "baud_rate": "baud rate", + "baud_rate": "Baud rate", "version": "[%key:component::mysensors::config::step::gw_tcp::data::version%]", - "persistence_file": "Persistence file (leave empty to auto-generate)" + "persistence_file": "[%key:component::mysensors::config::step::gw_tcp::data::persistence_file%]" } }, "gw_mqtt": { @@ -40,7 +40,7 @@ "topic_in_prefix": "Prefix for input topics (topic_in_prefix)", "topic_out_prefix": "Prefix for output topics (topic_out_prefix)", "version": "[%key:component::mysensors::config::step::gw_tcp::data::version%]", - "persistence_file": "[%key:component::mysensors::config::step::gw_serial::data::persistence_file%]" + "persistence_file": "[%key:component::mysensors::config::step::gw_tcp::data::persistence_file%]" } } }, diff --git a/homeassistant/components/mysensors/switch.py b/homeassistant/components/mysensors/switch.py index 52207c21f77..9b57102a94c 100644 --- a/homeassistant/components/mysensors/switch.py +++ b/homeassistant/components/mysensors/switch.py @@ -6,7 +6,7 @@ from typing import Any from homeassistant.components.switch import SwitchEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.const import STATE_ON, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback @@ -14,7 +14,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from . import setup_mysensors_platform from .const import MYSENSORS_DISCOVERY, DiscoveryInfo, SensorType from .entity import MySensorsChildEntity -from .helpers import on_unload async def async_setup_entry( @@ -48,9 +47,7 @@ async def async_setup_entry( async_add_entities=async_add_entities, ) - on_unload( - hass, - config_entry.entry_id, + config_entry.async_on_unload( async_dispatcher_connect( hass, MYSENSORS_DISCOVERY.format(config_entry.entry_id, Platform.SWITCH), @@ -72,17 +69,9 @@ class MySensorsSwitch(MySensorsChildEntity, SwitchEntity): self.gateway.set_child_value( self.node_id, self.child_id, self.value_type, 1, ack=1 ) - if self.assumed_state: - # Optimistically assume that switch has changed state - self._values[self.value_type] = STATE_ON - self.async_write_ha_state() async def async_turn_off(self, **kwargs: Any) -> None: """Turn the switch off.""" self.gateway.set_child_value( self.node_id, self.child_id, self.value_type, 0, ack=1 ) - if self.assumed_state: - # Optimistically assume that switch has changed state - self._values[self.value_type] = STATE_OFF - self.async_write_ha_state() diff --git a/homeassistant/components/mysensors/text.py b/homeassistant/components/mysensors/text.py index 8eff7a255e7..9fdd9da5345 100644 --- a/homeassistant/components/mysensors/text.py +++ b/homeassistant/components/mysensors/text.py @@ -12,7 +12,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from . import setup_mysensors_platform from .const import MYSENSORS_DISCOVERY, DiscoveryInfo from .entity import MySensorsChildEntity -from .helpers import on_unload async def async_setup_entry( @@ -33,9 +32,7 @@ async def async_setup_entry( async_add_entities=async_add_entities, ) - on_unload( - hass, - config_entry.entry_id, + config_entry.async_on_unload( async_dispatcher_connect( hass, MYSENSORS_DISCOVERY.format(config_entry.entry_id, Platform.TEXT), diff --git a/homeassistant/components/nam/__init__.py b/homeassistant/components/nam/__init__.py index 6b4ca6ff324..d297443c059 100644 --- a/homeassistant/components/nam/__init__.py +++ b/homeassistant/components/nam/__init__.py @@ -4,7 +4,7 @@ from __future__ import annotations import logging -from aiohttp.client_exceptions import ClientConnectorError, ClientError +from aiohttp.client_exceptions import ClientError from nettigo_air_monitor import ( ApiError, AuthFailedError, @@ -38,15 +38,27 @@ async def async_setup_entry(hass: HomeAssistant, entry: NAMConfigEntry) -> bool: options = ConnectionOptions(host=host, username=username, password=password) try: nam = await NettigoAirMonitor.create(websession, options) - except (ApiError, ClientError, ClientConnectorError, TimeoutError) as err: - raise ConfigEntryNotReady from err + except (ApiError, ClientError) as err: + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="device_communication_error", + translation_placeholders={"device": entry.title}, + ) from err try: await nam.async_check_credentials() - except ApiError as err: - raise ConfigEntryNotReady from err + except (ApiError, ClientError) as err: + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="device_communication_error", + translation_placeholders={"device": entry.title}, + ) from err except AuthFailedError as err: - raise ConfigEntryAuthFailed from err + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="auth_error", + translation_placeholders={"device": entry.title}, + ) from err coordinator = NAMDataUpdateCoordinator(hass, entry, nam) await coordinator.async_config_entry_first_refresh() diff --git a/homeassistant/components/nam/button.py b/homeassistant/components/nam/button.py index 60145e4fe27..791a5fdc27c 100644 --- a/homeassistant/components/nam/button.py +++ b/homeassistant/components/nam/button.py @@ -4,6 +4,9 @@ from __future__ import annotations import logging +from aiohttp.client_exceptions import ClientError +from nettigo_air_monitor import ApiError, AuthFailedError + from homeassistant.components.button import ( ButtonDeviceClass, ButtonEntity, @@ -11,9 +14,11 @@ from homeassistant.components.button import ( ) from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity +from .const import DOMAIN from .coordinator import NAMConfigEntry, NAMDataUpdateCoordinator PARALLEL_UPDATES = 1 @@ -59,4 +64,16 @@ class NAMButton(CoordinatorEntity[NAMDataUpdateCoordinator], ButtonEntity): async def async_press(self) -> None: """Triggers the restart.""" - await self.coordinator.nam.async_restart() + try: + await self.coordinator.nam.async_restart() + except (ApiError, ClientError) as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="device_communication_action_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.config_entry.title, + }, + ) from err + except AuthFailedError: + self.coordinator.config_entry.async_start_reauth(self.hass) diff --git a/homeassistant/components/nam/const.py b/homeassistant/components/nam/const.py index 4b7b50b309a..2dedcf3c68a 100644 --- a/homeassistant/components/nam/const.py +++ b/homeassistant/components/nam/const.py @@ -11,6 +11,7 @@ SUFFIX_P1: Final = "_p1" SUFFIX_P2: Final = "_p2" SUFFIX_P4: Final = "_p4" +ATTR_BH1750_ILLUMINANCE: Final = "bh1750_illuminance" ATTR_BME280_HUMIDITY: Final = "bme280_humidity" ATTR_BME280_PRESSURE: Final = "bme280_pressure" ATTR_BME280_TEMPERATURE: Final = "bme280_temperature" diff --git a/homeassistant/components/nam/coordinator.py b/homeassistant/components/nam/coordinator.py index 3e2c9c24474..8a898dee378 100644 --- a/homeassistant/components/nam/coordinator.py +++ b/homeassistant/components/nam/coordinator.py @@ -64,6 +64,10 @@ class NAMDataUpdateCoordinator(DataUpdateCoordinator[NAMSensors]): # We do not need to catch AuthFailed exception here because sensor data is # always available without authorization. except (ApiError, InvalidSensorDataError, RetryError) as error: - raise UpdateFailed(error) from error + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + translation_placeholders={"device": self.config_entry.title}, + ) from error return data diff --git a/homeassistant/components/nam/manifest.json b/homeassistant/components/nam/manifest.json index c3a559de50b..1c3b9db7a86 100644 --- a/homeassistant/components/nam/manifest.json +++ b/homeassistant/components/nam/manifest.json @@ -7,7 +7,7 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["nettigo_air_monitor"], - "requirements": ["nettigo-air-monitor==4.0.0"], + "requirements": ["nettigo-air-monitor==4.1.0"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/homeassistant/components/nam/sensor.py b/homeassistant/components/nam/sensor.py index 4478507dc59..45cfd313e8f 100644 --- a/homeassistant/components/nam/sensor.py +++ b/homeassistant/components/nam/sensor.py @@ -19,6 +19,7 @@ from homeassistant.components.sensor import ( from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, CONCENTRATION_PARTS_PER_MILLION, + LIGHT_LUX, PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, @@ -33,6 +34,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util.dt import utcnow from .const import ( + ATTR_BH1750_ILLUMINANCE, ATTR_BME280_HUMIDITY, ATTR_BME280_PRESSURE, ATTR_BME280_TEMPERATURE, @@ -83,6 +85,15 @@ class NAMSensorEntityDescription(SensorEntityDescription): SENSORS: tuple[NAMSensorEntityDescription, ...] = ( + NAMSensorEntityDescription( + key=ATTR_BH1750_ILLUMINANCE, + translation_key="bh1750_illuminance", + suggested_display_precision=0, + native_unit_of_measurement=LIGHT_LUX, + device_class=SensorDeviceClass.ILLUMINANCE, + state_class=SensorStateClass.MEASUREMENT, + value=lambda sensors: sensors.bh1750_illuminance, + ), NAMSensorEntityDescription( key=ATTR_BME280_HUMIDITY, translation_key="bme280_humidity", diff --git a/homeassistant/components/nam/strings.json b/homeassistant/components/nam/strings.json index 2caa4d8bd97..000dfe74112 100644 --- a/homeassistant/components/nam/strings.json +++ b/homeassistant/components/nam/strings.json @@ -54,6 +54,9 @@ }, "entity": { "sensor": { + "bh1750_illuminance": { + "name": "BH1750 illuminance" + }, "bme280_humidity": { "name": "BME280 humidity" }, @@ -98,6 +101,17 @@ "medium": "Medium", "high": "High", "very_high": "Very high" + }, + "state_attributes": { + "options": { + "state": { + "very_low": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_low%]", + "low": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::low%]", + "medium": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::medium%]", + "high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::high%]", + "very_high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_high%]" + } + } } }, "pmsx003_pm1": { @@ -120,6 +134,17 @@ "medium": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::medium%]", "high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::high%]", "very_high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_high%]" + }, + "state_attributes": { + "options": { + "state": { + "very_low": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_low%]", + "low": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::low%]", + "medium": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::medium%]", + "high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::high%]", + "very_high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_high%]" + } + } } }, "sds011_pm10": { @@ -145,6 +170,17 @@ "medium": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::medium%]", "high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::high%]", "very_high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_high%]" + }, + "state_attributes": { + "options": { + "state": { + "very_low": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_low%]", + "low": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::low%]", + "medium": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::medium%]", + "high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::high%]", + "very_high": "[%key:component::nam::entity::sensor::pmsx003_caqi_level::state::very_high%]" + } + } } }, "sps30_pm1": { @@ -169,5 +205,19 @@ "name": "Last restart" } } + }, + "exceptions": { + "auth_error": { + "message": "Authentication failed for {device}, please update your credentials" + }, + "device_communication_error": { + "message": "An error occurred while communicating with {device}" + }, + "device_communication_action_error": { + "message": "An error occurred while calling action for {entity} for {device}" + }, + "update_error": { + "message": "An error occurred while retrieving data from {device}" + } } } diff --git a/homeassistant/components/nasweb/config_flow.py b/homeassistant/components/nasweb/config_flow.py index 3a9ad3f7d49..298210903dc 100644 --- a/homeassistant/components/nasweb/config_flow.py +++ b/homeassistant/components/nasweb/config_flow.py @@ -103,7 +103,7 @@ class NASwebConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): errors["base"] = "missing_status" except AbortFlow: raise - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: diff --git a/homeassistant/components/ness_alarm/services.yaml b/homeassistant/components/ness_alarm/services.yaml index b02d5e36805..aed1e1836bd 100644 --- a/homeassistant/components/ness_alarm/services.yaml +++ b/homeassistant/components/ness_alarm/services.yaml @@ -7,7 +7,7 @@ aux: selector: number: min: 1 - max: 4 + max: 8 state: default: true selector: diff --git a/homeassistant/components/nest/media_source.py b/homeassistant/components/nest/media_source.py index 146b6f2479e..a3d2901e911 100644 --- a/homeassistant/components/nest/media_source.py +++ b/homeassistant/components/nest/media_source.py @@ -20,8 +20,10 @@ from __future__ import annotations from collections.abc import Mapping from dataclasses import dataclass +import datetime import logging import os +import pathlib from typing import Any from google_nest_sdm.camera_traits import CameraClipPreviewTrait, CameraEventImageTrait @@ -46,6 +48,7 @@ from homeassistant.components.media_source import ( ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.storage import Store from homeassistant.helpers.template import DATE_STR_FORMAT from homeassistant.util import dt as dt_util @@ -72,6 +75,9 @@ MEDIA_PATH = f"{DOMAIN}/event_media" # Size of small in-memory disk cache to avoid excessive disk reads DISK_READ_LRU_MAX_SIZE = 32 +# Remove orphaned media files that are older than this age +ORPHANED_MEDIA_AGE_CUTOFF = datetime.timedelta(days=7) + async def async_get_media_event_store( hass: HomeAssistant, subscriber: GoogleNestSubscriber @@ -123,6 +129,12 @@ class NestEventMediaStore(EventMediaStore): self._media_path = media_path self._data: dict[str, Any] | None = None self._devices: Mapping[str, str] | None = {} + # Invoke garbage collection for orphaned files one per + async_track_time_interval( + hass, + self.async_remove_orphaned_media, + datetime.timedelta(days=1), + ) async def async_load(self) -> dict | None: """Load data.""" @@ -249,6 +261,68 @@ class NestEventMediaStore(EventMediaStore): devices[device.name] = device_entry.id return devices + async def async_remove_orphaned_media(self, now: datetime.datetime) -> None: + """Remove any media files that are orphaned and not referenced by the active event data. + + The event media store handles garbage collection, but there may be cases where files are + left around or unable to be removed. This is a scheduled event that will also check for + old orphaned files and remove them when the events are not referenced in the active list + of event data. + + Event media files are stored with the format -.suffix. We extract + the list of valid timestamps from the event data and remove any files that are not in that list + or are older than the cutoff time. + """ + _LOGGER.debug("Checking for orphaned media at %s", now) + + def _cleanup(event_timestamps: dict[str, set[int]]) -> None: + time_cutoff = (now - ORPHANED_MEDIA_AGE_CUTOFF).timestamp() + media_path = pathlib.Path(self._media_path) + for device_id, valid_timestamps in event_timestamps.items(): + media_files = list(media_path.glob(f"{device_id}/*")) + _LOGGER.debug("Found %d files (device=%s)", len(media_files), device_id) + for media_file in media_files: + if "-" not in media_file.name: + continue + try: + timestamp = int(media_file.name.split("-")[0]) + except ValueError: + continue + if timestamp in valid_timestamps or timestamp > time_cutoff: + continue + _LOGGER.debug("Removing orphaned media file: %s", media_file) + try: + os.remove(media_file) + except OSError as err: + _LOGGER.error( + "Unable to remove orphaned media file: %s %s", + media_file, + err, + ) + + # Nest device id mapped to home assistant device id + event_timestamps = await self._get_valid_event_timestamps() + await self._hass.async_add_executor_job(_cleanup, event_timestamps) + + async def _get_valid_event_timestamps(self) -> dict[str, set[int]]: + """Return a mapping of home assistant device id to valid timestamps.""" + device_map = await self._get_devices() + event_data = await self.async_load() or {} + valid_device_timestamps = {} + for nest_device_id, device_id in device_map.items(): + if (device_events := event_data.get(nest_device_id, {})) is None: + continue + valid_device_timestamps[device_id] = { + int( + datetime.datetime.fromisoformat( + camera_event["timestamp"] + ).timestamp() + ) + for events in device_events + for camera_event in events["events"].values() + } + return valid_device_timestamps + async def async_get_media_source(hass: HomeAssistant) -> MediaSource: """Set up Nest media source.""" diff --git a/homeassistant/components/netatmo/config_flow.py b/homeassistant/components/netatmo/config_flow.py index d853694ffea..02d9c2fa3a6 100644 --- a/homeassistant/components/netatmo/config_flow.py +++ b/homeassistant/components/netatmo/config_flow.py @@ -135,7 +135,7 @@ class NetatmoOptionsFlowHandler(OptionsFlow): vol.Optional( CONF_WEATHER_AREAS, default=weather_areas, - ): cv.multi_select({wa: None for wa in weather_areas}), + ): cv.multi_select(dict.fromkeys(weather_areas)), vol.Optional(CONF_NEW_AREA): str, } ) diff --git a/homeassistant/components/netatmo/sensor.py b/homeassistant/components/netatmo/sensor.py index 5f8084d542c..56b8233912f 100644 --- a/homeassistant/components/netatmo/sensor.py +++ b/homeassistant/components/netatmo/sensor.py @@ -213,7 +213,8 @@ SENSOR_TYPES: tuple[NetatmoSensorEntityDescription, ...] = ( netatmo_name="wind_angle", entity_registry_enabled_default=False, native_unit_of_measurement=DEGREE, - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, ), NetatmoSensorEntityDescription( key="windstrength", @@ -235,7 +236,8 @@ SENSOR_TYPES: tuple[NetatmoSensorEntityDescription, ...] = ( netatmo_name="gust_angle", entity_registry_enabled_default=False, native_unit_of_measurement=DEGREE, - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, ), NetatmoSensorEntityDescription( key="guststrength", @@ -345,7 +347,8 @@ PUBLIC_WEATHER_STATION_TYPES: tuple[ key="windangle_value", entity_registry_enabled_default=False, native_unit_of_measurement=DEGREE, - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, value_fn=lambda area: area.get_latest_wind_angles(), ), NetatmoPublicWeatherSensorEntityDescription( @@ -360,7 +363,8 @@ PUBLIC_WEATHER_STATION_TYPES: tuple[ translation_key="gust_angle", entity_registry_enabled_default=False, native_unit_of_measurement=DEGREE, - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, value_fn=lambda area: area.get_latest_gust_angles(), ), NetatmoPublicWeatherSensorEntityDescription( diff --git a/homeassistant/components/nexia/manifest.json b/homeassistant/components/nexia/manifest.json index 09b79d37c55..e7ab63d4712 100644 --- a/homeassistant/components/nexia/manifest.json +++ b/homeassistant/components/nexia/manifest.json @@ -12,5 +12,5 @@ "documentation": "https://www.home-assistant.io/integrations/nexia", "iot_class": "cloud_polling", "loggers": ["nexia"], - "requirements": ["nexia==2.2.2"] + "requirements": ["nexia==2.4.0"] } diff --git a/homeassistant/components/nexia/strings.json b/homeassistant/components/nexia/strings.json index 05d86d3a495..43da2cf05c7 100644 --- a/homeassistant/components/nexia/strings.json +++ b/homeassistant/components/nexia/strings.json @@ -86,8 +86,8 @@ } }, "set_hvac_run_mode": { - "name": "Set hvac run mode", - "description": "Sets the HVAC operation mode.", + "name": "Set HVAC run mode", + "description": "Sets the run and/or operation mode of the HVAC system.", "fields": { "run_mode": { "name": "Run mode", diff --git a/homeassistant/components/nextcloud/strings.json b/homeassistant/components/nextcloud/strings.json index 9b22a6924bc..ef4e3de0f62 100644 --- a/homeassistant/components/nextcloud/strings.json +++ b/homeassistant/components/nextcloud/strings.json @@ -88,7 +88,7 @@ "name": "Cache start time" }, "nextcloud_cache_ttl": { - "name": "Cache ttl" + "name": "Cache TTL" }, "nextcloud_database_size": { "name": "Database size" @@ -268,13 +268,13 @@ "name": "Updates available" }, "nextcloud_system_cpuload_1": { - "name": "CPU Load last 1 minute" + "name": "CPU load last 1 minute" }, "nextcloud_system_cpuload_15": { - "name": "CPU Load last 15 minutes" + "name": "CPU load last 15 minutes" }, "nextcloud_system_cpuload_5": { - "name": "CPU Load last 5 minutes" + "name": "CPU load last 5 minutes" }, "nextcloud_system_freespace": { "name": "Free space" diff --git a/homeassistant/components/nextdns/__init__.py b/homeassistant/components/nextdns/__init__.py index 478ff215c30..eb8bd26cb9b 100644 --- a/homeassistant/components/nextdns/__init__.py +++ b/homeassistant/components/nextdns/__init__.py @@ -36,6 +36,7 @@ from .const import ( ATTR_SETTINGS, ATTR_STATUS, CONF_PROFILE_ID, + DOMAIN, UPDATE_INTERVAL_ANALYTICS, UPDATE_INTERVAL_CONNECTION, UPDATE_INTERVAL_SETTINGS, @@ -88,9 +89,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: NextDnsConfigEntry) -> b try: nextdns = await NextDns.create(websession, api_key) except (ApiError, ClientConnectorError, RetryError, TimeoutError) as err: - raise ConfigEntryNotReady from err + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="cannot_connect", + translation_placeholders={ + "entry": entry.title, + "error": repr(err), + }, + ) from err except InvalidApiKeyError as err: - raise ConfigEntryAuthFailed from err + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="auth_error", + translation_placeholders={"entry": entry.title}, + ) from err tasks = [] coordinators = {} diff --git a/homeassistant/components/nextdns/button.py b/homeassistant/components/nextdns/button.py index b36c243a463..2adccaa304f 100644 --- a/homeassistant/components/nextdns/button.py +++ b/homeassistant/components/nextdns/button.py @@ -2,15 +2,19 @@ from __future__ import annotations -from nextdns import AnalyticsStatus +from aiohttp import ClientError +from aiohttp.client_exceptions import ClientConnectorError +from nextdns import AnalyticsStatus, ApiError, InvalidApiKeyError from homeassistant.components.button import ButtonEntity, ButtonEntityDescription from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import NextDnsConfigEntry +from .const import DOMAIN from .coordinator import NextDnsUpdateCoordinator PARALLEL_UPDATES = 1 @@ -53,4 +57,21 @@ class NextDnsButton( async def async_press(self) -> None: """Trigger cleaning logs.""" - await self.coordinator.nextdns.clear_logs(self.coordinator.profile_id) + try: + await self.coordinator.nextdns.clear_logs(self.coordinator.profile_id) + except ( + ApiError, + ClientConnectorError, + TimeoutError, + ClientError, + ) as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="method_error", + translation_placeholders={ + "entity": self.entity_id, + "error": repr(err), + }, + ) from err + except InvalidApiKeyError: + self.coordinator.config_entry.async_start_reauth(self.hass) diff --git a/homeassistant/components/nextdns/config_flow.py b/homeassistant/components/nextdns/config_flow.py index d3327c4c08b..d36064d8fb0 100644 --- a/homeassistant/components/nextdns/config_flow.py +++ b/homeassistant/components/nextdns/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from typing import Any from aiohttp.client_exceptions import ClientConnectorError @@ -19,6 +20,8 @@ from .const import CONF_PROFILE_ID, DOMAIN AUTH_SCHEMA = vol.Schema({vol.Required(CONF_API_KEY): str}) +_LOGGER = logging.getLogger(__name__) + async def async_init_nextdns(hass: HomeAssistant, api_key: str) -> NextDns: """Check if credentials are valid.""" @@ -51,7 +54,8 @@ class NextDnsFlowHandler(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_api_key" except (ApiError, ClientConnectorError, RetryError, TimeoutError): errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: return await self.async_step_profiles() @@ -111,7 +115,8 @@ class NextDnsFlowHandler(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_api_key" except (ApiError, ClientConnectorError, RetryError, TimeoutError): errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: return self.async_update_reload_and_abort( diff --git a/homeassistant/components/nextdns/coordinator.py b/homeassistant/components/nextdns/coordinator.py index 850702e4488..41f6ff43a2a 100644 --- a/homeassistant/components/nextdns/coordinator.py +++ b/homeassistant/components/nextdns/coordinator.py @@ -79,9 +79,20 @@ class NextDnsUpdateCoordinator(DataUpdateCoordinator[CoordinatorDataT]): ClientConnectorError, RetryError, ) as err: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + translation_placeholders={ + "entry": self.config_entry.title, + "error": repr(err), + }, + ) from err except InvalidApiKeyError as err: - raise ConfigEntryAuthFailed from err + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="auth_error", + translation_placeholders={"entry": self.config_entry.title}, + ) from err async def _async_update_data_internal(self) -> CoordinatorDataT: """Update data via library.""" diff --git a/homeassistant/components/nextdns/strings.json b/homeassistant/components/nextdns/strings.json index f2a5fa2816d..38944a0711e 100644 --- a/homeassistant/components/nextdns/strings.json +++ b/homeassistant/components/nextdns/strings.json @@ -359,5 +359,19 @@ "name": "Force YouTube restricted mode" } } + }, + "exceptions": { + "auth_error": { + "message": "Authentication failed for {entry}, please update your API key" + }, + "cannot_connect": { + "message": "An error occurred while connecting to the NextDNS API for {entry}: {error}" + }, + "method_error": { + "message": "An error occurred while calling the NextDNS API method for {entity}: {error}" + }, + "update_error": { + "message": "An error occurred while retrieving data from the NextDNS API for {entry}: {error}" + } } } diff --git a/homeassistant/components/nextdns/switch.py b/homeassistant/components/nextdns/switch.py index b7c77bd9dbd..8bdca76b955 100644 --- a/homeassistant/components/nextdns/switch.py +++ b/homeassistant/components/nextdns/switch.py @@ -8,7 +8,7 @@ from typing import Any from aiohttp import ClientError from aiohttp.client_exceptions import ClientConnectorError -from nextdns import ApiError, Settings +from nextdns import ApiError, InvalidApiKeyError, Settings from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.const import EntityCategory @@ -18,6 +18,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import NextDnsConfigEntry +from .const import DOMAIN from .coordinator import NextDnsUpdateCoordinator PARALLEL_UPDATES = 1 @@ -582,9 +583,16 @@ class NextDnsSwitch( ClientError, ) as err: raise HomeAssistantError( - "NextDNS API returned an error calling set_setting for" - f" {self.entity_id}: {err}" + translation_domain=DOMAIN, + translation_key="method_error", + translation_placeholders={ + "entity": self.entity_id, + "error": repr(err), + }, ) from err + except InvalidApiKeyError: + self.coordinator.config_entry.async_start_reauth(self.hass) + return if result: self._attr_is_on = new_state diff --git a/homeassistant/components/nibe_heatpump/manifest.json b/homeassistant/components/nibe_heatpump/manifest.json index 049ba905f04..a8441fb90d8 100644 --- a/homeassistant/components/nibe_heatpump/manifest.json +++ b/homeassistant/components/nibe_heatpump/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/nibe_heatpump", "iot_class": "local_polling", - "requirements": ["nibe==2.14.0"] + "requirements": ["nibe==2.17.0"] } diff --git a/homeassistant/components/nibe_heatpump/sensor.py b/homeassistant/components/nibe_heatpump/sensor.py index ac4f9eba308..54cd0f7ea34 100644 --- a/homeassistant/components/nibe_heatpump/sensor.py +++ b/homeassistant/components/nibe_heatpump/sensor.py @@ -13,14 +13,17 @@ from homeassistant.components.sensor import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( + PERCENTAGE, EntityCategory, UnitOfElectricCurrent, UnitOfElectricPotential, UnitOfEnergy, UnitOfFrequency, UnitOfPower, + UnitOfPressure, UnitOfTemperature, UnitOfTime, + UnitOfVolumeFlowRate, ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback @@ -114,6 +117,20 @@ UNIT_DESCRIPTIONS = { state_class=SensorStateClass.TOTAL_INCREASING, native_unit_of_measurement=UnitOfTime.HOURS, ), + "min": SensorEntityDescription( + key="min", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL_INCREASING, + native_unit_of_measurement=UnitOfTime.MINUTES, + ), + "s": SensorEntityDescription( + key="s", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL_INCREASING, + native_unit_of_measurement=UnitOfTime.SECONDS, + ), "Hz": SensorEntityDescription( key="Hz", entity_category=EntityCategory.DIAGNOSTIC, @@ -121,6 +138,48 @@ UNIT_DESCRIPTIONS = { state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfFrequency.HERTZ, ), + "Pa": SensorEntityDescription( + key="Pa", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.PRESSURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPressure.PA, + ), + "kPa": SensorEntityDescription( + key="kPa", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.PRESSURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPressure.KPA, + ), + "bar": SensorEntityDescription( + key="bar", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.PRESSURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPressure.BAR, + ), + "l/m": SensorEntityDescription( + key="l/m", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.VOLUME_FLOW_RATE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE, + ), + "m³/h": SensorEntityDescription( + key="m³/h", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.VOLUME_FLOW_RATE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR, + ), + "%RH": SensorEntityDescription( + key="%RH", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.HUMIDITY, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + ), } diff --git a/homeassistant/components/nibe_heatpump/strings.json b/homeassistant/components/nibe_heatpump/strings.json index 6fa421e0855..c65a76d3364 100644 --- a/homeassistant/components/nibe_heatpump/strings.json +++ b/homeassistant/components/nibe_heatpump/strings.json @@ -10,13 +10,13 @@ }, "modbus": { "data": { - "model": "Model of Heat Pump", + "model": "Model of heat pump", "modbus_url": "Modbus URL", - "modbus_unit": "Modbus Unit Identifier" + "modbus_unit": "Modbus unit identifier" }, "data_description": { - "modbus_url": "Modbus URL that describes the connection to your Heat Pump or MODBUS40 unit. It should be on the form:\n - `tcp://[HOST]:[PORT]` for Modbus TCP connection\n - `serial://[LOCAL DEVICE]` for a local Modbus RTU connection\n - `rfc2217://[HOST]:[PORT]` for a remote telnet based Modbus RTU connection.", - "modbus_unit": "Unit identification for your Heat Pump. Can usually be left at 0." + "modbus_url": "Modbus URL that describes the connection to your heat pump or MODBUS40 unit. It should be in the form:\n - `tcp://[HOST]:[PORT]` for Modbus TCP connection\n - `serial://[LOCAL DEVICE]` for a local Modbus RTU connection\n - `rfc2217://[HOST]:[PORT]` for a remote Telnet-based Modbus RTU connection.", + "modbus_unit": "Unit identification for your heat pump. Can usually be left at 0." } }, "nibegw": { diff --git a/homeassistant/components/niko_home_control/config_flow.py b/homeassistant/components/niko_home_control/config_flow.py index f37e5e9248a..76e71bc1690 100644 --- a/homeassistant/components/niko_home_control/config_flow.py +++ b/homeassistant/components/niko_home_control/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +import logging from typing import Any from nhc.controller import NHCController @@ -12,6 +13,8 @@ from homeassistant.const import CONF_HOST from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) + DATA_SCHEMA = vol.Schema( { vol.Required(CONF_HOST): str, @@ -25,7 +28,8 @@ async def test_connection(host: str) -> str | None: controller = NHCController(host, 8000) try: await controller.connect() - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") return "cannot_connect" return None diff --git a/homeassistant/components/nmbs/__init__.py b/homeassistant/components/nmbs/__init__.py index 7d06baf37b6..4a2783143ca 100644 --- a/homeassistant/components/nmbs/__init__.py +++ b/homeassistant/components/nmbs/__init__.py @@ -8,6 +8,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.typing import ConfigType from .const import DOMAIN @@ -22,13 +23,13 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the NMBS component.""" - api_client = iRail() + api_client = iRail(session=async_get_clientsession(hass)) hass.data.setdefault(DOMAIN, {}) - station_response = await hass.async_add_executor_job(api_client.get_stations) - if station_response == -1: + station_response = await api_client.get_stations() + if station_response is None: return False - hass.data[DOMAIN] = station_response["station"] + hass.data[DOMAIN] = station_response.stations return True diff --git a/homeassistant/components/nmbs/config_flow.py b/homeassistant/components/nmbs/config_flow.py index e45b2d9adeb..60ab015e22b 100644 --- a/homeassistant/components/nmbs/config_flow.py +++ b/homeassistant/components/nmbs/config_flow.py @@ -3,11 +3,13 @@ from typing import Any from pyrail import iRail +from pyrail.models import StationDetails import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import Platform from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.selector import ( BooleanSelector, SelectOptionDict, @@ -31,17 +33,15 @@ class NMBSConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize.""" - self.api_client = iRail() - self.stations: list[dict[str, Any]] = [] + self.stations: list[StationDetails] = [] - async def _fetch_stations(self) -> list[dict[str, Any]]: + async def _fetch_stations(self) -> list[StationDetails]: """Fetch the stations.""" - stations_response = await self.hass.async_add_executor_job( - self.api_client.get_stations - ) - if stations_response == -1: + api_client = iRail(session=async_get_clientsession(self.hass)) + stations_response = await api_client.get_stations() + if stations_response is None: raise CannotConnect("The API is currently unavailable.") - return stations_response["station"] + return stations_response.stations async def _fetch_stations_choices(self) -> list[SelectOptionDict]: """Fetch the stations options.""" @@ -50,7 +50,7 @@ class NMBSConfigFlow(ConfigFlow, domain=DOMAIN): self.stations = await self._fetch_stations() return [ - SelectOptionDict(value=station["id"], label=station["standardname"]) + SelectOptionDict(value=station.id, label=station.standard_name) for station in self.stations ] @@ -72,12 +72,12 @@ class NMBSConfigFlow(ConfigFlow, domain=DOMAIN): [station_from] = [ station for station in self.stations - if station["id"] == user_input[CONF_STATION_FROM] + if station.id == user_input[CONF_STATION_FROM] ] [station_to] = [ station for station in self.stations - if station["id"] == user_input[CONF_STATION_TO] + if station.id == user_input[CONF_STATION_TO] ] vias = "_excl_vias" if user_input.get(CONF_EXCLUDE_VIAS) else "" await self.async_set_unique_id( @@ -85,7 +85,7 @@ class NMBSConfigFlow(ConfigFlow, domain=DOMAIN): ) self._abort_if_unique_id_configured() - config_entry_name = f"Train from {station_from['standardname']} to {station_to['standardname']}" + config_entry_name = f"Train from {station_from.standard_name} to {station_to.standard_name}" return self.async_create_entry( title=config_entry_name, data=user_input, @@ -127,18 +127,18 @@ class NMBSConfigFlow(ConfigFlow, domain=DOMAIN): station_live = None for station in self.stations: if user_input[CONF_STATION_FROM] in ( - station["standardname"], - station["name"], + station.standard_name, + station.name, ): station_from = station if user_input[CONF_STATION_TO] in ( - station["standardname"], - station["name"], + station.standard_name, + station.name, ): station_to = station if CONF_STATION_LIVE in user_input and user_input[CONF_STATION_LIVE] in ( - station["standardname"], - station["name"], + station.standard_name, + station.name, ): station_live = station @@ -148,29 +148,29 @@ class NMBSConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_abort(reason="same_station") # config flow uses id and not the standard name - user_input[CONF_STATION_FROM] = station_from["id"] - user_input[CONF_STATION_TO] = station_to["id"] + user_input[CONF_STATION_FROM] = station_from.id + user_input[CONF_STATION_TO] = station_to.id if station_live: - user_input[CONF_STATION_LIVE] = station_live["id"] + user_input[CONF_STATION_LIVE] = station_live.id entity_registry = er.async_get(self.hass) prefix = "live" vias = "_excl_vias" if user_input.get(CONF_EXCLUDE_VIAS, False) else "" if entity_id := entity_registry.async_get_entity_id( Platform.SENSOR, DOMAIN, - f"{prefix}_{station_live['standardname']}_{station_from['standardname']}_{station_to['standardname']}", + f"{prefix}_{station_live.standard_name}_{station_from.standard_name}_{station_to.standard_name}", ): - new_unique_id = f"{DOMAIN}_{prefix}_{station_live['id']}_{station_from['id']}_{station_to['id']}{vias}" + new_unique_id = f"{DOMAIN}_{prefix}_{station_live.id}_{station_from.id}_{station_to.id}{vias}" entity_registry.async_update_entity( entity_id, new_unique_id=new_unique_id ) if entity_id := entity_registry.async_get_entity_id( Platform.SENSOR, DOMAIN, - f"{prefix}_{station_live['name']}_{station_from['name']}_{station_to['name']}", + f"{prefix}_{station_live.name}_{station_from.name}_{station_to.name}", ): - new_unique_id = f"{DOMAIN}_{prefix}_{station_live['id']}_{station_from['id']}_{station_to['id']}{vias}" + new_unique_id = f"{DOMAIN}_{prefix}_{station_live.id}_{station_from.id}_{station_to.id}{vias}" entity_registry.async_update_entity( entity_id, new_unique_id=new_unique_id ) diff --git a/homeassistant/components/nmbs/const.py b/homeassistant/components/nmbs/const.py index fddb7365501..04c8beb327d 100644 --- a/homeassistant/components/nmbs/const.py +++ b/homeassistant/components/nmbs/const.py @@ -19,11 +19,7 @@ CONF_SHOW_ON_MAP = "show_on_map" def find_station_by_name(hass: HomeAssistant, station_name: str): """Find given station_name in the station list.""" return next( - ( - s - for s in hass.data[DOMAIN] - if station_name in (s["standardname"], s["name"]) - ), + (s for s in hass.data[DOMAIN] if station_name in (s.standard_name, s.name)), None, ) @@ -31,6 +27,6 @@ def find_station_by_name(hass: HomeAssistant, station_name: str): def find_station(hass: HomeAssistant, station_name: str): """Find given station_id in the station list.""" return next( - (s for s in hass.data[DOMAIN] if station_name in s["id"]), + (s for s in hass.data[DOMAIN] if station_name in s.id), None, ) diff --git a/homeassistant/components/nmbs/manifest.json b/homeassistant/components/nmbs/manifest.json index 9016eff11f8..37ff9429a54 100644 --- a/homeassistant/components/nmbs/manifest.json +++ b/homeassistant/components/nmbs/manifest.json @@ -7,5 +7,5 @@ "iot_class": "cloud_polling", "loggers": ["pyrail"], "quality_scale": "legacy", - "requirements": ["pyrail==0.0.3"] + "requirements": ["pyrail==0.4.1"] } diff --git a/homeassistant/components/nmbs/sensor.py b/homeassistant/components/nmbs/sensor.py index c6dea2d0843..3552ac3c26d 100644 --- a/homeassistant/components/nmbs/sensor.py +++ b/homeassistant/components/nmbs/sensor.py @@ -2,10 +2,12 @@ from __future__ import annotations +from datetime import datetime import logging from typing import Any from pyrail import iRail +from pyrail.models import ConnectionDetails, LiveboardDeparture, StationDetails import voluptuous as vol from homeassistant.components.sensor import ( @@ -23,6 +25,7 @@ from homeassistant.const import ( ) from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.entity_platform import ( AddConfigEntryEntitiesCallback, AddEntitiesCallback, @@ -44,8 +47,6 @@ from .const import ( # noqa: F401 _LOGGER = logging.getLogger(__name__) -API_FAILURE = -1 - DEFAULT_NAME = "NMBS" DEFAULT_ICON = "mdi:train" @@ -63,12 +64,12 @@ PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( ) -def get_time_until(departure_time=None): +def get_time_until(departure_time: datetime | None = None): """Calculate the time between now and a train's departure time.""" if departure_time is None: return 0 - delta = dt_util.utc_from_timestamp(int(departure_time)) - dt_util.now() + delta = dt_util.as_utc(departure_time) - dt_util.utcnow() return round(delta.total_seconds() / 60) @@ -77,11 +78,9 @@ def get_delay_in_minutes(delay=0): return round(int(delay) / 60) -def get_ride_duration(departure_time, arrival_time, delay=0): +def get_ride_duration(departure_time: datetime, arrival_time: datetime, delay=0): """Calculate the total travel time in minutes.""" - duration = dt_util.utc_from_timestamp( - int(arrival_time) - ) - dt_util.utc_from_timestamp(int(departure_time)) + duration = arrival_time - departure_time duration_time = int(round(duration.total_seconds() / 60)) return duration_time + get_delay_in_minutes(delay) @@ -157,7 +156,7 @@ async def async_setup_entry( async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up NMBS sensor entities based on a config entry.""" - api_client = iRail() + api_client = iRail(session=async_get_clientsession(hass)) name = config_entry.data.get(CONF_NAME, None) show_on_map = config_entry.data.get(CONF_SHOW_ON_MAP, False) @@ -189,9 +188,9 @@ class NMBSLiveBoard(SensorEntity): def __init__( self, api_client: iRail, - live_station: dict[str, Any], - station_from: dict[str, Any], - station_to: dict[str, Any], + live_station: StationDetails, + station_from: StationDetails, + station_to: StationDetails, excl_vias: bool, ) -> None: """Initialize the sensor for getting liveboard data.""" @@ -201,7 +200,8 @@ class NMBSLiveBoard(SensorEntity): self._station_to = station_to self._excl_vias = excl_vias - self._attrs: dict[str, Any] | None = {} + self._attrs: LiveboardDeparture | None = None + self._state: str | None = None self.entity_registry_enabled_default = False @@ -209,22 +209,20 @@ class NMBSLiveBoard(SensorEntity): @property def name(self) -> str: """Return the sensor default name.""" - return f"Trains in {self._station['standardname']}" + return f"Trains in {self._station.standard_name}" @property def unique_id(self) -> str: """Return the unique ID.""" - unique_id = ( - f"{self._station['id']}_{self._station_from['id']}_{self._station_to['id']}" - ) + unique_id = f"{self._station.id}_{self._station_from.id}_{self._station_to.id}" vias = "_excl_vias" if self._excl_vias else "" return f"nmbs_live_{unique_id}{vias}" @property def icon(self) -> str: """Return the default icon or an alert icon if delays.""" - if self._attrs and int(self._attrs["delay"]) > 0: + if self._attrs and int(self._attrs.delay) > 0: return DEFAULT_ICON_ALERT return DEFAULT_ICON @@ -240,15 +238,15 @@ class NMBSLiveBoard(SensorEntity): if self._state is None or not self._attrs: return None - delay = get_delay_in_minutes(self._attrs["delay"]) - departure = get_time_until(self._attrs["time"]) + delay = get_delay_in_minutes(self._attrs.delay) + departure = get_time_until(self._attrs.time) attrs = { "departure": f"In {departure} minutes", "departure_minutes": departure, - "extra_train": int(self._attrs["isExtra"]) > 0, - "vehicle_id": self._attrs["vehicle"], - "monitored_station": self._station["standardname"], + "extra_train": self._attrs.is_extra, + "vehicle_id": self._attrs.vehicle, + "monitored_station": self._station.standard_name, } if delay > 0: @@ -257,28 +255,26 @@ class NMBSLiveBoard(SensorEntity): return attrs - def update(self) -> None: + async def async_update(self, **kwargs: Any) -> None: """Set the state equal to the next departure.""" - liveboard = self._api_client.get_liveboard(self._station["id"]) + liveboard = await self._api_client.get_liveboard(self._station.id) - if liveboard == API_FAILURE: + if liveboard is None: _LOGGER.warning("API failed in NMBSLiveBoard") return - if not (departures := liveboard.get("departures")): + if not (departures := liveboard.departures): _LOGGER.warning("API returned invalid departures: %r", liveboard) return _LOGGER.debug("API returned departures: %r", departures) - if departures["number"] == "0": + if len(departures) == 0: # No trains are scheduled return - next_departure = departures["departure"][0] + next_departure = departures[0] self._attrs = next_departure - self._state = ( - f"Track {next_departure['platform']} - {next_departure['station']}" - ) + self._state = f"Track {next_departure.platform} - {next_departure.station}" class NMBSSensor(SensorEntity): @@ -292,8 +288,8 @@ class NMBSSensor(SensorEntity): api_client: iRail, name: str, show_on_map: bool, - station_from: dict[str, Any], - station_to: dict[str, Any], + station_from: StationDetails, + station_to: StationDetails, excl_vias: bool, ) -> None: """Initialize the NMBS connection sensor.""" @@ -304,13 +300,13 @@ class NMBSSensor(SensorEntity): self._station_to = station_to self._excl_vias = excl_vias - self._attrs: dict[str, Any] | None = {} + self._attrs: ConnectionDetails | None = None self._state = None @property def unique_id(self) -> str: """Return the unique ID.""" - unique_id = f"{self._station_from['id']}_{self._station_to['id']}" + unique_id = f"{self._station_from.id}_{self._station_to.id}" vias = "_excl_vias" if self._excl_vias else "" return f"nmbs_connection_{unique_id}{vias}" @@ -319,14 +315,14 @@ class NMBSSensor(SensorEntity): def name(self) -> str: """Return the name of the sensor.""" if self._name is None: - return f"Train from {self._station_from['standardname']} to {self._station_to['standardname']}" + return f"Train from {self._station_from.standard_name} to {self._station_to.standard_name}" return self._name @property def icon(self) -> str: """Return the sensor default icon or an alert icon if any delay.""" if self._attrs: - delay = get_delay_in_minutes(self._attrs["departure"]["delay"]) + delay = get_delay_in_minutes(self._attrs.departure.delay) if delay > 0: return "mdi:alert-octagon" @@ -338,19 +334,19 @@ class NMBSSensor(SensorEntity): if self._state is None or not self._attrs: return None - delay = get_delay_in_minutes(self._attrs["departure"]["delay"]) - departure = get_time_until(self._attrs["departure"]["time"]) - canceled = int(self._attrs["departure"]["canceled"]) + delay = get_delay_in_minutes(self._attrs.departure.delay) + departure = get_time_until(self._attrs.departure.time) + canceled = self._attrs.departure.canceled attrs = { - "destination": self._attrs["departure"]["station"], - "direction": self._attrs["departure"]["direction"]["name"], - "platform_arriving": self._attrs["arrival"]["platform"], - "platform_departing": self._attrs["departure"]["platform"], - "vehicle_id": self._attrs["departure"]["vehicle"], + "destination": self._attrs.departure.station, + "direction": self._attrs.departure.direction.name, + "platform_arriving": self._attrs.arrival.platform, + "platform_departing": self._attrs.departure.platform, + "vehicle_id": self._attrs.departure.vehicle, } - if canceled != 1: + if not canceled: attrs["departure"] = f"In {departure} minutes" attrs["departure_minutes"] = departure attrs["canceled"] = False @@ -364,14 +360,14 @@ class NMBSSensor(SensorEntity): attrs[ATTR_LONGITUDE] = self.station_coordinates[1] if self.is_via_connection and not self._excl_vias: - via = self._attrs["vias"]["via"][0] + via = self._attrs.vias[0] - attrs["via"] = via["station"] - attrs["via_arrival_platform"] = via["arrival"]["platform"] - attrs["via_transfer_platform"] = via["departure"]["platform"] + attrs["via"] = via.station + attrs["via_arrival_platform"] = via.arrival.platform + attrs["via_transfer_platform"] = via.departure.platform attrs["via_transfer_time"] = get_delay_in_minutes( - via["timebetween"] - ) + get_delay_in_minutes(via["departure"]["delay"]) + via.timebetween + ) + get_delay_in_minutes(via.departure.delay) if delay > 0: attrs["delay"] = f"{delay} minutes" @@ -390,8 +386,8 @@ class NMBSSensor(SensorEntity): if self._state is None or not self._attrs: return [] - latitude = float(self._attrs["departure"]["stationinfo"]["locationY"]) - longitude = float(self._attrs["departure"]["stationinfo"]["locationX"]) + latitude = float(self._attrs.departure.station_info.latitude) + longitude = float(self._attrs.departure.station_info.longitude) return [latitude, longitude] @property @@ -400,24 +396,24 @@ class NMBSSensor(SensorEntity): if not self._attrs: return False - return "vias" in self._attrs and int(self._attrs["vias"]["number"]) > 0 + return self._attrs.vias is not None and len(self._attrs.vias) > 0 - def update(self) -> None: + async def async_update(self, **kwargs: Any) -> None: """Set the state to the duration of a connection.""" - connections = self._api_client.get_connections( - self._station_from["id"], self._station_to["id"] + connections = await self._api_client.get_connections( + self._station_from.id, self._station_to.id ) - if connections == API_FAILURE: + if connections is None: _LOGGER.warning("API failed in NMBSSensor") return - if not (connection := connections.get("connection")): + if not (connection := connections.connections): _LOGGER.warning("API returned invalid connection: %r", connections) return _LOGGER.debug("API returned connection: %r", connection) - if int(connection[0]["departure"]["left"]) > 0: + if connection[0].departure.left: next_connection = connection[1] else: next_connection = connection[0] @@ -431,9 +427,9 @@ class NMBSSensor(SensorEntity): return duration = get_ride_duration( - next_connection["departure"]["time"], - next_connection["arrival"]["time"], - next_connection["departure"]["delay"], + next_connection.departure.time, + next_connection.arrival.time, + next_connection.departure.delay, ) self._state = duration diff --git a/homeassistant/components/nmbs/strings.json b/homeassistant/components/nmbs/strings.json index 3e7aa8d05bd..ac11026577a 100644 --- a/homeassistant/components/nmbs/strings.json +++ b/homeassistant/components/nmbs/strings.json @@ -29,7 +29,7 @@ "issues": { "deprecated_yaml_import_issue_station_not_found": { "title": "The {integration_title} YAML configuration import failed", - "description": "Configuring {integration_title} using YAML is being removed but there was an problem importing your YAML configuration.\n\nThe used station \"{station_name}\" could not be found. Fix it or remove the {integration_title} YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." + "description": "Configuring {integration_title} using YAML is being removed but there was a problem importing your YAML configuration.\n\nThe used station \"{station_name}\" could not be found. Fix it or remove the {integration_title} YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." } } } diff --git a/homeassistant/components/nobo_hub/strings.json b/homeassistant/components/nobo_hub/strings.json index 28be01862e9..1059934e896 100644 --- a/homeassistant/components/nobo_hub/strings.json +++ b/homeassistant/components/nobo_hub/strings.json @@ -44,7 +44,7 @@ "entity": { "select": { "global_override": { - "name": "global override", + "name": "Global override", "state": { "away": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::away%]", "comfort": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::comfort%]", @@ -53,7 +53,7 @@ } }, "week_profile": { - "name": "week profile" + "name": "Week profile" } } } diff --git a/homeassistant/components/nordpool/sensor.py b/homeassistant/components/nordpool/sensor.py index c6993826239..4bde12afc3c 100644 --- a/homeassistant/components/nordpool/sensor.py +++ b/homeassistant/components/nordpool/sensor.py @@ -34,7 +34,7 @@ def validate_prices( index: int, ) -> float | None: """Validate and return.""" - if result := func(entity)[area][index]: + if (result := func(entity)[area][index]) is not None: return result / 1000 return None diff --git a/homeassistant/components/nordpool/strings.json b/homeassistant/components/nordpool/strings.json index cc10a1a0640..7b33f032de1 100644 --- a/homeassistant/components/nordpool/strings.json +++ b/homeassistant/components/nordpool/strings.json @@ -15,7 +15,7 @@ }, "data_description": { "currency": "Select currency to display prices in, EUR is the base currency.", - "areas": "Areas to display prices for according to Nordpool market areas." + "areas": "Areas to display prices for according to Nord Pool market areas." } }, "reconfigure": { @@ -95,11 +95,11 @@ "services": { "get_prices_for_date": { "name": "Get prices for date", - "description": "Retrieve the prices for a specific date.", + "description": "Retrieves the prices for a specific date.", "fields": { "config_entry": { - "name": "Select Nord Pool configuration entry", - "description": "Choose the configuration entry." + "name": "Config entry", + "description": "The Nord Pool configuration entry for this action." }, "date": { "name": "Date", diff --git a/homeassistant/components/number/const.py b/homeassistant/components/number/const.py index 07c849278d4..f44a510b1c0 100644 --- a/homeassistant/components/number/const.py +++ b/homeassistant/components/number/const.py @@ -159,7 +159,7 @@ class NumberDeviceClass(StrEnum): DURATION = "duration" """Fixed duration. - Unit of measurement: `d`, `h`, `min`, `s`, `ms` + Unit of measurement: `d`, `h`, `min`, `s`, `ms`, `µs` """ ENERGY = "energy" @@ -462,6 +462,7 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = { UnitOfTime.MINUTES, UnitOfTime.SECONDS, UnitOfTime.MILLISECONDS, + UnitOfTime.MICROSECONDS, }, NumberDeviceClass.ENERGY: set(UnitOfEnergy), NumberDeviceClass.ENERGY_DISTANCE: set(UnitOfEnergyDistance), diff --git a/homeassistant/components/nut/__init__.py b/homeassistant/components/nut/__init__.py index 169dbbbff5d..5b188868819 100644 --- a/homeassistant/components/nut/__init__.py +++ b/homeassistant/components/nut/__init__.py @@ -103,7 +103,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: NutConfigEntry) -> bool: ) status = coordinator.data - _LOGGER.debug("NUT Sensors Available: %s", status) + _LOGGER.debug("NUT Sensors Available: %s", status if status else None) entry.async_on_unload(entry.add_update_listener(_async_update_listener)) unique_id = _unique_id_from_status(status) @@ -111,14 +111,34 @@ async def async_setup_entry(hass: HomeAssistant, entry: NutConfigEntry) -> bool: unique_id = entry.entry_id if username is not None and password is not None: + # Dynamically add outlet integration commands + additional_integration_commands = set() + if (num_outlets := status.get("outlet.count")) is not None: + for outlet_num in range(1, int(num_outlets) + 1): + outlet_num_str: str = str(outlet_num) + additional_integration_commands |= { + f"outlet.{outlet_num_str}.load.cycle", + f"outlet.{outlet_num_str}.load.on", + f"outlet.{outlet_num_str}.load.off", + } + + valid_integration_commands = ( + INTEGRATION_SUPPORTED_COMMANDS | additional_integration_commands + ) + user_available_commands = { - device_supported_command - for device_supported_command in await data.async_list_commands() or {} - if device_supported_command in INTEGRATION_SUPPORTED_COMMANDS + device_command + for device_command in await data.async_list_commands() or {} + if device_command in valid_integration_commands } else: user_available_commands = set() + _LOGGER.debug( + "NUT Commands Available: %s", + user_available_commands if user_available_commands else None, + ) + entry.runtime_data = NutRuntimeData( coordinator, data, unique_id, user_available_commands ) @@ -240,6 +260,7 @@ class PyNUTData: self._client = AIONUTClient(self._host, port, username, password, 5, persistent) self.ups_list: dict[str, str] | None = None + self.device_name: str | None = None self._status: dict[str, str] | None = None self._device_info: NUTDeviceInfo | None = None @@ -250,7 +271,7 @@ class PyNUTData: @property def name(self) -> str: - """Return the name of the ups.""" + """Return the name of the NUT device.""" return self._alias or f"Nut-{self._host}" @property @@ -294,6 +315,8 @@ class PyNUTData: self._status = await self._async_get_status() if self._device_info is None: self._device_info = self._get_device_info() + if self.device_name is None: + self.device_name = self.name.title() return self._status async def async_run_command(self, command_name: str) -> None: diff --git a/homeassistant/components/nut/button.py b/homeassistant/components/nut/button.py new file mode 100644 index 00000000000..0708056b2e3 --- /dev/null +++ b/homeassistant/components/nut/button.py @@ -0,0 +1,67 @@ +"""Provides a switch for switchable NUT outlets.""" + +from __future__ import annotations + +import logging + +from homeassistant.components.button import ( + ButtonDeviceClass, + ButtonEntity, + ButtonEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import NutConfigEntry +from .entity import NUTBaseEntity + +_LOGGER = logging.getLogger(__name__) + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: NutConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the NUT buttons.""" + pynut_data = config_entry.runtime_data + coordinator = pynut_data.coordinator + status = coordinator.data + + # Dynamically add outlet button types + if (num_outlets := status.get("outlet.count")) is None: + return + + data = pynut_data.data + unique_id = pynut_data.unique_id + valid_button_types: dict[str, ButtonEntityDescription] = {} + for outlet_num in range(1, int(num_outlets) + 1): + outlet_num_str = str(outlet_num) + outlet_name: str = status.get(f"outlet.{outlet_num_str}.name") or outlet_num_str + valid_button_types |= { + f"outlet.{outlet_num_str}.load.cycle": ButtonEntityDescription( + key=f"outlet.{outlet_num_str}.load.cycle", + translation_key="outlet_number_load_cycle", + translation_placeholders={"outlet_name": outlet_name}, + device_class=ButtonDeviceClass.RESTART, + entity_registry_enabled_default=True, + ), + } + + async_add_entities( + NUTButton(coordinator, description, data, unique_id) + for button_id, description in valid_button_types.items() + if button_id in pynut_data.user_available_commands + ) + + +class NUTButton(NUTBaseEntity, ButtonEntity): + """Representation of a button entity for NUT.""" + + async def async_press(self) -> None: + """Press the button.""" + name_list = self.entity_description.key.split(".") + command_name = f"{name_list[0]}.{name_list[1]}.load.cycle" + await self.pynut_data.async_run_command(command_name) diff --git a/homeassistant/components/nut/const.py b/homeassistant/components/nut/const.py index 6db40a910a0..d741d8e95f9 100644 --- a/homeassistant/components/nut/const.py +++ b/homeassistant/components/nut/const.py @@ -6,7 +6,11 @@ from homeassistant.const import Platform DOMAIN = "nut" -PLATFORMS = [Platform.SENSOR] +PLATFORMS = [ + Platform.BUTTON, + Platform.SENSOR, + Platform.SWITCH, +] DEFAULT_NAME = "NUT UPS" DEFAULT_HOST = "localhost" diff --git a/homeassistant/components/nut/entity.py b/homeassistant/components/nut/entity.py new file mode 100644 index 00000000000..e6536d8aad6 --- /dev/null +++ b/homeassistant/components/nut/entity.py @@ -0,0 +1,67 @@ +"""Base entity for the NUT integration.""" + +from __future__ import annotations + +from dataclasses import asdict +from typing import cast + +from homeassistant.const import ( + ATTR_MANUFACTURER, + ATTR_MODEL, + ATTR_SERIAL_NUMBER, + ATTR_SW_VERSION, +) +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import ( + CoordinatorEntity, + DataUpdateCoordinator, +) + +from . import PyNUTData +from .const import DOMAIN + +NUT_DEV_INFO_TO_DEV_INFO: dict[str, str] = { + "manufacturer": ATTR_MANUFACTURER, + "model": ATTR_MODEL, + "firmware": ATTR_SW_VERSION, + "serial": ATTR_SERIAL_NUMBER, +} + + +class NUTBaseEntity(CoordinatorEntity[DataUpdateCoordinator]): + """NUT base entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: DataUpdateCoordinator, + entity_description: EntityDescription, + data: PyNUTData, + unique_id: str, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + + self.entity_description = entity_description + self._attr_unique_id = f"{unique_id}_{entity_description.key}" + + self.pynut_data = data + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, unique_id)}, + name=self.pynut_data.device_name, + ) + self._attr_device_info.update(_get_nut_device_info(data)) + + +def _get_nut_device_info(data: PyNUTData) -> DeviceInfo: + """Return a DeviceInfo object filled with NUT device info.""" + nut_dev_infos = asdict(data.device_info) + nut_infos = { + info_key: nut_dev_infos[nut_key] + for nut_key, info_key in NUT_DEV_INFO_TO_DEV_INFO.items() + if nut_dev_infos[nut_key] is not None + } + + return cast(DeviceInfo, nut_infos) diff --git a/homeassistant/components/nut/icons.json b/homeassistant/components/nut/icons.json index 91df9d10553..a795368005c 100644 --- a/homeassistant/components/nut/icons.json +++ b/homeassistant/components/nut/icons.json @@ -1,5 +1,10 @@ { "entity": { + "button": { + "outlet_number_load_cycle": { + "default": "mdi:restart" + } + }, "sensor": { "ambient_humidity_status": { "default": "mdi:information-outline" @@ -37,17 +42,38 @@ "battery_packs_bad": { "default": "mdi:information-outline" }, + "battery_runtime": { + "default": "mdi:clock-outline" + }, + "battery_runtime_low": { + "default": "mdi:clock-alert-outline" + }, + "battery_runtime_restart": { + "default": "mdi:clock-start" + }, "battery_type": { "default": "mdi:information-outline" }, + "battery_voltage_high": { + "default": "mdi:battery-high" + }, + "battery_voltage_low": { + "default": "mdi:battery-low" + }, "input_bypass_phases": { + "default": "mdi:sine-wave" + }, + "input_current_status": { "default": "mdi:information-outline" }, "input_frequency_status": { "default": "mdi:information-outline" }, + "input_load": { + "default": "mdi:percent-box-outline" + }, "input_phases": { - "default": "mdi:information-outline" + "default": "mdi:sine-wave" }, "input_sensitivity": { "default": "mdi:information-outline" @@ -55,17 +81,26 @@ "input_transfer_reason": { "default": "mdi:information-outline" }, + "input_voltage_status": { + "default": "mdi:information-outline" + }, + "outlet_number_current_status": { + "default": "mdi:information-outline" + }, + "outlet_number_desc": { + "default": "mdi:information-outline" + }, "output_l1_power_percent": { - "default": "mdi:gauge" + "default": "mdi:percent-circle-outline" }, "output_l2_power_percent": { - "default": "mdi:gauge" + "default": "mdi:percent-circle-outline" }, "output_l3_power_percent": { - "default": "mdi:gauge" + "default": "mdi:percent-circle-outline" }, "output_phases": { - "default": "mdi:information-outline" + "default": "mdi:sine-wave" }, "ups_alarm": { "default": "mdi:alarm" @@ -76,20 +111,29 @@ "ups_contacts": { "default": "mdi:information-outline" }, + "ups_delay_reboot": { + "default": "mdi:timelapse" + }, + "ups_delay_shutdown": { + "default": "mdi:timelapse" + }, + "ups_delay_start": { + "default": "mdi:timelapse" + }, "ups_display_language": { "default": "mdi:information-outline" }, "ups_efficiency": { - "default": "mdi:gauge" + "default": "mdi:percent-outline" }, "ups_id": { "default": "mdi:information-outline" }, "ups_load": { - "default": "mdi:gauge" + "default": "mdi:percent-box-outline" }, "ups_load_high": { - "default": "mdi:gauge" + "default": "mdi:percent-box-outline" }, "ups_shutdown": { "default": "mdi:information-outline" @@ -112,15 +156,32 @@ "ups_test_date": { "default": "mdi:calendar" }, + "ups_test_interval": { + "default": "mdi:timelapse" + }, "ups_test_result": { "default": "mdi:information-outline" }, + "ups_timer_reboot": { + "default": "mdi:timer-refresh-outline" + }, + "ups_timer_shutdown": { + "default": "mdi:timer-stop-outline" + }, + "ups_timer_start": { + "default": "mdi:timer-play-outline" + }, "ups_type": { "default": "mdi:information-outline" }, "ups_watchdog_status": { "default": "mdi:information-outline" } + }, + "switch": { + "outlet_number_load_poweronoff": { + "default": "mdi:power" + } } } } diff --git a/homeassistant/components/nut/sensor.py b/homeassistant/components/nut/sensor.py index 2f574ec4842..5bf7958e39e 100644 --- a/homeassistant/components/nut/sensor.py +++ b/homeassistant/components/nut/sensor.py @@ -1,10 +1,9 @@ -"""Provides a sensor to track various status aspects of a UPS.""" +"""Provides a sensor to track various status aspects of a NUT device.""" from __future__ import annotations -from dataclasses import asdict import logging -from typing import Final, cast +from typing import Final from homeassistant.components.sensor import ( SensorDeviceClass, @@ -13,10 +12,6 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.const import ( - ATTR_MANUFACTURER, - ATTR_MODEL, - ATTR_SERIAL_NUMBER, - ATTR_SW_VERSION, PERCENTAGE, STATE_UNKNOWN, EntityCategory, @@ -29,23 +24,22 @@ from homeassistant.const import ( UnitOfTime, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) -from . import NutConfigEntry, PyNUTData -from .const import DOMAIN, KEY_STATUS, KEY_STATUS_DISPLAY, STATE_TYPES +from . import NutConfigEntry +from .const import KEY_STATUS, KEY_STATUS_DISPLAY, STATE_TYPES +from .entity import NUTBaseEntity -NUT_DEV_INFO_TO_DEV_INFO: dict[str, str] = { - "manufacturer": ATTR_MANUFACTURER, - "model": ATTR_MODEL, - "firmware": ATTR_SW_VERSION, - "serial": ATTR_SERIAL_NUMBER, +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + +AMBIENT_PRESENT = "ambient.present" +AMBIENT_SENSORS = { + "ambient.humidity", + "ambient.humidity.status", + "ambient.temperature", + "ambient.temperature.status", } - AMBIENT_THRESHOLD_STATUS_OPTIONS = [ "good", "warning-low", @@ -58,51 +52,752 @@ _LOGGER = logging.getLogger(__name__) SENSOR_TYPES: Final[dict[str, SensorEntityDescription]] = { - "ups.status.display": SensorEntityDescription( - key="ups.status.display", - translation_key="ups_status_display", + "ambient.humidity": SensorEntityDescription( + key="ambient.humidity", + translation_key="ambient_humidity", + native_unit_of_measurement=PERCENTAGE, + device_class=SensorDeviceClass.HUMIDITY, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, ), - "ups.status": SensorEntityDescription( - key="ups.status", - translation_key="ups_status", + "ambient.humidity.status": SensorEntityDescription( + key="ambient.humidity.status", + translation_key="ambient_humidity_status", + device_class=SensorDeviceClass.ENUM, + options=AMBIENT_THRESHOLD_STATUS_OPTIONS, + entity_category=EntityCategory.DIAGNOSTIC, ), - "ups.alarm": SensorEntityDescription( - key="ups.alarm", - translation_key="ups_alarm", + "ambient.temperature": SensorEntityDescription( + key="ambient.temperature", + translation_key="ambient_temperature", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, ), - "ups.temperature": SensorEntityDescription( - key="ups.temperature", - translation_key="ups_temperature", + "ambient.temperature.status": SensorEntityDescription( + key="ambient.temperature.status", + translation_key="ambient_temperature_status", + device_class=SensorDeviceClass.ENUM, + options=AMBIENT_THRESHOLD_STATUS_OPTIONS, + entity_category=EntityCategory.DIAGNOSTIC, + ), + "battery.alarm.threshold": SensorEntityDescription( + key="battery.alarm.threshold", + translation_key="battery_alarm_threshold", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.capacity": SensorEntityDescription( + key="battery.capacity", + translation_key="battery_capacity", + native_unit_of_measurement="Ah", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.charge": SensorEntityDescription( + key="battery.charge", + translation_key="battery_charge", + native_unit_of_measurement=PERCENTAGE, + device_class=SensorDeviceClass.BATTERY, + state_class=SensorStateClass.MEASUREMENT, + ), + "battery.charge.low": SensorEntityDescription( + key="battery.charge.low", + translation_key="battery_charge_low", + native_unit_of_measurement=PERCENTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.charge.restart": SensorEntityDescription( + key="battery.charge.restart", + translation_key="battery_charge_restart", + native_unit_of_measurement=PERCENTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.charge.warning": SensorEntityDescription( + key="battery.charge.warning", + translation_key="battery_charge_warning", + native_unit_of_measurement=PERCENTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.charger.status": SensorEntityDescription( + key="battery.charger.status", + translation_key="battery_charger_status", + ), + "battery.current": SensorEntityDescription( + key="battery.current", + translation_key="battery_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.current.total": SensorEntityDescription( + key="battery.current.total", + translation_key="battery_current_total", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.date": SensorEntityDescription( + key="battery.date", + translation_key="battery_date", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.mfr.date": SensorEntityDescription( + key="battery.mfr.date", + translation_key="battery_mfr_date", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.packs": SensorEntityDescription( + key="battery.packs", + translation_key="battery_packs", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.packs.bad": SensorEntityDescription( + key="battery.packs.bad", + translation_key="battery_packs_bad", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.runtime": SensorEntityDescription( + key="battery.runtime", + translation_key="battery_runtime", + native_unit_of_measurement=UnitOfTime.SECONDS, + device_class=SensorDeviceClass.DURATION, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.runtime.low": SensorEntityDescription( + key="battery.runtime.low", + translation_key="battery_runtime_low", + native_unit_of_measurement=UnitOfTime.SECONDS, + device_class=SensorDeviceClass.DURATION, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.runtime.restart": SensorEntityDescription( + key="battery.runtime.restart", + translation_key="battery_runtime_restart", + native_unit_of_measurement=UnitOfTime.SECONDS, + device_class=SensorDeviceClass.DURATION, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.temperature": SensorEntityDescription( + key="battery.temperature", + translation_key="battery_temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "ups.load": SensorEntityDescription( - key="ups.load", - translation_key="ups_load", + "battery.type": SensorEntityDescription( + key="battery.type", + translation_key="battery_type", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.voltage": SensorEntityDescription( + key="battery.voltage", + translation_key="battery_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.voltage.high": SensorEntityDescription( + key="battery.voltage.high", + translation_key="battery_voltage_high", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.voltage.low": SensorEntityDescription( + key="battery.voltage.low", + translation_key="battery_voltage_low", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "battery.voltage.nominal": SensorEntityDescription( + key="battery.voltage.nominal", + translation_key="battery_voltage_nominal", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.current": SensorEntityDescription( + key="input.bypass.current", + translation_key="input_bypass_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.frequency": SensorEntityDescription( + key="input.bypass.frequency", + translation_key="input_bypass_frequency", + native_unit_of_measurement=UnitOfFrequency.HERTZ, + device_class=SensorDeviceClass.FREQUENCY, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.L1.current": SensorEntityDescription( + key="input.bypass.L1.current", + translation_key="input_bypass_l1_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.L1-N.voltage": SensorEntityDescription( + key="input.bypass.L1-N.voltage", + translation_key="input_bypass_l1_n_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.L1.realpower": SensorEntityDescription( + key="input.bypass.L1.realpower", + translation_key="input_bypass_l1_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.L2.current": SensorEntityDescription( + key="input.bypass.L2.current", + translation_key="input_bypass_l2_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.L2-N.voltage": SensorEntityDescription( + key="input.bypass.L2-N.voltage", + translation_key="input_bypass_l2_n_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.L2.realpower": SensorEntityDescription( + key="input.bypass.L2.realpower", + translation_key="input_bypass_l2_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.L3.current": SensorEntityDescription( + key="input.bypass.L3.current", + translation_key="input_bypass_l3_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.L3-N.voltage": SensorEntityDescription( + key="input.bypass.L3-N.voltage", + translation_key="input_bypass_l3_n_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.L3.realpower": SensorEntityDescription( + key="input.bypass.L3.realpower", + translation_key="input_bypass_l3_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.phases": SensorEntityDescription( + key="input.bypass.phases", + translation_key="input_bypass_phases", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.realpower": SensorEntityDescription( + key="input.bypass.realpower", + translation_key="input_bypass_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.bypass.voltage": SensorEntityDescription( + key="input.bypass.voltage", + translation_key="input_bypass_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.current": SensorEntityDescription( + key="input.current", + translation_key="input_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), + "input.current.status": SensorEntityDescription( + key="input.current.status", + translation_key="input_current_status", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.frequency": SensorEntityDescription( + key="input.frequency", + translation_key="input_frequency", + native_unit_of_measurement=UnitOfFrequency.HERTZ, + device_class=SensorDeviceClass.FREQUENCY, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.frequency.nominal": SensorEntityDescription( + key="input.frequency.nominal", + translation_key="input_frequency_nominal", + native_unit_of_measurement=UnitOfFrequency.HERTZ, + device_class=SensorDeviceClass.FREQUENCY, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.frequency.status": SensorEntityDescription( + key="input.frequency.status", + translation_key="input_frequency_status", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L1.current": SensorEntityDescription( + key="input.L1.current", + translation_key="input_l1_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L1.frequency": SensorEntityDescription( + key="input.L1.frequency", + translation_key="input_l1_frequency", + native_unit_of_measurement=UnitOfFrequency.HERTZ, + device_class=SensorDeviceClass.FREQUENCY, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L1-N.voltage": SensorEntityDescription( + key="input.L1-N.voltage", + translation_key="input_l1_n_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L1.realpower": SensorEntityDescription( + key="input.L1.realpower", + translation_key="input_l1_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L2.current": SensorEntityDescription( + key="input.L2.current", + translation_key="input_l2_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L2.frequency": SensorEntityDescription( + key="input.L2.frequency", + translation_key="input_l2_frequency", + native_unit_of_measurement=UnitOfFrequency.HERTZ, + device_class=SensorDeviceClass.FREQUENCY, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L2-N.voltage": SensorEntityDescription( + key="input.L2-N.voltage", + translation_key="input_l2_n_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L2.realpower": SensorEntityDescription( + key="input.L2.realpower", + translation_key="input_l2_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L3.current": SensorEntityDescription( + key="input.L3.current", + translation_key="input_l3_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L3.frequency": SensorEntityDescription( + key="input.L3.frequency", + translation_key="input_l3_frequency", + native_unit_of_measurement=UnitOfFrequency.HERTZ, + device_class=SensorDeviceClass.FREQUENCY, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L3-N.voltage": SensorEntityDescription( + key="input.L3-N.voltage", + translation_key="input_l3_n_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.L3.realpower": SensorEntityDescription( + key="input.L3.realpower", + translation_key="input_l3_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.load": SensorEntityDescription( + key="input.load", + translation_key="input_load", native_unit_of_measurement=PERCENTAGE, state_class=SensorStateClass.MEASUREMENT, ), - "ups.load.high": SensorEntityDescription( - key="ups.load.high", - translation_key="ups_load_high", + "input.phases": SensorEntityDescription( + key="input.phases", + translation_key="input_phases", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.power": SensorEntityDescription( + key="input.power", + translation_key="input_power", + native_unit_of_measurement=UnitOfApparentPower.VOLT_AMPERE, + device_class=SensorDeviceClass.APPARENT_POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.realpower": SensorEntityDescription( + key="input.realpower", + translation_key="input_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.sensitivity": SensorEntityDescription( + key="input.sensitivity", + translation_key="input_sensitivity", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.transfer.high": SensorEntityDescription( + key="input.transfer.high", + translation_key="input_transfer_high", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.transfer.low": SensorEntityDescription( + key="input.transfer.low", + translation_key="input_transfer_low", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.transfer.reason": SensorEntityDescription( + key="input.transfer.reason", + translation_key="input_transfer_reason", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.voltage": SensorEntityDescription( + key="input.voltage", + translation_key="input_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + ), + "input.voltage.nominal": SensorEntityDescription( + key="input.voltage.nominal", + translation_key="input_voltage_nominal", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "input.voltage.status": SensorEntityDescription( + key="input.voltage.status", + translation_key="input_voltage_status", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "outlet.voltage": SensorEntityDescription( + key="outlet.voltage", + translation_key="outlet_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + ), + "output.current": SensorEntityDescription( + key="output.current", + translation_key="output_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.current.nominal": SensorEntityDescription( + key="output.current.nominal", + translation_key="output_current_nominal", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.frequency": SensorEntityDescription( + key="output.frequency", + translation_key="output_frequency", + native_unit_of_measurement=UnitOfFrequency.HERTZ, + device_class=SensorDeviceClass.FREQUENCY, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.frequency.nominal": SensorEntityDescription( + key="output.frequency.nominal", + translation_key="output_frequency_nominal", + native_unit_of_measurement=UnitOfFrequency.HERTZ, + device_class=SensorDeviceClass.FREQUENCY, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.L1.current": SensorEntityDescription( + key="output.L1.current", + translation_key="output_l1_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.L1-N.voltage": SensorEntityDescription( + key="output.L1-N.voltage", + translation_key="output_l1_n_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.L1.power.percent": SensorEntityDescription( + key="output.L1.power.percent", + translation_key="output_l1_power_percent", native_unit_of_measurement=PERCENTAGE, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "ups.id": SensorEntityDescription( - key="ups.id", - translation_key="ups_id", + "output.L1.realpower": SensorEntityDescription( + key="output.L1.realpower", + translation_key="output_l1_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "ups.delay.start": SensorEntityDescription( - key="ups.delay.start", - translation_key="ups_delay_start", - native_unit_of_measurement=UnitOfTime.SECONDS, - device_class=SensorDeviceClass.DURATION, + "output.L2.current": SensorEntityDescription( + key="output.L2.current", + translation_key="output_l2_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.L2-N.voltage": SensorEntityDescription( + key="output.L2-N.voltage", + translation_key="output_l2_n_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.L2.power.percent": SensorEntityDescription( + key="output.L2.power.percent", + translation_key="output_l2_power_percent", + native_unit_of_measurement=PERCENTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.L2.realpower": SensorEntityDescription( + key="output.L2.realpower", + translation_key="output_l2_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.L3.current": SensorEntityDescription( + key="output.L3.current", + translation_key="output_l3_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.L3-N.voltage": SensorEntityDescription( + key="output.L3-N.voltage", + translation_key="output_l3_n_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.L3.power.percent": SensorEntityDescription( + key="output.L3.power.percent", + translation_key="output_l3_power_percent", + native_unit_of_measurement=PERCENTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.L3.realpower": SensorEntityDescription( + key="output.L3.realpower", + translation_key="output_l3_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.phases": SensorEntityDescription( + key="output.phases", + translation_key="output_phases", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.power": SensorEntityDescription( + key="output.power", + translation_key="output_power", + native_unit_of_measurement=UnitOfApparentPower.VOLT_AMPERE, + device_class=SensorDeviceClass.APPARENT_POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.power.nominal": SensorEntityDescription( + key="output.power.nominal", + translation_key="output_power_nominal", + native_unit_of_measurement=UnitOfApparentPower.VOLT_AMPERE, + device_class=SensorDeviceClass.APPARENT_POWER, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.realpower": SensorEntityDescription( + key="output.realpower", + translation_key="output_realpower", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.realpower.nominal": SensorEntityDescription( + key="output.realpower.nominal", + translation_key="output_realpower_nominal", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "output.voltage": SensorEntityDescription( + key="output.voltage", + translation_key="output_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + ), + "output.voltage.nominal": SensorEntityDescription( + key="output.voltage.nominal", + translation_key="output_voltage_nominal", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "ups.alarm": SensorEntityDescription( + key="ups.alarm", + translation_key="ups_alarm", + ), + "ups.beeper.status": SensorEntityDescription( + key="ups.beeper.status", + translation_key="ups_beeper_status", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "ups.contacts": SensorEntityDescription( + key="ups.contacts", + translation_key="ups_contacts", entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), @@ -122,62 +817,20 @@ SENSOR_TYPES: Final[dict[str, SensorEntityDescription]] = { entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "ups.timer.start": SensorEntityDescription( - key="ups.timer.start", - translation_key="ups_timer_start", + "ups.delay.start": SensorEntityDescription( + key="ups.delay.start", + translation_key="ups_delay_start", native_unit_of_measurement=UnitOfTime.SECONDS, device_class=SensorDeviceClass.DURATION, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "ups.timer.reboot": SensorEntityDescription( - key="ups.timer.reboot", - translation_key="ups_timer_reboot", - native_unit_of_measurement=UnitOfTime.SECONDS, - device_class=SensorDeviceClass.DURATION, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "ups.timer.shutdown": SensorEntityDescription( - key="ups.timer.shutdown", - translation_key="ups_timer_shutdown", - native_unit_of_measurement=UnitOfTime.SECONDS, - device_class=SensorDeviceClass.DURATION, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "ups.test.interval": SensorEntityDescription( - key="ups.test.interval", - translation_key="ups_test_interval", - native_unit_of_measurement=UnitOfTime.SECONDS, - device_class=SensorDeviceClass.DURATION, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "ups.test.result": SensorEntityDescription( - key="ups.test.result", - translation_key="ups_test_result", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "ups.test.date": SensorEntityDescription( - key="ups.test.date", - translation_key="ups_test_date", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), "ups.display.language": SensorEntityDescription( key="ups.display.language", translation_key="ups_display_language", entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "ups.contacts": SensorEntityDescription( - key="ups.contacts", - translation_key="ups_contacts", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), "ups.efficiency": SensorEntityDescription( key="ups.efficiency", translation_key="ups_efficiency", @@ -186,6 +839,25 @@ SENSOR_TYPES: Final[dict[str, SensorEntityDescription]] = { entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), + "ups.id": SensorEntityDescription( + key="ups.id", + translation_key="ups_id", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "ups.load": SensorEntityDescription( + key="ups.load", + translation_key="ups_load", + native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, + ), + "ups.load.high": SensorEntityDescription( + key="ups.load.high", + translation_key="ups_load_high", + native_unit_of_measurement=PERCENTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), "ups.power": SensorEntityDescription( key="ups.power", translation_key="ups_power", @@ -220,21 +892,9 @@ SENSOR_TYPES: Final[dict[str, SensorEntityDescription]] = { entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "ups.beeper.status": SensorEntityDescription( - key="ups.beeper.status", - translation_key="ups_beeper_status", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "ups.type": SensorEntityDescription( - key="ups.type", - translation_key="ups_type", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "ups.watchdog.status": SensorEntityDescription( - key="ups.watchdog.status", - translation_key="ups_watchdog_status", + "ups.shutdown": SensorEntityDescription( + key="ups.shutdown", + translation_key="ups_shutdown", entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), @@ -256,739 +916,89 @@ SENSOR_TYPES: Final[dict[str, SensorEntityDescription]] = { entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "ups.shutdown": SensorEntityDescription( - key="ups.shutdown", - translation_key="ups_shutdown", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, + "ups.status": SensorEntityDescription( + key="ups.status", + translation_key="ups_status", ), - "battery.charge": SensorEntityDescription( - key="battery.charge", - translation_key="battery_charge", - native_unit_of_measurement=PERCENTAGE, - device_class=SensorDeviceClass.BATTERY, - state_class=SensorStateClass.MEASUREMENT, + "ups.status.display": SensorEntityDescription( + key="ups.status.display", + translation_key="ups_status_display", ), - "battery.charge.low": SensorEntityDescription( - key="battery.charge.low", - translation_key="battery_charge_low", - native_unit_of_measurement=PERCENTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.charge.restart": SensorEntityDescription( - key="battery.charge.restart", - translation_key="battery_charge_restart", - native_unit_of_measurement=PERCENTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.charge.warning": SensorEntityDescription( - key="battery.charge.warning", - translation_key="battery_charge_warning", - native_unit_of_measurement=PERCENTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.charger.status": SensorEntityDescription( - key="battery.charger.status", - translation_key="battery_charger_status", - ), - "battery.voltage": SensorEntityDescription( - key="battery.voltage", - translation_key="battery_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.voltage.nominal": SensorEntityDescription( - key="battery.voltage.nominal", - translation_key="battery_voltage_nominal", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.voltage.low": SensorEntityDescription( - key="battery.voltage.low", - translation_key="battery_voltage_low", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.voltage.high": SensorEntityDescription( - key="battery.voltage.high", - translation_key="battery_voltage_high", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.capacity": SensorEntityDescription( - key="battery.capacity", - translation_key="battery_capacity", - native_unit_of_measurement="Ah", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.current": SensorEntityDescription( - key="battery.current", - translation_key="battery_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.current.total": SensorEntityDescription( - key="battery.current.total", - translation_key="battery_current_total", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.temperature": SensorEntityDescription( - key="battery.temperature", - translation_key="battery_temperature", + "ups.temperature": SensorEntityDescription( + key="ups.temperature", + translation_key="ups_temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "battery.runtime": SensorEntityDescription( - key="battery.runtime", - translation_key="battery_runtime", + "ups.test.date": SensorEntityDescription( + key="ups.test.date", + translation_key="ups_test_date", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "ups.test.interval": SensorEntityDescription( + key="ups.test.interval", + translation_key="ups_test_interval", native_unit_of_measurement=UnitOfTime.SECONDS, device_class=SensorDeviceClass.DURATION, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "battery.runtime.low": SensorEntityDescription( - key="battery.runtime.low", - translation_key="battery_runtime_low", + "ups.test.result": SensorEntityDescription( + key="ups.test.result", + translation_key="ups_test_result", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + "ups.timer.reboot": SensorEntityDescription( + key="ups.timer.reboot", + translation_key="ups_timer_reboot", native_unit_of_measurement=UnitOfTime.SECONDS, device_class=SensorDeviceClass.DURATION, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "battery.runtime.restart": SensorEntityDescription( - key="battery.runtime.restart", - translation_key="battery_runtime_restart", + "ups.timer.shutdown": SensorEntityDescription( + key="ups.timer.shutdown", + translation_key="ups_timer_shutdown", native_unit_of_measurement=UnitOfTime.SECONDS, device_class=SensorDeviceClass.DURATION, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "battery.alarm.threshold": SensorEntityDescription( - key="battery.alarm.threshold", - translation_key="battery_alarm_threshold", + "ups.timer.start": SensorEntityDescription( + key="ups.timer.start", + translation_key="ups_timer_start", + native_unit_of_measurement=UnitOfTime.SECONDS, + device_class=SensorDeviceClass.DURATION, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "battery.date": SensorEntityDescription( - key="battery.date", - translation_key="battery_date", + "ups.type": SensorEntityDescription( + key="ups.type", + translation_key="ups_type", entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "battery.mfr.date": SensorEntityDescription( - key="battery.mfr.date", - translation_key="battery_mfr_date", + "ups.watchdog.status": SensorEntityDescription( + key="ups.watchdog.status", + translation_key="ups_watchdog_status", entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), - "battery.packs": SensorEntityDescription( - key="battery.packs", - translation_key="battery_packs", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.packs.bad": SensorEntityDescription( - key="battery.packs.bad", - translation_key="battery_packs_bad", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "battery.type": SensorEntityDescription( - key="battery.type", - translation_key="battery_type", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.sensitivity": SensorEntityDescription( - key="input.sensitivity", - translation_key="input_sensitivity", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.transfer.low": SensorEntityDescription( - key="input.transfer.low", - translation_key="input_transfer_low", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.transfer.high": SensorEntityDescription( - key="input.transfer.high", - translation_key="input_transfer_high", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.transfer.reason": SensorEntityDescription( - key="input.transfer.reason", - translation_key="input_transfer_reason", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.voltage": SensorEntityDescription( - key="input.voltage", - translation_key="input_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - ), - "input.voltage.nominal": SensorEntityDescription( - key="input.voltage.nominal", - translation_key="input_voltage_nominal", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.L1-N.voltage": SensorEntityDescription( - key="input.L1-N.voltage", - translation_key="input_l1_n_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.L2-N.voltage": SensorEntityDescription( - key="input.L2-N.voltage", - translation_key="input_l2_n_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.L3-N.voltage": SensorEntityDescription( - key="input.L3-N.voltage", - translation_key="input_l3_n_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.frequency": SensorEntityDescription( - key="input.frequency", - translation_key="input_frequency", - native_unit_of_measurement=UnitOfFrequency.HERTZ, - device_class=SensorDeviceClass.FREQUENCY, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.frequency.nominal": SensorEntityDescription( - key="input.frequency.nominal", - translation_key="input_frequency_nominal", - native_unit_of_measurement=UnitOfFrequency.HERTZ, - device_class=SensorDeviceClass.FREQUENCY, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.frequency.status": SensorEntityDescription( - key="input.frequency.status", - translation_key="input_frequency_status", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.L1.frequency": SensorEntityDescription( - key="input.L1.frequency", - translation_key="input_l1_frequency", - native_unit_of_measurement=UnitOfFrequency.HERTZ, - device_class=SensorDeviceClass.FREQUENCY, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.L2.frequency": SensorEntityDescription( - key="input.L2.frequency", - translation_key="input_l2_frequency", - native_unit_of_measurement=UnitOfFrequency.HERTZ, - device_class=SensorDeviceClass.FREQUENCY, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.L3.frequency": SensorEntityDescription( - key="input.L3.frequency", - translation_key="input_l3_frequency", - native_unit_of_measurement=UnitOfFrequency.HERTZ, - device_class=SensorDeviceClass.FREQUENCY, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.current": SensorEntityDescription( - key="input.bypass.current", - translation_key="input_bypass_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.L1.current": SensorEntityDescription( - key="input.bypass.L1.current", - translation_key="input_bypass_l1_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.L2.current": SensorEntityDescription( - key="input.bypass.L2.current", - translation_key="input_bypass_l2_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.L3.current": SensorEntityDescription( - key="input.bypass.L3.current", - translation_key="input_bypass_l3_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.frequency": SensorEntityDescription( - key="input.bypass.frequency", - translation_key="input_bypass_frequency", - native_unit_of_measurement=UnitOfFrequency.HERTZ, - device_class=SensorDeviceClass.FREQUENCY, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.phases": SensorEntityDescription( - key="input.bypass.phases", - translation_key="input_bypass_phases", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.realpower": SensorEntityDescription( - key="input.bypass.realpower", - translation_key="input_bypass_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.L1.realpower": SensorEntityDescription( - key="input.bypass.L1.realpower", - translation_key="input_bypass_l1_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.L2.realpower": SensorEntityDescription( - key="input.bypass.L2.realpower", - translation_key="input_bypass_l2_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.L3.realpower": SensorEntityDescription( - key="input.bypass.L3.realpower", - translation_key="input_bypass_l3_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.voltage": SensorEntityDescription( - key="input.bypass.voltage", - translation_key="input_bypass_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.L1-N.voltage": SensorEntityDescription( - key="input.bypass.L1-N.voltage", - translation_key="input_bypass_l1_n_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.L2-N.voltage": SensorEntityDescription( - key="input.bypass.L2-N.voltage", - translation_key="input_bypass_l2_n_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.bypass.L3-N.voltage": SensorEntityDescription( - key="input.bypass.L3-N.voltage", - translation_key="input_bypass_l3_n_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.current": SensorEntityDescription( - key="input.current", - translation_key="input_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_registry_enabled_default=False, - ), - "input.L1.current": SensorEntityDescription( - key="input.L1.current", - translation_key="input_l1_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.L2.current": SensorEntityDescription( - key="input.L2.current", - translation_key="input_l2_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.L3.current": SensorEntityDescription( - key="input.L3.current", - translation_key="input_l3_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.phases": SensorEntityDescription( - key="input.phases", - translation_key="input_phases", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.realpower": SensorEntityDescription( - key="input.realpower", - translation_key="input_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.L1.realpower": SensorEntityDescription( - key="input.L1.realpower", - translation_key="input_l1_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.L2.realpower": SensorEntityDescription( - key="input.L2.realpower", - translation_key="input_l2_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "input.L3.realpower": SensorEntityDescription( - key="input.L3.realpower", - translation_key="input_l3_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.power.nominal": SensorEntityDescription( - key="output.power.nominal", - translation_key="output_power_nominal", - native_unit_of_measurement=UnitOfApparentPower.VOLT_AMPERE, - device_class=SensorDeviceClass.APPARENT_POWER, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L1.power.percent": SensorEntityDescription( - key="output.L1.power.percent", - translation_key="output_l1_power_percent", - native_unit_of_measurement=PERCENTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L2.power.percent": SensorEntityDescription( - key="output.L2.power.percent", - translation_key="output_l2_power_percent", - native_unit_of_measurement=PERCENTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L3.power.percent": SensorEntityDescription( - key="output.L3.power.percent", - translation_key="output_l3_power_percent", - native_unit_of_measurement=PERCENTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.current": SensorEntityDescription( - key="output.current", - translation_key="output_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.current.nominal": SensorEntityDescription( - key="output.current.nominal", - translation_key="output_current_nominal", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L1.current": SensorEntityDescription( - key="output.L1.current", - translation_key="output_l1_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L2.current": SensorEntityDescription( - key="output.L2.current", - translation_key="output_l2_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L3.current": SensorEntityDescription( - key="output.L3.current", - translation_key="output_l3_current", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.voltage": SensorEntityDescription( - key="output.voltage", - translation_key="output_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - ), - "output.voltage.nominal": SensorEntityDescription( - key="output.voltage.nominal", - translation_key="output_voltage_nominal", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L1-N.voltage": SensorEntityDescription( - key="output.L1-N.voltage", - translation_key="output_l1_n_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L2-N.voltage": SensorEntityDescription( - key="output.L2-N.voltage", - translation_key="output_l2_n_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L3-N.voltage": SensorEntityDescription( - key="output.L3-N.voltage", - translation_key="output_l3_n_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.frequency": SensorEntityDescription( - key="output.frequency", - translation_key="output_frequency", - native_unit_of_measurement=UnitOfFrequency.HERTZ, - device_class=SensorDeviceClass.FREQUENCY, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.frequency.nominal": SensorEntityDescription( - key="output.frequency.nominal", - translation_key="output_frequency_nominal", - native_unit_of_measurement=UnitOfFrequency.HERTZ, - device_class=SensorDeviceClass.FREQUENCY, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.phases": SensorEntityDescription( - key="output.phases", - translation_key="output_phases", - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.power": SensorEntityDescription( - key="output.power", - translation_key="output_power", - native_unit_of_measurement=UnitOfApparentPower.VOLT_AMPERE, - device_class=SensorDeviceClass.APPARENT_POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.realpower": SensorEntityDescription( - key="output.realpower", - translation_key="output_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.realpower.nominal": SensorEntityDescription( - key="output.realpower.nominal", - translation_key="output_realpower_nominal", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L1.realpower": SensorEntityDescription( - key="output.L1.realpower", - translation_key="output_l1_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L2.realpower": SensorEntityDescription( - key="output.L2.realpower", - translation_key="output_l2_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "output.L3.realpower": SensorEntityDescription( - key="output.L3.realpower", - translation_key="output_l3_realpower", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - entity_registry_enabled_default=False, - ), - "ambient.humidity": SensorEntityDescription( - key="ambient.humidity", - translation_key="ambient_humidity", - native_unit_of_measurement=PERCENTAGE, - device_class=SensorDeviceClass.HUMIDITY, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - ), - "ambient.humidity.status": SensorEntityDescription( - key="ambient.humidity.status", - translation_key="ambient_humidity_status", - device_class=SensorDeviceClass.ENUM, - options=AMBIENT_THRESHOLD_STATUS_OPTIONS, - entity_category=EntityCategory.DIAGNOSTIC, - ), - "ambient.temperature": SensorEntityDescription( - key="ambient.temperature", - translation_key="ambient_temperature", - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - entity_category=EntityCategory.DIAGNOSTIC, - ), - "ambient.temperature.status": SensorEntityDescription( - key="ambient.temperature.status", - translation_key="ambient_temperature_status", - device_class=SensorDeviceClass.ENUM, - options=AMBIENT_THRESHOLD_STATUS_OPTIONS, - entity_category=EntityCategory.DIAGNOSTIC, - ), - "watts": SensorEntityDescription( - key="watts", - translation_key="watts", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - ), } -def _get_nut_device_info(data: PyNUTData) -> DeviceInfo: - """Return a DeviceInfo object filled with NUT device info.""" - nut_dev_infos = asdict(data.device_info) - nut_infos = { - info_key: nut_dev_infos[nut_key] - for nut_key, info_key in NUT_DEV_INFO_TO_DEV_INFO.items() - if nut_dev_infos[nut_key] is not None - } - - return cast(DeviceInfo, nut_infos) - - async def async_setup_entry( hass: HomeAssistant, config_entry: NutConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up the NUT sensors.""" + valid_sensor_types: dict[str, SensorEntityDescription] pynut_data = config_entry.runtime_data coordinator = pynut_data.coordinator @@ -996,16 +1006,75 @@ async def async_setup_entry( unique_id = pynut_data.unique_id status = coordinator.data - resources = [sensor_id for sensor_id in SENSOR_TYPES if sensor_id in status] + # Dynamically add outlet sensors to valid sensors dictionary + if (num_outlets := status.get("outlet.count")) is not None: + additional_sensor_types: dict[str, SensorEntityDescription] = {} + for outlet_num in range(1, int(num_outlets) + 1): + outlet_num_str: str = str(outlet_num) + outlet_name: str = ( + status.get(f"outlet.{outlet_num_str}.name") or outlet_num_str + ) + additional_sensor_types |= { + f"outlet.{outlet_num_str}.current": SensorEntityDescription( + key=f"outlet.{outlet_num_str}.current", + translation_key="outlet_number_current", + translation_placeholders={"outlet_name": outlet_name}, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + ), + f"outlet.{outlet_num_str}.current_status": SensorEntityDescription( + key=f"outlet.{outlet_num_str}.current_status", + translation_key="outlet_number_current_status", + translation_placeholders={"outlet_name": outlet_name}, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + f"outlet.{outlet_num_str}.desc": SensorEntityDescription( + key=f"outlet.{outlet_num_str}.desc", + translation_key="outlet_number_desc", + translation_placeholders={"outlet_name": outlet_name}, + ), + f"outlet.{outlet_num_str}.power": SensorEntityDescription( + key=f"outlet.{outlet_num_str}.power", + translation_key="outlet_number_power", + translation_placeholders={"outlet_name": outlet_name}, + native_unit_of_measurement=UnitOfApparentPower.VOLT_AMPERE, + device_class=SensorDeviceClass.APPARENT_POWER, + state_class=SensorStateClass.MEASUREMENT, + ), + f"outlet.{outlet_num_str}.realpower": SensorEntityDescription( + key=f"outlet.{outlet_num_str}.realpower", + translation_key="outlet_number_realpower", + translation_placeholders={"outlet_name": outlet_name}, + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + ), + } + + valid_sensor_types = {**SENSOR_TYPES, **additional_sensor_types} + else: + valid_sensor_types = SENSOR_TYPES + + # If device reports ambient sensors are not present, then remove + has_ambient_sensors: bool = status.get(AMBIENT_PRESENT) != "no" + resources = [ + sensor_id + for sensor_id in valid_sensor_types + if sensor_id in status + and (has_ambient_sensors or sensor_id not in AMBIENT_SENSORS) + ] + # Display status is a special case that falls back to the status value # of the UPS instead. - if KEY_STATUS in resources: + if KEY_STATUS in status: resources.append(KEY_STATUS_DISPLAY) async_add_entities( NUTSensor( coordinator, - SENSOR_TYPES[sensor_type], + valid_sensor_types[sensor_type], data, unique_id, ) @@ -1013,33 +1082,12 @@ async def async_setup_entry( ) -class NUTSensor(CoordinatorEntity[DataUpdateCoordinator[dict[str, str]]], SensorEntity): +class NUTSensor(NUTBaseEntity, SensorEntity): """Representation of a sensor entity for NUT status values.""" - _attr_has_entity_name = True - - def __init__( - self, - coordinator: DataUpdateCoordinator[dict[str, str]], - sensor_description: SensorEntityDescription, - data: PyNUTData, - unique_id: str, - ) -> None: - """Initialize the sensor.""" - super().__init__(coordinator) - self.entity_description = sensor_description - - device_name = data.name.title() - self._attr_unique_id = f"{unique_id}_{sensor_description.key}" - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, unique_id)}, - name=device_name, - ) - self._attr_device_info.update(_get_nut_device_info(data)) - @property def native_value(self) -> str | None: - """Return entity state from ups.""" + """Return entity state from NUT device.""" status = self.coordinator.data if self.entity_description.key == KEY_STATUS_DISPLAY: return _format_display_state(status) diff --git a/homeassistant/components/nut/strings.json b/homeassistant/components/nut/strings.json index b9485a320fb..4d8ffd45475 100644 --- a/homeassistant/components/nut/strings.json +++ b/homeassistant/components/nut/strings.json @@ -29,8 +29,8 @@ }, "error": { "cannot_connect": "Connection error: {error}", - "unknown": "[%key:common::config_flow::error::unknown%]", - "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", @@ -78,6 +78,9 @@ } }, "entity": { + "button": { + "outlet_number_load_cycle": { "name": "Power cycle outlet {outlet_name}" } + }, "sensor": { "ambient_humidity": { "name": "Ambient humidity" }, "ambient_humidity_status": { "name": "Ambient humidity status" }, @@ -106,72 +109,78 @@ "battery_voltage_low": { "name": "Low battery voltage" }, "battery_voltage_nominal": { "name": "Nominal battery voltage" }, "input_bypass_current": { "name": "Input bypass current" }, - "input_bypass_l1_current": { "name": "Input bypass L1 current" }, - "input_bypass_l2_current": { "name": "Input bypass L2 current" }, - "input_bypass_l3_current": { "name": "Input bypass L3 current" }, - "input_bypass_voltage": { "name": "Input bypass voltage" }, - "input_bypass_l1_n_voltage": { "name": "Input bypass L1-N voltage" }, - "input_bypass_l2_n_voltage": { "name": "Input bypass L2-N voltage" }, - "input_bypass_l3_n_voltage": { "name": "Input bypass L3-N voltage" }, "input_bypass_frequency": { "name": "Input bypass frequency" }, + "input_bypass_l1_current": { "name": "Input bypass L1 current" }, + "input_bypass_l1_n_voltage": { "name": "Input bypass L1-N voltage" }, + "input_bypass_l1_realpower": { "name": "Input bypass L1 real power" }, + "input_bypass_l2_current": { "name": "Input bypass L2 current" }, + "input_bypass_l2_n_voltage": { "name": "Input bypass L2-N voltage" }, + "input_bypass_l2_realpower": { "name": "Input bypass L2 real power" }, + "input_bypass_l3_current": { "name": "Input bypass L3 current" }, + "input_bypass_l3_n_voltage": { "name": "Input bypass L3-N voltage" }, + "input_bypass_l3_realpower": { "name": "Input bypass L3 real power" }, "input_bypass_phases": { "name": "Input bypass phases" }, "input_bypass_realpower": { "name": "Input bypass real power" }, - "input_bypass_l1_realpower": { - "name": "Input bypass L1 real power" - }, - "input_bypass_l2_realpower": { - "name": "Input bypass L2 real power" - }, - "input_bypass_l3_realpower": { - "name": "Input bypass L3 real power" - }, + "input_bypass_voltage": { "name": "Input bypass voltage" }, "input_current": { "name": "Input current" }, - "input_l1_current": { "name": "Input L1 current" }, - "input_l2_current": { "name": "Input L2 current" }, - "input_l3_current": { "name": "Input L3 current" }, + "input_current_status": { "name": "Input current status" }, "input_frequency": { "name": "Input frequency" }, "input_frequency_nominal": { "name": "Input nominal frequency" }, "input_frequency_status": { "name": "Input frequency status" }, + "input_l1_current": { "name": "Input L1 current" }, "input_l1_frequency": { "name": "Input L1 line frequency" }, - "input_l2_frequency": { "name": "Input L2 line frequency" }, - "input_l3_frequency": { "name": "Input L3 line frequency" }, - "input_phases": { "name": "Input phases" }, - "input_realpower": { "name": "Input real power" }, + "input_l1_n_voltage": { "name": "Input L1 voltage" }, "input_l1_realpower": { "name": "Input L1 real power" }, + "input_l2_current": { "name": "Input L2 current" }, + "input_l2_frequency": { "name": "Input L2 line frequency" }, + "input_l2_n_voltage": { "name": "Input L2 voltage" }, "input_l2_realpower": { "name": "Input L2 real power" }, + "input_l3_current": { "name": "Input L3 current" }, + "input_l3_frequency": { "name": "Input L3 line frequency" }, + "input_l3_n_voltage": { "name": "Input L3 voltage" }, "input_l3_realpower": { "name": "Input L3 real power" }, + "input_load": { "name": "Input load" }, + "input_phases": { "name": "Input phases" }, + "input_power": { "name": "Input power" }, + "input_realpower": { "name": "Input real power" }, "input_sensitivity": { "name": "Input power sensitivity" }, "input_transfer_high": { "name": "High voltage transfer" }, "input_transfer_low": { "name": "Low voltage transfer" }, "input_transfer_reason": { "name": "Voltage transfer reason" }, "input_voltage": { "name": "Input voltage" }, "input_voltage_nominal": { "name": "Nominal input voltage" }, - "input_l1_n_voltage": { "name": "Input L1 voltage" }, - "input_l2_n_voltage": { "name": "Input L2 voltage" }, - "input_l3_n_voltage": { "name": "Input L3 voltage" }, + "input_voltage_status": { "name": "Input voltage status" }, + "outlet_number_current": { "name": "Outlet {outlet_name} current" }, + "outlet_number_current_status": { + "name": "Outlet {outlet_name} current status" + }, + "outlet_number_desc": { "name": "Outlet {outlet_name} description" }, + "outlet_number_power": { "name": "Outlet {outlet_name} power" }, + "outlet_number_realpower": { "name": "Outlet {outlet_name} real power" }, + "outlet_voltage": { "name": "Outlet voltage" }, "output_current": { "name": "Output current" }, "output_current_nominal": { "name": "Nominal output current" }, - "output_l1_current": { "name": "Output L1 current" }, - "output_l2_current": { "name": "Output L2 current" }, - "output_l3_current": { "name": "Output L3 current" }, "output_frequency": { "name": "Output frequency" }, "output_frequency_nominal": { "name": "Nominal output frequency" }, + "output_l1_current": { "name": "Output L1 current" }, + "output_l1_n_voltage": { "name": "Output L1-N voltage" }, + "output_l1_power_percent": { "name": "Output L1 power usage" }, + "output_l1_realpower": { "name": "Output L1 real power" }, + "output_l2_current": { "name": "Output L2 current" }, + "output_l2_n_voltage": { "name": "Output L2-N voltage" }, + "output_l2_power_percent": { "name": "Output L2 power usage" }, + "output_l2_realpower": { "name": "Output L2 real power" }, + "output_l3_current": { "name": "Output L3 current" }, + "output_l3_n_voltage": { "name": "Output L3-N voltage" }, + "output_l3_power_percent": { "name": "Output L3 power usage" }, + "output_l3_realpower": { "name": "Output L3 real power" }, "output_phases": { "name": "Output phases" }, "output_power": { "name": "Output apparent power" }, - "output_l2_power_percent": { "name": "Output L2 power usage" }, - "output_l1_power_percent": { "name": "Output L1 power usage" }, - "output_l3_power_percent": { "name": "Output L3 power usage" }, "output_power_nominal": { "name": "Nominal output power" }, "output_realpower": { "name": "Output real power" }, "output_realpower_nominal": { "name": "Nominal output real power" }, - "output_l1_realpower": { "name": "Output L1 real power" }, - "output_l2_realpower": { "name": "Output L2 real power" }, - "output_l3_realpower": { "name": "Output L3 real power" }, "output_voltage": { "name": "Output voltage" }, "output_voltage_nominal": { "name": "Nominal output voltage" }, - "output_l1_n_voltage": { "name": "Output L1-N voltage" }, - "output_l2_n_voltage": { "name": "Output L2-N voltage" }, - "output_l3_n_voltage": { "name": "Output L3-N voltage" }, "ups_alarm": { "name": "Alarms" }, "ups_beeper_status": { "name": "Beeper status" }, "ups_contacts": { "name": "External contacts" }, @@ -203,8 +212,10 @@ "ups_timer_shutdown": { "name": "Load shutdown timer" }, "ups_timer_start": { "name": "Load start timer" }, "ups_type": { "name": "UPS type" }, - "ups_watchdog_status": { "name": "Watchdog status" }, - "watts": { "name": "Watts" } + "ups_watchdog_status": { "name": "Watchdog status" } + }, + "switch": { + "outlet_number_load_poweronoff": { "name": "Power outlet {outlet_name}" } } } } diff --git a/homeassistant/components/nut/switch.py b/homeassistant/components/nut/switch.py new file mode 100644 index 00000000000..924a596cc8e --- /dev/null +++ b/homeassistant/components/nut/switch.py @@ -0,0 +1,90 @@ +"""Provides a switch for switchable NUT outlets.""" + +from __future__ import annotations + +import logging +from typing import Any + +from homeassistant.components.switch import ( + SwitchDeviceClass, + SwitchEntity, + SwitchEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import NutConfigEntry +from .entity import NUTBaseEntity + +_LOGGER = logging.getLogger(__name__) + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: NutConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the NUT switches.""" + pynut_data = config_entry.runtime_data + coordinator = pynut_data.coordinator + status = coordinator.data + + # Dynamically add outlet switch types + if (num_outlets := status.get("outlet.count")) is None: + return + + data = pynut_data.data + unique_id = pynut_data.unique_id + user_available_commands = pynut_data.user_available_commands + switch_descriptions = [ + SwitchEntityDescription( + key=f"outlet.{outlet_num!s}.load.poweronoff", + translation_key="outlet_number_load_poweronoff", + translation_placeholders={ + "outlet_name": status.get(f"outlet.{outlet_num!s}.name") + or str(outlet_num) + }, + device_class=SwitchDeviceClass.OUTLET, + entity_registry_enabled_default=True, + ) + for outlet_num in range(1, int(num_outlets) + 1) + if ( + status.get(f"outlet.{outlet_num!s}.switchable") == "yes" + and f"outlet.{outlet_num!s}.load.on" in user_available_commands + and f"outlet.{outlet_num!s}.load.off" in user_available_commands + ) + ] + + async_add_entities( + NUTSwitch(coordinator, description, data, unique_id) + for description in switch_descriptions + ) + + +class NUTSwitch(NUTBaseEntity, SwitchEntity): + """Representation of a switch entity for NUT status values.""" + + @property + def is_on(self) -> bool | None: + """Return the state of the switch.""" + status = self.coordinator.data + outlet, outlet_num_str = self.entity_description.key.split(".", 2)[:2] + if (state := status.get(f"{outlet}.{outlet_num_str}.status")) is None: + return None + return bool(state == "on") + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the device.""" + + outlet, outlet_num_str = self.entity_description.key.split(".", 2)[:2] + command_name = f"{outlet}.{outlet_num_str}.load.on" + await self.pynut_data.async_run_command(command_name) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the device.""" + + outlet, outlet_num_str = self.entity_description.key.split(".", 2)[:2] + command_name = f"{outlet}.{outlet_num_str}.load.off" + await self.pynut_data.async_run_command(command_name) diff --git a/homeassistant/components/nws/sensor.py b/homeassistant/components/nws/sensor.py index 4cfb3b85e0f..8a7631d8381 100644 --- a/homeassistant/components/nws/sensor.py +++ b/homeassistant/components/nws/sensor.py @@ -115,6 +115,7 @@ SENSOR_TYPES: tuple[NWSSensorEntityDescription, ...] = ( native_unit_of_measurement=DEGREE, unit_convert=DEGREE, device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, ), NWSSensorEntityDescription( key="barometricPressure", diff --git a/homeassistant/components/octoprint/config_flow.py b/homeassistant/components/octoprint/config_flow.py index 010b45e5a1c..e20eea0a61f 100644 --- a/homeassistant/components/octoprint/config_flow.py +++ b/homeassistant/components/octoprint/config_flow.py @@ -85,7 +85,8 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN): raise err from None except CannotConnect: errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" if errors: diff --git a/homeassistant/components/ogemray/__init__.py b/homeassistant/components/ogemray/__init__.py new file mode 100644 index 00000000000..94e19234a6b --- /dev/null +++ b/homeassistant/components/ogemray/__init__.py @@ -0,0 +1 @@ +"""Ogemray virtual integration.""" diff --git a/homeassistant/components/ogemray/manifest.json b/homeassistant/components/ogemray/manifest.json new file mode 100644 index 00000000000..6a8eb315c7a --- /dev/null +++ b/homeassistant/components/ogemray/manifest.json @@ -0,0 +1,6 @@ +{ + "domain": "ogemray", + "name": "Ogemray", + "integration_type": "virtual", + "supported_by": "shelly" +} diff --git a/homeassistant/components/ohme/button.py b/homeassistant/components/ohme/button.py index 6e942215c0f..41782ea4a2d 100644 --- a/homeassistant/components/ohme/button.py +++ b/homeassistant/components/ohme/button.py @@ -2,8 +2,9 @@ from __future__ import annotations -from collections.abc import Awaitable, Callable +from collections.abc import Callable, Coroutine from dataclasses import dataclass +from typing import Any from ohme import ApiException, ChargerStatus, OhmeApiClient @@ -23,7 +24,7 @@ PARALLEL_UPDATES = 1 class OhmeButtonDescription(OhmeEntityDescription, ButtonEntityDescription): """Class describing Ohme button entities.""" - press_fn: Callable[[OhmeApiClient], Awaitable[None]] + press_fn: Callable[[OhmeApiClient], Coroutine[Any, Any, bool]] BUTTON_DESCRIPTIONS = [ diff --git a/homeassistant/components/ohme/config_flow.py b/homeassistant/components/ohme/config_flow.py index 748ea558983..1037c3a7c8b 100644 --- a/homeassistant/components/ohme/config_flow.py +++ b/homeassistant/components/ohme/config_flow.py @@ -99,6 +99,29 @@ class OhmeConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle re-configuration.""" + errors: dict[str, str] = {} + reconfigure_entry = self._get_reconfigure_entry() + if user_input: + errors = await self._validate_account( + reconfigure_entry.data[CONF_EMAIL], + user_input[CONF_PASSWORD], + ) + if not errors: + return self.async_update_reload_and_abort( + reconfigure_entry, + data_updates=user_input, + ) + return self.async_show_form( + step_id="reconfigure", + data_schema=REAUTH_SCHEMA, + description_placeholders={"email": reconfigure_entry.data[CONF_EMAIL]}, + errors=errors, + ) + async def _validate_account(self, email: str, password: str) -> dict[str, str]: """Validate Ohme account and return dict of errors.""" errors: dict[str, str] = {} diff --git a/homeassistant/components/ohme/diagnostics.py b/homeassistant/components/ohme/diagnostics.py new file mode 100644 index 00000000000..a955b3b76e2 --- /dev/null +++ b/homeassistant/components/ohme/diagnostics.py @@ -0,0 +1,24 @@ +"""Provides diagnostics for Ohme.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.core import HomeAssistant + +from .coordinator import OhmeConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, config_entry: OhmeConfigEntry +) -> dict[str, Any]: + """Return diagnostics for Ohme.""" + coordinators = config_entry.runtime_data + client = coordinators.charge_session_coordinator.client + + return { + "device_info": client.device_info, + "vehicles": client.vehicles, + "ct_connected": client.ct_connected, + "cap_available": client.cap_available, + } diff --git a/homeassistant/components/ohme/icons.json b/homeassistant/components/ohme/icons.json index 9771b0bf5c2..8613f2542c4 100644 --- a/homeassistant/components/ohme/icons.json +++ b/homeassistant/components/ohme/icons.json @@ -16,6 +16,9 @@ "select": { "charge_mode": { "default": "mdi:play-box" + }, + "vehicle": { + "default": "mdi:car" } }, "sensor": { @@ -51,6 +54,9 @@ "state": { "off": "mdi:sleep-off" } + }, + "price_cap": { + "default": "mdi:car-speed-limiter" } }, "time": { @@ -62,6 +68,9 @@ "services": { "list_charge_slots": { "service": "mdi:clock-start" + }, + "set_price_cap": { + "service": "mdi:car-speed-limiter" } } } diff --git a/homeassistant/components/ohme/manifest.json b/homeassistant/components/ohme/manifest.json index fb11fa0dd06..30a55360ce2 100644 --- a/homeassistant/components/ohme/manifest.json +++ b/homeassistant/components/ohme/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "cloud_polling", "quality_scale": "silver", - "requirements": ["ohme==1.3.2"] + "requirements": ["ohme==1.5.1"] } diff --git a/homeassistant/components/ohme/number.py b/homeassistant/components/ohme/number.py index 0c71bab009f..f412c658085 100644 --- a/homeassistant/components/ohme/number.py +++ b/homeassistant/components/ohme/number.py @@ -1,7 +1,8 @@ """Platform for number.""" -from collections.abc import Awaitable, Callable +from collections.abc import Callable, Coroutine from dataclasses import dataclass +from typing import Any from ohme import ApiException, OhmeApiClient @@ -22,7 +23,7 @@ PARALLEL_UPDATES = 1 class OhmeNumberDescription(OhmeEntityDescription, NumberEntityDescription): """Class describing Ohme number entities.""" - set_fn: Callable[[OhmeApiClient, float], Awaitable[None]] + set_fn: Callable[[OhmeApiClient, float], Coroutine[Any, Any, bool]] value_fn: Callable[[OhmeApiClient], float] @@ -31,7 +32,7 @@ NUMBER_DESCRIPTION = [ key="target_percentage", translation_key="target_percentage", value_fn=lambda client: client.target_soc, - set_fn=lambda client, value: client.async_set_target(target_percent=value), + set_fn=lambda client, value: client.async_set_target(target_percent=int(value)), native_min_value=0, native_max_value=100, native_step=1, @@ -42,7 +43,7 @@ NUMBER_DESCRIPTION = [ translation_key="preconditioning_duration", value_fn=lambda client: client.preconditioning, set_fn=lambda client, value: client.async_set_target( - pre_condition_length=value + pre_condition_length=int(value) ), native_min_value=0, native_max_value=60, diff --git a/homeassistant/components/ohme/quality_scale.yaml b/homeassistant/components/ohme/quality_scale.yaml index 497d5ad32e5..f748cf339b4 100644 --- a/homeassistant/components/ohme/quality_scale.yaml +++ b/homeassistant/components/ohme/quality_scale.yaml @@ -39,7 +39,7 @@ rules: # Gold devices: done - diagnostics: todo + diagnostics: done discovery: status: exempt comment: | @@ -62,7 +62,7 @@ rules: entity-translations: done exception-translations: done icon-translations: done - reconfiguration-flow: todo + reconfiguration-flow: done repair-issues: status: exempt comment: | diff --git a/homeassistant/components/ohme/select.py b/homeassistant/components/ohme/select.py index 17cc7c67e9a..d8d9c52c3b6 100644 --- a/homeassistant/components/ohme/select.py +++ b/homeassistant/components/ohme/select.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Awaitable, Callable +from collections.abc import Callable, Coroutine from dataclasses import dataclass from typing import Any, Final @@ -24,11 +24,13 @@ PARALLEL_UPDATES = 1 class OhmeSelectDescription(OhmeEntityDescription, SelectEntityDescription): """Class to describe an Ohme select entity.""" - select_fn: Callable[[OhmeApiClient, Any], Awaitable[None]] + select_fn: Callable[[OhmeApiClient, Any], Coroutine[Any, Any, bool | None]] + options: list[str] | None = None + options_fn: Callable[[OhmeApiClient], list[str]] | None = None current_option_fn: Callable[[OhmeApiClient], str | None] -SELECT_DESCRIPTION: Final[OhmeSelectDescription] = OhmeSelectDescription( +MODE_SELECT_DESCRIPTION: Final[OhmeSelectDescription] = OhmeSelectDescription( key="charge_mode", translation_key="charge_mode", select_fn=lambda client, mode: client.async_set_mode(mode), @@ -37,6 +39,14 @@ SELECT_DESCRIPTION: Final[OhmeSelectDescription] = OhmeSelectDescription( available_fn=lambda client: client.mode is not None, ) +VEHICLE_SELECT_DESCRIPTION: Final[OhmeSelectDescription] = OhmeSelectDescription( + key="vehicle", + translation_key="vehicle", + select_fn=lambda client, selection: client.async_set_vehicle(selection), + options_fn=lambda client: client.vehicles, + current_option_fn=lambda client: client.current_vehicle or None, +) + async def async_setup_entry( hass: HomeAssistant, @@ -44,9 +54,15 @@ async def async_setup_entry( async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up Ohme selects.""" - coordinator = config_entry.runtime_data.charge_session_coordinator + charge_sessions_coordinator = config_entry.runtime_data.charge_session_coordinator + device_info_coordinator = config_entry.runtime_data.device_info_coordinator - async_add_entities([OhmeSelect(coordinator, SELECT_DESCRIPTION)]) + async_add_entities( + [ + OhmeSelect(charge_sessions_coordinator, MODE_SELECT_DESCRIPTION), + OhmeSelect(device_info_coordinator, VEHICLE_SELECT_DESCRIPTION), + ] + ) class OhmeSelect(OhmeEntity, SelectEntity): @@ -64,6 +80,14 @@ class OhmeSelect(OhmeEntity, SelectEntity): ) from e await self.coordinator.async_request_refresh() + @property + def options(self) -> list[str]: + """Return a set of selectable options.""" + if self.entity_description.options_fn: + return self.entity_description.options_fn(self.coordinator.client) + assert self.entity_description.options + return self.entity_description.options + @property def current_option(self) -> str | None: """Return the current selected option.""" diff --git a/homeassistant/components/ohme/sensor.py b/homeassistant/components/ohme/sensor.py index d0425040b53..daee8fff13e 100644 --- a/homeassistant/components/ohme/sensor.py +++ b/homeassistant/components/ohme/sensor.py @@ -34,7 +34,7 @@ PARALLEL_UPDATES = 0 class OhmeSensorDescription(OhmeEntityDescription, SensorEntityDescription): """Class describing Ohme sensor entities.""" - value_fn: Callable[[OhmeApiClient], str | int | float] + value_fn: Callable[[OhmeApiClient], str | int | float | None] SENSOR_CHARGE_SESSION = [ @@ -129,6 +129,6 @@ class OhmeSensor(OhmeEntity, SensorEntity): entity_description: OhmeSensorDescription @property - def native_value(self) -> str | int | float: + def native_value(self) -> str | int | float | None: """Return the state of the sensor.""" return self.entity_description.value_fn(self.coordinator.client) diff --git a/homeassistant/components/ohme/services.py b/homeassistant/components/ohme/services.py index 7d06b909d88..249fb1abdab 100644 --- a/homeassistant/components/ohme/services.py +++ b/homeassistant/components/ohme/services.py @@ -17,9 +17,11 @@ from homeassistant.helpers import selector from .const import DOMAIN -SERVICE_LIST_CHARGE_SLOTS = "list_charge_slots" ATTR_CONFIG_ENTRY: Final = "config_entry" -SERVICE_SCHEMA: Final = vol.Schema( +ATTR_PRICE_CAP: Final = "price_cap" + +SERVICE_LIST_CHARGE_SLOTS = "list_charge_slots" +SERVICE_LIST_CHARGE_SLOTS_SCHEMA: Final = vol.Schema( { vol.Required(ATTR_CONFIG_ENTRY): selector.ConfigEntrySelector( { @@ -29,6 +31,18 @@ SERVICE_SCHEMA: Final = vol.Schema( } ) +SERVICE_SET_PRICE_CAP = "set_price_cap" +SERVICE_SET_PRICE_CAP_SCHEMA: Final = vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY): selector.ConfigEntrySelector( + { + "integration": DOMAIN, + } + ), + vol.Required(ATTR_PRICE_CAP): vol.Coerce(float), + } +) + def __get_client(call: ServiceCall) -> OhmeApiClient: """Get the client from the config entry.""" @@ -64,12 +78,28 @@ def async_setup_services(hass: HomeAssistant) -> None: """List of charge slots.""" client = __get_client(service_call) - return {"slots": client.slots} + return {"slots": [slot.to_dict() for slot in client.slots]} + + async def set_price_cap( + service_call: ServiceCall, + ) -> None: + """List of charge slots.""" + client = __get_client(service_call) + price_cap = service_call.data[ATTR_PRICE_CAP] + await client.async_change_price_cap(cap=price_cap) hass.services.async_register( DOMAIN, SERVICE_LIST_CHARGE_SLOTS, list_charge_slots, - schema=SERVICE_SCHEMA, + schema=SERVICE_LIST_CHARGE_SLOTS_SCHEMA, supports_response=SupportsResponse.ONLY, ) + + hass.services.async_register( + DOMAIN, + SERVICE_SET_PRICE_CAP, + set_price_cap, + schema=SERVICE_SET_PRICE_CAP_SCHEMA, + supports_response=SupportsResponse.NONE, + ) diff --git a/homeassistant/components/ohme/services.yaml b/homeassistant/components/ohme/services.yaml index c5c8ee18138..a45bc131511 100644 --- a/homeassistant/components/ohme/services.yaml +++ b/homeassistant/components/ohme/services.yaml @@ -5,3 +5,16 @@ list_charge_slots: selector: config_entry: integration: ohme +set_price_cap: + fields: + config_entry: + required: true + selector: + config_entry: + integration: ohme + price_cap: + required: true + selector: + number: + min: 0 + mode: box diff --git a/homeassistant/components/ohme/strings.json b/homeassistant/components/ohme/strings.json index 4c845daa8f0..4a2170babeb 100644 --- a/homeassistant/components/ohme/strings.json +++ b/homeassistant/components/ohme/strings.json @@ -21,6 +21,16 @@ "data_description": { "password": "Enter the password for your Ohme account" } + }, + "reconfigure": { + "description": "Update your password for {email}", + "title": "Reconfigure Ohme Account", + "data": { + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "Enter the password for your Ohme account" + } } }, "error": { @@ -29,7 +39,8 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" } }, "services": { @@ -42,6 +53,20 @@ "description": "The Ohme config entry for which to return charge slots." } } + }, + "set_price_cap": { + "name": "Set price cap", + "description": "Prevents charging when the electricity price exceeds a defined threshold.", + "fields": { + "config_entry": { + "name": "Ohme account", + "description": "The Ohme config entry for which to return charge slots." + }, + "price_cap": { + "name": "Price cap", + "description": "Threshold in 1/100ths of your local currency." + } + } } }, "entity": { @@ -66,6 +91,9 @@ "max_charge": "Max charge", "paused": "Paused" } + }, + "vehicle": { + "name": "Vehicle" } }, "sensor": { @@ -99,6 +127,9 @@ }, "sleep_when_inactive": { "name": "Sleep when inactive" + }, + "price_cap": { + "name": "Price cap" } }, "time": { diff --git a/homeassistant/components/ohme/switch.py b/homeassistant/components/ohme/switch.py index c4465ec7e97..47e3bf8a99d 100644 --- a/homeassistant/components/ohme/switch.py +++ b/homeassistant/components/ohme/switch.py @@ -1,9 +1,10 @@ """Platform for switch.""" +from collections.abc import Awaitable, Callable from dataclasses import dataclass from typing import Any -from ohme import ApiException +from ohme import ApiException, OhmeApiClient from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.const import EntityCategory @@ -19,28 +20,37 @@ PARALLEL_UPDATES = 1 @dataclass(frozen=True, kw_only=True) -class OhmeSwitchDescription(OhmeEntityDescription, SwitchEntityDescription): - """Class describing Ohme switch entities.""" +class OhmeConfigSwitchDescription(OhmeEntityDescription, SwitchEntityDescription): + """Class describing Ohme configuration switch entities.""" configuration_key: str -SWITCH_DEVICE_INFO = [ - OhmeSwitchDescription( +@dataclass(frozen=True, kw_only=True) +class OhmeSwitchDescription(OhmeEntityDescription, SwitchEntityDescription): + """Class describing basic Ohme switch entities.""" + + is_on_fn: Callable[[OhmeApiClient], bool] + off_fn: Callable[[OhmeApiClient], Awaitable] + on_fn: Callable[[OhmeApiClient], Awaitable] + + +SWITCH_CONFIG = [ + OhmeConfigSwitchDescription( key="lock_buttons", translation_key="lock_buttons", entity_category=EntityCategory.CONFIG, is_supported_fn=lambda client: client.is_capable("buttonsLockable"), configuration_key="buttonsLocked", ), - OhmeSwitchDescription( + OhmeConfigSwitchDescription( key="require_approval", translation_key="require_approval", entity_category=EntityCategory.CONFIG, is_supported_fn=lambda client: client.is_capable("pluginsRequireApprovalMode"), configuration_key="pluginsRequireApproval", ), - OhmeSwitchDescription( + OhmeConfigSwitchDescription( key="sleep_when_inactive", translation_key="sleep_when_inactive", entity_category=EntityCategory.CONFIG, @@ -49,6 +59,17 @@ SWITCH_DEVICE_INFO = [ ), ] +SWITCH_DESCRIPTION = [ + OhmeSwitchDescription( + key="price_cap", + translation_key="price_cap", + is_supported_fn=lambda client: client.cap_available, + is_on_fn=lambda client: client.cap_enabled, + on_fn=lambda client: client.async_change_price_cap(True), + off_fn=lambda client: client.async_change_price_cap(False), + ), +] + async def async_setup_entry( hass: HomeAssistant, @@ -56,15 +77,17 @@ async def async_setup_entry( async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up switches.""" - coordinators = config_entry.runtime_data - coordinator_map = [ - (SWITCH_DEVICE_INFO, coordinators.device_info_coordinator), - ] + coordinator = config_entry.runtime_data.device_info_coordinator + + async_add_entities( + OhmeConfigSwitch(coordinator, description) + for description in SWITCH_CONFIG + if description.is_supported_fn(coordinator.client) + ) async_add_entities( OhmeSwitch(coordinator, description) - for entities, coordinator in coordinator_map - for description in entities + for description in SWITCH_DESCRIPTION if description.is_supported_fn(coordinator.client) ) @@ -74,6 +97,27 @@ class OhmeSwitch(OhmeEntity, SwitchEntity): entity_description: OhmeSwitchDescription + @property + def is_on(self) -> bool: + """Return True if entity is on.""" + return self.entity_description.is_on_fn(self.coordinator.client) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the switch off.""" + await self.entity_description.off_fn(self.coordinator.client) + await self.coordinator.async_request_refresh() + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the switch on.""" + await self.entity_description.on_fn(self.coordinator.client) + await self.coordinator.async_request_refresh() + + +class OhmeConfigSwitch(OhmeEntity, SwitchEntity): + """Configuration switch for Ohme.""" + + entity_description: OhmeConfigSwitchDescription + @property def is_on(self) -> bool: """Return the entity value to represent the entity state.""" diff --git a/homeassistant/components/ohme/time.py b/homeassistant/components/ohme/time.py index 264b2afd41a..a0b1edb594a 100644 --- a/homeassistant/components/ohme/time.py +++ b/homeassistant/components/ohme/time.py @@ -1,8 +1,9 @@ """Platform for time.""" -from collections.abc import Awaitable, Callable +from collections.abc import Callable, Coroutine from dataclasses import dataclass from datetime import time +from typing import Any from ohme import ApiException, OhmeApiClient @@ -22,7 +23,7 @@ PARALLEL_UPDATES = 1 class OhmeTimeDescription(OhmeEntityDescription, TimeEntityDescription): """Class describing Ohme time entities.""" - set_fn: Callable[[OhmeApiClient, time], Awaitable[None]] + set_fn: Callable[[OhmeApiClient, time], Coroutine[Any, Any, bool]] value_fn: Callable[[OhmeApiClient], time] diff --git a/homeassistant/components/ollama/conversation.py b/homeassistant/components/ollama/conversation.py index 90e81544f66..ab9e05b5fbe 100644 --- a/homeassistant/components/ollama/conversation.py +++ b/homeassistant/components/ollama/conversation.py @@ -15,7 +15,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import chat_session, intent, llm +from homeassistant.helpers import intent, llm from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from .const import ( @@ -206,18 +206,6 @@ class OllamaConversationEntity( """Return a list of supported languages.""" return MATCH_ALL - async def async_process( - self, user_input: conversation.ConversationInput - ) -> conversation.ConversationResult: - """Process a sentence.""" - with ( - chat_session.async_get_chat_session( - self.hass, user_input.conversation_id - ) as session, - conversation.async_get_chat_log(self.hass, session, user_input) as chat_log, - ): - return await self._async_handle_message(user_input, chat_log) - async def _async_handle_message( self, user_input: conversation.ConversationInput, @@ -292,7 +280,9 @@ class OllamaConversationEntity( ) intent_response.async_set_speech(chat_log.content[-1].content or "") return conversation.ConversationResult( - response=intent_response, conversation_id=chat_log.conversation_id + response=intent_response, + conversation_id=chat_log.conversation_id, + continue_conversation=chat_log.continue_conversation, ) def _trim_history(self, message_history: MessageHistory, max_messages: int) -> None: diff --git a/homeassistant/components/omnilogic/strings.json b/homeassistant/components/omnilogic/strings.json index 5b193b7f5ba..6f207337789 100644 --- a/homeassistant/components/omnilogic/strings.json +++ b/homeassistant/components/omnilogic/strings.json @@ -34,7 +34,7 @@ "fields": { "speed": { "name": "Speed", - "description": "Speed for the VSP between min and max speed." + "description": "Speed for the pump between min and max speed." } } } diff --git a/homeassistant/components/onboarding/views.py b/homeassistant/components/onboarding/views.py index a590588c009..a84aabe9b48 100644 --- a/homeassistant/components/onboarding/views.py +++ b/homeassistant/components/onboarding/views.py @@ -31,7 +31,7 @@ from homeassistant.helpers import area_registry as ar from homeassistant.helpers.backup import async_get_manager as async_get_backup_manager from homeassistant.helpers.system_info import async_get_system_info from homeassistant.helpers.translation import async_get_translations -from homeassistant.setup import async_setup_component +from homeassistant.setup import SetupPhases, async_pause_setup, async_setup_component if TYPE_CHECKING: from . import OnboardingData, OnboardingStorage, OnboardingStoreData @@ -60,6 +60,7 @@ async def async_setup( hass.http.register_view(BackupInfoView(data)) hass.http.register_view(RestoreBackupView(data)) hass.http.register_view(UploadBackupView(data)) + await setup_cloud_views(hass, data) class OnboardingView(HomeAssistantView): @@ -367,7 +368,7 @@ class BackupInfoView(BackupOnboardingView): { "backups": list(backups.values()), "state": manager.state, - "last_non_idle_event": manager.last_non_idle_event, + "last_action_event": manager.last_action_event, } ) @@ -429,6 +430,125 @@ class UploadBackupView(BackupOnboardingView, backup_http.UploadBackupView): return await self._post(request) +async def setup_cloud_views(hass: HomeAssistant, data: OnboardingStoreData) -> None: + """Set up the cloud views.""" + + with async_pause_setup(hass, SetupPhases.WAIT_IMPORT_PACKAGES): + # Import the cloud integration in an executor to avoid blocking the + # event loop. + def import_cloud() -> None: + """Import the cloud integration.""" + # pylint: disable-next=import-outside-toplevel + from homeassistant.components.cloud import http_api # noqa: F401 + + await hass.async_add_import_executor_job(import_cloud) + + # The cloud integration is imported locally to avoid cloud being imported by + # bootstrap.py and to avoid circular imports. + + # pylint: disable-next=import-outside-toplevel + from homeassistant.components.cloud import http_api as cloud_http + + # pylint: disable-next=import-outside-toplevel,hass-component-root-import + from homeassistant.components.cloud.const import DATA_CLOUD + + class CloudOnboardingView(HomeAssistantView): + """Cloud onboarding view.""" + + requires_auth = False + + def __init__(self, data: OnboardingStoreData) -> None: + """Initialize the view.""" + self._data = data + + def with_cloud[_ViewT: CloudOnboardingView, **_P]( + func: Callable[ + Concatenate[_ViewT, web.Request, _P], + Coroutine[Any, Any, web.Response], + ], + ) -> Callable[ + Concatenate[_ViewT, web.Request, _P], Coroutine[Any, Any, web.Response] + ]: + """Home Assistant API decorator to check onboarding and cloud.""" + + @wraps(func) + async def _with_cloud( + self: _ViewT, + request: web.Request, + *args: _P.args, + **kwargs: _P.kwargs, + ) -> web.Response: + """Check onboarding status, cloud and call function.""" + if self._data["done"]: + # If at least one onboarding step is done, we don't allow accessing + # the cloud onboarding views. + raise HTTPUnauthorized + + hass = request.app[KEY_HASS] + if DATA_CLOUD not in hass.data: + return self.json( + {"code": "cloud_disabled"}, + status_code=HTTPStatus.INTERNAL_SERVER_ERROR, + ) + + return await func(self, request, *args, **kwargs) + + return _with_cloud + + class CloudForgotPasswordView( + CloudOnboardingView, cloud_http.CloudForgotPasswordView + ): + """View to start Forgot Password flow.""" + + url = "/api/onboarding/cloud/forgot_password" + name = "api:onboarding:cloud:forgot_password" + + @with_cloud + async def post(self, request: web.Request) -> web.Response: + """Handle forgot password request.""" + return await super()._post(request) + + class CloudLoginView(CloudOnboardingView, cloud_http.CloudLoginView): + """Login to Home Assistant Cloud.""" + + url = "/api/onboarding/cloud/login" + name = "api:onboarding:cloud:login" + + @with_cloud + async def post(self, request: web.Request) -> web.Response: + """Handle login request.""" + return await super()._post(request) + + class CloudLogoutView(CloudOnboardingView, cloud_http.CloudLogoutView): + """Log out of the Home Assistant cloud.""" + + url = "/api/onboarding/cloud/logout" + name = "api:onboarding:cloud:logout" + + @with_cloud + async def post(self, request: web.Request) -> web.Response: + """Handle logout request.""" + return await super()._post(request) + + class CloudStatusView(CloudOnboardingView): + """Get cloud status view.""" + + url = "/api/onboarding/cloud/status" + name = "api:onboarding:cloud:status" + + @with_cloud + async def get(self, request: web.Request) -> web.Response: + """Return cloud status.""" + hass = request.app[KEY_HASS] + cloud = hass.data[DATA_CLOUD] + return self.json({"logged_in": cloud.is_logged_in}) + + hass.http.register_view(CloudForgotPasswordView(data)) + hass.http.register_view(CloudLoginView(data)) + hass.http.register_view(CloudLogoutView(data)) + hass.http.register_view(CloudStatusView(data)) + + @callback def _async_get_hass_provider(hass: HomeAssistant) -> HassAuthProvider: """Get the Home Assistant auth provider.""" diff --git a/homeassistant/components/ondilo_ico/__init__.py b/homeassistant/components/ondilo_ico/__init__.py index ddcd7ab8831..93aadb5b6ea 100644 --- a/homeassistant/components/ondilo_ico/__init__.py +++ b/homeassistant/components/ondilo_ico/__init__.py @@ -1,27 +1,37 @@ """The Ondilo ICO integration.""" +from homeassistant.components.application_credentials import ( + ClientCredential, + async_import_client_credential, +) from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_entry_oauth2_flow +from homeassistant.helpers import config_entry_oauth2_flow, config_validation as cv +from homeassistant.helpers.typing import ConfigType from .api import OndiloClient -from .config_flow import OndiloIcoOAuth2FlowHandler -from .const import DOMAIN +from .const import DOMAIN, OAUTH2_CLIENT_ID, OAUTH2_CLIENT_SECRET from .coordinator import OndiloIcoPoolsCoordinator -from .oauth_impl import OndiloOauth2Implementation +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) PLATFORMS = [Platform.SENSOR] +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the Ondilo ICO integration.""" + # Import the default client credential. + await async_import_client_credential( + hass, + DOMAIN, + ClientCredential(OAUTH2_CLIENT_ID, OAUTH2_CLIENT_SECRET, name="Ondilo ICO"), + ) + + return True + + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Ondilo ICO from a config entry.""" - - OndiloIcoOAuth2FlowHandler.async_register_implementation( - hass, - OndiloOauth2Implementation(hass), - ) - implementation = ( await config_entry_oauth2_flow.async_get_config_entry_implementation( hass, entry diff --git a/homeassistant/components/ondilo_ico/api.py b/homeassistant/components/ondilo_ico/api.py index f6ab0baa576..696acf1b2d6 100644 --- a/homeassistant/components/ondilo_ico/api.py +++ b/homeassistant/components/ondilo_ico/api.py @@ -1,15 +1,12 @@ """API for Ondilo ICO bound to Home Assistant OAuth.""" from asyncio import run_coroutine_threadsafe -import logging from ondilo import Ondilo from homeassistant import config_entries, core from homeassistant.helpers import config_entry_oauth2_flow -_LOGGER = logging.getLogger(__name__) - class OndiloClient(Ondilo): """Provide Ondilo ICO authentication tied to an OAuth2 based config entry.""" diff --git a/homeassistant/components/ondilo_ico/application_credentials.py b/homeassistant/components/ondilo_ico/application_credentials.py new file mode 100644 index 00000000000..5481a88bc1b --- /dev/null +++ b/homeassistant/components/ondilo_ico/application_credentials.py @@ -0,0 +1,14 @@ +"""Application credentials platform for Ondilo ICO.""" + +from homeassistant.components.application_credentials import AuthorizationServer +from homeassistant.core import HomeAssistant + +from .const import OAUTH2_AUTHORIZE, OAUTH2_TOKEN + + +async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer: + """Return authorization server.""" + return AuthorizationServer( + authorize_url=OAUTH2_AUTHORIZE, + token_url=OAUTH2_TOKEN, + ) diff --git a/homeassistant/components/ondilo_ico/config_flow.py b/homeassistant/components/ondilo_ico/config_flow.py index fe0b89e7258..6839d2089bf 100644 --- a/homeassistant/components/ondilo_ico/config_flow.py +++ b/homeassistant/components/ondilo_ico/config_flow.py @@ -3,11 +3,14 @@ import logging from typing import Any +from homeassistant.components.application_credentials import ( + ClientCredential, + async_import_client_credential, +) from homeassistant.config_entries import ConfigFlowResult from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHandler -from .const import DOMAIN -from .oauth_impl import OndiloOauth2Implementation +from .const import DOMAIN, OAUTH2_CLIENT_ID, OAUTH2_CLIENT_SECRET class OndiloIcoOAuth2FlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): @@ -18,14 +21,13 @@ class OndiloIcoOAuth2FlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Handle a flow initialized by the user.""" - await self.async_set_unique_id(DOMAIN) - - self.async_register_implementation( + """Handle a flow start.""" + # Import the default client credential. + await async_import_client_credential( self.hass, - OndiloOauth2Implementation(self.hass), + DOMAIN, + ClientCredential(OAUTH2_CLIENT_ID, OAUTH2_CLIENT_SECRET, name="Ondilo ICO"), ) - return await super().async_step_user(user_input) @property diff --git a/homeassistant/components/ondilo_ico/const.py b/homeassistant/components/ondilo_ico/const.py index 3c947776857..8dec6072556 100644 --- a/homeassistant/components/ondilo_ico/const.py +++ b/homeassistant/components/ondilo_ico/const.py @@ -4,5 +4,5 @@ DOMAIN = "ondilo_ico" OAUTH2_AUTHORIZE = "https://interop.ondilo.com/oauth2/authorize" OAUTH2_TOKEN = "https://interop.ondilo.com/oauth2/token" -OAUTH2_CLIENTID = "customer_api" -OAUTH2_CLIENTSECRET = "" +OAUTH2_CLIENT_ID = "customer_api" +OAUTH2_CLIENT_SECRET = "" diff --git a/homeassistant/components/ondilo_ico/manifest.json b/homeassistant/components/ondilo_ico/manifest.json index 84862a89fbb..3553797b9cd 100644 --- a/homeassistant/components/ondilo_ico/manifest.json +++ b/homeassistant/components/ondilo_ico/manifest.json @@ -3,7 +3,7 @@ "name": "Ondilo ICO", "codeowners": ["@JeromeHXP"], "config_flow": true, - "dependencies": ["auth"], + "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/ondilo_ico", "integration_type": "hub", "iot_class": "cloud_polling", diff --git a/homeassistant/components/ondilo_ico/oauth_impl.py b/homeassistant/components/ondilo_ico/oauth_impl.py deleted file mode 100644 index e1c6e6fdb90..00000000000 --- a/homeassistant/components/ondilo_ico/oauth_impl.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Local implementation of OAuth2 specific to Ondilo to hard code client id and secret and return a proper name.""" - -from homeassistant.core import HomeAssistant -from homeassistant.helpers.config_entry_oauth2_flow import LocalOAuth2Implementation - -from .const import ( - DOMAIN, - OAUTH2_AUTHORIZE, - OAUTH2_CLIENTID, - OAUTH2_CLIENTSECRET, - OAUTH2_TOKEN, -) - - -class OndiloOauth2Implementation(LocalOAuth2Implementation): - """Local implementation of OAuth2 specific to Ondilo to hard code client id and secret and return a proper name.""" - - def __init__(self, hass: HomeAssistant) -> None: - """Just init default class with default values.""" - super().__init__( - hass, - DOMAIN, - OAUTH2_CLIENTID, - OAUTH2_CLIENTSECRET, - OAUTH2_AUTHORIZE, - OAUTH2_TOKEN, - ) - - @property - def name(self) -> str: - """Name of the implementation.""" - return "Ondilo" diff --git a/homeassistant/components/ondilo_ico/sensor.py b/homeassistant/components/ondilo_ico/sensor.py index ddc4a94853f..da5ccae11a5 100644 --- a/homeassistant/components/ondilo_ico/sensor.py +++ b/homeassistant/components/ondilo_ico/sensor.py @@ -12,6 +12,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, PERCENTAGE, + EntityCategory, UnitOfElectricPotential, UnitOfTemperature, ) @@ -56,12 +57,14 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( key="battery", native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.BATTERY, + entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, ), SensorEntityDescription( key="rssi", translation_key="rssi", native_unit_of_measurement=PERCENTAGE, + entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, ), SensorEntityDescription( diff --git a/homeassistant/components/onedrive/__init__.py b/homeassistant/components/onedrive/__init__.py index eea18bb2f7e..f5d841683d5 100644 --- a/homeassistant/components/onedrive/__init__.py +++ b/homeassistant/components/onedrive/__init__.py @@ -19,12 +19,14 @@ from onedrive_personal_sdk.models.items import Item, ItemUpdate from homeassistant.const import CONF_ACCESS_TOKEN, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.config_entry_oauth2_flow import ( OAuth2Session, async_get_config_entry_implementation, ) from homeassistant.helpers.instance_id import async_get as async_get_instance_id +from homeassistant.helpers.typing import ConfigType from .const import CONF_FOLDER_ID, CONF_FOLDER_NAME, DATA_BACKUP_AGENT_LISTENERS, DOMAIN from .coordinator import ( @@ -32,13 +34,20 @@ from .coordinator import ( OneDriveRuntimeData, OneDriveUpdateCoordinator, ) +from .services import async_register_services +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) PLATFORMS = [Platform.SENSOR] - _LOGGER = logging.getLogger(__name__) +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the OneDrive integration.""" + async_register_services(hass) + return True + + async def async_setup_entry(hass: HomeAssistant, entry: OneDriveConfigEntry) -> bool: """Set up OneDrive from a config entry.""" client, get_access_token = await _get_onedrive_client(hass, entry) diff --git a/homeassistant/components/onedrive/backup.py b/homeassistant/components/onedrive/backup.py index 9c7371bee4b..41a244506ea 100644 --- a/homeassistant/components/onedrive/backup.py +++ b/homeassistant/components/onedrive/backup.py @@ -138,7 +138,7 @@ class OneDriveBackupAgent(BackupAgent): """Download a backup file.""" backups = await self._list_cached_backups() if backup_id not in backups: - raise BackupNotFound("Backup not found") + raise BackupNotFound(f"Backup {backup_id} not found") stream = await self._client.download_drive_item( backups[backup_id].backup_file_id, timeout=TIMEOUT @@ -201,7 +201,7 @@ class OneDriveBackupAgent(BackupAgent): """Delete a backup file.""" backups = await self._list_cached_backups() if backup_id not in backups: - return + raise BackupNotFound(f"Backup {backup_id} not found") backup = backups[backup_id] @@ -221,12 +221,12 @@ class OneDriveBackupAgent(BackupAgent): ] @handle_backup_errors - async def async_get_backup( - self, backup_id: str, **kwargs: Any - ) -> AgentBackup | None: + async def async_get_backup(self, backup_id: str, **kwargs: Any) -> AgentBackup: """Return a backup.""" backups = await self._list_cached_backups() - return backups[backup_id].backup if backup_id in backups else None + if backup_id not in backups: + raise BackupNotFound(f"Backup {backup_id} not found") + return backups[backup_id].backup async def _list_cached_backups(self) -> dict[str, OneDriveBackup]: """List backups with a cache.""" diff --git a/homeassistant/components/onedrive/coordinator.py b/homeassistant/components/onedrive/coordinator.py index 7b2dbaab87a..3eb7d762712 100644 --- a/homeassistant/components/onedrive/coordinator.py +++ b/homeassistant/components/onedrive/coordinator.py @@ -88,8 +88,8 @@ class OneDriveUpdateCoordinator(DataUpdateCoordinator[Drive]): ), translation_key=key, translation_placeholders={ - "total": str(drive.quota.total), - "used": str(drive.quota.used), + "total": f"{drive.quota.total / (1024**3):.2f}", + "used": f"{drive.quota.used / (1024**3):.2f}", }, ) return drive diff --git a/homeassistant/components/onedrive/icons.json b/homeassistant/components/onedrive/icons.json index b693f69934e..2ac4921439c 100644 --- a/homeassistant/components/onedrive/icons.json +++ b/homeassistant/components/onedrive/icons.json @@ -20,5 +20,10 @@ } } } + }, + "services": { + "upload": { + "service": "mdi:cloud-upload" + } } } diff --git a/homeassistant/components/onedrive/quality_scale.yaml b/homeassistant/components/onedrive/quality_scale.yaml index 023410d89b2..1632c2670e0 100644 --- a/homeassistant/components/onedrive/quality_scale.yaml +++ b/homeassistant/components/onedrive/quality_scale.yaml @@ -1,18 +1,13 @@ rules: # Bronze - action-setup: - status: exempt - comment: Integration does not register custom actions. + action-setup: done appropriate-polling: done brands: done common-modules: done config-flow-test-coverage: done config-flow: done dependency-transparency: done - docs-actions: - status: exempt - comment: | - This integration does not have any custom actions. + docs-actions: done docs-high-level-description: done docs-installation-instructions: done docs-removal-instructions: done diff --git a/homeassistant/components/onedrive/services.py b/homeassistant/components/onedrive/services.py new file mode 100644 index 00000000000..1f1afe1507c --- /dev/null +++ b/homeassistant/components/onedrive/services.py @@ -0,0 +1,131 @@ +"""OneDrive services.""" + +from __future__ import annotations + +import asyncio +from dataclasses import asdict +from pathlib import Path +from typing import cast + +from onedrive_personal_sdk.exceptions import OneDriveException +import voluptuous as vol + +from homeassistant.const import CONF_FILENAME +from homeassistant.core import ( + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, +) +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import config_validation as cv + +from .const import DOMAIN +from .coordinator import OneDriveConfigEntry + +CONF_CONFIG_ENTRY_ID = "config_entry_id" +CONF_DESTINATION_FOLDER = "destination_folder" + +UPLOAD_SERVICE = "upload" +UPLOAD_SERVICE_SCHEMA = vol.Schema( + { + vol.Required(CONF_CONFIG_ENTRY_ID): cv.string, + vol.Required(CONF_FILENAME): vol.All(cv.ensure_list, [cv.string]), + vol.Required(CONF_DESTINATION_FOLDER): cv.string, + } +) +CONTENT_SIZE_LIMIT = 250 * 1024 * 1024 + + +def _read_file_contents( + hass: HomeAssistant, filenames: list[str] +) -> list[tuple[str, bytes]]: + """Return the mime types and file contents for each file.""" + results = [] + for filename in filenames: + if not hass.config.is_allowed_path(filename): + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="no_access_to_path", + translation_placeholders={"filename": filename}, + ) + filename_path = Path(filename) + if not filename_path.exists(): + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="filename_does_not_exist", + translation_placeholders={"filename": filename}, + ) + if filename_path.stat().st_size > CONTENT_SIZE_LIMIT: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="file_too_large", + translation_placeholders={ + "filename": filename, + "size": str(filename_path.stat().st_size), + "limit": str(CONTENT_SIZE_LIMIT), + }, + ) + results.append((filename_path.name, filename_path.read_bytes())) + return results + + +def async_register_services(hass: HomeAssistant) -> None: + """Register OneDrive services.""" + + async def async_handle_upload(call: ServiceCall) -> ServiceResponse: + """Generate content from text and optionally images.""" + config_entry: OneDriveConfigEntry | None = hass.config_entries.async_get_entry( + call.data[CONF_CONFIG_ENTRY_ID] + ) + if not config_entry: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="integration_not_found", + translation_placeholders={"target": DOMAIN}, + ) + client = config_entry.runtime_data.client + upload_tasks = [] + file_results = await hass.async_add_executor_job( + _read_file_contents, hass, call.data[CONF_FILENAME] + ) + + # make sure the destination folder exists + try: + folder_id = (await client.get_approot()).id + for folder in ( + cast(str, call.data[CONF_DESTINATION_FOLDER]).strip("/").split("/") + ): + folder_id = (await client.create_folder(folder_id, folder)).id + except OneDriveException as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="create_folder_error", + translation_placeholders={"message": str(err)}, + ) from err + + upload_tasks = [ + client.upload_file(folder_id, file_name, content) + for file_name, content in file_results + ] + try: + upload_results = await asyncio.gather(*upload_tasks) + except OneDriveException as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="upload_error", + translation_placeholders={"message": str(err)}, + ) from err + + if call.return_response: + return {"files": [asdict(item_result) for item_result in upload_results]} + return None + + if not hass.services.has_service(DOMAIN, UPLOAD_SERVICE): + hass.services.async_register( + DOMAIN, + UPLOAD_SERVICE, + async_handle_upload, + schema=UPLOAD_SERVICE_SCHEMA, + supports_response=SupportsResponse.OPTIONAL, + ) diff --git a/homeassistant/components/onedrive/services.yaml b/homeassistant/components/onedrive/services.yaml new file mode 100644 index 00000000000..0cf0faf6b60 --- /dev/null +++ b/homeassistant/components/onedrive/services.yaml @@ -0,0 +1,15 @@ +upload: + fields: + config_entry_id: + required: true + selector: + config_entry: + integration: onedrive + filename: + required: false + selector: + object: + destination_folder: + required: true + selector: + text: diff --git a/homeassistant/components/onedrive/strings.json b/homeassistant/components/onedrive/strings.json index 37e19eb68ca..90fa4efc3ec 100644 --- a/homeassistant/components/onedrive/strings.json +++ b/homeassistant/components/onedrive/strings.json @@ -90,6 +90,24 @@ }, "update_failed": { "message": "Failed to update drive state" + }, + "integration_not_found": { + "message": "Integration \"{target}\" not found in registry." + }, + "no_access_to_path": { + "message": "Cannot read {filename}, no access to path; `allowlist_external_dirs` may need to be adjusted in `configuration.yaml`" + }, + "filename_does_not_exist": { + "message": "`{filename}` does not exist" + }, + "file_too_large": { + "message": "`{filename}` is too large ({size} > {limit})" + }, + "upload_error": { + "message": "Failed to upload content: {message}" + }, + "create_folder_error": { + "message": "Failed to create folder: {message}" } }, "entity": { @@ -113,5 +131,27 @@ } } } + }, + "services": { + "upload": { + "name": "Upload file", + "description": "Uploads files to OneDrive.", + "fields": { + "config_entry_id": { + "name": "Config entry ID", + "description": "The config entry representing the OneDrive you want to upload to." + }, + "filename": { + "name": "Filename", + "description": "Path to the file to upload.", + "example": "/config/www/image.jpg" + }, + "destination_folder": { + "name": "Destination folder", + "description": "Folder inside the Home Assistant app folder (Apps/Home Assistant) you want to upload the file to. Will be created if it does not exist.", + "example": "photos/snapshots" + } + } + } } } diff --git a/homeassistant/components/onewire/config_flow.py b/homeassistant/components/onewire/config_flow.py index 8a5623772f7..2099d9aabb5 100644 --- a/homeassistant/components/onewire/config_flow.py +++ b/homeassistant/components/onewire/config_flow.py @@ -234,12 +234,7 @@ class OnewireOptionsFlowHandler(OptionsFlow): INPUT_ENTRY_DEVICE_SELECTION, default=self._get_current_configured_sensors(), description="Multiselect with list of devices to choose from", - ): cv.multi_select( - { - friendly_name: False - for friendly_name in self.configurable_devices - } - ), + ): cv.multi_select(dict.fromkeys(self.configurable_devices, False)), } ), errors=errors, diff --git a/homeassistant/components/onewire/strings.json b/homeassistant/components/onewire/strings.json index 46f41503d97..5e7719673b1 100644 --- a/homeassistant/components/onewire/strings.json +++ b/homeassistant/components/onewire/strings.json @@ -140,14 +140,14 @@ "device_selection": "[%key:component::onewire::options::error::device_not_selected%]" }, "description": "Select what configuration steps to process", - "title": "OneWire Device Options" + "title": "1-Wire device options" }, "configure_device": { "data": { - "precision": "Sensor Precision" + "precision": "Sensor precision" }, "description": "Select sensor precision for {sensor_id}", - "title": "OneWire Sensor Precision" + "title": "1-Wire sensor precision" } } } diff --git a/homeassistant/components/onkyo/media_player.py b/homeassistant/components/onkyo/media_player.py index 8f9587bc426..f7fe83c57a3 100644 --- a/homeassistant/components/onkyo/media_player.py +++ b/homeassistant/components/onkyo/media_player.py @@ -588,7 +588,7 @@ class OnkyoMediaPlayer(MediaPlayerEntity): self._attr_volume_level = min(1, volume_level) elif command in ["muting", "audio-muting"]: self._attr_is_volume_muted = bool(value == "on") - elif command in ["selector", "input-selector"]: + elif command in ["selector", "input-selector"] and value != "N/A": self._parse_source(value) self._query_av_info_delayed() elif command == "hdmi-output-selector": diff --git a/homeassistant/components/onvif/__init__.py b/homeassistant/components/onvif/__init__.py index 02e7e28ea18..09a4aba52bf 100644 --- a/homeassistant/components/onvif/__init__.py +++ b/homeassistant/components/onvif/__init__.py @@ -19,8 +19,9 @@ from homeassistant.const import ( HTTP_DIGEST_AUTHENTICATION, Platform, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers import entity_registry as er from .const import ( CONF_ENABLE_WEBHOOKS, @@ -99,6 +100,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: if device.capabilities.imaging: device.platforms += [Platform.SWITCH] + _async_migrate_camera_entities_unique_ids(hass, entry, device) + await hass.config_entries.async_forward_entry_setups(entry, device.platforms) entry.async_on_unload( @@ -155,3 +158,58 @@ async def async_populate_options(hass: HomeAssistant, entry: ConfigEntry) -> Non } hass.config_entries.async_update_entry(entry, options=options) + + +@callback +def _async_migrate_camera_entities_unique_ids( + hass: HomeAssistant, config_entry: ConfigEntry, device: ONVIFDevice +) -> None: + """Migrate unique ids of camera entities from profile index to profile token.""" + entity_reg = er.async_get(hass) + entities: list[er.RegistryEntry] = er.async_entries_for_config_entry( + entity_reg, config_entry.entry_id + ) + + mac_or_serial = device.info.mac or device.info.serial_number + old_uid_start = f"{mac_or_serial}_" + new_uid_start = f"{mac_or_serial}#" + + for entity in entities: + if entity.domain != Platform.CAMERA: + continue + + if ( + not entity.unique_id.startswith(old_uid_start) + and entity.unique_id != mac_or_serial + ): + continue + + index = 0 + if entity.unique_id.startswith(old_uid_start): + try: + index = int(entity.unique_id[len(old_uid_start) :]) + except ValueError: + LOGGER.error( + "Failed to migrate unique id for '%s' as the ONVIF profile index could not be parsed from unique id '%s'", + entity.entity_id, + entity.unique_id, + ) + continue + try: + token = device.profiles[index].token + except IndexError: + LOGGER.error( + "Failed to migrate unique id for '%s' as the ONVIF profile index '%d' parsed from unique id '%s' could not be found", + entity.entity_id, + index, + entity.unique_id, + ) + continue + new_uid = f"{new_uid_start}{token}" + LOGGER.debug( + "Migrating unique id for '%s' from '%s' to '%s'", + entity.entity_id, + entity.unique_id, + new_uid, + ) + entity_reg.async_update_entity(entity.entity_id, new_unique_id=new_uid) diff --git a/homeassistant/components/onvif/camera.py b/homeassistant/components/onvif/camera.py index da99e170ff6..fc17e912fcc 100644 --- a/homeassistant/components/onvif/camera.py +++ b/homeassistant/components/onvif/camera.py @@ -117,10 +117,7 @@ class ONVIFCameraEntity(ONVIFBaseEntity, Camera): self._attr_entity_registry_enabled_default = ( device.max_resolution == profile.video.resolution.width ) - if profile.index: - self._attr_unique_id = f"{self.mac_or_serial}_{profile.index}" - else: - self._attr_unique_id = self.mac_or_serial + self._attr_unique_id = f"{self.mac_or_serial}#{profile.token}" self._attr_name = f"{device.name} {profile.name}" @property diff --git a/homeassistant/components/onvif/event.py b/homeassistant/components/onvif/event.py index b7b34f7be9f..d1b93304ccc 100644 --- a/homeassistant/components/onvif/event.py +++ b/homeassistant/components/onvif/event.py @@ -174,11 +174,20 @@ class EventManager: UNHANDLED_TOPICS.add(topic) continue - event = await parser(unique_id, msg) + try: + event = await parser(unique_id, msg) + error = None + except (AttributeError, KeyError) as e: + event = None + error = e if not event: LOGGER.warning( - "%s: Unable to parse event from %s: %s", self.name, unique_id, msg + "%s: Unable to parse event from %s: %s: %s", + self.name, + unique_id, + error, + msg, ) return diff --git a/homeassistant/components/onvif/parsers.py b/homeassistant/components/onvif/parsers.py index 6eb1d001796..e5a731c73f6 100644 --- a/homeassistant/components/onvif/parsers.py +++ b/homeassistant/components/onvif/parsers.py @@ -49,24 +49,22 @@ def local_datetime_or_none(value: str) -> datetime.datetime | None: @PARSERS.register("tns1:VideoSource/MotionAlarm") +@PARSERS.register("tns1:Device/Trigger/tnshik:AlarmIn") async def async_parse_motion_alarm(uid: str, msg) -> Event | None: """Handle parsing event message. Topic: tns1:VideoSource/MotionAlarm """ - try: - topic, payload = extract_message(msg) - source = payload.Source.SimpleItem[0].Value - return Event( - f"{uid}_{topic}_{source}", - "Motion Alarm", - "binary_sensor", - "motion", - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + source = payload.Source.SimpleItem[0].Value + return Event( + f"{uid}_{topic}_{source}", + "Motion Alarm", + "binary_sensor", + "motion", + None, + payload.Data.SimpleItem[0].Value == "true", + ) @PARSERS.register("tns1:VideoSource/ImageTooBlurry/AnalyticsService") @@ -77,20 +75,17 @@ async def async_parse_image_too_blurry(uid: str, msg) -> Event | None: Topic: tns1:VideoSource/ImageTooBlurry/* """ - try: - topic, payload = extract_message(msg) - source = payload.Source.SimpleItem[0].Value - return Event( - f"{uid}_{topic}_{source}", - "Image Too Blurry", - "binary_sensor", - "problem", - None, - payload.Data.SimpleItem[0].Value == "true", - EntityCategory.DIAGNOSTIC, - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + source = payload.Source.SimpleItem[0].Value + return Event( + f"{uid}_{topic}_{source}", + "Image Too Blurry", + "binary_sensor", + "problem", + None, + payload.Data.SimpleItem[0].Value == "true", + EntityCategory.DIAGNOSTIC, + ) @PARSERS.register("tns1:VideoSource/ImageTooDark/AnalyticsService") @@ -101,20 +96,17 @@ async def async_parse_image_too_dark(uid: str, msg) -> Event | None: Topic: tns1:VideoSource/ImageTooDark/* """ - try: - topic, payload = extract_message(msg) - source = payload.Source.SimpleItem[0].Value - return Event( - f"{uid}_{topic}_{source}", - "Image Too Dark", - "binary_sensor", - "problem", - None, - payload.Data.SimpleItem[0].Value == "true", - EntityCategory.DIAGNOSTIC, - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + source = payload.Source.SimpleItem[0].Value + return Event( + f"{uid}_{topic}_{source}", + "Image Too Dark", + "binary_sensor", + "problem", + None, + payload.Data.SimpleItem[0].Value == "true", + EntityCategory.DIAGNOSTIC, + ) @PARSERS.register("tns1:VideoSource/ImageTooBright/AnalyticsService") @@ -125,20 +117,17 @@ async def async_parse_image_too_bright(uid: str, msg) -> Event | None: Topic: tns1:VideoSource/ImageTooBright/* """ - try: - topic, payload = extract_message(msg) - source = payload.Source.SimpleItem[0].Value - return Event( - f"{uid}_{topic}_{source}", - "Image Too Bright", - "binary_sensor", - "problem", - None, - payload.Data.SimpleItem[0].Value == "true", - EntityCategory.DIAGNOSTIC, - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + source = payload.Source.SimpleItem[0].Value + return Event( + f"{uid}_{topic}_{source}", + "Image Too Bright", + "binary_sensor", + "problem", + None, + payload.Data.SimpleItem[0].Value == "true", + EntityCategory.DIAGNOSTIC, + ) @PARSERS.register("tns1:VideoSource/GlobalSceneChange/AnalyticsService") @@ -149,19 +138,16 @@ async def async_parse_scene_change(uid: str, msg) -> Event | None: Topic: tns1:VideoSource/GlobalSceneChange/* """ - try: - topic, payload = extract_message(msg) - source = payload.Source.SimpleItem[0].Value - return Event( - f"{uid}_{topic}_{source}", - "Global Scene Change", - "binary_sensor", - "problem", - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + source = payload.Source.SimpleItem[0].Value + return Event( + f"{uid}_{topic}_{source}", + "Global Scene Change", + "binary_sensor", + "problem", + None, + payload.Data.SimpleItem[0].Value == "true", + ) @PARSERS.register("tns1:AudioAnalytics/Audio/DetectedSound") @@ -170,29 +156,26 @@ async def async_parse_detected_sound(uid: str, msg) -> Event | None: Topic: tns1:AudioAnalytics/Audio/DetectedSound """ - try: - audio_source = "" - audio_analytics = "" - rule = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "AudioSourceConfigurationToken": - audio_source = source.Value - if source.Name == "AudioAnalyticsConfigurationToken": - audio_analytics = source.Value - if source.Name == "Rule": - rule = source.Value + audio_source = "" + audio_analytics = "" + rule = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "AudioSourceConfigurationToken": + audio_source = source.Value + if source.Name == "AudioAnalyticsConfigurationToken": + audio_analytics = source.Value + if source.Name == "Rule": + rule = source.Value - return Event( - f"{uid}_{topic}_{audio_source}_{audio_analytics}_{rule}", - "Detected Sound", - "binary_sensor", - "sound", - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{audio_source}_{audio_analytics}_{rule}", + "Detected Sound", + "binary_sensor", + "sound", + None, + payload.Data.SimpleItem[0].Value == "true", + ) @PARSERS.register("tns1:RuleEngine/FieldDetector/ObjectsInside") @@ -201,30 +184,26 @@ async def async_parse_field_detector(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/FieldDetector/ObjectsInside """ - try: - video_source = "" - video_analytics = "" - rule = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "VideoSourceConfigurationToken": - video_source = _normalize_video_source(source.Value) - if source.Name == "VideoAnalyticsConfigurationToken": - video_analytics = source.Value - if source.Name == "Rule": - rule = source.Value + video_source = "" + video_analytics = "" + rule = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "VideoSourceConfigurationToken": + video_source = _normalize_video_source(source.Value) + if source.Name == "VideoAnalyticsConfigurationToken": + video_analytics = source.Value + if source.Name == "Rule": + rule = source.Value - evt = Event( - f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", - "Field Detection", - "binary_sensor", - "motion", - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None - return evt + return Event( + f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", + "Field Detection", + "binary_sensor", + "motion", + None, + payload.Data.SimpleItem[0].Value == "true", + ) @PARSERS.register("tns1:RuleEngine/CellMotionDetector/Motion") @@ -233,29 +212,26 @@ async def async_parse_cell_motion_detector(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/CellMotionDetector/Motion """ - try: - video_source = "" - video_analytics = "" - rule = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "VideoSourceConfigurationToken": - video_source = _normalize_video_source(source.Value) - if source.Name == "VideoAnalyticsConfigurationToken": - video_analytics = source.Value - if source.Name == "Rule": - rule = source.Value + video_source = "" + video_analytics = "" + rule = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "VideoSourceConfigurationToken": + video_source = _normalize_video_source(source.Value) + if source.Name == "VideoAnalyticsConfigurationToken": + video_analytics = source.Value + if source.Name == "Rule": + rule = source.Value - return Event( - f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", - "Cell Motion Detection", - "binary_sensor", - "motion", - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", + "Cell Motion Detection", + "binary_sensor", + "motion", + None, + payload.Data.SimpleItem[0].Value == "true", + ) @PARSERS.register("tns1:RuleEngine/MotionRegionDetector/Motion") @@ -264,29 +240,26 @@ async def async_parse_motion_region_detector(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/MotionRegionDetector/Motion """ - try: - video_source = "" - video_analytics = "" - rule = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "VideoSourceConfigurationToken": - video_source = _normalize_video_source(source.Value) - if source.Name == "VideoAnalyticsConfigurationToken": - video_analytics = source.Value - if source.Name == "Rule": - rule = source.Value + video_source = "" + video_analytics = "" + rule = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "VideoSourceConfigurationToken": + video_source = _normalize_video_source(source.Value) + if source.Name == "VideoAnalyticsConfigurationToken": + video_analytics = source.Value + if source.Name == "Rule": + rule = source.Value - return Event( - f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", - "Motion Region Detection", - "binary_sensor", - "motion", - None, - payload.Data.SimpleItem[0].Value in ["1", "true"], - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", + "Motion Region Detection", + "binary_sensor", + "motion", + None, + payload.Data.SimpleItem[0].Value in ["1", "true"], + ) @PARSERS.register("tns1:RuleEngine/TamperDetector/Tamper") @@ -295,30 +268,27 @@ async def async_parse_tamper_detector(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/TamperDetector/Tamper """ - try: - video_source = "" - video_analytics = "" - rule = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "VideoSourceConfigurationToken": - video_source = _normalize_video_source(source.Value) - if source.Name == "VideoAnalyticsConfigurationToken": - video_analytics = source.Value - if source.Name == "Rule": - rule = source.Value + video_source = "" + video_analytics = "" + rule = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "VideoSourceConfigurationToken": + video_source = _normalize_video_source(source.Value) + if source.Name == "VideoAnalyticsConfigurationToken": + video_analytics = source.Value + if source.Name == "Rule": + rule = source.Value - return Event( - f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", - "Tamper Detection", - "binary_sensor", - "problem", - None, - payload.Data.SimpleItem[0].Value == "true", - EntityCategory.DIAGNOSTIC, - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", + "Tamper Detection", + "binary_sensor", + "problem", + None, + payload.Data.SimpleItem[0].Value == "true", + EntityCategory.DIAGNOSTIC, + ) @PARSERS.register("tns1:RuleEngine/MyRuleDetector/DogCatDetect") @@ -327,23 +297,20 @@ async def async_parse_dog_cat_detector(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/MyRuleDetector/DogCatDetect """ - try: - video_source = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "Source": - video_source = _normalize_video_source(source.Value) + video_source = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "Source": + video_source = _normalize_video_source(source.Value) - return Event( - f"{uid}_{topic}_{video_source}", - "Pet Detection", - "binary_sensor", - "motion", - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{video_source}", + "Pet Detection", + "binary_sensor", + "motion", + None, + payload.Data.SimpleItem[0].Value == "true", + ) @PARSERS.register("tns1:RuleEngine/MyRuleDetector/VehicleDetect") @@ -352,23 +319,20 @@ async def async_parse_vehicle_detector(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/MyRuleDetector/VehicleDetect """ - try: - video_source = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "Source": - video_source = _normalize_video_source(source.Value) + video_source = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "Source": + video_source = _normalize_video_source(source.Value) - return Event( - f"{uid}_{topic}_{video_source}", - "Vehicle Detection", - "binary_sensor", - "motion", - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{video_source}", + "Vehicle Detection", + "binary_sensor", + "motion", + None, + payload.Data.SimpleItem[0].Value == "true", + ) _TAPO_EVENT_TEMPLATES: dict[str, Event] = { @@ -420,32 +384,28 @@ async def async_parse_tplink_detector(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/PeopleDetector/People Topic: tns1:RuleEngine/TPSmartEventDetector/TPSmartEvent """ - try: - video_source = "" - video_analytics = "" - rule = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "VideoSourceConfigurationToken": - video_source = _normalize_video_source(source.Value) - if source.Name == "VideoAnalyticsConfigurationToken": - video_analytics = source.Value - if source.Name == "Rule": - rule = source.Value + video_source = "" + video_analytics = "" + rule = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "VideoSourceConfigurationToken": + video_source = _normalize_video_source(source.Value) + if source.Name == "VideoAnalyticsConfigurationToken": + video_analytics = source.Value + if source.Name == "Rule": + rule = source.Value - for item in payload.Data.SimpleItem: - event_template = _TAPO_EVENT_TEMPLATES.get(item.Name, None) - if event_template is None: - continue + for item in payload.Data.SimpleItem: + event_template = _TAPO_EVENT_TEMPLATES.get(item.Name, None) + if event_template is None: + continue - return dataclasses.replace( - event_template, - uid=f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", - value=item.Value == "true", - ) - - except (AttributeError, KeyError): - return None + return dataclasses.replace( + event_template, + uid=f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", + value=item.Value == "true", + ) return None @@ -456,23 +416,20 @@ async def async_parse_person_detector(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/MyRuleDetector/PeopleDetect """ - try: - video_source = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "Source": - video_source = _normalize_video_source(source.Value) + video_source = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "Source": + video_source = _normalize_video_source(source.Value) - return Event( - f"{uid}_{topic}_{video_source}", - "Person Detection", - "binary_sensor", - "motion", - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{video_source}", + "Person Detection", + "binary_sensor", + "motion", + None, + payload.Data.SimpleItem[0].Value == "true", + ) @PARSERS.register("tns1:RuleEngine/MyRuleDetector/FaceDetect") @@ -481,23 +438,20 @@ async def async_parse_face_detector(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/MyRuleDetector/FaceDetect """ - try: - video_source = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "Source": - video_source = _normalize_video_source(source.Value) + video_source = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "Source": + video_source = _normalize_video_source(source.Value) - return Event( - f"{uid}_{topic}_{video_source}", - "Face Detection", - "binary_sensor", - "motion", - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{video_source}", + "Face Detection", + "binary_sensor", + "motion", + None, + payload.Data.SimpleItem[0].Value == "true", + ) @PARSERS.register("tns1:RuleEngine/MyRuleDetector/Visitor") @@ -506,23 +460,42 @@ async def async_parse_visitor_detector(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/MyRuleDetector/Visitor """ - try: - video_source = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "Source": - video_source = _normalize_video_source(source.Value) + video_source = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "Source": + video_source = _normalize_video_source(source.Value) - return Event( - f"{uid}_{topic}_{video_source}", - "Visitor Detection", - "binary_sensor", - "occupancy", - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{video_source}", + "Visitor Detection", + "binary_sensor", + "occupancy", + None, + payload.Data.SimpleItem[0].Value == "true", + ) + + +@PARSERS.register("tns1:RuleEngine/MyRuleDetector/Package") +async def async_parse_package_detector(uid: str, msg) -> Event | None: + """Handle parsing event message. + + Topic: tns1:RuleEngine/MyRuleDetector/Package + """ + video_source = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "Source": + video_source = _normalize_video_source(source.Value) + + return Event( + f"{uid}_{topic}_{video_source}", + "Package Detection", + "binary_sensor", + "occupancy", + None, + payload.Data.SimpleItem[0].Value == "true", + ) @PARSERS.register("tns1:Device/Trigger/DigitalInput") @@ -531,19 +504,16 @@ async def async_parse_digital_input(uid: str, msg) -> Event | None: Topic: tns1:Device/Trigger/DigitalInput """ - try: - topic, payload = extract_message(msg) - source = payload.Source.SimpleItem[0].Value - return Event( - f"{uid}_{topic}_{source}", - "Digital Input", - "binary_sensor", - None, - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + source = payload.Source.SimpleItem[0].Value + return Event( + f"{uid}_{topic}_{source}", + "Digital Input", + "binary_sensor", + None, + None, + payload.Data.SimpleItem[0].Value == "true", + ) @PARSERS.register("tns1:Device/Trigger/Relay") @@ -552,19 +522,16 @@ async def async_parse_relay(uid: str, msg) -> Event | None: Topic: tns1:Device/Trigger/Relay """ - try: - topic, payload = extract_message(msg) - source = payload.Source.SimpleItem[0].Value - return Event( - f"{uid}_{topic}_{source}", - "Relay Triggered", - "binary_sensor", - None, - None, - payload.Data.SimpleItem[0].Value == "active", - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + source = payload.Source.SimpleItem[0].Value + return Event( + f"{uid}_{topic}_{source}", + "Relay Triggered", + "binary_sensor", + None, + None, + payload.Data.SimpleItem[0].Value == "active", + ) @PARSERS.register("tns1:Device/HardwareFailure/StorageFailure") @@ -573,20 +540,17 @@ async def async_parse_storage_failure(uid: str, msg) -> Event | None: Topic: tns1:Device/HardwareFailure/StorageFailure """ - try: - topic, payload = extract_message(msg) - source = payload.Source.SimpleItem[0].Value - return Event( - f"{uid}_{topic}_{source}", - "Storage Failure", - "binary_sensor", - "problem", - None, - payload.Data.SimpleItem[0].Value == "true", - EntityCategory.DIAGNOSTIC, - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + source = payload.Source.SimpleItem[0].Value + return Event( + f"{uid}_{topic}_{source}", + "Storage Failure", + "binary_sensor", + "problem", + None, + payload.Data.SimpleItem[0].Value == "true", + EntityCategory.DIAGNOSTIC, + ) @PARSERS.register("tns1:Monitoring/ProcessorUsage") @@ -595,23 +559,20 @@ async def async_parse_processor_usage(uid: str, msg) -> Event | None: Topic: tns1:Monitoring/ProcessorUsage """ - try: - topic, payload = extract_message(msg) - usage = float(payload.Data.SimpleItem[0].Value) - if usage <= 1: - usage *= 100 + topic, payload = extract_message(msg) + usage = float(payload.Data.SimpleItem[0].Value) + if usage <= 1: + usage *= 100 - return Event( - f"{uid}_{topic}", - "Processor Usage", - "sensor", - None, - "percent", - int(usage), - EntityCategory.DIAGNOSTIC, - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}", + "Processor Usage", + "sensor", + None, + "percent", + int(usage), + EntityCategory.DIAGNOSTIC, + ) @PARSERS.register("tns1:Monitoring/OperatingTime/LastReboot") @@ -620,20 +581,17 @@ async def async_parse_last_reboot(uid: str, msg) -> Event | None: Topic: tns1:Monitoring/OperatingTime/LastReboot """ - try: - topic, payload = extract_message(msg) - date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value) - return Event( - f"{uid}_{topic}", - "Last Reboot", - "sensor", - "timestamp", - None, - date_time, - EntityCategory.DIAGNOSTIC, - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value) + return Event( + f"{uid}_{topic}", + "Last Reboot", + "sensor", + "timestamp", + None, + date_time, + EntityCategory.DIAGNOSTIC, + ) @PARSERS.register("tns1:Monitoring/OperatingTime/LastReset") @@ -642,21 +600,18 @@ async def async_parse_last_reset(uid: str, msg) -> Event | None: Topic: tns1:Monitoring/OperatingTime/LastReset """ - try: - topic, payload = extract_message(msg) - date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value) - return Event( - f"{uid}_{topic}", - "Last Reset", - "sensor", - "timestamp", - None, - date_time, - EntityCategory.DIAGNOSTIC, - entity_enabled=False, - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value) + return Event( + f"{uid}_{topic}", + "Last Reset", + "sensor", + "timestamp", + None, + date_time, + EntityCategory.DIAGNOSTIC, + entity_enabled=False, + ) @PARSERS.register("tns1:Monitoring/Backup/Last") @@ -665,22 +620,18 @@ async def async_parse_backup_last(uid: str, msg) -> Event | None: Topic: tns1:Monitoring/Backup/Last """ - - try: - topic, payload = extract_message(msg) - date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value) - return Event( - f"{uid}_{topic}", - "Last Backup", - "sensor", - "timestamp", - None, - date_time, - EntityCategory.DIAGNOSTIC, - entity_enabled=False, - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value) + return Event( + f"{uid}_{topic}", + "Last Backup", + "sensor", + "timestamp", + None, + date_time, + EntityCategory.DIAGNOSTIC, + entity_enabled=False, + ) @PARSERS.register("tns1:Monitoring/OperatingTime/LastClockSynchronization") @@ -689,21 +640,18 @@ async def async_parse_last_clock_sync(uid: str, msg) -> Event | None: Topic: tns1:Monitoring/OperatingTime/LastClockSynchronization """ - try: - topic, payload = extract_message(msg) - date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value) - return Event( - f"{uid}_{topic}", - "Last Clock Synchronization", - "sensor", - "timestamp", - None, - date_time, - EntityCategory.DIAGNOSTIC, - entity_enabled=False, - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + date_time = local_datetime_or_none(payload.Data.SimpleItem[0].Value) + return Event( + f"{uid}_{topic}", + "Last Clock Synchronization", + "sensor", + "timestamp", + None, + date_time, + EntityCategory.DIAGNOSTIC, + entity_enabled=False, + ) @PARSERS.register("tns1:RecordingConfig/JobState") @@ -713,20 +661,17 @@ async def async_parse_jobstate(uid: str, msg) -> Event | None: Topic: tns1:RecordingConfig/JobState """ - try: - topic, payload = extract_message(msg) - source = payload.Source.SimpleItem[0].Value - return Event( - f"{uid}_{topic}_{source}", - "Recording Job State", - "binary_sensor", - None, - None, - payload.Data.SimpleItem[0].Value == "Active", - EntityCategory.DIAGNOSTIC, - ) - except (AttributeError, KeyError): - return None + topic, payload = extract_message(msg) + source = payload.Source.SimpleItem[0].Value + return Event( + f"{uid}_{topic}_{source}", + "Recording Job State", + "binary_sensor", + None, + None, + payload.Data.SimpleItem[0].Value == "Active", + EntityCategory.DIAGNOSTIC, + ) @PARSERS.register("tns1:RuleEngine/LineDetector/Crossed") @@ -735,30 +680,27 @@ async def async_parse_linedetector_crossed(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/LineDetector/Crossed """ - try: - video_source = "" - video_analytics = "" - rule = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "VideoSourceConfigurationToken": - video_source = source.Value - if source.Name == "VideoAnalyticsConfigurationToken": - video_analytics = source.Value - if source.Name == "Rule": - rule = source.Value + video_source = "" + video_analytics = "" + rule = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "VideoSourceConfigurationToken": + video_source = source.Value + if source.Name == "VideoAnalyticsConfigurationToken": + video_analytics = source.Value + if source.Name == "Rule": + rule = source.Value - return Event( - f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", - "Line Detector Crossed", - "sensor", - None, - None, - payload.Data.SimpleItem[0].Value, - EntityCategory.DIAGNOSTIC, - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", + "Line Detector Crossed", + "sensor", + None, + None, + payload.Data.SimpleItem[0].Value, + EntityCategory.DIAGNOSTIC, + ) @PARSERS.register("tns1:RuleEngine/CountAggregation/Counter") @@ -767,30 +709,27 @@ async def async_parse_count_aggregation_counter(uid: str, msg) -> Event | None: Topic: tns1:RuleEngine/CountAggregation/Counter """ - try: - video_source = "" - video_analytics = "" - rule = "" - topic, payload = extract_message(msg) - for source in payload.Source.SimpleItem: - if source.Name == "VideoSourceConfigurationToken": - video_source = _normalize_video_source(source.Value) - if source.Name == "VideoAnalyticsConfigurationToken": - video_analytics = source.Value - if source.Name == "Rule": - rule = source.Value + video_source = "" + video_analytics = "" + rule = "" + topic, payload = extract_message(msg) + for source in payload.Source.SimpleItem: + if source.Name == "VideoSourceConfigurationToken": + video_source = _normalize_video_source(source.Value) + if source.Name == "VideoAnalyticsConfigurationToken": + video_analytics = source.Value + if source.Name == "Rule": + rule = source.Value - return Event( - f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", - "Count Aggregation Counter", - "sensor", - None, - None, - payload.Data.SimpleItem[0].Value, - EntityCategory.DIAGNOSTIC, - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{video_source}_{video_analytics}_{rule}", + "Count Aggregation Counter", + "sensor", + None, + None, + payload.Data.SimpleItem[0].Value, + EntityCategory.DIAGNOSTIC, + ) @PARSERS.register("tns1:UserAlarm/IVA/HumanShapeDetect") @@ -799,21 +738,18 @@ async def async_parse_human_shape_detect(uid: str, msg) -> Event | None: Topic: tns1:UserAlarm/IVA/HumanShapeDetect """ - try: - topic, payload = extract_message(msg) - video_source = "" - for source in payload.Source.SimpleItem: - if source.Name == "VideoSourceConfigurationToken": - video_source = _normalize_video_source(source.Value) - break + topic, payload = extract_message(msg) + video_source = "" + for source in payload.Source.SimpleItem: + if source.Name == "VideoSourceConfigurationToken": + video_source = _normalize_video_source(source.Value) + break - return Event( - f"{uid}_{topic}_{video_source}", - "Human Shape Detect", - "binary_sensor", - "motion", - None, - payload.Data.SimpleItem[0].Value == "true", - ) - except (AttributeError, KeyError): - return None + return Event( + f"{uid}_{topic}_{video_source}", + "Human Shape Detect", + "binary_sensor", + "motion", + None, + payload.Data.SimpleItem[0].Value == "true", + ) diff --git a/homeassistant/components/onvif/strings.json b/homeassistant/components/onvif/strings.json index 0afb5e59e8e..7988c50b1ac 100644 --- a/homeassistant/components/onvif/strings.json +++ b/homeassistant/components/onvif/strings.json @@ -62,12 +62,12 @@ "step": { "onvif_devices": { "data": { - "extra_arguments": "Extra FFMPEG arguments", + "extra_arguments": "Extra FFmpeg arguments", "rtsp_transport": "RTSP transport mechanism", "use_wallclock_as_timestamps": "Use wall clock as timestamps", - "enable_webhooks": "Enable Webhooks" + "enable_webhooks": "Enable webhooks" }, - "title": "ONVIF Device Options" + "title": "ONVIF device options" } } }, diff --git a/homeassistant/components/openai_conversation/__init__.py b/homeassistant/components/openai_conversation/__init__.py index 0fbda9b7f4a..fcf6ab298dc 100644 --- a/homeassistant/components/openai_conversation/__init__.py +++ b/homeassistant/components/openai_conversation/__init__.py @@ -2,7 +2,20 @@ from __future__ import annotations +import base64 +from mimetypes import guess_file_type +from pathlib import Path + import openai +from openai.types.images_response import ImagesResponse +from openai.types.responses import ( + EasyInputMessageParam, + Response, + ResponseInputImageParam, + ResponseInputMessageContentListParam, + ResponseInputParam, + ResponseInputTextParam, +) import voluptuous as vol from homeassistant.config_entries import ConfigEntry @@ -22,15 +35,41 @@ from homeassistant.helpers import config_validation as cv, selector from homeassistant.helpers.httpx_client import get_async_client from homeassistant.helpers.typing import ConfigType -from .const import DOMAIN, LOGGER +from .const import ( + CONF_CHAT_MODEL, + CONF_FILENAMES, + CONF_MAX_TOKENS, + CONF_PROMPT, + CONF_REASONING_EFFORT, + CONF_TEMPERATURE, + CONF_TOP_P, + DOMAIN, + LOGGER, + RECOMMENDED_CHAT_MODEL, + RECOMMENDED_MAX_TOKENS, + RECOMMENDED_REASONING_EFFORT, + RECOMMENDED_TEMPERATURE, + RECOMMENDED_TOP_P, +) SERVICE_GENERATE_IMAGE = "generate_image" +SERVICE_GENERATE_CONTENT = "generate_content" + PLATFORMS = (Platform.CONVERSATION,) CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) type OpenAIConfigEntry = ConfigEntry[openai.AsyncClient] +def encode_file(file_path: str) -> tuple[str, str]: + """Return base64 version of file contents.""" + mime_type, _ = guess_file_type(file_path) + if mime_type is None: + mime_type = "application/octet-stream" + with open(file_path, "rb") as image_file: + return (mime_type, base64.b64encode(image_file.read()).decode("utf-8")) + + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up OpenAI Conversation.""" @@ -49,9 +88,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: client: openai.AsyncClient = entry.runtime_data try: - response = await client.images.generate( + response: ImagesResponse = await client.images.generate( model="dall-e-3", - prompt=call.data["prompt"], + prompt=call.data[CONF_PROMPT], size=call.data["size"], quality=call.data["quality"], style=call.data["style"], @@ -63,6 +102,108 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return response.data[0].model_dump(exclude={"b64_json"}) + async def send_prompt(call: ServiceCall) -> ServiceResponse: + """Send a prompt to ChatGPT and return the response.""" + entry_id = call.data["config_entry"] + entry = hass.config_entries.async_get_entry(entry_id) + + if entry is None or entry.domain != DOMAIN: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_config_entry", + translation_placeholders={"config_entry": entry_id}, + ) + + model: str = entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL) + client: openai.AsyncClient = entry.runtime_data + + content: ResponseInputMessageContentListParam = [ + ResponseInputTextParam(type="input_text", text=call.data[CONF_PROMPT]) + ] + + def append_files_to_content() -> None: + for filename in call.data[CONF_FILENAMES]: + if not hass.config.is_allowed_path(filename): + raise HomeAssistantError( + f"Cannot read `{filename}`, no access to path; " + "`allowlist_external_dirs` may need to be adjusted in " + "`configuration.yaml`" + ) + if not Path(filename).exists(): + raise HomeAssistantError(f"`{filename}` does not exist") + mime_type, base64_file = encode_file(filename) + if "image/" not in mime_type: + raise HomeAssistantError( + "Only images are supported by the OpenAI API," + f"`{filename}` is not an image file" + ) + content.append( + ResponseInputImageParam( + type="input_image", + file_id=filename, + image_url=f"data:{mime_type};base64,{base64_file}", + detail="auto", + ) + ) + + if CONF_FILENAMES in call.data: + await hass.async_add_executor_job(append_files_to_content) + + messages: ResponseInputParam = [ + EasyInputMessageParam(type="message", role="user", content=content) + ] + + try: + model_args = { + "model": model, + "input": messages, + "max_output_tokens": entry.options.get( + CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS + ), + "top_p": entry.options.get(CONF_TOP_P, RECOMMENDED_TOP_P), + "temperature": entry.options.get( + CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE + ), + "user": call.context.user_id, + "store": False, + } + + if model.startswith("o"): + model_args["reasoning"] = { + "effort": entry.options.get( + CONF_REASONING_EFFORT, RECOMMENDED_REASONING_EFFORT + ) + } + + response: Response = await client.responses.create(**model_args) + + except openai.OpenAIError as err: + raise HomeAssistantError(f"Error generating content: {err}") from err + except FileNotFoundError as err: + raise HomeAssistantError(f"Error generating content: {err}") from err + + return {"text": response.output_text} + + hass.services.async_register( + DOMAIN, + SERVICE_GENERATE_CONTENT, + send_prompt, + schema=vol.Schema( + { + vol.Required("config_entry"): selector.ConfigEntrySelector( + { + "integration": DOMAIN, + } + ), + vol.Required(CONF_PROMPT): cv.string, + vol.Optional(CONF_FILENAMES, default=[]): vol.All( + cv.ensure_list, [cv.string] + ), + } + ), + supports_response=SupportsResponse.ONLY, + ) + hass.services.async_register( DOMAIN, SERVICE_GENERATE_IMAGE, @@ -74,7 +215,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: "integration": DOMAIN, } ), - vol.Required("prompt"): cv.string, + vol.Required(CONF_PROMPT): cv.string, vol.Optional("size", default="1024x1024"): vol.In( ("1024x1024", "1024x1792", "1792x1024") ), @@ -84,6 +225,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ), supports_response=SupportsResponse.ONLY, ) + return True diff --git a/homeassistant/components/openai_conversation/config_flow.py b/homeassistant/components/openai_conversation/config_flow.py index c631884ea0b..7304eb52da3 100644 --- a/homeassistant/components/openai_conversation/config_flow.py +++ b/homeassistant/components/openai_conversation/config_flow.py @@ -2,22 +2,31 @@ from __future__ import annotations +import json import logging from types import MappingProxyType from typing import Any import openai import voluptuous as vol +from voluptuous_openapi import convert +from homeassistant.components.zone import ENTITY_ID_HOME from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, OptionsFlow, ) -from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API +from homeassistant.const import ( + ATTR_LATITUDE, + ATTR_LONGITUDE, + CONF_API_KEY, + CONF_LLM_HASS_API, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import llm +from homeassistant.helpers.httpx_client import get_async_client from homeassistant.helpers.selector import ( NumberSelector, NumberSelectorConfig, @@ -37,12 +46,22 @@ from .const import ( CONF_RECOMMENDED, CONF_TEMPERATURE, CONF_TOP_P, + CONF_WEB_SEARCH, + CONF_WEB_SEARCH_CITY, + CONF_WEB_SEARCH_CONTEXT_SIZE, + CONF_WEB_SEARCH_COUNTRY, + CONF_WEB_SEARCH_REGION, + CONF_WEB_SEARCH_TIMEZONE, + CONF_WEB_SEARCH_USER_LOCATION, DOMAIN, RECOMMENDED_CHAT_MODEL, RECOMMENDED_MAX_TOKENS, RECOMMENDED_REASONING_EFFORT, RECOMMENDED_TEMPERATURE, RECOMMENDED_TOP_P, + RECOMMENDED_WEB_SEARCH, + RECOMMENDED_WEB_SEARCH_CONTEXT_SIZE, + RECOMMENDED_WEB_SEARCH_USER_LOCATION, UNSUPPORTED_MODELS, ) @@ -66,7 +85,9 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None: Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. """ - client = openai.AsyncOpenAI(api_key=data[CONF_API_KEY]) + client = openai.AsyncOpenAI( + api_key=data[CONF_API_KEY], http_client=get_async_client(hass) + ) await hass.async_add_executor_job(client.with_options(timeout=10.0).models.list) @@ -137,7 +158,16 @@ class OpenAIOptionsFlow(OptionsFlow): if user_input.get(CONF_CHAT_MODEL) in UNSUPPORTED_MODELS: errors[CONF_CHAT_MODEL] = "model_not_supported" - else: + + if user_input.get(CONF_WEB_SEARCH): + if not user_input.get( + CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL + ).startswith("gpt-4o"): + errors[CONF_WEB_SEARCH] = "web_search_not_supported" + elif user_input.get(CONF_WEB_SEARCH_USER_LOCATION): + user_input.update(await self.get_location_data()) + + if not errors: return self.async_create_entry(title="", data=user_input) else: # Re-render the options again, now with the recommended options shown/hidden @@ -156,6 +186,59 @@ class OpenAIOptionsFlow(OptionsFlow): errors=errors, ) + async def get_location_data(self) -> dict[str, str]: + """Get approximate location data of the user.""" + location_data: dict[str, str] = {} + zone_home = self.hass.states.get(ENTITY_ID_HOME) + if zone_home is not None: + client = openai.AsyncOpenAI( + api_key=self.config_entry.data[CONF_API_KEY], + http_client=get_async_client(self.hass), + ) + location_schema = vol.Schema( + { + vol.Optional( + CONF_WEB_SEARCH_CITY, + description="Free text input for the city, e.g. `San Francisco`", + ): str, + vol.Optional( + CONF_WEB_SEARCH_REGION, + description="Free text input for the region, e.g. `California`", + ): str, + } + ) + response = await client.responses.create( + model=RECOMMENDED_CHAT_MODEL, + input=[ + { + "role": "system", + "content": "Where are the following coordinates located: " + f"({zone_home.attributes[ATTR_LATITUDE]}," + f" {zone_home.attributes[ATTR_LONGITUDE]})?", + } + ], + text={ + "format": { + "type": "json_schema", + "name": "approximate_location", + "description": "Approximate location data of the user " + "for refined web search results", + "schema": convert(location_schema), + "strict": False, + } + }, + store=False, + ) + location_data = location_schema(json.loads(response.output_text) or {}) + + if self.hass.config.country: + location_data[CONF_WEB_SEARCH_COUNTRY] = self.hass.config.country + location_data[CONF_WEB_SEARCH_TIMEZONE] = self.hass.config.time_zone + + _LOGGER.debug("Location data: %s", location_data) + + return location_data + def openai_config_option_schema( hass: HomeAssistant, @@ -227,10 +310,35 @@ def openai_config_option_schema( ): SelectSelector( SelectSelectorConfig( options=["low", "medium", "high"], - translation_key="reasoning_effort", + translation_key=CONF_REASONING_EFFORT, mode=SelectSelectorMode.DROPDOWN, ) ), + vol.Optional( + CONF_WEB_SEARCH, + description={"suggested_value": options.get(CONF_WEB_SEARCH)}, + default=RECOMMENDED_WEB_SEARCH, + ): bool, + vol.Optional( + CONF_WEB_SEARCH_CONTEXT_SIZE, + description={ + "suggested_value": options.get(CONF_WEB_SEARCH_CONTEXT_SIZE) + }, + default=RECOMMENDED_WEB_SEARCH_CONTEXT_SIZE, + ): SelectSelector( + SelectSelectorConfig( + options=["low", "medium", "high"], + translation_key=CONF_WEB_SEARCH_CONTEXT_SIZE, + mode=SelectSelectorMode.DROPDOWN, + ) + ), + vol.Optional( + CONF_WEB_SEARCH_USER_LOCATION, + description={ + "suggested_value": options.get(CONF_WEB_SEARCH_USER_LOCATION) + }, + default=RECOMMENDED_WEB_SEARCH_USER_LOCATION, + ): bool, } ) return schema diff --git a/homeassistant/components/openai_conversation/const.py b/homeassistant/components/openai_conversation/const.py index 793e021e332..41abc504219 100644 --- a/homeassistant/components/openai_conversation/const.py +++ b/homeassistant/components/openai_conversation/const.py @@ -3,22 +3,34 @@ import logging DOMAIN = "openai_conversation" -LOGGER = logging.getLogger(__package__) +LOGGER: logging.Logger = logging.getLogger(__package__) -CONF_RECOMMENDED = "recommended" -CONF_PROMPT = "prompt" CONF_CHAT_MODEL = "chat_model" -RECOMMENDED_CHAT_MODEL = "gpt-4o-mini" +CONF_FILENAMES = "filenames" CONF_MAX_TOKENS = "max_tokens" -RECOMMENDED_MAX_TOKENS = 150 -CONF_TOP_P = "top_p" -RECOMMENDED_TOP_P = 1.0 -CONF_TEMPERATURE = "temperature" -RECOMMENDED_TEMPERATURE = 1.0 +CONF_PROMPT = "prompt" +CONF_PROMPT = "prompt" CONF_REASONING_EFFORT = "reasoning_effort" +CONF_RECOMMENDED = "recommended" +CONF_TEMPERATURE = "temperature" +CONF_TOP_P = "top_p" +CONF_WEB_SEARCH = "web_search" +CONF_WEB_SEARCH_USER_LOCATION = "user_location" +CONF_WEB_SEARCH_CONTEXT_SIZE = "search_context_size" +CONF_WEB_SEARCH_CITY = "city" +CONF_WEB_SEARCH_REGION = "region" +CONF_WEB_SEARCH_COUNTRY = "country" +CONF_WEB_SEARCH_TIMEZONE = "timezone" +RECOMMENDED_CHAT_MODEL = "gpt-4o-mini" +RECOMMENDED_MAX_TOKENS = 150 RECOMMENDED_REASONING_EFFORT = "low" +RECOMMENDED_TEMPERATURE = 1.0 +RECOMMENDED_TOP_P = 1.0 +RECOMMENDED_WEB_SEARCH = False +RECOMMENDED_WEB_SEARCH_CONTEXT_SIZE = "medium" +RECOMMENDED_WEB_SEARCH_USER_LOCATION = False -UNSUPPORTED_MODELS = [ +UNSUPPORTED_MODELS: list[str] = [ "o1-mini", "o1-mini-2024-09-12", "o1-preview", diff --git a/homeassistant/components/openai_conversation/conversation.py b/homeassistant/components/openai_conversation/conversation.py index cc09ec77c0e..026e18f3ce1 100644 --- a/homeassistant/components/openai_conversation/conversation.py +++ b/homeassistant/components/openai_conversation/conversation.py @@ -2,21 +2,31 @@ from collections.abc import AsyncGenerator, Callable import json -from typing import Any, Literal, cast +from typing import Any, Literal import openai from openai._streaming import AsyncStream -from openai._types import NOT_GIVEN -from openai.types.chat import ( - ChatCompletionAssistantMessageParam, - ChatCompletionChunk, - ChatCompletionMessageParam, - ChatCompletionMessageToolCallParam, - ChatCompletionToolMessageParam, - ChatCompletionToolParam, +from openai.types.responses import ( + EasyInputMessageParam, + FunctionToolParam, + ResponseCompletedEvent, + ResponseErrorEvent, + ResponseFailedEvent, + ResponseFunctionCallArgumentsDeltaEvent, + ResponseFunctionCallArgumentsDoneEvent, + ResponseFunctionToolCall, + ResponseFunctionToolCallParam, + ResponseIncompleteEvent, + ResponseInputParam, + ResponseOutputItemAddedEvent, + ResponseOutputMessage, + ResponseStreamEvent, + ResponseTextDeltaEvent, + ToolParam, + WebSearchToolParam, ) -from openai.types.chat.chat_completion_message_tool_call_param import Function -from openai.types.shared_params import FunctionDefinition +from openai.types.responses.response_input_param import FunctionCallOutput +from openai.types.responses.web_search_tool_param import UserLocation from voluptuous_openapi import convert from homeassistant.components import assist_pipeline, conversation @@ -24,7 +34,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import chat_session, device_registry as dr, intent, llm +from homeassistant.helpers import device_registry as dr, intent, llm from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from . import OpenAIConfigEntry @@ -35,6 +45,13 @@ from .const import ( CONF_REASONING_EFFORT, CONF_TEMPERATURE, CONF_TOP_P, + CONF_WEB_SEARCH, + CONF_WEB_SEARCH_CITY, + CONF_WEB_SEARCH_CONTEXT_SIZE, + CONF_WEB_SEARCH_COUNTRY, + CONF_WEB_SEARCH_REGION, + CONF_WEB_SEARCH_TIMEZONE, + CONF_WEB_SEARCH_USER_LOCATION, DOMAIN, LOGGER, RECOMMENDED_CHAT_MODEL, @@ -42,6 +59,7 @@ from .const import ( RECOMMENDED_REASONING_EFFORT, RECOMMENDED_TEMPERATURE, RECOMMENDED_TOP_P, + RECOMMENDED_WEB_SEARCH_CONTEXT_SIZE, ) # Max number of back and forth with the LLM to generate a response @@ -60,122 +78,131 @@ async def async_setup_entry( def _format_tool( tool: llm.Tool, custom_serializer: Callable[[Any], Any] | None -) -> ChatCompletionToolParam: +) -> FunctionToolParam: """Format tool specification.""" - tool_spec = FunctionDefinition( + return FunctionToolParam( + type="function", name=tool.name, parameters=convert(tool.parameters, custom_serializer=custom_serializer), + description=tool.description, + strict=False, ) - if tool.description: - tool_spec["description"] = tool.description - return ChatCompletionToolParam(type="function", function=tool_spec) def _convert_content_to_param( content: conversation.Content, -) -> ChatCompletionMessageParam: +) -> ResponseInputParam: """Convert any native chat message for this agent to the native format.""" - if content.role == "tool_result": - assert type(content) is conversation.ToolResultContent - return ChatCompletionToolMessageParam( - role="tool", - tool_call_id=content.tool_call_id, - content=json.dumps(content.tool_result), - ) - if content.role != "assistant" or not content.tool_calls: # type: ignore[union-attr] - role = content.role + messages: ResponseInputParam = [] + if isinstance(content, conversation.ToolResultContent): + return [ + FunctionCallOutput( + type="function_call_output", + call_id=content.tool_call_id, + output=json.dumps(content.tool_result), + ) + ] + + if content.content: + role: Literal["user", "assistant", "system", "developer"] = content.role if role == "system": role = "developer" - return cast( - ChatCompletionMessageParam, - {"role": content.role, "content": content.content}, # type: ignore[union-attr] + messages.append( + EasyInputMessageParam(type="message", role=role, content=content.content) ) - # Handle the Assistant content including tool calls. - assert type(content) is conversation.AssistantContent - return ChatCompletionAssistantMessageParam( - role="assistant", - content=content.content, - tool_calls=[ - ChatCompletionMessageToolCallParam( - id=tool_call.id, - function=Function( - arguments=json.dumps(tool_call.tool_args), - name=tool_call.tool_name, - ), - type="function", + if isinstance(content, conversation.AssistantContent) and content.tool_calls: + messages.extend( + ResponseFunctionToolCallParam( + type="function_call", + name=tool_call.tool_name, + arguments=json.dumps(tool_call.tool_args), + call_id=tool_call.id, ) for tool_call in content.tool_calls - ], - ) + ) + return messages async def _transform_stream( - result: AsyncStream[ChatCompletionChunk], + chat_log: conversation.ChatLog, + result: AsyncStream[ResponseStreamEvent], ) -> AsyncGenerator[conversation.AssistantContentDeltaDict]: """Transform an OpenAI delta stream into HA format.""" - current_tool_call: dict | None = None + async for event in result: + LOGGER.debug("Received event: %s", event) - async for chunk in result: - LOGGER.debug("Received chunk: %s", chunk) - choice = chunk.choices[0] - - if choice.finish_reason: - if current_tool_call: - yield { - "tool_calls": [ - llm.ToolInput( - id=current_tool_call["id"], - tool_name=current_tool_call["tool_name"], - tool_args=json.loads(current_tool_call["tool_args"]), - ) - ] - } - - break - - delta = chunk.choices[0].delta - - # We can yield delta messages not continuing or starting tool calls - if current_tool_call is None and not delta.tool_calls: - yield { # type: ignore[misc] - key: value - for key in ("role", "content") - if (value := getattr(delta, key)) is not None - } - continue - - # When doing tool calls, we should always have a tool call - # object or we have gotten stopped above with a finish_reason set. - if ( - not delta.tool_calls - or not (delta_tool_call := delta.tool_calls[0]) - or not delta_tool_call.function - ): - raise ValueError("Expected delta with tool call") - - if current_tool_call and delta_tool_call.index == current_tool_call["index"]: - current_tool_call["tool_args"] += delta_tool_call.function.arguments or "" - continue - - # We got tool call with new index, so we need to yield the previous - if current_tool_call: + if isinstance(event, ResponseOutputItemAddedEvent): + if isinstance(event.item, ResponseOutputMessage): + yield {"role": event.item.role} + elif isinstance(event.item, ResponseFunctionToolCall): + current_tool_call = event.item + elif isinstance(event, ResponseTextDeltaEvent): + yield {"content": event.delta} + elif isinstance(event, ResponseFunctionCallArgumentsDeltaEvent): + current_tool_call.arguments += event.delta + elif isinstance(event, ResponseFunctionCallArgumentsDoneEvent): + current_tool_call.status = "completed" yield { "tool_calls": [ llm.ToolInput( - id=current_tool_call["id"], - tool_name=current_tool_call["tool_name"], - tool_args=json.loads(current_tool_call["tool_args"]), + id=current_tool_call.call_id, + tool_name=current_tool_call.name, + tool_args=json.loads(current_tool_call.arguments), ) ] } + elif isinstance(event, ResponseCompletedEvent): + if event.response.usage is not None: + chat_log.async_trace( + { + "stats": { + "input_tokens": event.response.usage.input_tokens, + "output_tokens": event.response.usage.output_tokens, + } + } + ) + elif isinstance(event, ResponseIncompleteEvent): + if event.response.usage is not None: + chat_log.async_trace( + { + "stats": { + "input_tokens": event.response.usage.input_tokens, + "output_tokens": event.response.usage.output_tokens, + } + } + ) - current_tool_call = { - "index": delta_tool_call.index, - "id": delta_tool_call.id, - "tool_name": delta_tool_call.function.name, - "tool_args": delta_tool_call.function.arguments or "", - } + if ( + event.response.incomplete_details + and event.response.incomplete_details.reason + ): + reason: str = event.response.incomplete_details.reason + else: + reason = "unknown reason" + + if reason == "max_output_tokens": + reason = "max output tokens reached" + elif reason == "content_filter": + reason = "content filter triggered" + + raise HomeAssistantError(f"OpenAI response incomplete: {reason}") + elif isinstance(event, ResponseFailedEvent): + if event.response.usage is not None: + chat_log.async_trace( + { + "stats": { + "input_tokens": event.response.usage.input_tokens, + "output_tokens": event.response.usage.output_tokens, + } + } + ) + reason = "unknown reason" + if event.response.error is not None: + reason = event.response.error.message + raise HomeAssistantError(f"OpenAI response failed: {reason}") + elif isinstance(event, ResponseErrorEvent): + raise HomeAssistantError(f"OpenAI response error: {event.message}") class OpenAIConversationEntity( @@ -223,18 +250,6 @@ class OpenAIConversationEntity( conversation.async_unset_agent(self.hass, self.entry) await super().async_will_remove_from_hass() - async def async_process( - self, user_input: conversation.ConversationInput - ) -> conversation.ConversationResult: - """Process a sentence.""" - with ( - chat_session.async_get_chat_session( - self.hass, user_input.conversation_id - ) as session, - conversation.async_get_chat_log(self.hass, session, user_input) as chat_log, - ): - return await self._async_handle_message(user_input, chat_log) - async def _async_handle_message( self, user_input: conversation.ConversationInput, @@ -253,15 +268,38 @@ class OpenAIConversationEntity( except conversation.ConverseError as err: return err.as_conversation_result() - tools: list[ChatCompletionToolParam] | None = None + tools: list[ToolParam] | None = None if chat_log.llm_api: tools = [ _format_tool(tool, chat_log.llm_api.custom_serializer) for tool in chat_log.llm_api.tools ] + if options.get(CONF_WEB_SEARCH): + web_search = WebSearchToolParam( + type="web_search_preview", + search_context_size=options.get( + CONF_WEB_SEARCH_CONTEXT_SIZE, RECOMMENDED_WEB_SEARCH_CONTEXT_SIZE + ), + ) + if options.get(CONF_WEB_SEARCH_USER_LOCATION): + web_search["user_location"] = UserLocation( + type="approximate", + city=options.get(CONF_WEB_SEARCH_CITY, ""), + region=options.get(CONF_WEB_SEARCH_REGION, ""), + country=options.get(CONF_WEB_SEARCH_COUNTRY, ""), + timezone=options.get(CONF_WEB_SEARCH_TIMEZONE, ""), + ) + if tools is None: + tools = [] + tools.append(web_search) + model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL) - messages = [_convert_content_to_param(content) for content in chat_log.content] + messages = [ + m + for content in chat_log.content + for m in _convert_content_to_param(content) + ] client = self.entry.runtime_data @@ -269,24 +307,28 @@ class OpenAIConversationEntity( for _iteration in range(MAX_TOOL_ITERATIONS): model_args = { "model": model, - "messages": messages, - "tools": tools or NOT_GIVEN, - "max_completion_tokens": options.get( + "input": messages, + "max_output_tokens": options.get( CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS ), "top_p": options.get(CONF_TOP_P, RECOMMENDED_TOP_P), "temperature": options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE), "user": chat_log.conversation_id, + "store": False, "stream": True, } + if tools: + model_args["tools"] = tools if model.startswith("o"): - model_args["reasoning_effort"] = options.get( - CONF_REASONING_EFFORT, RECOMMENDED_REASONING_EFFORT - ) + model_args["reasoning"] = { + "effort": options.get( + CONF_REASONING_EFFORT, RECOMMENDED_REASONING_EFFORT + ) + } try: - result = await client.chat.completions.create(**model_args) + result = await client.responses.create(**model_args) except openai.RateLimitError as err: LOGGER.error("Rate limited by OpenAI: %s", err) raise HomeAssistantError("Rate limited or insufficient funds") from err @@ -294,14 +336,10 @@ class OpenAIConversationEntity( LOGGER.error("Error talking to OpenAI: %s", err) raise HomeAssistantError("Error talking to OpenAI") from err - messages.extend( - [ - _convert_content_to_param(content) - async for content in chat_log.async_add_delta_content_stream( - user_input.agent_id, _transform_stream(result) - ) - ] - ) + async for content in chat_log.async_add_delta_content_stream( + user_input.agent_id, _transform_stream(chat_log, result) + ): + messages.extend(_convert_content_to_param(content)) if not chat_log.unresponded_tool_results: break @@ -310,7 +348,9 @@ class OpenAIConversationEntity( assert type(chat_log.content[-1]) is conversation.AssistantContent intent_response.async_set_speech(chat_log.content[-1].content or "") return conversation.ConversationResult( - response=intent_response, conversation_id=chat_log.conversation_id + response=intent_response, + conversation_id=chat_log.conversation_id, + continue_conversation=chat_log.continue_conversation, ) async def _async_entry_update_listener( diff --git a/homeassistant/components/openai_conversation/icons.json b/homeassistant/components/openai_conversation/icons.json index 3abecd640d1..f0ece31c304 100644 --- a/homeassistant/components/openai_conversation/icons.json +++ b/homeassistant/components/openai_conversation/icons.json @@ -2,6 +2,9 @@ "services": { "generate_image": { "service": "mdi:image-sync" + }, + "generate_content": { + "service": "mdi:receipt-text" } } } diff --git a/homeassistant/components/openai_conversation/manifest.json b/homeassistant/components/openai_conversation/manifest.json index a7aa7884dc4..988dd2321d5 100644 --- a/homeassistant/components/openai_conversation/manifest.json +++ b/homeassistant/components/openai_conversation/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/openai_conversation", "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["openai==1.61.0"] + "requirements": ["openai==1.68.2"] } diff --git a/homeassistant/components/openai_conversation/services.yaml b/homeassistant/components/openai_conversation/services.yaml index 3db71cae383..75fa097f25d 100644 --- a/homeassistant/components/openai_conversation/services.yaml +++ b/homeassistant/components/openai_conversation/services.yaml @@ -38,3 +38,23 @@ generate_image: options: - "vivid" - "natural" +generate_content: + fields: + config_entry: + required: true + selector: + config_entry: + integration: openai_conversation + prompt: + required: true + selector: + text: + multiline: true + example: "Hello, how can I help you?" + filenames: + selector: + text: + multiline: true + example: | + - /path/to/file1.txt + - /path/to/file2.txt diff --git a/homeassistant/components/openai_conversation/strings.json b/homeassistant/components/openai_conversation/strings.json index b8768f8abbe..a373ec448d7 100644 --- a/homeassistant/components/openai_conversation/strings.json +++ b/homeassistant/components/openai_conversation/strings.json @@ -24,16 +24,23 @@ "top_p": "Top P", "llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]", "recommended": "Recommended model settings", - "reasoning_effort": "Reasoning effort" + "reasoning_effort": "Reasoning effort", + "web_search": "Enable web search", + "search_context_size": "Search context size", + "user_location": "Include home location" }, "data_description": { "prompt": "Instruct how the LLM should respond. This can be a template.", - "reasoning_effort": "How many reasoning tokens the model should generate before creating a response to the prompt (for certain reasoning models)" + "reasoning_effort": "How many reasoning tokens the model should generate before creating a response to the prompt (for certain reasoning models)", + "web_search": "Allow the model to search the web for the latest information before generating a response", + "search_context_size": "High level guidance for the amount of context window space to use for the search", + "user_location": "Refine search results based on geography" } } }, "error": { - "model_not_supported": "This model is not supported, please select a different model" + "model_not_supported": "This model is not supported, please select a different model", + "web_search_not_supported": "Web search is only supported for gpt-4o and gpt-4o-mini models" } }, "selector": { @@ -43,15 +50,22 @@ "medium": "Medium", "high": "High" } + }, + "search_context_size": { + "options": { + "low": "Low", + "medium": "Medium", + "high": "High" + } } }, "services": { "generate_image": { "name": "Generate image", - "description": "Turn a prompt into an image", + "description": "Turns a prompt into an image", "fields": { "config_entry": { - "name": "Config Entry", + "name": "Config entry", "description": "The config entry to use for this action" }, "prompt": { @@ -72,6 +86,24 @@ "description": "The style of the generated image" } } + }, + "generate_content": { + "name": "Generate content", + "description": "Sends a conversational query to ChatGPT including any attached image files", + "fields": { + "config_entry": { + "name": "Config entry", + "description": "The config entry to use for this action" + }, + "prompt": { + "name": "Prompt", + "description": "The prompt to send" + }, + "filenames": { + "name": "Files", + "description": "List of files to upload" + } + } } }, "exceptions": { diff --git a/homeassistant/components/opentherm_gw/__init__.py b/homeassistant/components/opentherm_gw/__init__.py index 8c92c70ab49..87da159872d 100644 --- a/homeassistant/components/opentherm_gw/__init__.py +++ b/homeassistant/components/opentherm_gw/__init__.py @@ -9,8 +9,7 @@ import pyotgw.vars as gw_vars from serial import SerialException import voluptuous as vol -from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_DATE, ATTR_ID, @@ -21,21 +20,12 @@ from homeassistant.const import ( CONF_ID, CONF_NAME, EVENT_HOMEASSISTANT_STOP, - PRECISION_HALVES, - PRECISION_TENTHS, - PRECISION_WHOLE, Platform, ) from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import ( - config_validation as cv, - device_registry as dr, - entity_registry as er, - issue_registry as ir, -) +from homeassistant.helpers import config_validation as cv, device_registry as dr from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.helpers.typing import ConfigType from .const import ( ATTR_CH_OVRD, @@ -44,9 +34,6 @@ from .const import ( ATTR_LEVEL, ATTR_TRANSP_ARG, ATTR_TRANSP_CMD, - CONF_CLIMATE, - CONF_FLOOR_TEMP, - CONF_PRECISION, CONF_TEMPORARY_OVRD_MODE, CONNECTION_TIMEOUT, DATA_GATEWAYS, @@ -70,29 +57,6 @@ from .const import ( _LOGGER = logging.getLogger(__name__) -# *_SCHEMA required for deprecated import from configuration.yaml, can be removed in 2025.4.0 -CLIMATE_SCHEMA = vol.Schema( - { - vol.Optional(CONF_PRECISION): vol.In( - [PRECISION_TENTHS, PRECISION_HALVES, PRECISION_WHOLE] - ), - vol.Optional(CONF_FLOOR_TEMP, default=False): cv.boolean, - } -) - -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: cv.schema_with_slug_keys( - { - vol.Required(CONF_DEVICE): cv.string, - vol.Optional(CONF_CLIMATE, default={}): CLIMATE_SCHEMA, - vol.Optional(CONF_NAME): cv.string, - } - ) - }, - extra=vol.ALLOW_EXTRA, -) - PLATFORMS = [ Platform.BINARY_SENSOR, Platform.BUTTON, @@ -118,35 +82,6 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b gateway = OpenThermGatewayHub(hass, config_entry) hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] = gateway - # Migration can be removed in 2025.4.0 - dev_reg = dr.async_get(hass) - if ( - migrate_device := dev_reg.async_get_device( - {(DOMAIN, config_entry.data[CONF_ID])} - ) - ) is not None: - dev_reg.async_update_device( - migrate_device.id, - new_identifiers={ - ( - DOMAIN, - f"{config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.GATEWAY}", - ) - }, - ) - - # Migration can be removed in 2025.4.0 - ent_reg = er.async_get(hass) - if ( - entity_id := ent_reg.async_get_entity_id( - CLIMATE_DOMAIN, DOMAIN, config_entry.data[CONF_ID] - ) - ) is not None: - ent_reg.async_update_entity( - entity_id, - new_unique_id=f"{config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.THERMOSTAT}-thermostat_entity", - ) - config_entry.add_update_listener(options_updated) try: @@ -164,33 +99,6 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b return True -# Deprecated import from configuration.yaml, can be removed in 2025.4.0 -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the OpenTherm Gateway component.""" - if DOMAIN in config: - ir.async_create_issue( - hass, - DOMAIN, - "deprecated_import_from_configuration_yaml", - breaks_in_ha_version="2025.4.0", - is_fixable=False, - is_persistent=False, - severity=ir.IssueSeverity.WARNING, - translation_key="deprecated_import_from_configuration_yaml", - ) - if not hass.config_entries.async_entries(DOMAIN) and DOMAIN in config: - conf = config[DOMAIN] - for device_id, device_config in conf.items(): - device_config[CONF_ID] = device_id - - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=device_config - ) - ) - return True - - def register_services(hass: HomeAssistant) -> None: """Register services for the component.""" service_reset_schema = vol.Schema( diff --git a/homeassistant/components/opentherm_gw/config_flow.py b/homeassistant/components/opentherm_gw/config_flow.py index bcbf279f3f7..a100dcb730f 100644 --- a/homeassistant/components/opentherm_gw/config_flow.py +++ b/homeassistant/components/opentherm_gw/config_flow.py @@ -95,19 +95,6 @@ class OpenThermGwConfigFlow(ConfigFlow, domain=DOMAIN): """Handle manual initiation of the config flow.""" return await self.async_step_init(user_input) - # Deprecated import from configuration.yaml, can be removed in 2025.4.0 - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import an OpenTherm Gateway device as a config entry. - - This flow is triggered by `async_setup` for configured devices. - """ - formatted_config = { - CONF_NAME: import_data.get(CONF_NAME, import_data[CONF_ID]), - CONF_DEVICE: import_data[CONF_DEVICE], - CONF_ID: import_data[CONF_ID], - } - return await self.async_step_init(info=formatted_config) - def _show_form(self, errors: dict[str, str] | None = None) -> ConfigFlowResult: """Show the config flow form with possible errors.""" return self.async_show_form( diff --git a/homeassistant/components/opentherm_gw/strings.json b/homeassistant/components/opentherm_gw/strings.json index b49dea4a267..cc57a7d9e0c 100644 --- a/homeassistant/components/opentherm_gw/strings.json +++ b/homeassistant/components/opentherm_gw/strings.json @@ -354,12 +354,6 @@ } } }, - "issues": { - "deprecated_import_from_configuration_yaml": { - "title": "Deprecated configuration", - "description": "Configuration of the OpenTherm Gateway integration through configuration.yaml is deprecated. Your configuration has been migrated to config entries. Please remove any OpenTherm Gateway configuration from your configuration.yaml." - } - }, "options": { "step": { "init": { diff --git a/homeassistant/components/openweathermap/__init__.py b/homeassistant/components/openweathermap/__init__.py index fa51b91dc6d..40ddf0ff37e 100644 --- a/homeassistant/components/openweathermap/__init__.py +++ b/homeassistant/components/openweathermap/__init__.py @@ -11,7 +11,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_LANGUAGE, CONF_MODE, CONF_NAME from homeassistant.core import HomeAssistant -from .const import CONFIG_FLOW_VERSION, OWM_MODE_V25, PLATFORMS +from .const import CONFIG_FLOW_VERSION, DEFAULT_OWM_MODE, OWM_MODES, PLATFORMS from .coordinator import WeatherUpdateCoordinator from .repairs import async_create_issue, async_delete_issue from .utils import build_data_and_options @@ -39,7 +39,7 @@ async def async_setup_entry( language = entry.options[CONF_LANGUAGE] mode = entry.options[CONF_MODE] - if mode == OWM_MODE_V25: + if mode not in OWM_MODES: async_create_issue(hass, entry.entry_id) else: async_delete_issue(hass, entry.entry_id) @@ -70,7 +70,7 @@ async def async_migrate_entry( _LOGGER.debug("Migrating OpenWeatherMap entry from version %s", version) if version < 5: - combined_data = {**data, **options, CONF_MODE: OWM_MODE_V25} + combined_data = {**data, **options, CONF_MODE: DEFAULT_OWM_MODE} new_data, new_options = build_data_and_options(combined_data) config_entries.async_update_entry( entry, diff --git a/homeassistant/components/openweathermap/const.py b/homeassistant/components/openweathermap/const.py index de317709f5b..fbd2cb1aee2 100644 --- a/homeassistant/components/openweathermap/const.py +++ b/homeassistant/components/openweathermap/const.py @@ -62,10 +62,8 @@ FORECAST_MODE_ONECALL_DAILY = "onecall_daily" OWM_MODE_FREE_CURRENT = "current" OWM_MODE_FREE_FORECAST = "forecast" OWM_MODE_V30 = "v3.0" -OWM_MODE_V25 = "v2.5" OWM_MODES = [ OWM_MODE_V30, - OWM_MODE_V25, OWM_MODE_FREE_CURRENT, OWM_MODE_FREE_FORECAST, ] diff --git a/homeassistant/components/openweathermap/manifest.json b/homeassistant/components/openweathermap/manifest.json index 14313a5a77e..88510aaae8c 100644 --- a/homeassistant/components/openweathermap/manifest.json +++ b/homeassistant/components/openweathermap/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/openweathermap", "iot_class": "cloud_polling", "loggers": ["pyopenweathermap"], - "requirements": ["pyopenweathermap==0.2.1"] + "requirements": ["pyopenweathermap==0.2.2"] } diff --git a/homeassistant/components/openweathermap/sensor.py b/homeassistant/components/openweathermap/sensor.py index 0afab69b638..a595652d90b 100644 --- a/homeassistant/components/openweathermap/sensor.py +++ b/homeassistant/components/openweathermap/sensor.py @@ -89,7 +89,8 @@ WEATHER_SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( key=ATTR_API_WIND_BEARING, name="Wind bearing", native_unit_of_measurement=DEGREE, - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, ), SensorEntityDescription( key=ATTR_API_HUMIDITY, diff --git a/homeassistant/components/openweathermap/weather.py b/homeassistant/components/openweathermap/weather.py index a6ad163e1c8..12d883c871a 100644 --- a/homeassistant/components/openweathermap/weather.py +++ b/homeassistant/components/openweathermap/weather.py @@ -42,7 +42,6 @@ from .const import ( DOMAIN, MANUFACTURER, OWM_MODE_FREE_FORECAST, - OWM_MODE_V25, OWM_MODE_V30, ) from .coordinator import WeatherUpdateCoordinator @@ -106,7 +105,7 @@ class OpenWeatherMapWeather(SingleCoordinatorWeatherEntity[WeatherUpdateCoordina ) self.mode = mode - if mode in (OWM_MODE_V30, OWM_MODE_V25): + if mode == OWM_MODE_V30: self._attr_supported_features = ( WeatherEntityFeature.FORECAST_DAILY | WeatherEntityFeature.FORECAST_HOURLY diff --git a/homeassistant/components/opower/coordinator.py b/homeassistant/components/opower/coordinator.py index aed89ccf46e..e8b6dbf9718 100644 --- a/homeassistant/components/opower/coordinator.py +++ b/homeassistant/components/opower/coordinator.py @@ -16,7 +16,11 @@ from opower import ( from opower.exceptions import ApiException, CannotConnect, InvalidAuth from homeassistant.components.recorder import get_instance -from homeassistant.components.recorder.models import StatisticData, StatisticMetaData +from homeassistant.components.recorder.models import ( + StatisticData, + StatisticMeanType, + StatisticMetaData, +) from homeassistant.components.recorder.statistics import ( async_add_external_statistics, get_last_statistics, @@ -201,7 +205,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): f"{account.meter_type.name.lower()} {account.utility_account_id}" ) cost_metadata = StatisticMetaData( - has_mean=False, + mean_type=StatisticMeanType.NONE, has_sum=True, name=f"{name_prefix} cost", source=DOMAIN, @@ -209,7 +213,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): unit_of_measurement=None, ) consumption_metadata = StatisticMetaData( - has_mean=False, + mean_type=StatisticMeanType.NONE, has_sum=True, name=f"{name_prefix} consumption", source=DOMAIN, diff --git a/homeassistant/components/opower/strings.json b/homeassistant/components/opower/strings.json index 362e6cd7596..749545743fe 100644 --- a/homeassistant/components/opower/strings.json +++ b/homeassistant/components/opower/strings.json @@ -11,7 +11,7 @@ "mfa": { "description": "The TOTP secret below is not one of the 6 digit time-based numeric codes. It is a string of around 16 characters containing the shared secret that enables your authenticator app to generate the correct time-based code at the appropriate time. See the documentation.", "data": { - "totp_secret": "TOTP Secret" + "totp_secret": "TOTP secret" } }, "reauth_confirm": { @@ -19,7 +19,7 @@ "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", - "totp_secret": "TOTP Secret" + "totp_secret": "[%key:component::opower::config::step::mfa::data::totp_secret%]" } } }, diff --git a/homeassistant/components/overkiz/manifest.json b/homeassistant/components/overkiz/manifest.json index c25accd87f3..937b4ccb937 100644 --- a/homeassistant/components/overkiz/manifest.json +++ b/homeassistant/components/overkiz/manifest.json @@ -13,7 +13,7 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"], - "requirements": ["pyoverkiz==1.16.0"], + "requirements": ["pyoverkiz==1.16.5"], "zeroconf": [ { "type": "_kizbox._tcp.local.", diff --git a/homeassistant/components/overkiz/sensor.py b/homeassistant/components/overkiz/sensor.py index 9214398a37b..cec0d0d2571 100644 --- a/homeassistant/components/overkiz/sensor.py +++ b/homeassistant/components/overkiz/sensor.py @@ -70,6 +70,15 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [ options=["full", "normal", "medium", "low", "verylow"], translation_key="battery", ), + OverkizSensorDescription( + key=OverkizState.CORE_BATTERY_DISCRETE_LEVEL, + name="Battery", + entity_category=EntityCategory.DIAGNOSTIC, + icon="mdi:battery", + device_class=SensorDeviceClass.ENUM, + options=["good", "medium", "low", "critical"], + translation_key="battery", + ), OverkizSensorDescription( key=OverkizState.CORE_RSSI_LEVEL, name="RSSI level", diff --git a/homeassistant/components/overkiz/strings.json b/homeassistant/components/overkiz/strings.json index 0c564a003d6..05b5eac4b21 100644 --- a/homeassistant/components/overkiz/strings.json +++ b/homeassistant/components/overkiz/strings.json @@ -123,7 +123,9 @@ "low": "Low", "normal": "Normal", "medium": "Medium", - "verylow": "Very low" + "verylow": "Very low", + "good": "Good", + "critical": "Critical" } }, "discrete_rssi_level": { diff --git a/homeassistant/components/overkiz/water_heater/__init__.py b/homeassistant/components/overkiz/water_heater/__init__.py index 9895ea84c2c..2960cefe10c 100644 --- a/homeassistant/components/overkiz/water_heater/__init__.py +++ b/homeassistant/components/overkiz/water_heater/__init__.py @@ -13,6 +13,9 @@ from ..entity import OverkizEntity from .atlantic_domestic_hot_water_production_mlb_component import ( AtlanticDomesticHotWaterProductionMBLComponent, ) +from .atlantic_domestic_hot_water_production_v2_io_component import ( + AtlanticDomesticHotWaterProductionV2IOComponent, +) from .atlantic_pass_apc_dhw import AtlanticPassAPCDHW from .domestic_hot_water_production import DomesticHotWaterProduction from .hitachi_dhw import HitachiDHW @@ -52,4 +55,5 @@ WIDGET_TO_WATER_HEATER_ENTITY = { CONTROLLABLE_NAME_TO_WATER_HEATER_ENTITY = { "modbuslink:AtlanticDomesticHotWaterProductionMBLComponent": AtlanticDomesticHotWaterProductionMBLComponent, + "io:AtlanticDomesticHotWaterProductionV2_CV4E_IOComponent": AtlanticDomesticHotWaterProductionV2IOComponent, } diff --git a/homeassistant/components/overkiz/water_heater/atlantic_domestic_hot_water_production_v2_io_component.py b/homeassistant/components/overkiz/water_heater/atlantic_domestic_hot_water_production_v2_io_component.py new file mode 100644 index 00000000000..7e7db07f847 --- /dev/null +++ b/homeassistant/components/overkiz/water_heater/atlantic_domestic_hot_water_production_v2_io_component.py @@ -0,0 +1,332 @@ +"""Support for AtlanticDomesticHotWaterProductionV2IOComponent.""" + +from typing import Any, cast + +from pyoverkiz.enums import OverkizCommand, OverkizCommandParam, OverkizState + +from homeassistant.components.water_heater import ( + STATE_ECO, + STATE_ELECTRIC, + STATE_HEAT_PUMP, + STATE_PERFORMANCE, + WaterHeaterEntity, + WaterHeaterEntityFeature, +) +from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature + +from ..entity import OverkizEntity + +DEFAULT_MIN_TEMP: float = 50.0 +DEFAULT_MAX_TEMP: float = 62.0 +MAX_BOOST_MODE_DURATION: int = 7 + +DHWP_AWAY_MODES = [ + OverkizCommandParam.ABSENCE, + OverkizCommandParam.AWAY, + OverkizCommandParam.FROSTPROTECTION, +] + + +class AtlanticDomesticHotWaterProductionV2IOComponent(OverkizEntity, WaterHeaterEntity): + """Representation of AtlanticDomesticHotWaterProductionV2IOComponent (io).""" + + _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_supported_features = ( + WaterHeaterEntityFeature.TARGET_TEMPERATURE + | WaterHeaterEntityFeature.OPERATION_MODE + | WaterHeaterEntityFeature.AWAY_MODE + | WaterHeaterEntityFeature.ON_OFF + ) + _attr_operation_list = [ + STATE_ECO, + STATE_PERFORMANCE, + STATE_HEAT_PUMP, + STATE_ELECTRIC, + ] + + @property + def min_temp(self) -> float: + """Return the minimum temperature.""" + + min_temp = self.device.states[OverkizState.CORE_MINIMAL_TEMPERATURE_MANUAL_MODE] + if min_temp: + return cast(float, min_temp.value_as_float) + return DEFAULT_MIN_TEMP + + @property + def max_temp(self) -> float: + """Return the maximum temperature.""" + + max_temp = self.device.states[OverkizState.CORE_MAXIMAL_TEMPERATURE_MANUAL_MODE] + if max_temp: + return cast(float, max_temp.value_as_float) + return DEFAULT_MAX_TEMP + + @property + def current_temperature(self) -> float: + """Return the current temperature.""" + + return cast( + float, + self.executor.select_state( + OverkizState.IO_MIDDLE_WATER_TEMPERATURE, + ), + ) + + @property + def target_temperature(self) -> float: + """Return the temperature corresponding to the PRESET.""" + + return cast( + float, + self.executor.select_state(OverkizState.CORE_TARGET_TEMPERATURE), + ) + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new temperature.""" + + temperature = kwargs.get(ATTR_TEMPERATURE) + await self.executor.async_execute_command( + OverkizCommand.SET_TARGET_TEMPERATURE, temperature, refresh_afterwards=False + ) + await self.executor.async_execute_command( + OverkizCommand.REFRESH_TARGET_TEMPERATURE, refresh_afterwards=False + ) + await self.coordinator.async_refresh() + + @property + def is_state_eco(self) -> bool: + """Return true if eco mode is on.""" + + return ( + self.executor.select_state(OverkizState.IO_DHW_MODE) + == OverkizCommandParam.MANUAL_ECO_ACTIVE + ) + + @property + def is_state_performance(self) -> bool: + """Return true if performance mode is on.""" + + return ( + self.executor.select_state(OverkizState.IO_DHW_MODE) + == OverkizCommandParam.AUTO_MODE + ) + + @property + def is_state_heat_pump(self) -> bool: + """Return true if heat pump mode is on.""" + + return ( + self.executor.select_state(OverkizState.IO_DHW_MODE) + == OverkizCommandParam.MANUAL_ECO_INACTIVE + ) + + @property + def is_away_mode_on(self) -> bool: + """Return true if away mode is on.""" + + away_mode_duration = cast( + str, self.executor.select_state(OverkizState.IO_AWAY_MODE_DURATION) + ) + # away_mode_duration can be either a Literal["always"] + if away_mode_duration == OverkizCommandParam.ALWAYS: + return True + + # Or an int of 0 to 7 days. But it still is a string. + if away_mode_duration.isdecimal() and int(away_mode_duration) > 0: + return True + + return False + + @property + def current_operation(self) -> str | None: + """Return current operation.""" + + # The Away Mode leaves the current operation unchanged + if self.is_boost_mode_on: + return STATE_ELECTRIC + + if self.is_state_eco: + return STATE_ECO + + if self.is_state_performance: + return STATE_PERFORMANCE + + if self.is_state_heat_pump: + return STATE_HEAT_PUMP + + return None + + @property + def is_boost_mode_on(self) -> bool: + """Return true if boost mode is on.""" + + return ( + cast( + int, + self.executor.select_state(OverkizState.CORE_BOOST_MODE_DURATION), + ) + > 0 + ) + + async def async_set_operation_mode(self, operation_mode: str) -> None: + """Set new operation mode.""" + + if operation_mode == STATE_ECO: + if self.is_boost_mode_on: + await self.async_turn_boost_mode_off(refresh_afterwards=False) + + if self.is_away_mode_on: + await self.async_turn_away_mode_off(refresh_afterwards=False) + + await self.executor.async_execute_command( + OverkizCommand.SET_DHW_MODE, + OverkizCommandParam.MANUAL_ECO_ACTIVE, + refresh_afterwards=False, + ) + # ECO changes the target temperature so we have to refresh it + await self.executor.async_execute_command( + OverkizCommand.REFRESH_TARGET_TEMPERATURE, refresh_afterwards=False + ) + await self.coordinator.async_refresh() + + elif operation_mode == STATE_PERFORMANCE: + if self.is_boost_mode_on: + await self.async_turn_boost_mode_off(refresh_afterwards=False) + if self.is_away_mode_on: + await self.async_turn_away_mode_off(refresh_afterwards=False) + + await self.executor.async_execute_command( + OverkizCommand.SET_DHW_MODE, + OverkizCommandParam.AUTO_MODE, + refresh_afterwards=False, + ) + + await self.coordinator.async_refresh() + + elif operation_mode == STATE_HEAT_PUMP: + refresh_target_temp = False + if self.is_state_performance: + # Switching from STATE_PERFORMANCE to STATE_HEAT_PUMP + # changes the target temperature and requires a target temperature refresh + refresh_target_temp = True + + if self.is_boost_mode_on: + await self.async_turn_boost_mode_off(refresh_afterwards=False) + if self.is_away_mode_on: + await self.async_turn_away_mode_off(refresh_afterwards=False) + + await self.executor.async_execute_command( + OverkizCommand.SET_DHW_MODE, + OverkizCommandParam.MANUAL_ECO_INACTIVE, + refresh_afterwards=False, + ) + + if refresh_target_temp: + await self.executor.async_execute_command( + OverkizCommand.REFRESH_TARGET_TEMPERATURE, + refresh_afterwards=False, + ) + + await self.coordinator.async_refresh() + + elif operation_mode == STATE_ELECTRIC: + if self.is_away_mode_on: + await self.async_turn_away_mode_off(refresh_afterwards=False) + if not self.is_boost_mode_on: + await self.async_turn_boost_mode_on(refresh_afterwards=False) + await self.coordinator.async_refresh() + + async def async_turn_away_mode_on(self, refresh_afterwards: bool = True) -> None: + """Turn away mode on.""" + + await self.executor.async_execute_command( + OverkizCommand.SET_CURRENT_OPERATING_MODE, + { + OverkizCommandParam.RELAUNCH: OverkizCommandParam.OFF, + OverkizCommandParam.ABSENCE: OverkizCommandParam.ON, + }, + refresh_afterwards=False, + ) + # Toggling the AWAY mode changes away mode duration so we have to refresh it + await self.executor.async_execute_command( + OverkizCommand.REFRESH_AWAY_MODE_DURATION, + refresh_afterwards=False, + ) + if refresh_afterwards: + await self.coordinator.async_refresh() + + async def async_turn_away_mode_off(self, refresh_afterwards: bool = True) -> None: + """Turn away mode off.""" + + await self.executor.async_execute_command( + OverkizCommand.SET_CURRENT_OPERATING_MODE, + { + OverkizCommandParam.RELAUNCH: OverkizCommandParam.OFF, + OverkizCommandParam.ABSENCE: OverkizCommandParam.OFF, + }, + refresh_afterwards=False, + ) + # Toggling the AWAY mode changes away mode duration so we have to refresh it + await self.executor.async_execute_command( + OverkizCommand.REFRESH_AWAY_MODE_DURATION, + refresh_afterwards=False, + ) + if refresh_afterwards: + await self.coordinator.async_refresh() + + async def async_turn_boost_mode_on(self, refresh_afterwards: bool = True) -> None: + """Turn boost mode on.""" + + refresh_target_temp = False + if self.is_state_performance: + # Switching from STATE_PERFORMANCE to BOOST requires a target temperature refresh + refresh_target_temp = True + + await self.executor.async_execute_command( + OverkizCommand.SET_BOOST_MODE_DURATION, + MAX_BOOST_MODE_DURATION, + refresh_afterwards=False, + ) + + await self.executor.async_execute_command( + OverkizCommand.SET_CURRENT_OPERATING_MODE, + { + OverkizCommandParam.RELAUNCH: OverkizCommandParam.ON, + OverkizCommandParam.ABSENCE: OverkizCommandParam.OFF, + }, + refresh_afterwards=False, + ) + + await self.executor.async_execute_command( + OverkizCommand.REFRESH_BOOST_MODE_DURATION, + refresh_afterwards=False, + ) + + if refresh_target_temp: + await self.executor.async_execute_command( + OverkizCommand.REFRESH_TARGET_TEMPERATURE, refresh_afterwards=False + ) + + if refresh_afterwards: + await self.coordinator.async_refresh() + + async def async_turn_boost_mode_off(self, refresh_afterwards: bool = True) -> None: + """Turn boost mode off.""" + + await self.executor.async_execute_command( + OverkizCommand.SET_CURRENT_OPERATING_MODE, + { + OverkizCommandParam.RELAUNCH: OverkizCommandParam.OFF, + OverkizCommandParam.ABSENCE: OverkizCommandParam.OFF, + }, + refresh_afterwards=False, + ) + # Toggling the BOOST mode changes boost mode duration so we have to refresh it + await self.executor.async_execute_command( + OverkizCommand.REFRESH_BOOST_MODE_DURATION, + refresh_afterwards=False, + ) + + if refresh_afterwards: + await self.coordinator.async_refresh() diff --git a/homeassistant/components/overseerr/strings.json b/homeassistant/components/overseerr/strings.json index 14650fd5c25..ce8b9fe9fec 100644 --- a/homeassistant/components/overseerr/strings.json +++ b/homeassistant/components/overseerr/strings.json @@ -90,7 +90,7 @@ "services": { "get_requests": { "name": "Get requests", - "description": "Get media requests from Overseerr.", + "description": "Retrieves a list of media requests from Overseerr.", "fields": { "config_entry_id": { "name": "Overseerr instance", @@ -106,7 +106,7 @@ }, "requested_by": { "name": "Requested by", - "description": "Filter the requests by the user id that requested them." + "description": "Filter the requests by the user ID that requested them." } } } diff --git a/homeassistant/components/person/manifest.json b/homeassistant/components/person/manifest.json index 7f370be6fbe..0c1792e9277 100644 --- a/homeassistant/components/person/manifest.json +++ b/homeassistant/components/person/manifest.json @@ -1,7 +1,6 @@ { "domain": "person", "name": "Person", - "after_dependencies": ["device_tracker"], "codeowners": [], "dependencies": ["image_upload", "http"], "documentation": "https://www.home-assistant.io/integrations/person", diff --git a/homeassistant/components/pglab/__init__.py b/homeassistant/components/pglab/__init__.py index 7307ac2f801..8bce7be26e8 100644 --- a/homeassistant/components/pglab/__init__.py +++ b/homeassistant/components/pglab/__init__.py @@ -23,12 +23,14 @@ from homeassistant.helpers import config_validation as cv from .const import DOMAIN, LOGGER from .discovery import PGLabDiscovery -type PGLABConfigEntry = ConfigEntry[PGLabDiscovery] +type PGLabConfigEntry = ConfigEntry[PGLabDiscovery] CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) -async def async_setup_entry(hass: HomeAssistant, entry: PGLABConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, config_entry: PGLabConfigEntry +) -> bool: """Set up PG LAB Electronics integration from a config entry.""" async def mqtt_publish(topic: str, payload: str, qos: int, retain: bool) -> None: @@ -67,19 +69,21 @@ async def async_setup_entry(hass: HomeAssistant, entry: PGLABConfigEntry) -> boo pglab_mqtt = PyPGLabMqttClient(mqtt_publish, mqtt_subscribe, mqtt_unsubscribe) # Setup PGLab device discovery. - entry.runtime_data = PGLabDiscovery() + config_entry.runtime_data = PGLabDiscovery() # Start to discovery PG Lab devices. - await entry.runtime_data.start(hass, pglab_mqtt, entry) + await config_entry.runtime_data.start(hass, pglab_mqtt, config_entry) return True -async def async_unload_entry(hass: HomeAssistant, entry: PGLABConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, config_entry: PGLabConfigEntry +) -> bool: """Unload a config entry.""" # Stop PGLab device discovery. - pglab_discovery = entry.runtime_data - await pglab_discovery.stop(hass, entry) + pglab_discovery = config_entry.runtime_data + await pglab_discovery.stop(hass, config_entry) return True diff --git a/homeassistant/components/pglab/coordinator.py b/homeassistant/components/pglab/coordinator.py new file mode 100644 index 00000000000..53c5dbc3b58 --- /dev/null +++ b/homeassistant/components/pglab/coordinator.py @@ -0,0 +1,78 @@ +"""Coordinator for PG LAB Electronics.""" + +from __future__ import annotations + +from datetime import datetime, timedelta +from typing import TYPE_CHECKING, Any + +from pypglab.const import SENSOR_REBOOT_TIME, SENSOR_TEMPERATURE, SENSOR_VOLTAGE +from pypglab.device import Device as PyPGLabDevice +from pypglab.sensor import Sensor as PyPGLabSensors + +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from homeassistant.util.dt import utcnow + +from .const import DOMAIN, LOGGER + +if TYPE_CHECKING: + from . import PGLabConfigEntry + + +class PGLabSensorsCoordinator(DataUpdateCoordinator[dict[str, Any]]): + """Class to update Sensor Entities when receiving new data.""" + + def __init__( + self, + hass: HomeAssistant, + config_entry: PGLabConfigEntry, + pglab_device: PyPGLabDevice, + ) -> None: + """Initialize.""" + + # get a reference of PG Lab device internal sensors state + self._sensors: PyPGLabSensors = pglab_device.sensors + + super().__init__( + hass, + LOGGER, + config_entry=config_entry, + name=DOMAIN, + ) + + @callback + def _new_sensors_data(self, payload: str) -> None: + """Handle new sensor data.""" + + # notify all listeners that new sensor values are available + self.async_set_updated_data(self._sensors.state) + + async def subscribe_topics(self) -> None: + """Subscribe the sensors state to be notifty from MQTT update messages.""" + + # subscribe to the pypglab sensors to receive updates from the mqtt broker + # when a new sensor values are available + await self._sensors.subscribe_topics() + + # set the callback to be called when a new sensor values are available + self._sensors.set_on_state_callback(self._new_sensors_data) + + def get_sensor_value(self, sensor_key: str) -> float | datetime | None: + """Return the value of a sensor.""" + + if self.data: + value = self.data[sensor_key] + + if (sensor_key == SENSOR_REBOOT_TIME) and value: + # convert the reboot time to a datetime object + return utcnow() - timedelta(seconds=value) + + if (sensor_key == SENSOR_TEMPERATURE) and value: + # convert the temperature value to a float + return float(value) + + if (sensor_key == SENSOR_VOLTAGE) and value: + # convert the voltage value to a float + return float(value) + + return None diff --git a/homeassistant/components/pglab/cover.py b/homeassistant/components/pglab/cover.py new file mode 100644 index 00000000000..8385fd95ffa --- /dev/null +++ b/homeassistant/components/pglab/cover.py @@ -0,0 +1,107 @@ +"""PG LAB Electronics Cover.""" + +from __future__ import annotations + +from typing import Any + +from pypglab.device import Device as PyPGLabDevice +from pypglab.shutter import Shutter as PyPGLabShutter + +from homeassistant.components.cover import ( + CoverDeviceClass, + CoverEntity, + CoverEntityFeature, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .discovery import PGLabDiscovery +from .entity import PGLabEntity + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up switches for device.""" + + @callback + def async_discover( + pglab_device: PyPGLabDevice, pglab_shutter: PyPGLabShutter + ) -> None: + """Discover and add a PG LAB Cover.""" + pglab_discovery = config_entry.runtime_data + pglab_cover = PGLabCover(pglab_discovery, pglab_device, pglab_shutter) + async_add_entities([pglab_cover]) + + # Register the callback to create the cover entity when discovered. + pglab_discovery = config_entry.runtime_data + await pglab_discovery.register_platform(hass, Platform.COVER, async_discover) + + +class PGLabCover(PGLabEntity, CoverEntity): + """A PGLab Cover.""" + + _attr_translation_key = "shutter" + + def __init__( + self, + pglab_discovery: PGLabDiscovery, + pglab_device: PyPGLabDevice, + pglab_shutter: PyPGLabShutter, + ) -> None: + """Initialize the Cover class.""" + + super().__init__( + pglab_discovery, + pglab_device, + pglab_shutter, + ) + + self._attr_unique_id = f"{pglab_device.id}_shutter{pglab_shutter.id}" + self._attr_translation_placeholders = {"shutter_id": pglab_shutter.id} + + self._shutter = pglab_shutter + + self._attr_device_class = CoverDeviceClass.SHUTTER + self._attr_supported_features = ( + CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE | CoverEntityFeature.STOP + ) + + async def async_open_cover(self, **kwargs: Any) -> None: + """Open the cover.""" + await self._shutter.open() + + async def async_close_cover(self, **kwargs: Any) -> None: + """Close cover.""" + await self._shutter.close() + + async def async_stop_cover(self, **kwargs: Any) -> None: + """Stop the cover.""" + await self._shutter.stop() + + @property + def is_closed(self) -> bool | None: + """Return if cover is closed.""" + if not self._shutter.state: + return None + return self._shutter.state == PyPGLabShutter.STATE_CLOSED + + @property + def is_closing(self) -> bool | None: + """Return if the cover is closing.""" + if not self._shutter.state: + return None + return self._shutter.state == PyPGLabShutter.STATE_CLOSING + + @property + def is_opening(self) -> bool | None: + """Return if the cover is opening.""" + if not self._shutter.state: + return None + return self._shutter.state == PyPGLabShutter.STATE_OPENING diff --git a/homeassistant/components/pglab/discovery.py b/homeassistant/components/pglab/discovery.py index af6bedc9bf4..c1d8653c17b 100644 --- a/homeassistant/components/pglab/discovery.py +++ b/homeassistant/components/pglab/discovery.py @@ -25,20 +25,24 @@ from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) -from homeassistant.helpers.entity import Entity from .const import DISCOVERY_TOPIC, DOMAIN, LOGGER +from .coordinator import PGLabSensorsCoordinator if TYPE_CHECKING: - from . import PGLABConfigEntry + from . import PGLabConfigEntry # Supported platforms. PLATFORMS = [ + Platform.COVER, + Platform.SENSOR, Platform.SWITCH, ] # Used to create a new component entity. CREATE_NEW_ENTITY = { + Platform.COVER: "pglab_create_new_entity_cover", + Platform.SENSOR: "pglab_create_new_entity_sensor", Platform.SWITCH: "pglab_create_new_entity_switch", } @@ -66,7 +70,12 @@ def get_device_id_from_discovery_topic(topic: str) -> str | None: class DiscoverDeviceInfo: """Keeps information of the PGLab discovered device.""" - def __init__(self, pglab_device: PyPGLabDevice) -> None: + def __init__( + self, + hass: HomeAssistant, + config_entry: PGLabConfigEntry, + pglab_device: PyPGLabDevice, + ) -> None: """Initialize the device discovery info.""" # Hash string represents the devices actual configuration, @@ -74,14 +83,15 @@ class DiscoverDeviceInfo: # When the hash string changes the devices entities must be rebuilt. self._hash = pglab_device.hash self._entities: list[tuple[str, str]] = [] + self.coordinator = PGLabSensorsCoordinator(hass, config_entry, pglab_device) - def add_entity(self, entity: Entity) -> None: + def add_entity(self, platform_domain: str, entity_unique_id: str | None) -> None: """Add an entity.""" # PGLabEntity always have unique IDs if TYPE_CHECKING: - assert entity.unique_id is not None - self._entities.append((entity.platform.domain, entity.unique_id)) + assert entity_unique_id is not None + self._entities.append((platform_domain, entity_unique_id)) @property def hash(self) -> int: @@ -94,6 +104,17 @@ class DiscoverDeviceInfo: return self._entities +async def create_discover_device_info( + hass: HomeAssistant, config_entry: PGLabConfigEntry, pglab_device: PyPGLabDevice +) -> DiscoverDeviceInfo: + """Create a new DiscoverDeviceInfo instance.""" + discovery_info = DiscoverDeviceInfo(hass, config_entry, pglab_device) + + # Subscribe to sensor state changes. + await discovery_info.coordinator.subscribe_topics() + return discovery_info + + @dataclass class PGLabDiscovery: """Discovery a PGLab device with the following MQTT topic format pglab/discovery/[device]/config.""" @@ -166,7 +187,10 @@ class PGLabDiscovery: del self._discovered[device_id] async def start( - self, hass: HomeAssistant, mqtt: PyPGLabMqttClient, entry: PGLABConfigEntry + self, + hass: HomeAssistant, + mqtt: PyPGLabMqttClient, + config_entry: PGLabConfigEntry, ) -> None: """Start discovering a PGLab devices.""" @@ -192,7 +216,7 @@ class PGLabDiscovery: # Create a new device. device_registry = dr.async_get(hass) device_registry.async_get_or_create( - config_entry_id=entry.entry_id, + config_entry_id=config_entry.entry_id, configuration_url=f"http://{pglab_device.ip}/", connections={(CONNECTION_NETWORK_MAC, pglab_device.mac)}, identifiers={(DOMAIN, pglab_device.id)}, @@ -223,9 +247,18 @@ class PGLabDiscovery: self.__clean_discovered_device(hass, pglab_device.id) # Add a new device. - discovery_info = DiscoverDeviceInfo(pglab_device) + discovery_info = await create_discover_device_info( + hass, config_entry, pglab_device + ) self._discovered[pglab_device.id] = discovery_info + # Create all new cover entities. + for s in pglab_device.shutters: + # the HA entity is not yet created, send a message to create it + async_dispatcher_send( + hass, CREATE_NEW_ENTITY[Platform.COVER], pglab_device, s + ) + # Create all new relay entities. for r in pglab_device.relays: # The HA entity is not yet created, send a message to create it. @@ -233,6 +266,14 @@ class PGLabDiscovery: hass, CREATE_NEW_ENTITY[Platform.SWITCH], pglab_device, r ) + # Create all new sensor entities. + async_dispatcher_send( + hass, + CREATE_NEW_ENTITY[Platform.SENSOR], + pglab_device, + discovery_info.coordinator, + ) + topics = { "discovery_topic": { "topic": f"{self._discovery_topic}/#", @@ -241,7 +282,7 @@ class PGLabDiscovery: } # Forward setup all HA supported platforms. - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) self._mqtt_client = mqtt self._substate = async_prepare_subscribe_topics(hass, self._substate, topics) @@ -256,9 +297,9 @@ class PGLabDiscovery: ) self._disconnect_platform.append(disconnect_callback) - async def stop(self, hass: HomeAssistant, entry: PGLABConfigEntry) -> None: + async def stop(self, hass: HomeAssistant, config_entry: PGLabConfigEntry) -> None: """Stop to discovery PG LAB devices.""" - await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) # Disconnect all registered platforms. for disconnect_callback in self._disconnect_platform: @@ -266,7 +307,9 @@ class PGLabDiscovery: async_unsubscribe_topics(hass, self._substate) - async def add_entity(self, entity: Entity, device_id: str): + async def add_entity( + self, platform_domain: str, entity_unique_id: str | None, device_id: str + ): """Save a new PG LAB device entity.""" # Be sure that the device is been discovered. @@ -274,4 +317,4 @@ class PGLabDiscovery: raise PGLabDiscoveryError("Unknown device, device_id not discovered") discovery_info = self._discovered[device_id] - discovery_info.add_entity(entity) + discovery_info.add_entity(platform_domain, entity_unique_id) diff --git a/homeassistant/components/pglab/entity.py b/homeassistant/components/pglab/entity.py index 1b8975a3bbe..59a4e28de89 100644 --- a/homeassistant/components/pglab/entity.py +++ b/homeassistant/components/pglab/entity.py @@ -8,59 +8,86 @@ from pypglab.entity import Entity as PyPGLabEntity from homeassistant.core import callback from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.entity import Entity +from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN +from .coordinator import PGLabSensorsCoordinator from .discovery import PGLabDiscovery -class PGLabEntity(Entity): - """Representation of a PGLab entity in Home Assistant.""" +class PGLabBaseEntity(Entity): + """Base class of a PGLab entity in Home Assistant.""" _attr_has_entity_name = True def __init__( self, - discovery: PGLabDiscovery, - device: PyPGLabDevice, - entity: PyPGLabEntity, + pglab_discovery: PGLabDiscovery, + pglab_device: PyPGLabDevice, ) -> None: """Initialize the class.""" - self._id = entity.id - self._device_id = device.id - self._entity = entity - self._discovery = discovery + self._device_id = pglab_device.id + self._discovery = pglab_discovery # Information about the device that is partially visible in the UI. self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, device.id)}, - name=device.name, - sw_version=device.firmware_version, - hw_version=device.hardware_version, - model=device.type, - manufacturer=device.manufactor, - configuration_url=f"http://{device.ip}/", - connections={(CONNECTION_NETWORK_MAC, device.mac)}, + identifiers={(DOMAIN, pglab_device.id)}, + name=pglab_device.name, + sw_version=pglab_device.firmware_version, + hw_version=pglab_device.hardware_version, + model=pglab_device.type, + manufacturer=pglab_device.manufactor, + configuration_url=f"http://{pglab_device.ip}/", + connections={(CONNECTION_NETWORK_MAC, pglab_device.mac)}, ) async def async_added_to_hass(self) -> None: """Update the device discovery info.""" - self._entity.set_on_state_callback(self.state_updated) - await self._entity.subscribe_topics() - - await super().async_added_to_hass() - # Inform PGLab discovery instance that a new entity is available. # This is important to know in case the device needs to be reconfigured # and the entity can be potentially destroyed. - await self._discovery.add_entity(self, self._device_id) + await self._discovery.add_entity( + self.platform.domain, + self.unique_id, + self._device_id, + ) + + # propagate the async_added_to_hass to the super class + await super().async_added_to_hass() + + +class PGLabEntity(PGLabBaseEntity): + """Representation of a PGLab entity in Home Assistant.""" + + def __init__( + self, + pglab_discovery: PGLabDiscovery, + pglab_device: PyPGLabDevice, + pglab_entity: PyPGLabEntity, + ) -> None: + """Initialize the class.""" + + super().__init__(pglab_discovery, pglab_device) + + self._id = pglab_entity.id + self._entity: PyPGLabEntity = pglab_entity + + async def async_added_to_hass(self) -> None: + """Subscribe pypglab entity to be updated from mqtt when pypglab entity internal state change.""" + + # set the callback to be called when pypglab entity state is changed + self._entity.set_on_state_callback(self.state_updated) + + # subscribe to the pypglab entity to receive updates from the mqtt broker + await self._entity.subscribe_topics() + await super().async_added_to_hass() async def async_will_remove_from_hass(self) -> None: """Unsubscribe when removed.""" await super().async_will_remove_from_hass() - await self._entity.unsubscribe_topics() self._entity.set_on_state_callback(None) @@ -68,3 +95,18 @@ class PGLabEntity(Entity): def state_updated(self, payload: str) -> None: """Handle state updates.""" self.async_write_ha_state() + + +class PGLabSensorEntity(PGLabBaseEntity, CoordinatorEntity[PGLabSensorsCoordinator]): + """Representation of a PGLab sensor entity in Home Assistant.""" + + def __init__( + self, + pglab_discovery: PGLabDiscovery, + pglab_device: PyPGLabDevice, + pglab_coordinator: PGLabSensorsCoordinator, + ) -> None: + """Initialize the class.""" + + PGLabBaseEntity.__init__(self, pglab_discovery, pglab_device) + CoordinatorEntity.__init__(self, pglab_coordinator) diff --git a/homeassistant/components/pglab/sensor.py b/homeassistant/components/pglab/sensor.py new file mode 100644 index 00000000000..ce19ec3a21a --- /dev/null +++ b/homeassistant/components/pglab/sensor.py @@ -0,0 +1,109 @@ +"""Sensor for PG LAB Electronics.""" + +from __future__ import annotations + +from pypglab.const import SENSOR_REBOOT_TIME, SENSOR_TEMPERATURE, SENSOR_VOLTAGE +from pypglab.device import Device as PyPGLabDevice + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import Platform, UnitOfElectricPotential, UnitOfTemperature +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import PGLabConfigEntry +from .coordinator import PGLabSensorsCoordinator +from .discovery import PGLabDiscovery +from .entity import PGLabSensorEntity + +PARALLEL_UPDATES = 0 + +SENSOR_INFO: list[SensorEntityDescription] = [ + SensorEntityDescription( + key=SENSOR_TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + ), + SensorEntityDescription( + key=SENSOR_VOLTAGE, + translation_key="mpu_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + ), + SensorEntityDescription( + key=SENSOR_REBOOT_TIME, + translation_key="runtime", + device_class=SensorDeviceClass.TIMESTAMP, + icon="mdi:progress-clock", + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: PGLabConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up sensor for device.""" + + @callback + def async_discover( + pglab_device: PyPGLabDevice, + pglab_coordinator: PGLabSensorsCoordinator, + ) -> None: + """Discover and add a PG LAB Sensor.""" + pglab_discovery = config_entry.runtime_data + + sensors: list[PGLabSensor] = [ + PGLabSensor( + description, + pglab_discovery, + pglab_device, + pglab_coordinator, + ) + for description in SENSOR_INFO + ] + + async_add_entities(sensors) + + # Register the callback to create the sensor entity when discovered. + pglab_discovery = config_entry.runtime_data + await pglab_discovery.register_platform(hass, Platform.SENSOR, async_discover) + + +class PGLabSensor(PGLabSensorEntity, SensorEntity): + """A PGLab sensor.""" + + def __init__( + self, + description: SensorEntityDescription, + pglab_discovery: PGLabDiscovery, + pglab_device: PyPGLabDevice, + pglab_coordinator: PGLabSensorsCoordinator, + ) -> None: + """Initialize the Sensor class.""" + + super().__init__(pglab_discovery, pglab_device, pglab_coordinator) + + self._attr_unique_id = f"{pglab_device.id}_{description.key}" + self.entity_description = description + + @callback + def _handle_coordinator_update(self) -> None: + """Update attributes when the coordinator updates.""" + + self._attr_native_value = self.coordinator.get_sensor_value( + self.entity_description.key + ) + super()._handle_coordinator_update() + + @property + def available(self) -> bool: + """Return PG LAB sensor availability.""" + return super().available and self.native_value is not None diff --git a/homeassistant/components/pglab/strings.json b/homeassistant/components/pglab/strings.json index 8f9021cdcca..c6f80d12f09 100644 --- a/homeassistant/components/pglab/strings.json +++ b/homeassistant/components/pglab/strings.json @@ -15,10 +15,26 @@ } }, "entity": { + "cover": { + "shutter": { + "name": "Shutter {shutter_id}" + } + }, "switch": { "relay": { "name": "Relay {relay_id}" } + }, + "sensor": { + "temperature": { + "name": "Temperature" + }, + "runtime": { + "name": "Run time" + }, + "mpu_voltage": { + "name": "MPU voltage" + } } } } diff --git a/homeassistant/components/pglab/switch.py b/homeassistant/components/pglab/switch.py index 554b5cf80ca..76b177e84c4 100644 --- a/homeassistant/components/pglab/switch.py +++ b/homeassistant/components/pglab/switch.py @@ -12,7 +12,7 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback -from . import PGLABConfigEntry +from . import PGLabConfigEntry from .discovery import PGLabDiscovery from .entity import PGLabEntity @@ -21,7 +21,7 @@ PARALLEL_UPDATES = 0 async def async_setup_entry( hass: HomeAssistant, - config_entry: PGLABConfigEntry, + config_entry: PGLabConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up switches for device.""" @@ -52,9 +52,9 @@ class PGLabSwitch(PGLabEntity, SwitchEntity): """Initialize the Switch class.""" super().__init__( - discovery=pglab_discovery, - device=pglab_device, - entity=pglab_relay, + pglab_discovery, + pglab_device, + pglab_relay, ) self._attr_unique_id = f"{pglab_device.id}_relay{pglab_relay.id}" diff --git a/homeassistant/components/philips_js/light.py b/homeassistant/components/philips_js/light.py index bf15292335e..87e3323a30c 100644 --- a/homeassistant/components/philips_js/light.py +++ b/homeassistant/components/philips_js/light.py @@ -3,7 +3,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Any +from typing import Any, cast from haphilipsjs import PhilipsTV from haphilipsjs.typing import AmbilightCurrentConfiguration @@ -328,7 +328,7 @@ class PhilipsTVLightEntity(PhilipsJsEntity, LightEntity): """Turn the bulb on.""" brightness = kwargs.get(ATTR_BRIGHTNESS, self.brightness) hs_color = kwargs.get(ATTR_HS_COLOR, self.hs_color) - attr_effect = kwargs.get(ATTR_EFFECT, self.effect) + attr_effect = cast(str, kwargs.get(ATTR_EFFECT, self.effect)) if not self._tv.on: raise HomeAssistantError("TV is not available") diff --git a/homeassistant/components/picnic/manifest.json b/homeassistant/components/picnic/manifest.json index 09f28da39a4..251964c15d0 100644 --- a/homeassistant/components/picnic/manifest.json +++ b/homeassistant/components/picnic/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/picnic", "iot_class": "cloud_polling", "loggers": ["python_picnic_api2"], - "requirements": ["python-picnic-api2==1.2.2"] + "requirements": ["python-picnic-api2==1.2.4"] } diff --git a/homeassistant/components/ping/strings.json b/homeassistant/components/ping/strings.json index ef9f74b4207..c301a1b277d 100644 --- a/homeassistant/components/ping/strings.json +++ b/homeassistant/components/ping/strings.json @@ -2,16 +2,16 @@ "entity": { "sensor": { "round_trip_time_avg": { - "name": "Round Trip Time Average" + "name": "Round-trip time average" }, "round_trip_time_max": { - "name": "Round Trip Time Maximum" + "name": "Round-trip time maximum" }, "round_trip_time_mdev": { - "name": "Round Trip Time Mean Deviation" + "name": "Round-trip time mean deviation" }, "round_trip_time_min": { - "name": "Round Trip Time Minimum" + "name": "Round-trip time minimum" } } }, diff --git a/homeassistant/components/point/config_flow.py b/homeassistant/components/point/config_flow.py index a0a51c7b9e6..b26ade8b725 100644 --- a/homeassistant/components/point/config_flow.py +++ b/homeassistant/components/point/config_flow.py @@ -11,6 +11,8 @@ from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHan from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) + class OAuth2FlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): """Config flow to handle Minut Point OAuth2 authentication.""" @@ -56,7 +58,7 @@ class OAuth2FlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): if reauth_entry.unique_id is not None: self._abort_if_unique_id_mismatch(reason="wrong_account") - logging.debug("user_id: %s", user_id) + _LOGGER.debug("user_id: %s", user_id) return self.async_update_reload_and_abort( reauth_entry, data_updates=data, unique_id=user_id ) diff --git a/homeassistant/components/private_ble_device/manifest.json b/homeassistant/components/private_ble_device/manifest.json index 445affbcd57..df24f536527 100644 --- a/homeassistant/components/private_ble_device/manifest.json +++ b/homeassistant/components/private_ble_device/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/private_ble_device", "iot_class": "local_push", - "requirements": ["bluetooth-data-tools==1.23.4"] + "requirements": ["bluetooth-data-tools==1.26.5"] } diff --git a/homeassistant/components/private_ble_device/strings.json b/homeassistant/components/private_ble_device/strings.json index c35775a4843..845a5d92bae 100644 --- a/homeassistant/components/private_ble_device/strings.json +++ b/homeassistant/components/private_ble_device/strings.json @@ -14,7 +14,7 @@ "irk_not_valid": "The key does not look like a valid IRK." }, "abort": { - "bluetooth_not_available": "At least one Bluetooth adapter or remote bluetooth proxy must be configured to track Private BLE Devices." + "bluetooth_not_available": "At least one Bluetooth adapter or remote Bluetooth proxy must be configured to track Private BLE Devices." } }, "entity": { diff --git a/homeassistant/components/progettihwsw/config_flow.py b/homeassistant/components/progettihwsw/config_flow.py index 2e5ea221dca..8818eff2d81 100644 --- a/homeassistant/components/progettihwsw/config_flow.py +++ b/homeassistant/components/progettihwsw/config_flow.py @@ -1,5 +1,6 @@ """Config flow for ProgettiHWSW Automation integration.""" +import logging from typing import TYPE_CHECKING, Any from ProgettiHWSW.ProgettiHWSWAPI import ProgettiHWSWAPI @@ -11,6 +12,8 @@ from homeassistant.exceptions import HomeAssistantError from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) + DATA_SCHEMA = vol.Schema( {vol.Required("host"): str, vol.Required("port", default=80): int} ) @@ -86,7 +89,8 @@ class ProgettiHWSWConfigFlow(ConfigFlow, domain=DOMAIN): info = await validate_input(self.hass, user_input) except CannotConnect: errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: user_input.update(info) diff --git a/homeassistant/components/proxy/camera.py b/homeassistant/components/proxy/camera.py index f6e909f13d1..47fa9454deb 100644 --- a/homeassistant/components/proxy/camera.py +++ b/homeassistant/components/proxy/camera.py @@ -104,6 +104,15 @@ def _resize_image(image, opts): new_width = opts.max_width (old_width, old_height) = img.size old_size = len(image) + + # If no max_width specified, only apply quality changes if requested + if new_width is None: + if opts.quality is None: + return image + imgbuf = io.BytesIO() + img.save(imgbuf, "JPEG", optimize=True, quality=quality) + return imgbuf.getvalue() + if old_width <= new_width: if opts.quality is None: _LOGGER.debug("Image is smaller-than/equal-to requested width") diff --git a/homeassistant/components/pterodactyl/__init__.py b/homeassistant/components/pterodactyl/__init__.py new file mode 100644 index 00000000000..33b3cc7576f --- /dev/null +++ b/homeassistant/components/pterodactyl/__init__.py @@ -0,0 +1,27 @@ +"""The Pterodactyl integration.""" + +from __future__ import annotations + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .coordinator import PterodactylConfigEntry, PterodactylCoordinator + +_PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR] + + +async def async_setup_entry(hass: HomeAssistant, entry: PterodactylConfigEntry) -> bool: + """Set up Pterodactyl from a config entry.""" + coordinator = PterodactylCoordinator(hass, entry) + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS) + + return True + + +async def async_unload_entry( + hass: HomeAssistant, entry: PterodactylConfigEntry +) -> bool: + """Unload a Pterodactyl config entry.""" + return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS) diff --git a/homeassistant/components/pterodactyl/api.py b/homeassistant/components/pterodactyl/api.py new file mode 100644 index 00000000000..38cb9809652 --- /dev/null +++ b/homeassistant/components/pterodactyl/api.py @@ -0,0 +1,120 @@ +"""API module of the Pterodactyl integration.""" + +from dataclasses import dataclass +import logging + +from pydactyl import PterodactylClient +from pydactyl.exceptions import ( + BadRequestError, + ClientConfigError, + PterodactylApiError, + PydactylError, +) + +from homeassistant.core import HomeAssistant + +_LOGGER = logging.getLogger(__name__) + + +class PterodactylConfigurationError(Exception): + """Raised when the configuration is invalid.""" + + +class PterodactylConnectionError(Exception): + """Raised when no data can be fechted from the server.""" + + +@dataclass +class PterodactylData: + """Data for the Pterodactyl server.""" + + name: str + uuid: str + identifier: str + state: str + memory_utilization: int + cpu_utilization: float + disk_utilization: int + network_rx_utilization: int + network_tx_utilization: int + uptime: int + + +class PterodactylAPI: + """Wrapper for Pterodactyl's API.""" + + pterodactyl: PterodactylClient | None + identifiers: list[str] + + def __init__(self, hass: HomeAssistant, host: str, api_key: str) -> None: + """Initialize the Pterodactyl API.""" + self.hass = hass + self.host = host + self.api_key = api_key + self.pterodactyl = None + self.identifiers = [] + + async def async_init(self): + """Initialize the Pterodactyl API.""" + self.pterodactyl = PterodactylClient(self.host, self.api_key) + + try: + paginated_response = await self.hass.async_add_executor_job( + self.pterodactyl.client.servers.list_servers + ) + except ClientConfigError as error: + raise PterodactylConfigurationError(error) from error + except ( + PydactylError, + BadRequestError, + PterodactylApiError, + ) as error: + raise PterodactylConnectionError(error) from error + else: + game_servers = paginated_response.collect() + for game_server in game_servers: + self.identifiers.append(game_server["attributes"]["identifier"]) + + _LOGGER.debug("Identifiers of Pterodactyl servers: %s", self.identifiers) + + def get_server_data(self, identifier: str) -> tuple[dict, dict]: + """Get all data from the Pterodactyl server.""" + server = self.pterodactyl.client.servers.get_server(identifier) # type: ignore[union-attr] + utilization = self.pterodactyl.client.servers.get_server_utilization( # type: ignore[union-attr] + identifier + ) + + return server, utilization + + async def async_get_data(self) -> dict[str, PterodactylData]: + """Update the data from all Pterodactyl servers.""" + data = {} + + for identifier in self.identifiers: + try: + server, utilization = await self.hass.async_add_executor_job( + self.get_server_data, identifier + ) + except ( + PydactylError, + BadRequestError, + PterodactylApiError, + ) as error: + raise PterodactylConnectionError(error) from error + else: + data[identifier] = PterodactylData( + name=server["name"], + uuid=server["uuid"], + identifier=identifier, + state=utilization["current_state"], + cpu_utilization=utilization["resources"]["cpu_absolute"], + memory_utilization=utilization["resources"]["memory_bytes"], + disk_utilization=utilization["resources"]["disk_bytes"], + network_rx_utilization=utilization["resources"]["network_rx_bytes"], + network_tx_utilization=utilization["resources"]["network_tx_bytes"], + uptime=utilization["resources"]["uptime"], + ) + + _LOGGER.debug("%s", data[identifier]) + + return data diff --git a/homeassistant/components/pterodactyl/binary_sensor.py b/homeassistant/components/pterodactyl/binary_sensor.py new file mode 100644 index 00000000000..e3615c47499 --- /dev/null +++ b/homeassistant/components/pterodactyl/binary_sensor.py @@ -0,0 +1,64 @@ +"""Binary sensor platform of the Pterodactyl integration.""" + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .coordinator import PterodactylConfigEntry, PterodactylCoordinator +from .entity import PterodactylEntity + +KEY_STATUS = "status" + + +BINARY_SENSOR_DESCRIPTIONS = [ + BinarySensorEntityDescription( + key=KEY_STATUS, + translation_key=KEY_STATUS, + device_class=BinarySensorDeviceClass.RUNNING, + ), +] + +# Coordinator is used to centralize the data updates. +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: PterodactylConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the Pterodactyl binary sensor platform.""" + coordinator = config_entry.runtime_data + + async_add_entities( + PterodactylBinarySensorEntity( + coordinator, identifier, description, config_entry + ) + for identifier in coordinator.api.identifiers + for description in BINARY_SENSOR_DESCRIPTIONS + ) + + +class PterodactylBinarySensorEntity(PterodactylEntity, BinarySensorEntity): + """Representation of a Pterodactyl binary sensor base entity.""" + + def __init__( + self, + coordinator: PterodactylCoordinator, + identifier: str, + description: BinarySensorEntityDescription, + config_entry: PterodactylConfigEntry, + ) -> None: + """Initialize binary sensor base entity.""" + super().__init__(coordinator, identifier, config_entry) + self.entity_description = description + self._attr_unique_id = f"{self.game_server_data.uuid}_{description.key}" + + @property + def is_on(self) -> bool: + """Return binary sensor state.""" + return self.game_server_data.state == "running" diff --git a/homeassistant/components/pterodactyl/config_flow.py b/homeassistant/components/pterodactyl/config_flow.py new file mode 100644 index 00000000000..a36069d2bb9 --- /dev/null +++ b/homeassistant/components/pterodactyl/config_flow.py @@ -0,0 +1,62 @@ +"""Config flow for the Pterodactyl integration.""" + +from __future__ import annotations + +import logging +from typing import Any + +import voluptuous as vol +from yarl import URL + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_API_KEY, CONF_URL + +from .api import ( + PterodactylAPI, + PterodactylConfigurationError, + PterodactylConnectionError, +) +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_URL = "http://localhost:8080" + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_URL, default=DEFAULT_URL): str, + vol.Required(CONF_API_KEY): str, + } +) + + +class PterodactylConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Pterodactyl.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + if user_input is not None: + url = URL(user_input[CONF_URL]).human_repr() + api_key = user_input[CONF_API_KEY] + + self._async_abort_entries_match({CONF_URL: url}) + api = PterodactylAPI(self.hass, url, api_key) + + try: + await api.async_init() + except (PterodactylConfigurationError, PterodactylConnectionError): + errors["base"] = "cannot_connect" + except Exception: + _LOGGER.exception("Unexpected exception occurred during config flow") + errors["base"] = "unknown" + else: + return self.async_create_entry(title=url, data=user_input) + + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + ) diff --git a/homeassistant/components/pterodactyl/const.py b/homeassistant/components/pterodactyl/const.py new file mode 100644 index 00000000000..8cf4d0c3963 --- /dev/null +++ b/homeassistant/components/pterodactyl/const.py @@ -0,0 +1,3 @@ +"""Constants for the Pterodactyl integration.""" + +DOMAIN = "pterodactyl" diff --git a/homeassistant/components/pterodactyl/coordinator.py b/homeassistant/components/pterodactyl/coordinator.py new file mode 100644 index 00000000000..36456ade630 --- /dev/null +++ b/homeassistant/components/pterodactyl/coordinator.py @@ -0,0 +1,66 @@ +"""Data update coordinator of the Pterodactyl integration.""" + +from __future__ import annotations + +from datetime import timedelta +import logging + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_API_KEY, CONF_URL +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .api import ( + PterodactylAPI, + PterodactylConfigurationError, + PterodactylConnectionError, + PterodactylData, +) + +SCAN_INTERVAL = timedelta(seconds=60) + +_LOGGER = logging.getLogger(__name__) + +type PterodactylConfigEntry = ConfigEntry[PterodactylCoordinator] + + +class PterodactylCoordinator(DataUpdateCoordinator[dict[str, PterodactylData]]): + """Pterodactyl data update coordinator.""" + + config_entry: PterodactylConfigEntry + api: PterodactylAPI + + def __init__( + self, + hass: HomeAssistant, + config_entry: PterodactylConfigEntry, + ) -> None: + """Initialize coordinator instance.""" + + super().__init__( + hass=hass, + name=config_entry.data[CONF_URL], + config_entry=config_entry, + logger=_LOGGER, + update_interval=SCAN_INTERVAL, + ) + + async def _async_setup(self) -> None: + """Set up the Pterodactyl data coordinator.""" + self.api = PterodactylAPI( + hass=self.hass, + host=self.config_entry.data[CONF_URL], + api_key=self.config_entry.data[CONF_API_KEY], + ) + + try: + await self.api.async_init() + except PterodactylConfigurationError as error: + raise UpdateFailed(error) from error + + async def _async_update_data(self) -> dict[str, PterodactylData]: + """Get updated data from the Pterodactyl server.""" + try: + return await self.api.async_get_data() + except PterodactylConnectionError as error: + raise UpdateFailed(error) from error diff --git a/homeassistant/components/pterodactyl/entity.py b/homeassistant/components/pterodactyl/entity.py new file mode 100644 index 00000000000..49fd65af476 --- /dev/null +++ b/homeassistant/components/pterodactyl/entity.py @@ -0,0 +1,47 @@ +"""Base entity for the Pterodactyl integration.""" + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_URL +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .api import PterodactylData +from .const import DOMAIN +from .coordinator import PterodactylCoordinator + +MANUFACTURER = "Pterodactyl" + + +class PterodactylEntity(CoordinatorEntity[PterodactylCoordinator]): + """Representation of a Pterodactyl base entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: PterodactylCoordinator, + identifier: str, + config_entry: ConfigEntry, + ) -> None: + """Initialize base entity.""" + super().__init__(coordinator) + + self.identifier = identifier + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, identifier)}, + manufacturer=MANUFACTURER, + name=self.game_server_data.name, + model=self.game_server_data.name, + model_id=self.game_server_data.uuid, + configuration_url=f"{config_entry.data[CONF_URL]}/server/{identifier}", + ) + + @property + def available(self) -> bool: + """Return binary sensor availability.""" + return super().available and self.identifier in self.coordinator.data + + @property + def game_server_data(self) -> PterodactylData: + """Return game server data.""" + return self.coordinator.data[self.identifier] diff --git a/homeassistant/components/pterodactyl/manifest.json b/homeassistant/components/pterodactyl/manifest.json new file mode 100644 index 00000000000..8ffa21dd186 --- /dev/null +++ b/homeassistant/components/pterodactyl/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "pterodactyl", + "name": "Pterodactyl", + "codeowners": ["@elmurato"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/pterodactyl", + "iot_class": "local_polling", + "quality_scale": "bronze", + "requirements": ["py-dactyl==2.0.4"] +} diff --git a/homeassistant/components/pterodactyl/quality_scale.yaml b/homeassistant/components/pterodactyl/quality_scale.yaml new file mode 100644 index 00000000000..dae3b9fa11a --- /dev/null +++ b/homeassistant/components/pterodactyl/quality_scale.yaml @@ -0,0 +1,93 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: Integration doesn't provide any service actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow: done + config-flow-test-coverage: done + dependency-transparency: done + docs-actions: + status: exempt + comment: Integration doesn't provide any service actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: done + comment: Handled by coordinator. + entity-unique-id: + status: done + comment: Using confid entry ID as the dependency pydactyl doesn't provide a unique information. + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: + status: done + comment: | + Raising ConfigEntryNotReady, if the initialization isn't successful. + unique-config-entry: + status: done + comment: | + As there is no unique information available from the dependency pydactyl, + the server host is used to identify that the same service is already configured. + + # Silver + action-exceptions: + status: exempt + comment: Integration doesn't provide any service actions. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: Integration doesn't support any configuration parameters. + docs-installation-parameters: todo + entity-unavailable: + status: done + comment: Handled by coordinator. + integration-owner: done + log-when-unavailable: + status: done + comment: Handled by coordinator. + parallel-updates: done + reauthentication-flow: todo + test-coverage: todo + + # Gold + devices: done + diagnostics: todo + discovery: + status: exempt + comment: No discovery possible. + discovery-update-info: + status: exempt + comment: | + No discovery possible. Users can use the (local or public) hostname instead of an IP address, + if static IP addresses cannot be configured. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: todo + entity-translations: done + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: No repair use-cases for this integration. + stale-devices: todo + + # Platinum + async-dependency: todo + inject-websession: + status: exempt + comment: Integration isn't making any HTTP requests. + strict-typing: todo diff --git a/homeassistant/components/pterodactyl/strings.json b/homeassistant/components/pterodactyl/strings.json new file mode 100644 index 00000000000..a875c72ccd8 --- /dev/null +++ b/homeassistant/components/pterodactyl/strings.json @@ -0,0 +1,30 @@ +{ + "config": { + "step": { + "user": { + "data": { + "url": "[%key:common::config_flow::data::url%]", + "api_key": "[%key:common::config_flow::data::api_key%]" + }, + "data_description": { + "url": "The URL of your Pterodactyl server, including the protocol (http:// or https://) and optionally the port number.", + "api_key": "The account API key for accessing your Pterodactyl server." + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + } + }, + "entity": { + "binary_sensor": { + "status": { + "name": "Status" + } + } + } +} diff --git a/homeassistant/components/purpleair/__init__.py b/homeassistant/components/purpleair/__init__.py index 2d4022946b2..78986b34351 100644 --- a/homeassistant/components/purpleair/__init__.py +++ b/homeassistant/components/purpleair/__init__.py @@ -2,37 +2,34 @@ from __future__ import annotations -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import PurpleAirDataUpdateCoordinator - -PLATFORMS = [Platform.SENSOR] +from .const import PLATFORMS +from .coordinator import PurpleAirConfigEntry, PurpleAirDataUpdateCoordinator -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Set up PurpleAir from a config entry.""" - coordinator = PurpleAirDataUpdateCoordinator(hass, entry) +async def async_setup_entry(hass: HomeAssistant, entry: PurpleAirConfigEntry) -> bool: + """Set up PurpleAir config entry.""" + coordinator = PurpleAirDataUpdateCoordinator( + hass, + entry, + ) + entry.runtime_data = coordinator + await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - entry.async_on_unload(entry.add_update_listener(async_handle_entry_update)) + entry.async_on_unload(entry.add_update_listener(async_reload_entry)) return True -async def async_handle_entry_update(hass: HomeAssistant, entry: ConfigEntry) -> None: - """Handle an options update.""" +async def async_reload_entry(hass: HomeAssistant, entry: PurpleAirConfigEntry) -> None: + """Reload config entry.""" await hass.config_entries.async_reload(entry.entry_id) -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok +async def async_unload_entry(hass: HomeAssistant, entry: PurpleAirConfigEntry) -> bool: + """Unload config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/purpleair/const.py b/homeassistant/components/purpleair/const.py index 5f1ec84d469..fcb928bd4f3 100644 --- a/homeassistant/components/purpleair/const.py +++ b/homeassistant/components/purpleair/const.py @@ -1,10 +1,13 @@ """Constants for the PurpleAir integration.""" import logging +from typing import Final -DOMAIN = "purpleair" +from homeassistant.const import Platform -LOGGER = logging.getLogger(__package__) +LOGGER: Final = logging.getLogger(__package__) +PLATFORMS: Final = [Platform.SENSOR] -CONF_READ_KEY = "read_key" -CONF_SENSOR_INDICES = "sensor_indices" +DOMAIN: Final[str] = "purpleair" + +CONF_SENSOR_INDICES: Final[str] = "sensor_indices" diff --git a/homeassistant/components/purpleair/coordinator.py b/homeassistant/components/purpleair/coordinator.py index f1511733cfa..4ed0c0340c6 100644 --- a/homeassistant/components/purpleair/coordinator.py +++ b/homeassistant/components/purpleair/coordinator.py @@ -46,12 +46,15 @@ SENSOR_FIELDS_TO_RETRIEVE = [ UPDATE_INTERVAL = timedelta(minutes=2) +type PurpleAirConfigEntry = ConfigEntry[PurpleAirDataUpdateCoordinator] + + class PurpleAirDataUpdateCoordinator(DataUpdateCoordinator[GetSensorsResponse]): """Define a PurpleAir-specific coordinator.""" - config_entry: ConfigEntry + config_entry: PurpleAirConfigEntry - def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: + def __init__(self, hass: HomeAssistant, entry: PurpleAirConfigEntry) -> None: """Initialize.""" self._api = API( entry.data[CONF_API_KEY], diff --git a/homeassistant/components/purpleair/diagnostics.py b/homeassistant/components/purpleair/diagnostics.py index f7c44b7e9b2..71b83e277d3 100644 --- a/homeassistant/components/purpleair/diagnostics.py +++ b/homeassistant/components/purpleair/diagnostics.py @@ -5,7 +5,6 @@ from __future__ import annotations from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_API_KEY, CONF_LATITUDE, @@ -14,8 +13,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import PurpleAirDataUpdateCoordinator +from .coordinator import PurpleAirConfigEntry CONF_TITLE = "title" @@ -30,14 +28,13 @@ TO_REDACT = { async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: PurpleAirConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: PurpleAirDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] return async_redact_data( { "entry": entry.as_dict(), - "data": coordinator.data.model_dump(), + "data": entry.runtime_data.data.model_dump(), }, TO_REDACT, ) diff --git a/homeassistant/components/purpleair/entity.py b/homeassistant/components/purpleair/entity.py index 4f7be1874ed..410fdd9b942 100644 --- a/homeassistant/components/purpleair/entity.py +++ b/homeassistant/components/purpleair/entity.py @@ -7,13 +7,12 @@ from typing import Any from aiopurpleair.models.sensors import SensorModel -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE, CONF_SHOW_ON_MAP from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN -from .coordinator import PurpleAirDataUpdateCoordinator +from .coordinator import PurpleAirConfigEntry, PurpleAirDataUpdateCoordinator class PurpleAirEntity(CoordinatorEntity[PurpleAirDataUpdateCoordinator]): @@ -23,12 +22,11 @@ class PurpleAirEntity(CoordinatorEntity[PurpleAirDataUpdateCoordinator]): def __init__( self, - coordinator: PurpleAirDataUpdateCoordinator, - entry: ConfigEntry, + entry: PurpleAirConfigEntry, sensor_index: int, ) -> None: """Initialize.""" - super().__init__(coordinator) + super().__init__(entry.runtime_data) self._sensor_index = sensor_index diff --git a/homeassistant/components/purpleair/sensor.py b/homeassistant/components/purpleair/sensor.py index bed1d878557..a85a23b6144 100644 --- a/homeassistant/components/purpleair/sensor.py +++ b/homeassistant/components/purpleair/sensor.py @@ -13,7 +13,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, PERCENTAGE, @@ -27,8 +26,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback -from .const import CONF_SENSOR_INDICES, DOMAIN -from .coordinator import PurpleAirDataUpdateCoordinator +from .const import CONF_SENSOR_INDICES +from .coordinator import PurpleAirConfigEntry from .entity import PurpleAirEntity CONCENTRATION_PARTICLES_PER_100_MILLILITERS = f"particles/100{UnitOfVolume.MILLILITERS}" @@ -165,13 +164,12 @@ SENSOR_DESCRIPTIONS = [ async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: PurpleAirConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up PurpleAir sensors based on a config entry.""" - coordinator: PurpleAirDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] async_add_entities( - PurpleAirSensorEntity(coordinator, entry, sensor_index, description) + PurpleAirSensorEntity(entry, sensor_index, description) for sensor_index in entry.options[CONF_SENSOR_INDICES] for description in SENSOR_DESCRIPTIONS ) @@ -184,13 +182,12 @@ class PurpleAirSensorEntity(PurpleAirEntity, SensorEntity): def __init__( self, - coordinator: PurpleAirDataUpdateCoordinator, - entry: ConfigEntry, + entry: PurpleAirConfigEntry, sensor_index: int, description: PurpleAirSensorEntityDescription, ) -> None: """Initialize.""" - super().__init__(coordinator, entry, sensor_index) + super().__init__(entry, sensor_index) self._attr_unique_id = f"{self._sensor_index}-{description.key}" self.entity_description = description diff --git a/homeassistant/components/pvoutput/manifest.json b/homeassistant/components/pvoutput/manifest.json index 9dbdad53bcb..dee5f9cda6e 100644 --- a/homeassistant/components/pvoutput/manifest.json +++ b/homeassistant/components/pvoutput/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/pvoutput", "integration_type": "device", "iot_class": "cloud_polling", - "requirements": ["pvo==2.2.0"] + "requirements": ["pvo==2.2.1"] } diff --git a/homeassistant/components/pvoutput/strings.json b/homeassistant/components/pvoutput/strings.json index 06d98971053..651bb55a2b4 100644 --- a/homeassistant/components/pvoutput/strings.json +++ b/homeassistant/components/pvoutput/strings.json @@ -27,19 +27,19 @@ "entity": { "sensor": { "energy_consumption": { - "name": "Energy consumed" + "name": "Energy consumption" }, "energy_generation": { - "name": "Energy generated" + "name": "Energy generation" }, "efficiency": { "name": "Efficiency" }, "power_consumption": { - "name": "Power consumed" + "name": "Power consumption" }, "power_generation": { - "name": "Power generated" + "name": "Power generation" } } } diff --git a/homeassistant/components/pyload/__init__.py b/homeassistant/components/pyload/__init__.py index cf8e922d70e..ca7bbb0c1dc 100644 --- a/homeassistant/components/pyload/__init__.py +++ b/homeassistant/components/pyload/__init__.py @@ -2,14 +2,18 @@ from __future__ import annotations +import logging + from aiohttp import CookieJar from pyloadapi import PyLoadAPI +from yarl import URL from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_SSL, + CONF_URL, CONF_USERNAME, CONF_VERIFY_SSL, Platform, @@ -19,17 +23,14 @@ from homeassistant.helpers.aiohttp_client import async_create_clientsession from .coordinator import PyLoadConfigEntry, PyLoadCoordinator +_LOGGER = logging.getLogger(__name__) + PLATFORMS: list[Platform] = [Platform.BUTTON, Platform.SENSOR, Platform.SWITCH] async def async_setup_entry(hass: HomeAssistant, entry: PyLoadConfigEntry) -> bool: """Set up pyLoad from a config entry.""" - url = ( - f"{'https' if entry.data[CONF_SSL] else 'http'}://" - f"{entry.data[CONF_HOST]}:{entry.data[CONF_PORT]}/" - ) - session = async_create_clientsession( hass, verify_ssl=entry.data[CONF_VERIFY_SSL], @@ -37,7 +38,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: PyLoadConfigEntry) -> bo ) pyloadapi = PyLoadAPI( session, - api_url=url, + api_url=URL(entry.data[CONF_URL]), username=entry.data[CONF_USERNAME], password=entry.data[CONF_PASSWORD], ) @@ -55,3 +56,27 @@ async def async_setup_entry(hass: HomeAssistant, entry: PyLoadConfigEntry) -> bo async def async_unload_entry(hass: HomeAssistant, entry: PyLoadConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def async_migrate_entry(hass: HomeAssistant, entry: PyLoadConfigEntry) -> bool: + """Migrate config entry.""" + _LOGGER.debug( + "Migrating configuration from version %s.%s", entry.version, entry.minor_version + ) + + if entry.version == 1 and entry.minor_version == 0: + url = URL.build( + scheme="https" if entry.data[CONF_SSL] else "http", + host=entry.data[CONF_HOST], + port=entry.data[CONF_PORT], + ).human_repr() + hass.config_entries.async_update_entry( + entry, data={**entry.data, CONF_URL: url}, minor_version=1, version=1 + ) + + _LOGGER.debug( + "Migration to configuration version %s.%s successful", + entry.version, + entry.minor_version, + ) + return True diff --git a/homeassistant/components/pyload/config_flow.py b/homeassistant/components/pyload/config_flow.py index bc3bbc6cb34..50d354d345d 100644 --- a/homeassistant/components/pyload/config_flow.py +++ b/homeassistant/components/pyload/config_flow.py @@ -9,19 +9,17 @@ from typing import Any from aiohttp import CookieJar from pyloadapi import CannotConnect, InvalidAuth, ParserError, PyLoadAPI import voluptuous as vol +from yarl import URL from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import ( - CONF_HOST, CONF_NAME, CONF_PASSWORD, - CONF_PORT, - CONF_SSL, + CONF_URL, CONF_USERNAME, CONF_VERIFY_SSL, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_create_clientsession from homeassistant.helpers.selector import ( TextSelector, @@ -29,15 +27,18 @@ from homeassistant.helpers.selector import ( TextSelectorType, ) -from .const import DEFAULT_NAME, DEFAULT_PORT, DOMAIN +from .const import DEFAULT_NAME, DOMAIN _LOGGER = logging.getLogger(__name__) STEP_USER_DATA_SCHEMA = vol.Schema( { - vol.Required(CONF_HOST): str, - vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port, - vol.Required(CONF_SSL, default=False): cv.boolean, + vol.Required(CONF_URL): TextSelector( + TextSelectorConfig( + type=TextSelectorType.URL, + autocomplete="url", + ), + ), vol.Required(CONF_VERIFY_SSL, default=True): bool, vol.Required(CONF_USERNAME): TextSelector( TextSelectorConfig( @@ -80,14 +81,9 @@ async def validate_input(hass: HomeAssistant, user_input: dict[str, Any]) -> Non user_input[CONF_VERIFY_SSL], cookie_jar=CookieJar(unsafe=True), ) - - url = ( - f"{'https' if user_input[CONF_SSL] else 'http'}://" - f"{user_input[CONF_HOST]}:{user_input[CONF_PORT]}/" - ) pyload = PyLoadAPI( session, - api_url=url, + api_url=URL(user_input[CONF_URL]), username=user_input[CONF_USERNAME], password=user_input[CONF_PASSWORD], ) @@ -99,6 +95,7 @@ class PyLoadConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for pyLoad.""" VERSION = 1 + MINOR_VERSION = 1 async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -106,9 +103,8 @@ class PyLoadConfigFlow(ConfigFlow, domain=DOMAIN): """Handle the initial step.""" errors: dict[str, str] = {} if user_input is not None: - self._async_abort_entries_match( - {CONF_HOST: user_input[CONF_HOST], CONF_PORT: user_input[CONF_PORT]} - ) + url = URL(user_input[CONF_URL]).human_repr() + self._async_abort_entries_match({CONF_URL: url}) try: await validate_input(self.hass, user_input) except (CannotConnect, ParserError): @@ -120,7 +116,14 @@ class PyLoadConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "unknown" else: title = DEFAULT_NAME - return self.async_create_entry(title=title, data=user_input) + + return self.async_create_entry( + title=title, + data={ + **user_input, + CONF_URL: url, + }, + ) return self.async_show_form( step_id="user", @@ -144,9 +147,8 @@ class PyLoadConfigFlow(ConfigFlow, domain=DOMAIN): reauth_entry = self._get_reauth_entry() if user_input is not None: - new_input = reauth_entry.data | user_input try: - await validate_input(self.hass, new_input) + await validate_input(self.hass, {**reauth_entry.data, **user_input}) except (CannotConnect, ParserError): errors["base"] = "cannot_connect" except InvalidAuth: @@ -155,7 +157,9 @@ class PyLoadConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - return self.async_update_reload_and_abort(reauth_entry, data=new_input) + return self.async_update_reload_and_abort( + reauth_entry, data_updates=user_input + ) return self.async_show_form( step_id="reauth_confirm", @@ -191,15 +195,18 @@ class PyLoadConfigFlow(ConfigFlow, domain=DOMAIN): else: return self.async_update_reload_and_abort( reconfig_entry, - data=user_input, + data={ + **user_input, + CONF_URL: URL(user_input[CONF_URL]).human_repr(), + }, reload_even_if_entry_is_unchanged=False, ) - + suggested_values = user_input if user_input else reconfig_entry.data return self.async_show_form( step_id="reconfigure", data_schema=self.add_suggested_values_to_schema( STEP_USER_DATA_SCHEMA, - user_input or reconfig_entry.data, + suggested_values, ), description_placeholders={CONF_NAME: reconfig_entry.data[CONF_USERNAME]}, errors=errors, diff --git a/homeassistant/components/pyload/coordinator.py b/homeassistant/components/pyload/coordinator.py index c57dfa7720d..7bb2b870520 100644 --- a/homeassistant/components/pyload/coordinator.py +++ b/homeassistant/components/pyload/coordinator.py @@ -31,6 +31,7 @@ class PyLoadData: download: bool reconnect: bool captcha: bool | None = None + proxy: bool | None = None free_space: int diff --git a/homeassistant/components/pyload/diagnostics.py b/homeassistant/components/pyload/diagnostics.py index 105a9a953e2..98fab38da1d 100644 --- a/homeassistant/components/pyload/diagnostics.py +++ b/homeassistant/components/pyload/diagnostics.py @@ -5,13 +5,15 @@ from __future__ import annotations from dataclasses import asdict from typing import Any -from homeassistant.components.diagnostics import async_redact_data -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME +from yarl import URL + +from homeassistant.components.diagnostics import REDACTED, async_redact_data +from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME from homeassistant.core import HomeAssistant from .coordinator import PyLoadConfigEntry, PyLoadData -TO_REDACT = {CONF_USERNAME, CONF_PASSWORD, CONF_HOST} +TO_REDACT = {CONF_USERNAME, CONF_PASSWORD, CONF_URL} async def async_get_config_entry_diagnostics( @@ -21,6 +23,9 @@ async def async_get_config_entry_diagnostics( pyload_data: PyLoadData = config_entry.runtime_data.data return { - "config_entry_data": async_redact_data(dict(config_entry.data), TO_REDACT), + "config_entry_data": { + **async_redact_data(dict(config_entry.data), TO_REDACT), + CONF_URL: URL(config_entry.data[CONF_URL]).with_host(REDACTED).human_repr(), + }, "pyload_data": asdict(pyload_data), } diff --git a/homeassistant/components/pyload/manifest.json b/homeassistant/components/pyload/manifest.json index 134865b9d93..feaa23af7de 100644 --- a/homeassistant/components/pyload/manifest.json +++ b/homeassistant/components/pyload/manifest.json @@ -7,5 +7,6 @@ "integration_type": "service", "iot_class": "local_polling", "loggers": ["pyloadapi"], + "quality_scale": "platinum", "requirements": ["PyLoadAPI==1.4.2"] } diff --git a/homeassistant/components/pyload/quality_scale.yaml b/homeassistant/components/pyload/quality_scale.yaml new file mode 100644 index 00000000000..a9ce552961b --- /dev/null +++ b/homeassistant/components/pyload/quality_scale.yaml @@ -0,0 +1,82 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: The integration registers no actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: The integration registers no actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: The integration registers no events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: The integration registers no actions. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: Integration has no configuration parameters + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: done + reauthentication-flow: done + test-coverage: done + + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: The integration is a web service, there are no discoverable devices. + discovery: + status: exempt + comment: The integration is a web service, there are no discoverable devices. + docs-data-update: done + docs-examples: done + docs-known-limitations: done + docs-supported-devices: + status: exempt + comment: The integration is a web service, there are no devices. + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: done + dynamic-devices: + status: exempt + comment: The integration is a web service, there are no devices. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: done + repair-issues: + status: exempt + comment: The integration has no repairs. + stale-devices: + status: exempt + comment: The integration is a web service, there are no devices. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/pyload/strings.json b/homeassistant/components/pyload/strings.json index ed15a438c28..9414f7f7bb8 100644 --- a/homeassistant/components/pyload/strings.json +++ b/homeassistant/components/pyload/strings.json @@ -3,38 +3,30 @@ "step": { "user": { "data": { - "host": "[%key:common::config_flow::data::host%]", + "url": "[%key:common::config_flow::data::url%]", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", - "ssl": "[%key:common::config_flow::data::ssl%]", - "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]", - "port": "[%key:common::config_flow::data::port%]" + "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" }, "data_description": { - "host": "The hostname or IP address of the device running your pyLoad instance.", + "url": "Specify the full URL of your pyLoad web interface, including the protocol (HTTP or HTTPS), hostname or IP address, port (pyLoad uses 8000 by default), and any path prefix if applicable.\nExample: `https://example.com:8000/path`", "username": "The username used to access the pyLoad instance.", "password": "The password associated with the pyLoad account.", - "port": "pyLoad uses port 8000 by default.", - "ssl": "If enabled, the connection to the pyLoad instance will use HTTPS.", "verify_ssl": "If checked, the SSL certificate will be validated to ensure a secure connection." } }, "reconfigure": { "data": { - "host": "[%key:common::config_flow::data::host%]", + "url": "[%key:common::config_flow::data::url%]", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", - "ssl": "[%key:common::config_flow::data::ssl%]", - "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]", - "port": "[%key:common::config_flow::data::port%]" + "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" }, "data_description": { - "host": "[%key:component::pyload::config::step::user::data_description::host%]", - "verify_ssl": "[%key:component::pyload::config::step::user::data_description::verify_ssl%]", + "url": "[%key:component::pyload::config::step::user::data_description::url%]", "username": "[%key:component::pyload::config::step::user::data_description::username%]", "password": "[%key:component::pyload::config::step::user::data_description::password%]", - "port": "[%key:component::pyload::config::step::user::data_description::port%]", - "ssl": "[%key:component::pyload::config::step::user::data_description::ssl%]" + "verify_ssl": "[%key:component::pyload::config::step::user::data_description::verify_ssl%]" } }, "reauth_confirm": { diff --git a/homeassistant/components/qbus/climate.py b/homeassistant/components/qbus/climate.py new file mode 100644 index 00000000000..57d97c046b7 --- /dev/null +++ b/homeassistant/components/qbus/climate.py @@ -0,0 +1,172 @@ +"""Support for Qbus thermostat.""" + +import logging +from typing import Any + +from qbusmqttapi.const import KEY_PROPERTIES_REGIME, KEY_PROPERTIES_SET_TEMPERATURE +from qbusmqttapi.discovery import QbusMqttOutput +from qbusmqttapi.state import QbusMqttThermoState, StateType + +from homeassistant.components.climate import ( + ClimateEntity, + ClimateEntityFeature, + HVACAction, + HVACMode, +) +from homeassistant.components.mqtt import ReceiveMessage, client as mqtt +from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.debounce import Debouncer +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .const import DOMAIN +from .coordinator import QbusConfigEntry +from .entity import QbusEntity, add_new_outputs + +PARALLEL_UPDATES = 0 + +STATE_REQUEST_DELAY = 2 + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: QbusConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up climate entities.""" + + coordinator = entry.runtime_data + added_outputs: list[QbusMqttOutput] = [] + + def _check_outputs() -> None: + add_new_outputs( + coordinator, + added_outputs, + lambda output: output.type == "thermo", + QbusClimate, + async_add_entities, + ) + + _check_outputs() + entry.async_on_unload(coordinator.async_add_listener(_check_outputs)) + + +class QbusClimate(QbusEntity, ClimateEntity): + """Representation of a Qbus climate entity.""" + + _attr_hvac_modes = [HVACMode.HEAT] + _attr_supported_features = ( + ClimateEntityFeature.PRESET_MODE | ClimateEntityFeature.TARGET_TEMPERATURE + ) + _attr_temperature_unit = UnitOfTemperature.CELSIUS + + def __init__(self, mqtt_output: QbusMqttOutput) -> None: + """Initialize climate entity.""" + + super().__init__(mqtt_output) + + self._attr_hvac_action = HVACAction.IDLE + self._attr_hvac_mode = HVACMode.HEAT + + set_temp: dict[str, Any] = mqtt_output.properties.get( + KEY_PROPERTIES_SET_TEMPERATURE, {} + ) + current_regime: dict[str, Any] = mqtt_output.properties.get( + KEY_PROPERTIES_REGIME, {} + ) + + self._attr_min_temp: float = set_temp.get("min", 0) + self._attr_max_temp: float = set_temp.get("max", 35) + self._attr_target_temperature_step: float = set_temp.get("step", 0.5) + self._attr_preset_modes: list[str] = current_regime.get("enumValues", []) + self._attr_preset_mode: str = ( + self._attr_preset_modes[0] if len(self._attr_preset_modes) > 0 else "" + ) + + self._request_state_debouncer: Debouncer | None = None + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + self._request_state_debouncer = Debouncer( + self.hass, + _LOGGER, + cooldown=STATE_REQUEST_DELAY, + immediate=False, + function=self._async_request_state, + ) + await super().async_added_to_hass() + + async def async_set_preset_mode(self, preset_mode: str) -> None: + """Set new target preset mode.""" + + if preset_mode not in self._attr_preset_modes: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_preset", + translation_placeholders={ + "preset": preset_mode, + "options": ", ".join(self._attr_preset_modes), + }, + ) + + state = QbusMqttThermoState(id=self._mqtt_output.id, type=StateType.STATE) + state.write_regime(preset_mode) + + await self._async_publish_output_state(state) + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new target temperature.""" + temperature = kwargs.get(ATTR_TEMPERATURE) + + if temperature is not None and isinstance(temperature, float): + state = QbusMqttThermoState(id=self._mqtt_output.id, type=StateType.STATE) + state.write_set_temperature(temperature) + + await self._async_publish_output_state(state) + + async def _state_received(self, msg: ReceiveMessage) -> None: + state = self._message_factory.parse_output_state( + QbusMqttThermoState, msg.payload + ) + + if state is None: + return + + if preset_mode := state.read_regime(): + self._attr_preset_mode = preset_mode + + if current_temperature := state.read_current_temperature(): + self._attr_current_temperature = current_temperature + + if target_temperature := state.read_set_temperature(): + self._attr_target_temperature = target_temperature + + self._set_hvac_action() + + # When the state type is "event", the payload only contains the changed + # property. Request the state to get the full payload. However, changing + # temperature step by step could cause a flood of state requests, so we're + # holding off a few seconds before requesting the full state. + if state.type == StateType.EVENT: + assert self._request_state_debouncer is not None + await self._request_state_debouncer.async_call() + + self.async_schedule_update_ha_state() + + def _set_hvac_action(self) -> None: + if self.target_temperature is None or self.current_temperature is None: + self._attr_hvac_action = HVACAction.IDLE + return + + self._attr_hvac_action = ( + HVACAction.HEATING + if self.target_temperature > self.current_temperature + else HVACAction.IDLE + ) + + async def _async_request_state(self) -> None: + request = self._message_factory.create_state_request([self._mqtt_output.id]) + await mqtt.async_publish(self.hass, request.topic, request.payload) diff --git a/homeassistant/components/qbus/const.py b/homeassistant/components/qbus/const.py index b9e42f13766..767a41f48cc 100644 --- a/homeassistant/components/qbus/const.py +++ b/homeassistant/components/qbus/const.py @@ -6,6 +6,7 @@ from homeassistant.const import Platform DOMAIN: Final = "qbus" PLATFORMS: list[Platform] = [ + Platform.CLIMATE, Platform.LIGHT, Platform.SWITCH, ] diff --git a/homeassistant/components/qbus/light.py b/homeassistant/components/qbus/light.py index 5ec76f5e807..3d2c763b8e3 100644 --- a/homeassistant/components/qbus/light.py +++ b/homeassistant/components/qbus/light.py @@ -51,7 +51,7 @@ class QbusLight(QbusEntity, LightEntity): super().__init__(mqtt_output) - self._set_state() + self._set_state(0) async def async_turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" @@ -74,7 +74,6 @@ class QbusLight(QbusEntity, LightEntity): state.write_percentage(percentage) await self._async_publish_output_state(state) - self._set_state(percentage=percentage, on=on) async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" @@ -82,7 +81,6 @@ class QbusLight(QbusEntity, LightEntity): state.write_on_off(on=False) await self._async_publish_output_state(state) - self._set_state(on=False) async def _state_received(self, msg: ReceiveMessage) -> None: output = self._message_factory.parse_output_state( @@ -91,20 +89,9 @@ class QbusLight(QbusEntity, LightEntity): if output is not None: percentage = round(output.read_percentage()) - self._set_state(percentage=percentage) + self._set_state(percentage) self.async_schedule_update_ha_state() - def _set_state( - self, *, percentage: int | None = None, on: bool | None = None - ) -> None: - if percentage is None: - # When turning on without brightness, we don't know the desired - # brightness. It will be set during _state_received(). - if on is True: - self._attr_is_on = True - else: - self._attr_is_on = False - self._attr_brightness = 0 - else: - self._attr_is_on = percentage > 0 - self._attr_brightness = value_to_brightness((1, 100), percentage) + def _set_state(self, percentage: int = 0) -> None: + self._attr_is_on = percentage > 0 + self._attr_brightness = value_to_brightness((1, 100), percentage) diff --git a/homeassistant/components/qbus/strings.json b/homeassistant/components/qbus/strings.json index e6df18c393c..f308c5b3519 100644 --- a/homeassistant/components/qbus/strings.json +++ b/homeassistant/components/qbus/strings.json @@ -15,5 +15,10 @@ "error": { "no_controller": "No controllers were found" } + }, + "exceptions": { + "invalid_preset": { + "message": "Preset mode \"{preset}\" is not valid. Valid preset modes are: {options}." + } } } diff --git a/homeassistant/components/qbus/switch.py b/homeassistant/components/qbus/switch.py index 002ad43e904..e1feccf4450 100644 --- a/homeassistant/components/qbus/switch.py +++ b/homeassistant/components/qbus/switch.py @@ -57,7 +57,6 @@ class QbusSwitch(QbusEntity, SwitchEntity): state.write_value(True) await self._async_publish_output_state(state) - self._attr_is_on = True async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" @@ -65,7 +64,6 @@ class QbusSwitch(QbusEntity, SwitchEntity): state.write_value(False) await self._async_publish_output_state(state) - self._attr_is_on = False async def _state_received(self, msg: ReceiveMessage) -> None: output = self._message_factory.parse_output_state( diff --git a/homeassistant/components/qnap/config_flow.py b/homeassistant/components/qnap/config_flow.py index 75f41a27f69..504883b55e9 100644 --- a/homeassistant/components/qnap/config_flow.py +++ b/homeassistant/components/qnap/config_flow.py @@ -70,8 +70,8 @@ class QnapConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except TypeError: errors["base"] = "invalid_auth" - except Exception as error: # noqa: BLE001 - _LOGGER.error(error) + except Exception: + _LOGGER.exception("Unexpected error") errors["base"] = "unknown" else: unique_id = stats["system"]["serial_number"] diff --git a/homeassistant/components/rabbitair/config_flow.py b/homeassistant/components/rabbitair/config_flow.py index f4487a73b58..43959e1e42c 100644 --- a/homeassistant/components/rabbitair/config_flow.py +++ b/homeassistant/components/rabbitair/config_flow.py @@ -74,8 +74,8 @@ class RabbitAirConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_host" except TimeoutConnect: errors["base"] = "timeout_connect" - except Exception as err: # noqa: BLE001 - _LOGGER.debug("Unexpected exception: %s", err) + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: user_input[CONF_MAC] = info["mac"] diff --git a/homeassistant/components/rachio/strings.json b/homeassistant/components/rachio/strings.json index 308403d805d..d51a1d5f920 100644 --- a/homeassistant/components/rachio/strings.json +++ b/homeassistant/components/rachio/strings.json @@ -3,7 +3,7 @@ "step": { "user": { "title": "Connect to your Rachio device", - "description": "You will need the API Key from https://app.rach.io/. Go to Settings, then select 'GET API KEY'.", + "description": "You will need the API key from https://app.rach.io/. Go to Settings, then select 'GET API KEY'.", "data": { "api_key": "[%key:common::config_flow::data::api_key%]" } @@ -70,7 +70,7 @@ }, "start_watering": { "name": "Start watering", - "description": "Start a single zone, a schedule or any number of smart hose timers.", + "description": "Starts a single zone, a schedule or any number of smart hose timers.", "fields": { "duration": { "name": "Duration", @@ -80,7 +80,7 @@ }, "pause_watering": { "name": "Pause watering", - "description": "Pause any currently running zones or schedules.", + "description": "Pauses any currently running zones or schedules.", "fields": { "devices": { "name": "Devices", @@ -94,7 +94,7 @@ }, "resume_watering": { "name": "Resume watering", - "description": "Resume any paused zone runs or schedules.", + "description": "Resumes any paused zone runs or schedules.", "fields": { "devices": { "name": "[%key:component::rachio::services::pause_watering::fields::devices::name%]", @@ -104,7 +104,7 @@ }, "stop_watering": { "name": "Stop watering", - "description": "Stop any currently running zones or schedules.", + "description": "Stops any currently running zones or schedules.", "fields": { "devices": { "name": "[%key:component::rachio::services::pause_watering::fields::devices::name%]", diff --git a/homeassistant/components/recorder/auto_repairs/schema.py b/homeassistant/components/recorder/auto_repairs/schema.py index 1373f466bc2..cf3addd4f20 100644 --- a/homeassistant/components/recorder/auto_repairs/schema.py +++ b/homeassistant/components/recorder/auto_repairs/schema.py @@ -175,7 +175,7 @@ def _validate_db_schema_precision( # Mark the session as read_only to ensure that the test data is not committed # to the database and we always rollback when the scope is exited with session_scope(session=instance.get_session(), read_only=True) as session: - db_object = table_object(**{column: PRECISE_NUMBER for column in columns}) + db_object = table_object(**dict.fromkeys(columns, PRECISE_NUMBER)) table = table_object.__tablename__ try: session.add(db_object) @@ -184,7 +184,7 @@ def _validate_db_schema_precision( _check_columns( schema_errors=schema_errors, stored={column: getattr(db_object, column) for column in columns}, - expected={column: PRECISE_NUMBER for column in columns}, + expected=dict.fromkeys(columns, PRECISE_NUMBER), columns=columns, table_name=table, supports="double precision", diff --git a/homeassistant/components/recorder/const.py b/homeassistant/components/recorder/const.py index 36ff63a0496..4797eecda0f 100644 --- a/homeassistant/components/recorder/const.py +++ b/homeassistant/components/recorder/const.py @@ -54,6 +54,7 @@ CONTEXT_ID_AS_BINARY_SCHEMA_VERSION = 36 EVENT_TYPE_IDS_SCHEMA_VERSION = 37 STATES_META_SCHEMA_VERSION = 38 LAST_REPORTED_SCHEMA_VERSION = 43 +CIRCULAR_MEAN_SCHEMA_VERSION = 49 LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION = 28 LEGACY_STATES_EVENT_FOREIGN_KEYS_FIXED_SCHEMA_VERSION = 43 diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index eaf72b74cdc..7b8043b9201 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -79,7 +79,13 @@ from .db_schema import ( StatisticsShortTerm, ) from .executor import DBInterruptibleThreadPoolExecutor -from .models import DatabaseEngine, StatisticData, StatisticMetaData, UnsupportedDialect +from .models import ( + DatabaseEngine, + StatisticData, + StatisticMeanType, + StatisticMetaData, + UnsupportedDialect, +) from .pool import POOL_SIZE, MutexPool, RecorderPool from .table_managers.event_data import EventDataManager from .table_managers.event_types import EventTypeManager @@ -123,8 +129,6 @@ from .util import ( _LOGGER = logging.getLogger(__name__) -DEFAULT_URL = "sqlite:///{hass_config_path}" - # Controls how often we clean up # States and Events objects EXPIRE_AFTER_COMMITS = 120 @@ -613,6 +617,17 @@ class Recorder(threading.Thread): table: type[Statistics | StatisticsShortTerm], ) -> None: """Schedule import of statistics.""" + if "mean_type" not in metadata: + # Backwards compatibility for old metadata format + # Can be removed after 2026.4 + metadata["mean_type"] = ( # type: ignore[unreachable] + StatisticMeanType.ARITHMETIC + if metadata.get("has_mean") + else StatisticMeanType.NONE + ) + # Remove deprecated has_mean as it's not needed anymore in core + metadata.pop("has_mean", None) + self.queue_task(ImportStatisticsTask(metadata, stats, table)) @callback diff --git a/homeassistant/components/recorder/db_schema.py b/homeassistant/components/recorder/db_schema.py index d1a2405406e..6566cadf64c 100644 --- a/homeassistant/components/recorder/db_schema.py +++ b/homeassistant/components/recorder/db_schema.py @@ -58,6 +58,7 @@ from .const import ALL_DOMAIN_EXCLUDE_ATTRS, SupportedDialect from .models import ( StatisticData, StatisticDataTimestamp, + StatisticMeanType, StatisticMetaData, bytes_to_ulid_or_none, bytes_to_uuid_hex_or_none, @@ -77,7 +78,7 @@ class LegacyBase(DeclarativeBase): """Base class for tables, used for schema migration.""" -SCHEMA_VERSION = 48 +SCHEMA_VERSION = 50 _LOGGER = logging.getLogger(__name__) @@ -203,11 +204,11 @@ UINT_32_TYPE = BigInteger().with_variant( "mariadb", ) JSON_VARIANT_CAST = Text().with_variant( - postgresql.JSON(none_as_null=True), # type: ignore[no-untyped-call] + postgresql.JSON(none_as_null=True), "postgresql", ) JSONB_VARIANT_CAST = Text().with_variant( - postgresql.JSONB(none_as_null=True), # type: ignore[no-untyped-call] + postgresql.JSONB(none_as_null=True), "postgresql", ) DATETIME_TYPE = ( @@ -719,6 +720,7 @@ class StatisticsBase: start: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) start_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, index=True) mean: Mapped[float | None] = mapped_column(DOUBLE_TYPE) + mean_weight: Mapped[float | None] = mapped_column(DOUBLE_TYPE) min: Mapped[float | None] = mapped_column(DOUBLE_TYPE) max: Mapped[float | None] = mapped_column(DOUBLE_TYPE) last_reset: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) @@ -740,6 +742,7 @@ class StatisticsBase: start=None, start_ts=stats["start"].timestamp(), mean=stats.get("mean"), + mean_weight=stats.get("mean_weight"), min=stats.get("min"), max=stats.get("max"), last_reset=None, @@ -763,6 +766,7 @@ class StatisticsBase: start=None, start_ts=stats["start_ts"], mean=stats.get("mean"), + mean_weight=stats.get("mean_weight"), min=stats.get("min"), max=stats.get("max"), last_reset=None, @@ -848,6 +852,9 @@ class _StatisticsMeta: has_mean: Mapped[bool | None] = mapped_column(Boolean) has_sum: Mapped[bool | None] = mapped_column(Boolean) name: Mapped[str | None] = mapped_column(String(255)) + mean_type: Mapped[StatisticMeanType] = mapped_column( + SmallInteger, nullable=False, default=StatisticMeanType.NONE.value + ) # See StatisticMeanType @staticmethod def from_meta(meta: StatisticMetaData) -> StatisticsMeta: diff --git a/homeassistant/components/recorder/manifest.json b/homeassistant/components/recorder/manifest.json index 40513c8ea24..f5336e2a85b 100644 --- a/homeassistant/components/recorder/manifest.json +++ b/homeassistant/components/recorder/manifest.json @@ -7,8 +7,8 @@ "iot_class": "local_push", "quality_scale": "internal", "requirements": [ - "SQLAlchemy==2.0.38", - "fnv-hash-fast==1.2.6", + "SQLAlchemy==2.0.39", + "fnv-hash-fast==1.4.0", "psutil-home-assistant==0.0.1" ] } diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 3aa12f2b1f9..58af15c2aa7 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -9,7 +9,7 @@ from dataclasses import dataclass, replace as dataclass_replace from datetime import timedelta import logging from time import time -from typing import TYPE_CHECKING, Any, cast, final +from typing import TYPE_CHECKING, Any, TypedDict, cast, final from uuid import UUID import sqlalchemy @@ -81,7 +81,7 @@ from .db_schema import ( StatisticsRuns, StatisticsShortTerm, ) -from .models import process_timestamp +from .models import StatisticMeanType, process_timestamp from .models.time import datetime_to_timestamp_or_none from .queries import ( batch_cleanup_entity_ids, @@ -144,24 +144,32 @@ class _ColumnTypesForDialect: big_int_type: str timestamp_type: str context_bin_type: str + small_int_type: str + double_type: str _MYSQL_COLUMN_TYPES = _ColumnTypesForDialect( big_int_type="INTEGER(20)", timestamp_type=DOUBLE_PRECISION_TYPE_SQL, context_bin_type=f"BLOB({CONTEXT_ID_BIN_MAX_LENGTH})", + small_int_type="SMALLINT", + double_type=DOUBLE_PRECISION_TYPE_SQL, ) _POSTGRESQL_COLUMN_TYPES = _ColumnTypesForDialect( big_int_type="INTEGER", timestamp_type=DOUBLE_PRECISION_TYPE_SQL, context_bin_type="BYTEA", + small_int_type="SMALLINT", + double_type=DOUBLE_PRECISION_TYPE_SQL, ) _SQLITE_COLUMN_TYPES = _ColumnTypesForDialect( big_int_type="INTEGER", timestamp_type="FLOAT", context_bin_type="BLOB", + small_int_type="INTEGER", + double_type="FLOAT", ) _COLUMN_TYPES_FOR_DIALECT: dict[SupportedDialect | None, _ColumnTypesForDialect] = { @@ -712,6 +720,11 @@ def _modify_columns( raise +class _FKAlterDict(TypedDict): + old_fk: ForeignKeyConstraint + columns: list[str] + + def _update_states_table_with_foreign_key_options( session_maker: Callable[[], Session], engine: Engine ) -> None: @@ -729,7 +742,7 @@ def _update_states_table_with_foreign_key_options( inspector = sqlalchemy.inspect(engine) tmp_states_table = Table(TABLE_STATES, MetaData()) - alters = [ + alters: list[_FKAlterDict] = [ { "old_fk": ForeignKeyConstraint( (), (), name=foreign_key["name"], table=tmp_states_table @@ -755,14 +768,14 @@ def _update_states_table_with_foreign_key_options( with session_scope(session=session_maker()) as session: try: connection = session.connection() - connection.execute(DropConstraint(alter["old_fk"])) # type: ignore[no-untyped-call] + connection.execute(DropConstraint(alter["old_fk"])) for fkc in states_key_constraints: if fkc.column_keys == alter["columns"]: # AddConstraint mutates the constraint passed to it, we need to # undo that to avoid changing the behavior of the table schema. # https://github.com/sqlalchemy/sqlalchemy/blob/96f1172812f858fead45cdc7874abac76f45b339/lib/sqlalchemy/sql/ddl.py#L746-L748 create_rule = fkc._create_rule # noqa: SLF001 - add_constraint = AddConstraint(fkc) # type: ignore[no-untyped-call] + add_constraint = AddConstraint(fkc) fkc._create_rule = create_rule # noqa: SLF001 connection.execute(add_constraint) except (InternalError, OperationalError): @@ -800,7 +813,7 @@ def _drop_foreign_key_constraints( with session_scope(session=session_maker()) as session: try: connection = session.connection() - connection.execute(DropConstraint(drop)) # type: ignore[no-untyped-call] + connection.execute(DropConstraint(drop)) except (InternalError, OperationalError): _LOGGER.exception( "Could not drop foreign constraints in %s table on %s", @@ -845,7 +858,7 @@ def _restore_foreign_key_constraints( # undo that to avoid changing the behavior of the table schema. # https://github.com/sqlalchemy/sqlalchemy/blob/96f1172812f858fead45cdc7874abac76f45b339/lib/sqlalchemy/sql/ddl.py#L746-L748 create_rule = constraint._create_rule # noqa: SLF001 - add_constraint = AddConstraint(constraint) # type: ignore[no-untyped-call] + add_constraint = AddConstraint(constraint) constraint._create_rule = create_rule # noqa: SLF001 try: _add_constraint(session_maker, add_constraint, table, column) @@ -1988,6 +2001,42 @@ class _SchemaVersion48Migrator(_SchemaVersionMigrator, target_version=48): _migrate_columns_to_timestamp(self.instance, self.session_maker, self.engine) +class _SchemaVersion49Migrator(_SchemaVersionMigrator, target_version=49): + def _apply_update(self) -> None: + """Version specific update method.""" + _add_columns( + self.session_maker, + "statistics_meta", + [ + f"mean_type {self.column_types.small_int_type} NOT NULL DEFAULT {StatisticMeanType.NONE.value}" + ], + ) + + for table in ("statistics", "statistics_short_term"): + _add_columns( + self.session_maker, + table, + [f"mean_weight {self.column_types.double_type}"], + ) + + with session_scope(session=self.session_maker()) as session: + connection = session.connection() + connection.execute( + text( + "UPDATE statistics_meta SET mean_type=:mean_type WHERE has_mean=true" + ), + {"mean_type": StatisticMeanType.ARITHMETIC.value}, + ) + + +class _SchemaVersion50Migrator(_SchemaVersionMigrator, target_version=50): + def _apply_update(self) -> None: + """Version specific update method.""" + with session_scope(session=self.session_maker()) as session: + connection = session.connection() + connection.execute(text("UPDATE statistics_meta SET has_mean=NULL")) + + def _migrate_statistics_columns_to_timestamp_removing_duplicates( hass: HomeAssistant, instance: Recorder, diff --git a/homeassistant/components/recorder/models/__init__.py b/homeassistant/components/recorder/models/__init__.py index ea7a6c86854..8f76982a900 100644 --- a/homeassistant/components/recorder/models/__init__.py +++ b/homeassistant/components/recorder/models/__init__.py @@ -17,6 +17,7 @@ from .statistics import ( RollingWindowStatisticPeriod, StatisticData, StatisticDataTimestamp, + StatisticMeanType, StatisticMetaData, StatisticPeriod, StatisticResult, @@ -37,6 +38,7 @@ __all__ = [ "RollingWindowStatisticPeriod", "StatisticData", "StatisticDataTimestamp", + "StatisticMeanType", "StatisticMetaData", "StatisticPeriod", "StatisticResult", diff --git a/homeassistant/components/recorder/models/statistics.py b/homeassistant/components/recorder/models/statistics.py index ad4d82067c4..08da12d6b17 100644 --- a/homeassistant/components/recorder/models/statistics.py +++ b/homeassistant/components/recorder/models/statistics.py @@ -3,7 +3,8 @@ from __future__ import annotations from datetime import datetime, timedelta -from typing import Literal, TypedDict +from enum import IntEnum +from typing import Literal, NotRequired, TypedDict class StatisticResult(TypedDict): @@ -36,6 +37,7 @@ class StatisticMixIn(TypedDict, total=False): min: float max: float mean: float + mean_weight: float class StatisticData(StatisticDataBase, StatisticMixIn, total=False): @@ -50,10 +52,20 @@ class StatisticDataTimestamp(StatisticDataTimestampBase, StatisticMixIn, total=F last_reset_ts: float | None +class StatisticMeanType(IntEnum): + """Statistic mean type.""" + + NONE = 0 + ARITHMETIC = 1 + CIRCULAR = 2 + + class StatisticMetaData(TypedDict): """Statistic meta data class.""" - has_mean: bool + # has_mean is deprecated, use mean_type instead. has_mean will be removed in 2026.4 + has_mean: NotRequired[bool] + mean_type: StatisticMeanType has_sum: bool name: str | None source: str diff --git a/homeassistant/components/recorder/statistics.py b/homeassistant/components/recorder/statistics.py index 97fe73c54fe..2507a66899e 100644 --- a/homeassistant/components/recorder/statistics.py +++ b/homeassistant/components/recorder/statistics.py @@ -9,12 +9,23 @@ from datetime import datetime, timedelta from functools import lru_cache, partial from itertools import chain, groupby import logging +import math from operator import itemgetter import re from time import time as time_time -from typing import TYPE_CHECKING, Any, Literal, TypedDict, cast +from typing import TYPE_CHECKING, Any, Literal, Required, TypedDict, cast -from sqlalchemy import Select, and_, bindparam, func, lambda_stmt, select, text +from sqlalchemy import ( + Label, + Select, + and_, + bindparam, + case, + func, + lambda_stmt, + select, + text, +) from sqlalchemy.engine.row import Row from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.orm.session import Session @@ -29,6 +40,7 @@ from homeassistant.helpers.singleton import singleton from homeassistant.helpers.typing import UNDEFINED, UndefinedType from homeassistant.util import dt as dt_util from homeassistant.util.collection import chunked_or_all +from homeassistant.util.enum import try_parse_enum from homeassistant.util.unit_conversion import ( AreaConverter, BaseUnitConverter, @@ -74,6 +86,7 @@ from .db_schema import ( from .models import ( StatisticData, StatisticDataTimestamp, + StatisticMeanType, StatisticMetaData, StatisticResult, datetime_to_timestamp_or_none, @@ -113,11 +126,54 @@ QUERY_STATISTICS_SHORT_TERM = ( StatisticsShortTerm.sum, ) + +def query_circular_mean(table: type[StatisticsBase]) -> tuple[Label, Label]: + """Return the sqlalchemy function for circular mean and the mean_weight. + + The result must be modulo 360 to normalize the result [0, 360]. + """ + # Postgres doesn't support modulo for double precision and + # the other dbs return the remainder instead of the modulo + # meaning negative values are possible. For these reason + # we need to normalize the result to be in the range [0, 360) + # in Python. + # https://en.wikipedia.org/wiki/Circular_mean + radians = func.radians(table.mean) + weight = func.sqrt( + func.power(func.sum(func.sin(radians) * table.mean_weight), 2) + + func.power(func.sum(func.cos(radians) * table.mean_weight), 2) + ) + return ( + func.degrees( + func.atan2(func.sum(func.sin(radians)), func.sum(func.cos(radians))) + ).label("mean"), + weight.label("mean_weight"), + ) + + QUERY_STATISTICS_SUMMARY_MEAN = ( StatisticsShortTerm.metadata_id, - func.avg(StatisticsShortTerm.mean), func.min(StatisticsShortTerm.min), func.max(StatisticsShortTerm.max), + case( + ( + StatisticsMeta.mean_type == StatisticMeanType.ARITHMETIC, + func.avg(StatisticsShortTerm.mean), + ), + ( + StatisticsMeta.mean_type == StatisticMeanType.CIRCULAR, + query_circular_mean(StatisticsShortTerm)[0], + ), + else_=None, + ), + case( + ( + StatisticsMeta.mean_type == StatisticMeanType.CIRCULAR, + query_circular_mean(StatisticsShortTerm)[1], + ), + else_=None, + ), + StatisticsMeta.mean_type, ) QUERY_STATISTICS_SUMMARY_SUM = ( @@ -136,31 +192,28 @@ QUERY_STATISTICS_SUMMARY_SUM = ( STATISTIC_UNIT_TO_UNIT_CONVERTER: dict[str | None, type[BaseUnitConverter]] = { - **{unit: AreaConverter for unit in AreaConverter.VALID_UNITS}, - **{ - unit: BloodGlucoseConcentrationConverter - for unit in BloodGlucoseConcentrationConverter.VALID_UNITS - }, - **{unit: ConductivityConverter for unit in ConductivityConverter.VALID_UNITS}, - **{unit: DataRateConverter for unit in DataRateConverter.VALID_UNITS}, - **{unit: DistanceConverter for unit in DistanceConverter.VALID_UNITS}, - **{unit: DurationConverter for unit in DurationConverter.VALID_UNITS}, - **{unit: ElectricCurrentConverter for unit in ElectricCurrentConverter.VALID_UNITS}, - **{ - unit: ElectricPotentialConverter - for unit in ElectricPotentialConverter.VALID_UNITS - }, - **{unit: EnergyConverter for unit in EnergyConverter.VALID_UNITS}, - **{unit: EnergyDistanceConverter for unit in EnergyDistanceConverter.VALID_UNITS}, - **{unit: InformationConverter for unit in InformationConverter.VALID_UNITS}, - **{unit: MassConverter for unit in MassConverter.VALID_UNITS}, - **{unit: PowerConverter for unit in PowerConverter.VALID_UNITS}, - **{unit: PressureConverter for unit in PressureConverter.VALID_UNITS}, - **{unit: SpeedConverter for unit in SpeedConverter.VALID_UNITS}, - **{unit: TemperatureConverter for unit in TemperatureConverter.VALID_UNITS}, - **{unit: UnitlessRatioConverter for unit in UnitlessRatioConverter.VALID_UNITS}, - **{unit: VolumeConverter for unit in VolumeConverter.VALID_UNITS}, - **{unit: VolumeFlowRateConverter for unit in VolumeFlowRateConverter.VALID_UNITS}, + **dict.fromkeys(AreaConverter.VALID_UNITS, AreaConverter), + **dict.fromkeys( + BloodGlucoseConcentrationConverter.VALID_UNITS, + BloodGlucoseConcentrationConverter, + ), + **dict.fromkeys(ConductivityConverter.VALID_UNITS, ConductivityConverter), + **dict.fromkeys(DataRateConverter.VALID_UNITS, DataRateConverter), + **dict.fromkeys(DistanceConverter.VALID_UNITS, DistanceConverter), + **dict.fromkeys(DurationConverter.VALID_UNITS, DurationConverter), + **dict.fromkeys(ElectricCurrentConverter.VALID_UNITS, ElectricCurrentConverter), + **dict.fromkeys(ElectricPotentialConverter.VALID_UNITS, ElectricPotentialConverter), + **dict.fromkeys(EnergyConverter.VALID_UNITS, EnergyConverter), + **dict.fromkeys(EnergyDistanceConverter.VALID_UNITS, EnergyDistanceConverter), + **dict.fromkeys(InformationConverter.VALID_UNITS, InformationConverter), + **dict.fromkeys(MassConverter.VALID_UNITS, MassConverter), + **dict.fromkeys(PowerConverter.VALID_UNITS, PowerConverter), + **dict.fromkeys(PressureConverter.VALID_UNITS, PressureConverter), + **dict.fromkeys(SpeedConverter.VALID_UNITS, SpeedConverter), + **dict.fromkeys(TemperatureConverter.VALID_UNITS, TemperatureConverter), + **dict.fromkeys(UnitlessRatioConverter.VALID_UNITS, UnitlessRatioConverter), + **dict.fromkeys(VolumeConverter.VALID_UNITS, VolumeConverter), + **dict.fromkeys(VolumeFlowRateConverter.VALID_UNITS, VolumeFlowRateConverter), } @@ -183,6 +236,24 @@ def mean(values: list[float]) -> float | None: return sum(values) / len(values) +DEG_TO_RAD = math.pi / 180 +RAD_TO_DEG = 180 / math.pi + + +def weighted_circular_mean(values: Iterable[tuple[float, float]]) -> float: + """Return the weighted circular mean of the values.""" + sin_sum = sum(math.sin(x * DEG_TO_RAD) * weight for x, weight in values) + cos_sum = sum(math.cos(x * DEG_TO_RAD) * weight for x, weight in values) + return (RAD_TO_DEG * math.atan2(sin_sum, cos_sum)) % 360 + + +def circular_mean(values: list[float]) -> float: + """Return the circular mean of the values.""" + sin_sum = sum(math.sin(x * DEG_TO_RAD) for x in values) + cos_sum = sum(math.cos(x * DEG_TO_RAD) for x in values) + return (RAD_TO_DEG * math.atan2(sin_sum, cos_sum)) % 360 + + _LOGGER = logging.getLogger(__name__) @@ -375,11 +446,19 @@ def _compile_hourly_statistics_summary_mean_stmt( start_time_ts: float, end_time_ts: float ) -> StatementLambdaElement: """Generate the summary mean statement for hourly statistics.""" + # Due the fact that we support different mean type (See StatisticMeanType) + # we need to join here with the StatisticsMeta table to get the mean type + # and then use a case statement to compute the mean based on the mean type. + # As we use the StatisticsMeta.mean_type in the select case statement we need + # to group by it as well. return lambda_stmt( lambda: select(*QUERY_STATISTICS_SUMMARY_MEAN) .filter(StatisticsShortTerm.start_ts >= start_time_ts) .filter(StatisticsShortTerm.start_ts < end_time_ts) - .group_by(StatisticsShortTerm.metadata_id) + .join( + StatisticsMeta, and_(StatisticsShortTerm.metadata_id == StatisticsMeta.id) + ) + .group_by(StatisticsShortTerm.metadata_id, StatisticsMeta.mean_type) .order_by(StatisticsShortTerm.metadata_id) ) @@ -421,10 +500,17 @@ def _compile_hourly_statistics(session: Session, start: datetime) -> None: if stats: for stat in stats: - metadata_id, _mean, _min, _max = stat + metadata_id, _min, _max, _mean, _mean_weight, _mean_type = stat + if ( + try_parse_enum(StatisticMeanType, _mean_type) + is StatisticMeanType.CIRCULAR + ): + # Normalize the circular mean to be in the range [0, 360) + _mean = _mean % 360 summary[metadata_id] = { "start_ts": start_time_ts, "mean": _mean, + "mean_weight": _mean_weight, "min": _min, "max": _max, } @@ -830,7 +916,7 @@ def _statistic_by_id_from_metadata( "display_unit_of_measurement": get_display_unit( hass, meta["statistic_id"], meta["unit_of_measurement"] ), - "has_mean": meta["has_mean"], + "mean_type": meta["mean_type"], "has_sum": meta["has_sum"], "name": meta["name"], "source": meta["source"], @@ -849,7 +935,9 @@ def _flatten_list_statistic_ids_metadata_result( { "statistic_id": _id, "display_unit_of_measurement": info["display_unit_of_measurement"], - "has_mean": info["has_mean"], + "has_mean": info["mean_type"] + == StatisticMeanType.ARITHMETIC, # Can be removed with 2026.4 + "mean_type": info["mean_type"], "has_sum": info["has_sum"], "name": info.get("name"), "source": info["source"], @@ -904,7 +992,7 @@ def list_statistic_ids( continue result[key] = { "display_unit_of_measurement": meta["unit_of_measurement"], - "has_mean": meta["has_mean"], + "mean_type": meta["mean_type"], "has_sum": meta["has_sum"], "name": meta["name"], "source": meta["source"], @@ -922,6 +1010,7 @@ def _reduce_statistics( period_start_end: Callable[[float], tuple[float, float]], period: timedelta, types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]], + metadata: dict[str, tuple[int, StatisticMetaData]], ) -> dict[str, list[StatisticsRow]]: """Reduce hourly statistics to daily or monthly statistics.""" result: dict[str, list[StatisticsRow]] = defaultdict(list) @@ -949,7 +1038,13 @@ def _reduce_statistics( "end": end, } if _want_mean: - row["mean"] = mean(mean_values) if mean_values else None + row["mean"] = None + if mean_values: + match metadata[statistic_id][1]["mean_type"]: + case StatisticMeanType.ARITHMETIC: + row["mean"] = mean(mean_values) + case StatisticMeanType.CIRCULAR: + row["mean"] = circular_mean(mean_values) mean_values.clear() if _want_min: row["min"] = min(min_values) if min_values else None @@ -966,8 +1061,9 @@ def _reduce_statistics( result[statistic_id].append(row) if _want_max and (_max := statistic.get("max")) is not None: max_values.append(_max) - if _want_mean and (_mean := statistic.get("mean")) is not None: - mean_values.append(_mean) + if _want_mean: + if (_mean := statistic.get("mean")) is not None: + mean_values.append(_mean) if _want_min and (_min := statistic.get("min")) is not None: min_values.append(_min) prev_stat = statistic @@ -1014,11 +1110,12 @@ def reduce_day_ts_factory() -> tuple[ def _reduce_statistics_per_day( stats: dict[str, list[StatisticsRow]], types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]], + metadata: dict[str, tuple[int, StatisticMetaData]], ) -> dict[str, list[StatisticsRow]]: """Reduce hourly statistics to daily statistics.""" _same_day_ts, _day_start_end_ts = reduce_day_ts_factory() return _reduce_statistics( - stats, _same_day_ts, _day_start_end_ts, timedelta(days=1), types + stats, _same_day_ts, _day_start_end_ts, timedelta(days=1), types, metadata ) @@ -1062,11 +1159,12 @@ def reduce_week_ts_factory() -> tuple[ def _reduce_statistics_per_week( stats: dict[str, list[StatisticsRow]], types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]], + metadata: dict[str, tuple[int, StatisticMetaData]], ) -> dict[str, list[StatisticsRow]]: """Reduce hourly statistics to weekly statistics.""" _same_week_ts, _week_start_end_ts = reduce_week_ts_factory() return _reduce_statistics( - stats, _same_week_ts, _week_start_end_ts, timedelta(days=7), types + stats, _same_week_ts, _week_start_end_ts, timedelta(days=7), types, metadata ) @@ -1115,11 +1213,12 @@ def reduce_month_ts_factory() -> tuple[ def _reduce_statistics_per_month( stats: dict[str, list[StatisticsRow]], types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]], + metadata: dict[str, tuple[int, StatisticMetaData]], ) -> dict[str, list[StatisticsRow]]: """Reduce hourly statistics to monthly statistics.""" _same_month_ts, _month_start_end_ts = reduce_month_ts_factory() return _reduce_statistics( - stats, _same_month_ts, _month_start_end_ts, timedelta(days=31), types + stats, _same_month_ts, _month_start_end_ts, timedelta(days=31), types, metadata ) @@ -1163,27 +1262,41 @@ def _generate_max_mean_min_statistic_in_sub_period_stmt( return stmt +class _MaxMinMeanStatisticSubPeriod(TypedDict, total=False): + max: float + mean_acc: float + min: float + duration: float + circular_means: Required[list[tuple[float, float]]] + + def _get_max_mean_min_statistic_in_sub_period( session: Session, - result: dict[str, float], + result: _MaxMinMeanStatisticSubPeriod, start_time: datetime | None, end_time: datetime | None, table: type[StatisticsBase], types: set[Literal["max", "mean", "min", "change"]], - metadata_id: int, + metadata: tuple[int, StatisticMetaData], ) -> None: """Return max, mean and min during the period.""" # Calculate max, mean, min + mean_type = metadata[1]["mean_type"] columns = select() if "max" in types: columns = columns.add_columns(func.max(table.max)) if "mean" in types: - columns = columns.add_columns(func.avg(table.mean)) - columns = columns.add_columns(func.count(table.mean)) + match mean_type: + case StatisticMeanType.ARITHMETIC: + columns = columns.add_columns(func.avg(table.mean)) + columns = columns.add_columns(func.count(table.mean)) + case StatisticMeanType.CIRCULAR: + columns = columns.add_columns(*query_circular_mean(table)) if "min" in types: columns = columns.add_columns(func.min(table.min)) + stmt = _generate_max_mean_min_statistic_in_sub_period_stmt( - columns, start_time, end_time, table, metadata_id + columns, start_time, end_time, table, metadata[0] ) stats = cast(Sequence[Row[Any]], execute_stmt_lambda_element(session, stmt)) if not stats: @@ -1191,11 +1304,21 @@ def _get_max_mean_min_statistic_in_sub_period( if "max" in types and (new_max := stats[0].max) is not None: old_max = result.get("max") result["max"] = max(new_max, old_max) if old_max is not None else new_max - if "mean" in types and stats[0].avg is not None: + if "mean" in types: # https://github.com/sqlalchemy/sqlalchemy/issues/9127 - duration = stats[0].count * table.duration.total_seconds() # type: ignore[operator] - result["duration"] = result.get("duration", 0.0) + duration - result["mean_acc"] = result.get("mean_acc", 0.0) + stats[0].avg * duration + match mean_type: + case StatisticMeanType.ARITHMETIC: + duration = stats[0].count * table.duration.total_seconds() # type: ignore[operator] + if stats[0].avg is not None: + result["duration"] = result.get("duration", 0.0) + duration + result["mean_acc"] = ( + result.get("mean_acc", 0.0) + stats[0].avg * duration + ) + case StatisticMeanType.CIRCULAR: + if (new_circular_mean := stats[0].mean) is not None and ( + weight := stats[0].mean_weight + ) is not None: + result["circular_means"].append((new_circular_mean, weight)) if "min" in types and (new_min := stats[0].min) is not None: old_min = result.get("min") result["min"] = min(new_min, old_min) if old_min is not None else new_min @@ -1210,15 +1333,15 @@ def _get_max_mean_min_statistic( tail_start_time: datetime | None, tail_end_time: datetime | None, tail_only: bool, - metadata_id: int, + metadata: tuple[int, StatisticMetaData], types: set[Literal["max", "mean", "min", "change"]], ) -> dict[str, float | None]: """Return max, mean and min during the period. - The mean is a time weighted average, combining hourly and 5-minute statistics if + The mean is time weighted, combining hourly and 5-minute statistics if necessary. """ - max_mean_min: dict[str, float] = {} + max_mean_min = _MaxMinMeanStatisticSubPeriod(circular_means=[]) result: dict[str, float | None] = {} if tail_start_time is not None: @@ -1230,7 +1353,7 @@ def _get_max_mean_min_statistic( tail_end_time, StatisticsShortTerm, types, - metadata_id, + metadata, ) if not tail_only: @@ -1241,7 +1364,7 @@ def _get_max_mean_min_statistic( main_end_time, Statistics, types, - metadata_id, + metadata, ) if head_start_time is not None: @@ -1252,16 +1375,23 @@ def _get_max_mean_min_statistic( head_end_time, StatisticsShortTerm, types, - metadata_id, + metadata, ) if "max" in types: result["max"] = max_mean_min.get("max") if "mean" in types: - if "mean_acc" not in max_mean_min: - result["mean"] = None - else: - result["mean"] = max_mean_min["mean_acc"] / max_mean_min["duration"] + mean_value = None + match metadata[1]["mean_type"]: + case StatisticMeanType.CIRCULAR: + if circular_means := max_mean_min["circular_means"]: + mean_value = weighted_circular_mean(circular_means) + case StatisticMeanType.ARITHMETIC: + if (mean_value := max_mean_min.get("mean_acc")) is not None and ( + duration := max_mean_min.get("duration") + ) is not None: + mean_value = mean_value / duration + result["mean"] = mean_value if "min" in types: result["min"] = max_mean_min.get("min") return result @@ -1562,7 +1692,7 @@ def statistic_during_period( tail_start_time, tail_end_time, tail_only, - metadata_id, + metadata, types, ) @@ -1645,7 +1775,7 @@ def _extract_metadata_and_discard_impossible_columns( has_sum = False for metadata_id, stats_metadata in metadata.values(): metadata_ids.append(metadata_id) - has_mean |= stats_metadata["has_mean"] + has_mean |= stats_metadata["mean_type"] is not StatisticMeanType.NONE has_sum |= stats_metadata["has_sum"] if not has_mean: types.discard("mean") @@ -1801,13 +1931,13 @@ def _statistics_during_period_with_session( ) if period == "day": - result = _reduce_statistics_per_day(result, types) + result = _reduce_statistics_per_day(result, types, metadata) if period == "week": - result = _reduce_statistics_per_week(result, types) + result = _reduce_statistics_per_week(result, types, metadata) if period == "month": - result = _reduce_statistics_per_month(result, types) + result = _reduce_statistics_per_month(result, types, metadata) if "change" in _types: _augment_result_with_change( diff --git a/homeassistant/components/recorder/strings.json b/homeassistant/components/recorder/strings.json index 43c2ecdc14f..0c8d47548bf 100644 --- a/homeassistant/components/recorder/strings.json +++ b/homeassistant/components/recorder/strings.json @@ -43,15 +43,15 @@ "fields": { "entity_id": { "name": "Entities to remove", - "description": "List of entities for which the data is to be removed from the recorder database." + "description": "List of entities for which the data is to be removed from the Recorder database." }, "domains": { "name": "Domains to remove", - "description": "List of domains for which the data needs to be removed from the recorder database." + "description": "List of domains for which the data needs to be removed from the Recorder database." }, "entity_globs": { "name": "Entity globs to remove", - "description": "List of glob patterns used to select the entities for which the data is to be removed from the recorder database." + "description": "List of glob patterns used to select the entities for which the data is to be removed from the Recorder database." }, "keep_days": { "name": "[%key:component::recorder::services::purge::fields::keep_days::name%]", diff --git a/homeassistant/components/recorder/table_managers/statistics_meta.py b/homeassistant/components/recorder/table_managers/statistics_meta.py index 77fc34518db..634e9565c12 100644 --- a/homeassistant/components/recorder/table_managers/statistics_meta.py +++ b/homeassistant/components/recorder/table_managers/statistics_meta.py @@ -4,16 +4,18 @@ from __future__ import annotations import logging import threading -from typing import TYPE_CHECKING, Final, Literal +from typing import TYPE_CHECKING, Any, Final, Literal from lru import LRU from sqlalchemy import lambda_stmt, select +from sqlalchemy.orm import InstrumentedAttribute from sqlalchemy.orm.session import Session from sqlalchemy.sql.expression import true from sqlalchemy.sql.lambdas import StatementLambdaElement +from ..const import CIRCULAR_MEAN_SCHEMA_VERSION from ..db_schema import StatisticsMeta -from ..models import StatisticMetaData +from ..models import StatisticMeanType, StatisticMetaData from ..util import execute_stmt_lambda_element if TYPE_CHECKING: @@ -28,7 +30,6 @@ QUERY_STATISTIC_META = ( StatisticsMeta.statistic_id, StatisticsMeta.source, StatisticsMeta.unit_of_measurement, - StatisticsMeta.has_mean, StatisticsMeta.has_sum, StatisticsMeta.name, ) @@ -37,24 +38,38 @@ INDEX_ID: Final = 0 INDEX_STATISTIC_ID: Final = 1 INDEX_SOURCE: Final = 2 INDEX_UNIT_OF_MEASUREMENT: Final = 3 -INDEX_HAS_MEAN: Final = 4 -INDEX_HAS_SUM: Final = 5 -INDEX_NAME: Final = 6 +INDEX_HAS_SUM: Final = 4 +INDEX_NAME: Final = 5 +INDEX_MEAN_TYPE: Final = 6 def _generate_get_metadata_stmt( statistic_ids: set[str] | None = None, statistic_type: Literal["mean", "sum"] | None = None, statistic_source: str | None = None, + schema_version: int = 0, ) -> StatementLambdaElement: - """Generate a statement to fetch metadata.""" - stmt = lambda_stmt(lambda: select(*QUERY_STATISTIC_META)) + """Generate a statement to fetch metadata with the passed filters. + + Depending on the schema version, either mean_type (added in version 49) or has_mean column is used. + """ + columns: list[InstrumentedAttribute[Any]] = list(QUERY_STATISTIC_META) + if schema_version >= CIRCULAR_MEAN_SCHEMA_VERSION: + columns.append(StatisticsMeta.mean_type) + else: + columns.append(StatisticsMeta.has_mean) + stmt = lambda_stmt(lambda: select(*columns)) if statistic_ids: stmt += lambda q: q.where(StatisticsMeta.statistic_id.in_(statistic_ids)) if statistic_source is not None: stmt += lambda q: q.where(StatisticsMeta.source == statistic_source) if statistic_type == "mean": - stmt += lambda q: q.where(StatisticsMeta.has_mean == true()) + if schema_version >= CIRCULAR_MEAN_SCHEMA_VERSION: + stmt += lambda q: q.where( + StatisticsMeta.mean_type != StatisticMeanType.NONE + ) + else: + stmt += lambda q: q.where(StatisticsMeta.has_mean == true()) elif statistic_type == "sum": stmt += lambda q: q.where(StatisticsMeta.has_sum == true()) return stmt @@ -100,14 +115,34 @@ class StatisticsMetaManager: for row in execute_stmt_lambda_element( session, _generate_get_metadata_stmt( - statistic_ids, statistic_type, statistic_source + statistic_ids, + statistic_type, + statistic_source, + self.recorder.schema_version, ), orm_rows=False, ): statistic_id = row[INDEX_STATISTIC_ID] row_id = row[INDEX_ID] + if self.recorder.schema_version >= CIRCULAR_MEAN_SCHEMA_VERSION: + try: + mean_type = StatisticMeanType(row[INDEX_MEAN_TYPE]) + except ValueError: + _LOGGER.warning( + "Invalid mean type found for statistic_id: %s, mean_type: %s. Skipping", + statistic_id, + row[INDEX_MEAN_TYPE], + ) + continue + else: + mean_type = ( + StatisticMeanType.ARITHMETIC + if row[INDEX_MEAN_TYPE] + else StatisticMeanType.NONE + ) meta = { - "has_mean": row[INDEX_HAS_MEAN], + "has_mean": mean_type is StatisticMeanType.ARITHMETIC, + "mean_type": mean_type, "has_sum": row[INDEX_HAS_SUM], "name": row[INDEX_NAME], "source": row[INDEX_SOURCE], @@ -157,9 +192,18 @@ class StatisticsMetaManager: This call is not thread-safe and must be called from the recorder thread. """ + if "mean_type" not in new_metadata: + # To maintain backward compatibility after adding 'mean_type' in schema version 49, + # we must still check for its presence. Even though type hints suggest it should always exist, + # custom integrations might omit it, so we need to guard against that. + new_metadata["mean_type"] = ( # type: ignore[unreachable] + StatisticMeanType.ARITHMETIC + if new_metadata["has_mean"] + else StatisticMeanType.NONE + ) metadata_id, old_metadata = old_metadata_dict[statistic_id] if not ( - old_metadata["has_mean"] != new_metadata["has_mean"] + old_metadata["mean_type"] != new_metadata["mean_type"] or old_metadata["has_sum"] != new_metadata["has_sum"] or old_metadata["name"] != new_metadata["name"] or old_metadata["unit_of_measurement"] @@ -170,7 +214,7 @@ class StatisticsMetaManager: self._assert_in_recorder_thread() session.query(StatisticsMeta).filter_by(statistic_id=statistic_id).update( { - StatisticsMeta.has_mean: new_metadata["has_mean"], + StatisticsMeta.mean_type: new_metadata["mean_type"], StatisticsMeta.has_sum: new_metadata["has_sum"], StatisticsMeta.name: new_metadata["name"], StatisticsMeta.unit_of_measurement: new_metadata["unit_of_measurement"], diff --git a/homeassistant/components/recorder/websocket_api.py b/homeassistant/components/recorder/websocket_api.py index d23ecab3dac..f4058943971 100644 --- a/homeassistant/components/recorder/websocket_api.py +++ b/homeassistant/components/recorder/websocket_api.py @@ -37,7 +37,7 @@ from homeassistant.util.unit_conversion import ( VolumeFlowRateConverter, ) -from .models import StatisticPeriod +from .models import StatisticMeanType, StatisticPeriod from .statistics import ( STATISTIC_UNIT_TO_UNIT_CONVERTER, async_add_external_statistics, @@ -532,6 +532,10 @@ def ws_import_statistics( ) -> None: """Import statistics.""" metadata = msg["metadata"] + # The WS command will be changed in a follow up PR + metadata["mean_type"] = ( + StatisticMeanType.ARITHMETIC if metadata["has_mean"] else StatisticMeanType.NONE + ) stats = msg["stats"] if valid_entity_id(metadata["statistic_id"]): diff --git a/homeassistant/components/remote_calendar/__init__.py b/homeassistant/components/remote_calendar/__init__.py new file mode 100644 index 00000000000..910eeae8268 --- /dev/null +++ b/homeassistant/components/remote_calendar/__init__.py @@ -0,0 +1,33 @@ +"""The Remote Calendar integration.""" + +import logging + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .const import DOMAIN +from .coordinator import RemoteCalendarConfigEntry, RemoteCalendarDataUpdateCoordinator + +_LOGGER = logging.getLogger(__name__) + + +PLATFORMS: list[Platform] = [Platform.CALENDAR] + + +async def async_setup_entry( + hass: HomeAssistant, entry: RemoteCalendarConfigEntry +) -> bool: + """Set up Remote Calendar from a config entry.""" + hass.data.setdefault(DOMAIN, {}) + coordinator = RemoteCalendarDataUpdateCoordinator(hass, entry) + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry( + hass: HomeAssistant, entry: RemoteCalendarConfigEntry +) -> bool: + """Handle unload of an entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/remote_calendar/calendar.py b/homeassistant/components/remote_calendar/calendar.py new file mode 100644 index 00000000000..bd83a5f18cc --- /dev/null +++ b/homeassistant/components/remote_calendar/calendar.py @@ -0,0 +1,92 @@ +"""Calendar platform for a Remote Calendar.""" + +from datetime import datetime +import logging + +from ical.event import Event + +from homeassistant.components.calendar import CalendarEntity, CalendarEvent +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity +from homeassistant.util import dt as dt_util + +from . import RemoteCalendarConfigEntry +from .const import CONF_CALENDAR_NAME +from .coordinator import RemoteCalendarDataUpdateCoordinator + +_LOGGER = logging.getLogger(__name__) + +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + entry: RemoteCalendarConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the remote calendar platform.""" + coordinator = entry.runtime_data + entity = RemoteCalendarEntity(coordinator, entry) + async_add_entities([entity]) + + +class RemoteCalendarEntity( + CoordinatorEntity[RemoteCalendarDataUpdateCoordinator], CalendarEntity +): + """A calendar entity backed by a remote iCalendar url.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: RemoteCalendarDataUpdateCoordinator, + entry: RemoteCalendarConfigEntry, + ) -> None: + """Initialize RemoteCalendarEntity.""" + super().__init__(coordinator) + self._attr_name = entry.data[CONF_CALENDAR_NAME] + self._attr_unique_id = entry.entry_id + + @property + def event(self) -> CalendarEvent | None: + """Return the next upcoming event.""" + now = dt_util.now() + events = self.coordinator.data.timeline_tz(now.tzinfo).active_after(now) + if event := next(events, None): + return _get_calendar_event(event) + return None + + async def async_get_events( + self, hass: HomeAssistant, start_date: datetime, end_date: datetime + ) -> list[CalendarEvent]: + """Get all events in a specific time frame.""" + events = self.coordinator.data.timeline_tz(start_date.tzinfo).overlapping( + start_date, + end_date, + ) + return [_get_calendar_event(event) for event in events] + + +def _get_calendar_event(event: Event) -> CalendarEvent: + """Return a CalendarEvent from an API event.""" + + return CalendarEvent( + summary=event.summary, + start=( + dt_util.as_local(event.start) + if isinstance(event.start, datetime) + else event.start + ), + end=( + dt_util.as_local(event.end) + if isinstance(event.end, datetime) + else event.end + ), + description=event.description, + uid=event.uid, + rrule=event.rrule.as_rrule_str() if event.rrule else None, + recurrence_id=event.recurrence_id, + location=event.location, + ) diff --git a/homeassistant/components/remote_calendar/config_flow.py b/homeassistant/components/remote_calendar/config_flow.py new file mode 100644 index 00000000000..cc9f45e2767 --- /dev/null +++ b/homeassistant/components/remote_calendar/config_flow.py @@ -0,0 +1,82 @@ +"""Config flow for Remote Calendar integration.""" + +from http import HTTPStatus +import logging +from typing import Any + +from httpx import HTTPError, InvalidURL +from ical.calendar_stream import IcsCalendarStream +from ical.exceptions import CalendarParseError +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_URL +from homeassistant.helpers.httpx_client import get_async_client + +from .const import CONF_CALENDAR_NAME, DOMAIN + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_CALENDAR_NAME): str, + vol.Required(CONF_URL): str, + } +) + + +class RemoteCalendarConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Remote Calendar.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + if user_input is None: + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA + ) + errors: dict = {} + _LOGGER.debug("User input: %s", user_input) + self._async_abort_entries_match( + {CONF_CALENDAR_NAME: user_input[CONF_CALENDAR_NAME]} + ) + if user_input[CONF_URL].startswith("webcal://"): + user_input[CONF_URL] = user_input[CONF_URL].replace( + "webcal://", "https://", 1 + ) + self._async_abort_entries_match({CONF_URL: user_input[CONF_URL]}) + client = get_async_client(self.hass) + try: + res = await client.get(user_input[CONF_URL], follow_redirects=True) + if res.status_code == HTTPStatus.FORBIDDEN: + errors["base"] = "forbidden" + return self.async_show_form( + step_id="user", + data_schema=STEP_USER_DATA_SCHEMA, + errors=errors, + ) + res.raise_for_status() + except (HTTPError, InvalidURL) as err: + errors["base"] = "cannot_connect" + _LOGGER.debug("An error occurred: %s", err) + else: + try: + await self.hass.async_add_executor_job( + IcsCalendarStream.calendar_from_ics, res.text + ) + except CalendarParseError as err: + errors["base"] = "invalid_ics_file" + _LOGGER.debug("Invalid .ics file: %s", err) + else: + return self.async_create_entry( + title=user_input[CONF_CALENDAR_NAME], data=user_input + ) + + return self.async_show_form( + step_id="user", + data_schema=STEP_USER_DATA_SCHEMA, + errors=errors, + ) diff --git a/homeassistant/components/remote_calendar/const.py b/homeassistant/components/remote_calendar/const.py new file mode 100644 index 00000000000..060d7633111 --- /dev/null +++ b/homeassistant/components/remote_calendar/const.py @@ -0,0 +1,4 @@ +"""Constants for the Remote Calendar integration.""" + +DOMAIN = "remote_calendar" +CONF_CALENDAR_NAME = "calendar_name" diff --git a/homeassistant/components/remote_calendar/coordinator.py b/homeassistant/components/remote_calendar/coordinator.py new file mode 100644 index 00000000000..6caec297c1a --- /dev/null +++ b/homeassistant/components/remote_calendar/coordinator.py @@ -0,0 +1,71 @@ +"""Data UpdateCoordinator for the Remote Calendar integration.""" + +from datetime import timedelta +import logging + +from httpx import HTTPError, InvalidURL +from ical.calendar import Calendar +from ical.calendar_stream import IcsCalendarStream +from ical.exceptions import CalendarParseError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_URL +from homeassistant.core import HomeAssistant +from homeassistant.helpers.httpx_client import get_async_client +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN + +type RemoteCalendarConfigEntry = ConfigEntry[RemoteCalendarDataUpdateCoordinator] + +_LOGGER = logging.getLogger(__name__) +SCAN_INTERVAL = timedelta(days=1) + + +class RemoteCalendarDataUpdateCoordinator(DataUpdateCoordinator[Calendar]): + """Class to manage fetching calendar data.""" + + config_entry: RemoteCalendarConfigEntry + ics: str + + def __init__( + self, + hass: HomeAssistant, + config_entry: RemoteCalendarConfigEntry, + ) -> None: + """Initialize data updater.""" + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=SCAN_INTERVAL, + always_update=True, + ) + self._client = get_async_client(hass) + self._url = config_entry.data[CONF_URL] + + async def _async_update_data(self) -> Calendar: + """Update data from the url.""" + try: + res = await self._client.get(self._url, follow_redirects=True) + res.raise_for_status() + except (HTTPError, InvalidURL) as err: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="unable_to_fetch", + translation_placeholders={"err": str(err)}, + ) from err + try: + # calendar_from_ics will dynamically load packages + # the first time it is called, so we need to do it + # in a separate thread to avoid blocking the event loop + self.ics = res.text + return await self.hass.async_add_executor_job( + IcsCalendarStream.calendar_from_ics, self.ics + ) + except CalendarParseError as err: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="unable_to_parse", + translation_placeholders={"err": str(err)}, + ) from err diff --git a/homeassistant/components/remote_calendar/diagnostics.py b/homeassistant/components/remote_calendar/diagnostics.py new file mode 100644 index 00000000000..5ebfb3d3812 --- /dev/null +++ b/homeassistant/components/remote_calendar/diagnostics.py @@ -0,0 +1,25 @@ +"""Provides diagnostics for the remote calendar.""" + +import datetime +from typing import Any + +from ical.diagnostics import redact_ics + +from homeassistant.core import HomeAssistant +from homeassistant.util import dt as dt_util + +from . import RemoteCalendarConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: RemoteCalendarConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + coordinator = entry.runtime_data + payload: dict[str, Any] = { + "now": dt_util.now().isoformat(), + "timezone": str(dt_util.get_default_time_zone()), + "system_timezone": str(datetime.datetime.now().astimezone().tzinfo), + } + payload["ics"] = "\n".join(redact_ics(coordinator.ics)) + return payload diff --git a/homeassistant/components/remote_calendar/manifest.json b/homeassistant/components/remote_calendar/manifest.json new file mode 100644 index 00000000000..256f5baf0ff --- /dev/null +++ b/homeassistant/components/remote_calendar/manifest.json @@ -0,0 +1,12 @@ +{ + "domain": "remote_calendar", + "name": "Remote Calendar", + "codeowners": ["@Thomas55555"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/remote_calendar", + "integration_type": "service", + "iot_class": "cloud_polling", + "loggers": ["ical"], + "quality_scale": "silver", + "requirements": ["ical==9.0.3"] +} diff --git a/homeassistant/components/remote_calendar/quality_scale.yaml b/homeassistant/components/remote_calendar/quality_scale.yaml new file mode 100644 index 00000000000..964b63d7116 --- /dev/null +++ b/homeassistant/components/remote_calendar/quality_scale.yaml @@ -0,0 +1,98 @@ +rules: + # Bronze + config-flow: done + test-before-configure: done + unique-config-entry: + status: exempt + comment: | + No unique identifier. + config-flow-test-coverage: done + runtime-data: done + test-before-setup: done + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + dependency-transparency: done + action-setup: + status: exempt + comment: | + There are no actions. + common-modules: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + docs-actions: + status: exempt + comment: No actions available. + brands: done + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: + status: exempt + comment: | + There are no actions. + reauthentication-flow: + status: exempt + comment: | + There is no authentication required. + parallel-updates: done + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: + status: exempt + comment: no configuration options + + # Gold + devices: + status: exempt + comment: No devices. One URL is always assigned to one calendar. + diagnostics: done + discovery-update-info: + status: todo + comment: No discovery protocol available. + discovery: + status: exempt + comment: No discovery protocol available. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: No devices. One URL is always assigned to one calendar. + entity-category: done + entity-device-class: + status: exempt + comment: No devices classes for calendars. + entity-disabled-by-default: + status: exempt + comment: Only one entity per entry. + entity-translations: + status: exempt + comment: Entity name is defined by the user, so no translation possible. + exception-translations: done + icon-translations: + status: exempt + comment: Only the default icon is used. + reconfiguration-flow: + status: exempt + comment: no configuration possible + repair-issues: todo + stale-devices: + status: exempt + comment: No devices. One URL is always assigned to one calendar. + + # Platinum + async-dependency: todo + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/remote_calendar/strings.json b/homeassistant/components/remote_calendar/strings.json new file mode 100644 index 00000000000..fff2d4abbb3 --- /dev/null +++ b/homeassistant/components/remote_calendar/strings.json @@ -0,0 +1,34 @@ +{ + "title": "Remote Calendar", + "config": { + "step": { + "user": { + "description": "Please choose a name for the calendar to be imported", + "data": { + "calendar_name": "Calendar name", + "url": "Calendar URL" + }, + "data_description": { + "calendar_name": "The name of the calendar shown in the UI.", + "url": "The URL of the remote calendar." + } + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "forbidden": "The server understood the request but refuses to authorize it.", + "invalid_ics_file": "[%key:component::local_calendar::config::error::invalid_ics_file%]" + } + }, + "exceptions": { + "unable_to_fetch": { + "message": "Unable to fetch calendar data: {err}" + }, + "unable_to_parse": { + "message": "Unable to parse calendar data: {err}" + } + } +} diff --git a/homeassistant/components/remote_rpi_gpio/binary_sensor.py b/homeassistant/components/remote_rpi_gpio/binary_sensor.py index 42e8517c1e8..1d970bb3541 100644 --- a/homeassistant/components/remote_rpi_gpio/binary_sensor.py +++ b/homeassistant/components/remote_rpi_gpio/binary_sensor.py @@ -2,6 +2,7 @@ from __future__ import annotations +from gpiozero import DigitalInputDevice import requests import voluptuous as vol @@ -48,10 +49,10 @@ def setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Raspberry PI GPIO devices.""" - address = config["host"] + address = config[CONF_HOST] invert_logic = config[CONF_INVERT_LOGIC] pull_mode = config[CONF_PULL_MODE] - ports = config["ports"] + ports = config[CONF_PORTS] bouncetime = config[CONF_BOUNCETIME] / 1000 devices = [] @@ -71,9 +72,11 @@ class RemoteRPiGPIOBinarySensor(BinarySensorEntity): _attr_should_poll = False - def __init__(self, name, sensor, invert_logic): + def __init__( + self, name: str | None, sensor: DigitalInputDevice, invert_logic: bool + ) -> None: """Initialize the RPi binary sensor.""" - self._name = name + self._attr_name = name self._invert_logic = invert_logic self._state = False self._sensor = sensor @@ -90,20 +93,10 @@ class RemoteRPiGPIOBinarySensor(BinarySensorEntity): self._sensor.when_activated = read_gpio @property - def name(self): - """Return the name of the sensor.""" - return self._name - - @property - def is_on(self): + def is_on(self) -> bool: """Return the state of the entity.""" return self._state != self._invert_logic - @property - def device_class(self): - """Return the class of this sensor, from DEVICE_CLASSES.""" - return - def update(self) -> None: """Update the GPIO state.""" try: diff --git a/homeassistant/components/remote_rpi_gpio/switch.py b/homeassistant/components/remote_rpi_gpio/switch.py index 91b389c5a1e..25f95045e4b 100644 --- a/homeassistant/components/remote_rpi_gpio/switch.py +++ b/homeassistant/components/remote_rpi_gpio/switch.py @@ -4,6 +4,7 @@ from __future__ import annotations from typing import Any +from gpiozero import LED import voluptuous as vol from homeassistant.components.switch import ( @@ -57,37 +58,23 @@ def setup_platform( class RemoteRPiGPIOSwitch(SwitchEntity): """Representation of a Remote Raspberry Pi GPIO.""" + _attr_assumed_state = True _attr_should_poll = False - def __init__(self, name, led): + def __init__(self, name: str | None, led: LED) -> None: """Initialize the pin.""" - self._name = name or DEVICE_DEFAULT_NAME - self._state = False + self._attr_name = name or DEVICE_DEFAULT_NAME + self._attr_is_on = False self._switch = led - @property - def name(self): - """Return the name of the switch.""" - return self._name - - @property - def assumed_state(self): - """If unable to access real state of the entity.""" - return True - - @property - def is_on(self): - """Return true if device is on.""" - return self._state - def turn_on(self, **kwargs: Any) -> None: """Turn the device on.""" write_output(self._switch, 1) - self._state = True + self._attr_is_on = True self.schedule_update_ha_state() def turn_off(self, **kwargs: Any) -> None: """Turn the device off.""" write_output(self._switch, 0) - self._state = False + self._attr_is_on = False self.schedule_update_ha_state() diff --git a/homeassistant/components/renault/config_flow.py b/homeassistant/components/renault/config_flow.py index 70544a5637f..90d2c11613c 100644 --- a/homeassistant/components/renault/config_flow.py +++ b/homeassistant/components/renault/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from typing import Any import aiohttp @@ -16,6 +17,8 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from .const import CONF_KAMEREON_ACCOUNT_ID, CONF_LOCALE, DOMAIN from .renault_hub import RenaultHub +_LOGGER = logging.getLogger(__name__) + USER_SCHEMA = vol.Schema( { vol.Required(CONF_LOCALE): vol.In(AVAILABLE_LOCALES.keys()), @@ -54,7 +57,8 @@ class RenaultFlowHandler(ConfigFlow, domain=DOMAIN): ) except (aiohttp.ClientConnectionError, GigyaException): errors["base"] = "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: if login_success: diff --git a/homeassistant/components/reolink/__init__.py b/homeassistant/components/reolink/__init__.py index 71ca5428740..99ca91c5bdf 100644 --- a/homeassistant/components/reolink/__init__.py +++ b/homeassistant/components/reolink/__init__.py @@ -28,7 +28,7 @@ from homeassistant.helpers.event import async_call_later from homeassistant.helpers.typing import ConfigType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import CONF_SUPPORTS_PRIVACY_MODE, CONF_USE_HTTPS, DOMAIN +from .const import CONF_BC_PORT, CONF_SUPPORTS_PRIVACY_MODE, CONF_USE_HTTPS, DOMAIN from .exceptions import PasswordIncompatible, ReolinkException, UserNotAdmin from .host import ReolinkHost from .services import async_setup_services @@ -67,9 +67,7 @@ async def async_setup_entry( hass: HomeAssistant, config_entry: ReolinkConfigEntry ) -> bool: """Set up Reolink from a config entry.""" - host = ReolinkHost( - hass, config_entry.data, config_entry.options, config_entry.entry_id - ) + host = ReolinkHost(hass, config_entry.data, config_entry.options, config_entry) try: await host.async_init() @@ -100,6 +98,7 @@ async def async_setup_entry( or host.api.use_https != config_entry.data[CONF_USE_HTTPS] or host.api.supported(None, "privacy_mode") != config_entry.data.get(CONF_SUPPORTS_PRIVACY_MODE) + or host.api.baichuan.port != config_entry.data.get(CONF_BC_PORT) ): if host.api.port != config_entry.data[CONF_PORT]: _LOGGER.warning( @@ -108,10 +107,21 @@ async def async_setup_entry( config_entry.data[CONF_PORT], host.api.port, ) + if ( + config_entry.data.get(CONF_BC_PORT, host.api.baichuan.port) + != host.api.baichuan.port + ): + _LOGGER.warning( + "Baichuan port of Reolink %s, changed from %s to %s", + host.api.nvr_name, + config_entry.data.get(CONF_BC_PORT), + host.api.baichuan.port, + ) data = { **config_entry.data, CONF_PORT: host.api.port, CONF_USE_HTTPS: host.api.use_https, + CONF_BC_PORT: host.api.baichuan.port, CONF_SUPPORTS_PRIVACY_MODE: host.api.supported(None, "privacy_mode"), } hass.config_entries.async_update_entry(config_entry, data=data) diff --git a/homeassistant/components/reolink/binary_sensor.py b/homeassistant/components/reolink/binary_sensor.py index 4e90bfc9eef..39910bbc52a 100644 --- a/homeassistant/components/reolink/binary_sensor.py +++ b/homeassistant/components/reolink/binary_sensor.py @@ -25,7 +25,11 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback -from .entity import ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription +from .entity import ( + ReolinkChannelCoordinatorEntity, + ReolinkChannelEntityDescription, + ReolinkEntityDescription, +) from .util import ReolinkConfigEntry, ReolinkData PARALLEL_UPDATES = 0 @@ -41,6 +45,18 @@ class ReolinkBinarySensorEntityDescription( value: Callable[[Host, int], bool] +@dataclass(frozen=True, kw_only=True) +class ReolinkSmartAIBinarySensorEntityDescription( + BinarySensorEntityDescription, + ReolinkEntityDescription, +): + """A class that describes Smart AI binary sensor entities.""" + + smart_type: str + value: Callable[[Host, int, int], bool] + supported: Callable[[Host, int, int], bool] = lambda api, ch, loc: True + + BINARY_PUSH_SENSORS = ( ReolinkBinarySensorEntityDescription( key="motion", @@ -121,6 +137,142 @@ BINARY_SENSORS = ( ), ) +BINARY_SMART_AI_SENSORS = ( + ReolinkSmartAIBinarySensorEntityDescription( + key="crossline_person", + smart_type="crossline", + cmd_id=33, + translation_key="crossline_person", + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_state(ch, "crossline", loc, "people") + ), + supported=lambda api, ch, loc: ( + api.supported(ch, "ai_crossline") + and "people" in api.baichuan.smart_ai_type_list(ch, "crossline", loc) + ), + ), + ReolinkSmartAIBinarySensorEntityDescription( + key="crossline_vehicle", + smart_type="crossline", + cmd_id=33, + translation_key="crossline_vehicle", + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_state(ch, "crossline", loc, "vehicle") + ), + supported=lambda api, ch, loc: ( + api.supported(ch, "ai_crossline") + and "vehicle" in api.baichuan.smart_ai_type_list(ch, "crossline", loc) + ), + ), + ReolinkSmartAIBinarySensorEntityDescription( + key="crossline_dog_cat", + smart_type="crossline", + cmd_id=33, + translation_key="crossline_dog_cat", + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_state(ch, "crossline", loc, "dog_cat") + ), + supported=lambda api, ch, loc: ( + api.supported(ch, "ai_crossline") + and "dog_cat" in api.baichuan.smart_ai_type_list(ch, "crossline", loc) + ), + ), + ReolinkSmartAIBinarySensorEntityDescription( + key="intrusion_person", + smart_type="intrusion", + cmd_id=33, + translation_key="intrusion_person", + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_state(ch, "intrusion", loc, "people") + ), + supported=lambda api, ch, loc: ( + api.supported(ch, "ai_intrusion") + and "people" in api.baichuan.smart_ai_type_list(ch, "intrusion", loc) + ), + ), + ReolinkSmartAIBinarySensorEntityDescription( + key="intrusion_vehicle", + smart_type="intrusion", + cmd_id=33, + translation_key="intrusion_vehicle", + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_state(ch, "intrusion", loc, "vehicle") + ), + supported=lambda api, ch, loc: ( + api.supported(ch, "ai_intrusion") + and "vehicle" in api.baichuan.smart_ai_type_list(ch, "intrusion", loc) + ), + ), + ReolinkSmartAIBinarySensorEntityDescription( + key="intrusion_dog_cat", + smart_type="intrusion", + cmd_id=33, + translation_key="intrusion_dog_cat", + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_state(ch, "intrusion", loc, "dog_cat") + ), + supported=lambda api, ch, loc: ( + api.supported(ch, "ai_intrusion") + and "dog_cat" in api.baichuan.smart_ai_type_list(ch, "intrusion", loc) + ), + ), + ReolinkSmartAIBinarySensorEntityDescription( + key="linger_person", + smart_type="loitering", + cmd_id=33, + translation_key="linger_person", + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_state(ch, "loitering", loc, "people") + ), + supported=lambda api, ch, loc: ( + api.supported(ch, "ai_linger") + and "people" in api.baichuan.smart_ai_type_list(ch, "loitering", loc) + ), + ), + ReolinkSmartAIBinarySensorEntityDescription( + key="linger_vehicle", + smart_type="loitering", + cmd_id=33, + translation_key="linger_vehicle", + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_state(ch, "loitering", loc, "vehicle") + ), + supported=lambda api, ch, loc: ( + api.supported(ch, "ai_linger") + and "vehicle" in api.baichuan.smart_ai_type_list(ch, "loitering", loc) + ), + ), + ReolinkSmartAIBinarySensorEntityDescription( + key="linger_dog_cat", + smart_type="loitering", + cmd_id=33, + translation_key="linger_dog_cat", + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_state(ch, "loitering", loc, "dog_cat") + ), + supported=lambda api, ch, loc: ( + api.supported(ch, "ai_linger") + and "dog_cat" in api.baichuan.smart_ai_type_list(ch, "loitering", loc) + ), + ), + ReolinkSmartAIBinarySensorEntityDescription( + key="forgotten_item", + smart_type="legacy", + cmd_id=33, + translation_key="forgotten_item", + value=lambda api, ch, loc: (api.baichuan.smart_ai_state(ch, "legacy", loc)), + supported=lambda api, ch, loc: api.supported(ch, "ai_forgotten_item"), + ), + ReolinkSmartAIBinarySensorEntityDescription( + key="taken_item", + smart_type="loss", + cmd_id=33, + translation_key="taken_item", + value=lambda api, ch, loc: (api.baichuan.smart_ai_state(ch, "loss", loc)), + supported=lambda api, ch, loc: api.supported(ch, "ai_taken_item"), + ), +) + async def async_setup_entry( hass: HomeAssistant, @@ -129,18 +281,29 @@ async def async_setup_entry( ) -> None: """Set up a Reolink IP Camera.""" reolink_data: ReolinkData = config_entry.runtime_data + api = reolink_data.host.api - entities: list[ReolinkBinarySensorEntity] = [] - for channel in reolink_data.host.api.channels: + entities: list[ReolinkBinarySensorEntity | ReolinkSmartAIBinarySensorEntity] = [] + for channel in api.channels: entities.extend( ReolinkPushBinarySensorEntity(reolink_data, channel, entity_description) for entity_description in BINARY_PUSH_SENSORS - if entity_description.supported(reolink_data.host.api, channel) + if entity_description.supported(api, channel) ) entities.extend( ReolinkBinarySensorEntity(reolink_data, channel, entity_description) for entity_description in BINARY_SENSORS - if entity_description.supported(reolink_data.host.api, channel) + if entity_description.supported(api, channel) + ) + entities.extend( + ReolinkSmartAIBinarySensorEntity( + reolink_data, channel, location, entity_description + ) + for entity_description in BINARY_SMART_AI_SENSORS + for location in api.baichuan.smart_location_list( + channel, entity_description.key + ) + if entity_description.supported(api, channel, location) ) async_add_entities(entities) @@ -198,3 +361,40 @@ class ReolinkPushBinarySensorEntity(ReolinkBinarySensorEntity): async def _async_handle_event(self, event: str) -> None: """Handle incoming event for motion detection.""" self.async_write_ha_state() + + +class ReolinkSmartAIBinarySensorEntity( + ReolinkChannelCoordinatorEntity, BinarySensorEntity +): + """Binary-sensor class for Reolink IP camera Smart AI sensors.""" + + entity_description: ReolinkSmartAIBinarySensorEntityDescription + + def __init__( + self, + reolink_data: ReolinkData, + channel: int, + location: int, + entity_description: ReolinkSmartAIBinarySensorEntityDescription, + ) -> None: + """Initialize Reolink binary sensor.""" + self.entity_description = entity_description + super().__init__(reolink_data, channel) + unique_index = self._host.api.baichuan.smart_ai_index( + channel, entity_description.smart_type, location + ) + self._attr_unique_id = f"{self._attr_unique_id}_{unique_index}" + + self._location = location + self._attr_translation_placeholders = { + "zone_name": self._host.api.baichuan.smart_ai_name( + channel, entity_description.smart_type, location + ) + } + + @property + def is_on(self) -> bool: + """State of the sensor.""" + return self.entity_description.value( + self._host.api, self._channel, self._location + ) diff --git a/homeassistant/components/reolink/config_flow.py b/homeassistant/components/reolink/config_flow.py index 7943cadef21..12ccd455be3 100644 --- a/homeassistant/components/reolink/config_flow.py +++ b/homeassistant/components/reolink/config_flow.py @@ -8,6 +8,7 @@ import logging from typing import Any from reolink_aio.api import ALLOWED_SPECIAL_CHARS +from reolink_aio.baichuan import DEFAULT_BC_PORT from reolink_aio.exceptions import ( ApiError, CredentialsInvalidError, @@ -37,7 +38,7 @@ from homeassistant.helpers import config_validation as cv, selector from homeassistant.helpers.device_registry import format_mac from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo -from .const import CONF_SUPPORTS_PRIVACY_MODE, CONF_USE_HTTPS, DOMAIN +from .const import CONF_BC_PORT, CONF_SUPPORTS_PRIVACY_MODE, CONF_USE_HTTPS, DOMAIN from .exceptions import ( PasswordIncompatible, ReolinkException, @@ -287,6 +288,7 @@ class ReolinkFlowHandler(ConfigFlow, domain=DOMAIN): if not errors: user_input[CONF_PORT] = host.api.port user_input[CONF_USE_HTTPS] = host.api.use_https + user_input[CONF_BC_PORT] = host.api.baichuan.port user_input[CONF_SUPPORTS_PRIVACY_MODE] = host.api.supported( None, "privacy_mode" ) @@ -326,8 +328,9 @@ class ReolinkFlowHandler(ConfigFlow, domain=DOMAIN): if errors: data_schema = data_schema.extend( { - vol.Optional(CONF_PORT): cv.positive_int, + vol.Optional(CONF_PORT): cv.port, vol.Required(CONF_USE_HTTPS, default=False): bool, + vol.Required(CONF_BC_PORT, default=DEFAULT_BC_PORT): cv.port, } ) diff --git a/homeassistant/components/reolink/const.py b/homeassistant/components/reolink/const.py index 7bd93337c46..026d1219881 100644 --- a/homeassistant/components/reolink/const.py +++ b/homeassistant/components/reolink/const.py @@ -3,4 +3,5 @@ DOMAIN = "reolink" CONF_USE_HTTPS = "use_https" +CONF_BC_PORT = "baichuan_port" CONF_SUPPORTS_PRIVACY_MODE = "privacy_mode_supported" diff --git a/homeassistant/components/reolink/diagnostics.py b/homeassistant/components/reolink/diagnostics.py index 693f2ba59a4..1d0e5d919e7 100644 --- a/homeassistant/components/reolink/diagnostics.py +++ b/homeassistant/components/reolink/diagnostics.py @@ -25,6 +25,14 @@ async def async_get_config_entry_diagnostics( IPC_cam[ch]["firmware version"] = api.camera_sw_version(ch) IPC_cam[ch]["encoding main"] = await api.get_encoding(ch) + chimes: dict[int, dict[str, Any]] = {} + for chime in api.chime_list: + chimes[chime.dev_id] = {} + chimes[chime.dev_id]["channel"] = chime.channel + chimes[chime.dev_id]["name"] = chime.name + chimes[chime.dev_id]["online"] = chime.online + chimes[chime.dev_id]["event_types"] = chime.chime_event_types + return { "model": api.model, "hardware version": api.hardware_version, @@ -41,9 +49,11 @@ async def async_get_config_entry_diagnostics( "channels": api.channels, "stream channels": api.stream_channels, "IPC cams": IPC_cam, + "Chimes": chimes, "capabilities": api.capabilities, "cmd list": host.update_cmd, "firmware ch list": host.firmware_ch_list, "api versions": api.checked_api_versions, "abilities": api.abilities, + "BC_abilities": api.baichuan.abilities, } diff --git a/homeassistant/components/reolink/entity.py b/homeassistant/components/reolink/entity.py index 55ce4ce891e..ec598de663d 100644 --- a/homeassistant/components/reolink/entity.py +++ b/homeassistant/components/reolink/entity.py @@ -178,8 +178,13 @@ class ReolinkChannelCoordinatorEntity(ReolinkHostCoordinatorEntity): else: self._dev_id = f"{self._host.unique_id}_ch{dev_ch}" + connections = set() + if mac := self._host.api.baichuan.mac_address(dev_ch): + connections.add((CONNECTION_NETWORK_MAC, mac)) + self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, self._dev_id)}, + connections=connections, via_device=(DOMAIN, self._host.unique_id), name=self._host.api.camera_name(dev_ch), model=self._host.api.camera_model(dev_ch), diff --git a/homeassistant/components/reolink/host.py b/homeassistant/components/reolink/host.py index 2f646ba9090..a027177f1fc 100644 --- a/homeassistant/components/reolink/host.py +++ b/homeassistant/components/reolink/host.py @@ -12,6 +12,7 @@ from typing import Any, Literal import aiohttp from aiohttp.web import Request from reolink_aio.api import ALLOWED_SPECIAL_CHARS, Host +from reolink_aio.baichuan import DEFAULT_BC_PORT from reolink_aio.enums import SubType from reolink_aio.exceptions import NotSupportedError, ReolinkError, SubscriptionError @@ -33,14 +34,14 @@ from homeassistant.helpers.network import NoURLAvailableError, get_url from homeassistant.helpers.storage import Store from homeassistant.util.ssl import SSLCipherList -from .const import CONF_SUPPORTS_PRIVACY_MODE, CONF_USE_HTTPS, DOMAIN +from .const import CONF_BC_PORT, CONF_SUPPORTS_PRIVACY_MODE, CONF_USE_HTTPS, DOMAIN from .exceptions import ( PasswordIncompatible, ReolinkSetupException, ReolinkWebhookException, UserNotAdmin, ) -from .util import get_store +from .util import ReolinkConfigEntry, get_store DEFAULT_TIMEOUT = 30 FIRST_TCP_PUSH_TIMEOUT = 10 @@ -66,11 +67,11 @@ class ReolinkHost: hass: HomeAssistant, config: Mapping[str, Any], options: Mapping[str, Any], - config_entry_id: str | None = None, + config_entry: ReolinkConfigEntry | None = None, ) -> None: """Initialize Reolink Host. Could be either NVR, or Camera.""" self._hass: HomeAssistant = hass - self._config_entry_id = config_entry_id + self._config_entry = config_entry self._config = config self._unique_id: str = "" @@ -91,6 +92,7 @@ class ReolinkHost: protocol=options[CONF_PROTOCOL], timeout=DEFAULT_TIMEOUT, aiohttp_get_session_callback=get_aiohttp_session, + bc_port=config.get(CONF_BC_PORT, DEFAULT_BC_PORT), ) self.last_wake: float = 0 @@ -149,15 +151,33 @@ class ReolinkHost: async def async_init(self) -> None: """Connect to Reolink host.""" if not self._api.valid_password(): + if ( + len(self._config[CONF_PASSWORD]) >= 32 + and self._config_entry is not None + ): + ir.async_create_issue( + self._hass, + DOMAIN, + f"password_too_long_{self._config_entry.entry_id}", + is_fixable=True, + severity=ir.IssueSeverity.ERROR, + translation_key="password_too_long", + translation_placeholders={"name": self._config_entry.title}, + ) + raise PasswordIncompatible( - "Reolink password contains incompatible special character, " - "please change the password to only contain characters: " - f"a-z, A-Z, 0-9 or {ALLOWED_SPECIAL_CHARS}" + "Reolink password contains incompatible special character or " + "is too long, please change the password to only contain characters: " + f"a-z, A-Z, 0-9 or {ALLOWED_SPECIAL_CHARS} " + "and not be longer than 31 characters" ) store: Store[str] | None = None - if self._config_entry_id is not None: - store = get_store(self._hass, self._config_entry_id) + if self._config_entry is not None: + ir.async_delete_issue( + self._hass, DOMAIN, f"password_too_long_{self._config_entry.entry_id}" + ) + store = get_store(self._hass, self._config_entry.entry_id) if self._config.get(CONF_SUPPORTS_PRIVACY_MODE) and ( data := await store.async_load() ): diff --git a/homeassistant/components/reolink/icons.json b/homeassistant/components/reolink/icons.json index 26198a11594..7d1dba099ed 100644 --- a/homeassistant/components/reolink/icons.json +++ b/homeassistant/components/reolink/icons.json @@ -54,6 +54,72 @@ "state": { "on": "mdi:sleep" } + }, + "crossline_person": { + "default": "mdi:fence", + "state": { + "on": "mdi:fence-electric" + } + }, + "crossline_vehicle": { + "default": "mdi:fence", + "state": { + "on": "mdi:fence-electric" + } + }, + "crossline_dog_cat": { + "default": "mdi:fence", + "state": { + "on": "mdi:fence-electric" + } + }, + "intrusion_person": { + "default": "mdi:location-enter", + "state": { + "on": "mdi:alert-circle-outline" + } + }, + "intrusion_vehicle": { + "default": "mdi:location-enter", + "state": { + "on": "mdi:alert-circle-outline" + } + }, + "intrusion_dog_cat": { + "default": "mdi:location-enter", + "state": { + "on": "mdi:alert-circle-outline" + } + }, + "linger_person": { + "default": "mdi:account-switch", + "state": { + "on": "mdi:account-alert" + } + }, + "linger_vehicle": { + "default": "mdi:account-switch", + "state": { + "on": "mdi:account-alert" + } + }, + "linger_dog_cat": { + "default": "mdi:account-switch", + "state": { + "on": "mdi:account-alert" + } + }, + "forgotten_item": { + "default": "mdi:package-variant-closed-plus", + "state": { + "on": "mdi:package-variant-closed-check" + } + }, + "taken_item": { + "default": "mdi:package-variant-closed-minus", + "state": { + "on": "mdi:package-variant-closed-check" + } } }, "button": { @@ -151,6 +217,21 @@ "ai_animal_sensitivity": { "default": "mdi:paw" }, + "crossline_sensitivity": { + "default": "mdi:fence" + }, + "intrusion_sensitivity": { + "default": "mdi:location-enter" + }, + "linger_sensitivity": { + "default": "mdi:account-switch" + }, + "forgotten_item_sensitivity": { + "default": "mdi:package-variant-closed-plus" + }, + "taken_item_sensitivity": { + "default": "mdi:package-variant-closed-minus" + }, "ai_face_delay": { "default": "mdi:face-recognition" }, @@ -169,6 +250,18 @@ "ai_animal_delay": { "default": "mdi:paw" }, + "intrusion_delay": { + "default": "mdi:location-enter" + }, + "linger_delay": { + "default": "mdi:account-switch" + }, + "forgotten_item_delay": { + "default": "mdi:package-variant-closed-plus" + }, + "taken_item_delay": { + "default": "mdi:package-variant-closed-minus" + }, "auto_quick_reply_time": { "default": "mdi:message-reply-text-outline" }, @@ -284,6 +377,9 @@ }, "sub_bit_rate": { "default": "mdi:play-speed" + }, + "scene_mode": { + "default": "mdi:view-list" } }, "sensor": { @@ -299,6 +395,9 @@ "battery_state": { "default": "mdi:battery-charging" }, + "day_night_state": { + "default": "mdi:theme-light-dark" + }, "wifi_signal": { "default": "mdi:wifi" }, diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json index f923efdbbf2..82b9586cccc 100644 --- a/homeassistant/components/reolink/manifest.json +++ b/homeassistant/components/reolink/manifest.json @@ -19,5 +19,5 @@ "iot_class": "local_push", "loggers": ["reolink_aio"], "quality_scale": "platinum", - "requirements": ["reolink-aio==0.12.1"] + "requirements": ["reolink-aio==0.13.0"] } diff --git a/homeassistant/components/reolink/number.py b/homeassistant/components/reolink/number.py index 48382df4cbc..2a6fb740ee0 100644 --- a/homeassistant/components/reolink/number.py +++ b/homeassistant/components/reolink/number.py @@ -9,6 +9,7 @@ from typing import Any from reolink_aio.api import Chime, Host from homeassistant.components.number import ( + NumberDeviceClass, NumberEntity, NumberEntityDescription, NumberMode, @@ -44,6 +45,19 @@ class ReolinkNumberEntityDescription( value: Callable[[Host, int], float | None] +@dataclass(frozen=True, kw_only=True) +class ReolinkSmartAINumberEntityDescription( + NumberEntityDescription, + ReolinkChannelEntityDescription, +): + """A class that describes smart AI number entities.""" + + smart_type: str + method: Callable[[Host, int, int, float], Any] + mode: NumberMode = NumberMode.AUTO + value: Callable[[Host, int, int], float | None] + + @dataclass(frozen=True, kw_only=True) class ReolinkHostNumberEntityDescription( NumberEntityDescription, @@ -125,6 +139,7 @@ NUMBER_ENTITIES = ( cmd_key="GetPtzGuard", translation_key="guard_return_time", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, native_min_value=10, @@ -248,6 +263,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiAlarm", translation_key="ai_face_delay", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, entity_registry_enabled_default=False, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, @@ -264,6 +280,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiAlarm", translation_key="ai_person_delay", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, entity_registry_enabled_default=False, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, @@ -280,6 +297,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiAlarm", translation_key="ai_vehicle_delay", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, entity_registry_enabled_default=False, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, @@ -296,6 +314,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiAlarm", translation_key="ai_package_delay", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, entity_registry_enabled_default=False, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, @@ -312,6 +331,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiAlarm", translation_key="ai_pet_delay", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, entity_registry_enabled_default=False, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, @@ -330,6 +350,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiAlarm", translation_key="ai_animal_delay", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, entity_registry_enabled_default=False, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, @@ -346,6 +367,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAutoReply", translation_key="auto_quick_reply_time", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, native_min_value=1, @@ -385,6 +407,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiCfg", translation_key="auto_track_disappear_time", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, native_min_value=1, @@ -400,6 +423,7 @@ NUMBER_ENTITIES = ( cmd_key="GetAiCfg", translation_key="auto_track_stop_time", entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, native_step=1, native_unit_of_measurement=UnitOfTime.SECONDS, native_min_value=1, @@ -493,6 +517,168 @@ NUMBER_ENTITIES = ( ), ) +SMART_AI_NUMBER_ENTITIES = ( + ReolinkSmartAINumberEntityDescription( + key="crossline_sensitivity", + smart_type="crossline", + cmd_id=527, + translation_key="crossline_sensitivity", + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + native_step=1, + native_min_value=0, + native_max_value=100, + supported=lambda api, ch: api.supported(ch, "ai_crossline"), + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_sensitivity(ch, "crossline", loc) + ), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "crossline", loc, sensitivity=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="intrusion_sensitivity", + smart_type="intrusion", + cmd_id=529, + translation_key="intrusion_sensitivity", + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + native_step=1, + native_min_value=0, + native_max_value=100, + supported=lambda api, ch: api.supported(ch, "ai_intrusion"), + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_sensitivity(ch, "intrusion", loc) + ), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "intrusion", loc, sensitivity=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="linger_sensitivity", + smart_type="loitering", + cmd_id=531, + translation_key="linger_sensitivity", + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + native_step=1, + native_min_value=0, + native_max_value=100, + supported=lambda api, ch: api.supported(ch, "ai_linger"), + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_sensitivity(ch, "loitering", loc) + ), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "loitering", loc, sensitivity=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="forgotten_item_sensitivity", + smart_type="legacy", + cmd_id=549, + translation_key="forgotten_item_sensitivity", + entity_registry_enabled_default=False, + entity_category=EntityCategory.CONFIG, + native_step=1, + native_min_value=0, + native_max_value=100, + supported=lambda api, ch: api.supported(ch, "ai_forgotten_item"), + value=lambda api, ch, loc: ( + api.baichuan.smart_ai_sensitivity(ch, "legacy", loc) + ), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "legacy", loc, sensitivity=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="taken_item_sensitivity", + smart_type="loss", + cmd_id=551, + translation_key="taken_item_sensitivity", + entity_registry_enabled_default=False, + entity_category=EntityCategory.CONFIG, + native_step=1, + native_min_value=0, + native_max_value=100, + supported=lambda api, ch: api.supported(ch, "ai_taken_item"), + value=lambda api, ch, loc: api.baichuan.smart_ai_sensitivity(ch, "loss", loc), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "loss", loc, sensitivity=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="intrusion_delay", + smart_type="intrusion", + cmd_id=529, + translation_key="intrusion_delay", + entity_registry_enabled_default=False, + entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, + native_step=1, + native_unit_of_measurement=UnitOfTime.SECONDS, + native_min_value=0, + native_max_value=10, + supported=lambda api, ch: api.supported(ch, "ai_intrusion"), + value=lambda api, ch, loc: api.baichuan.smart_ai_delay(ch, "intrusion", loc), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "intrusion", loc, delay=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="linger_delay", + smart_type="loitering", + cmd_id=531, + translation_key="linger_delay", + entity_registry_enabled_default=False, + entity_category=EntityCategory.CONFIG, + native_step=1, + native_unit_of_measurement=UnitOfTime.SECONDS, + native_min_value=1, + native_max_value=10, + supported=lambda api, ch: api.supported(ch, "ai_linger"), + value=lambda api, ch, loc: api.baichuan.smart_ai_delay(ch, "loitering", loc), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "loitering", loc, delay=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="forgotten_item_delay", + smart_type="legacy", + cmd_id=549, + translation_key="forgotten_item_delay", + entity_registry_enabled_default=False, + entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, + native_step=1, + native_unit_of_measurement=UnitOfTime.SECONDS, + native_min_value=1, + native_max_value=30, + supported=lambda api, ch: api.supported(ch, "ai_forgotten_item"), + value=lambda api, ch, loc: api.baichuan.smart_ai_delay(ch, "legacy", loc), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "legacy", loc, delay=int(value) + ), + ), + ReolinkSmartAINumberEntityDescription( + key="taken_item_delay", + smart_type="loss", + cmd_id=551, + translation_key="taken_item_delay", + entity_registry_enabled_default=False, + entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.DURATION, + native_step=1, + native_unit_of_measurement=UnitOfTime.SECONDS, + native_min_value=1, + native_max_value=30, + supported=lambda api, ch: api.supported(ch, "ai_taken_item"), + value=lambda api, ch, loc: api.baichuan.smart_ai_delay(ch, "loss", loc), + method=lambda api, ch, loc, value: api.baichuan.set_smart_ai( + ch, "loss", loc, delay=int(value) + ), + ), +) + HOST_NUMBER_ENTITIES = ( ReolinkHostNumberEntityDescription( key="alarm_volume", @@ -542,22 +728,32 @@ async def async_setup_entry( ) -> None: """Set up a Reolink number entities.""" reolink_data: ReolinkData = config_entry.runtime_data + api = reolink_data.host.api entities: list[NumberEntity] = [ ReolinkNumberEntity(reolink_data, channel, entity_description) for entity_description in NUMBER_ENTITIES - for channel in reolink_data.host.api.channels - if entity_description.supported(reolink_data.host.api, channel) + for channel in api.channels + if entity_description.supported(api, channel) ] + entities.extend( + ReolinkSmartAINumberEntity(reolink_data, channel, location, entity_description) + for entity_description in SMART_AI_NUMBER_ENTITIES + for channel in api.channels + for location in api.baichuan.smart_location_list( + channel, entity_description.smart_type + ) + if entity_description.supported(api, channel) + ) entities.extend( ReolinkHostNumberEntity(reolink_data, entity_description) for entity_description in HOST_NUMBER_ENTITIES - if entity_description.supported(reolink_data.host.api) + if entity_description.supported(api) ) entities.extend( ReolinkChimeNumberEntity(reolink_data, chime, entity_description) for entity_description in CHIME_NUMBER_ENTITIES - for chime in reolink_data.host.api.chime_list + for chime in api.chime_list ) async_add_entities(entities) @@ -599,6 +795,51 @@ class ReolinkNumberEntity(ReolinkChannelCoordinatorEntity, NumberEntity): self.async_write_ha_state() +class ReolinkSmartAINumberEntity(ReolinkChannelCoordinatorEntity, NumberEntity): + """Base smart AI number entity class for Reolink IP cameras.""" + + entity_description: ReolinkSmartAINumberEntityDescription + + def __init__( + self, + reolink_data: ReolinkData, + channel: int, + location: int, + entity_description: ReolinkSmartAINumberEntityDescription, + ) -> None: + """Initialize Reolink number entity.""" + self.entity_description = entity_description + super().__init__(reolink_data, channel) + + unique_index = self._host.api.baichuan.smart_ai_index( + channel, entity_description.smart_type, location + ) + self._attr_unique_id = f"{self._attr_unique_id}_{unique_index}" + + self._location = location + self._attr_mode = entity_description.mode + self._attr_translation_placeholders = { + "zone_name": self._host.api.baichuan.smart_ai_name( + channel, entity_description.smart_type, location + ) + } + + @property + def native_value(self) -> float | None: + """State of the number entity.""" + return self.entity_description.value( + self._host.api, self._channel, self._location + ) + + @raise_translated_error + async def async_set_native_value(self, value: float) -> None: + """Update the current value.""" + await self.entity_description.method( + self._host.api, self._channel, self._location, value + ) + self.async_write_ha_state() + + class ReolinkHostNumberEntity(ReolinkHostCoordinatorEntity, NumberEntity): """Base number entity class for Reolink Host.""" diff --git a/homeassistant/components/reolink/select.py b/homeassistant/components/reolink/select.py index c0b20da0238..e5d66ed3901 100644 --- a/homeassistant/components/reolink/select.py +++ b/homeassistant/components/reolink/select.py @@ -30,6 +30,8 @@ from .entity import ( ReolinkChannelEntityDescription, ReolinkChimeCoordinatorEntity, ReolinkChimeEntityDescription, + ReolinkHostCoordinatorEntity, + ReolinkHostEntityDescription, ) from .util import ReolinkConfigEntry, ReolinkData, raise_translated_error @@ -49,6 +51,18 @@ class ReolinkSelectEntityDescription( value: Callable[[Host, int], str] | None = None +@dataclass(frozen=True, kw_only=True) +class ReolinkHostSelectEntityDescription( + SelectEntityDescription, + ReolinkHostEntityDescription, +): + """A class that describes host select entities.""" + + get_options: Callable[[Host], list[str]] + method: Callable[[Host, str], Any] + value: Callable[[Host], str] + + @dataclass(frozen=True, kw_only=True) class ReolinkChimeSelectEntityDescription( SelectEntityDescription, @@ -238,6 +252,19 @@ SELECT_ENTITIES = ( ), ) +HOST_SELECT_ENTITIES = ( + ReolinkHostSelectEntityDescription( + key="scene_mode", + cmd_key="GetScene", + translation_key="scene_mode", + entity_category=EntityCategory.CONFIG, + get_options=lambda api: api.baichuan.scene_names, + supported=lambda api: api.supported(None, "scenes"), + value=lambda api: api.baichuan.active_scene, + method=lambda api, name: api.baichuan.set_scene(scene_name=name), + ), +) + CHIME_SELECT_ENTITIES = ( ReolinkChimeSelectEntityDescription( key="motion_tone", @@ -300,12 +327,19 @@ async def async_setup_entry( """Set up a Reolink select entities.""" reolink_data: ReolinkData = config_entry.runtime_data - entities: list[ReolinkSelectEntity | ReolinkChimeSelectEntity] = [ + entities: list[ + ReolinkSelectEntity | ReolinkHostSelectEntity | ReolinkChimeSelectEntity + ] = [ ReolinkSelectEntity(reolink_data, channel, entity_description) for entity_description in SELECT_ENTITIES for channel in reolink_data.host.api.channels if entity_description.supported(reolink_data.host.api, channel) ] + entities.extend( + ReolinkHostSelectEntity(reolink_data, entity_description) + for entity_description in HOST_SELECT_ENTITIES + if entity_description.supported(reolink_data.host.api) + ) entities.extend( ReolinkChimeSelectEntity(reolink_data, chime, entity_description) for entity_description in CHIME_SELECT_ENTITIES @@ -360,6 +394,33 @@ class ReolinkSelectEntity(ReolinkChannelCoordinatorEntity, SelectEntity): self.async_write_ha_state() +class ReolinkHostSelectEntity(ReolinkHostCoordinatorEntity, SelectEntity): + """Base select entity class for Reolink Host.""" + + entity_description: ReolinkHostSelectEntityDescription + + def __init__( + self, + reolink_data: ReolinkData, + entity_description: ReolinkHostSelectEntityDescription, + ) -> None: + """Initialize Reolink select entity.""" + self.entity_description = entity_description + super().__init__(reolink_data) + self._attr_options = entity_description.get_options(self._host.api) + + @property + def current_option(self) -> str | None: + """Return the current option.""" + return self.entity_description.value(self._host.api) + + @raise_translated_error + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + await self.entity_description.method(self._host.api, option) + self.async_write_ha_state() + + class ReolinkChimeSelectEntity(ReolinkChimeCoordinatorEntity, SelectEntity): """Base select entity class for Reolink IP cameras.""" diff --git a/homeassistant/components/reolink/sensor.py b/homeassistant/components/reolink/sensor.py index ecad555b481..85de03dd1a3 100644 --- a/homeassistant/components/reolink/sensor.py +++ b/homeassistant/components/reolink/sensor.py @@ -107,6 +107,17 @@ SENSORS = ( value=lambda api, ch: BatteryEnum(api.battery_status(ch)).name, supported=lambda api, ch: api.supported(ch, "battery"), ), + ReolinkSensorEntityDescription( + key="day_night_state", + cmd_id=33, + cmd_key="296", + translation_key="day_night_state", + device_class=SensorDeviceClass.ENUM, + entity_category=EntityCategory.DIAGNOSTIC, + options=["day", "night", "led_day"], + value=lambda api, ch: api.baichuan.day_night_state(ch), + supported=lambda api, ch: api.supported(ch, "day_night_state"), + ), ) HOST_SENSORS = ( diff --git a/homeassistant/components/reolink/strings.json b/homeassistant/components/reolink/strings.json index 335ed92d32e..9a6db7b5d67 100644 --- a/homeassistant/components/reolink/strings.json +++ b/homeassistant/components/reolink/strings.json @@ -8,13 +8,15 @@ "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]", "use_https": "Enable HTTPS", + "baichuan_port": "Basic service port", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" }, "data_description": { "host": "The hostname or IP address of your Reolink device. For example: '192.168.1.25'.", - "port": "The port to connect to the Reolink device. For HTTP normally: '80', for HTTPS normally '443'.", + "port": "The HTTP(s) port to connect to the Reolink device API. For HTTP normally: '80', for HTTPS normally '443'.", "use_https": "Use a HTTPS (SSL) connection to the Reolink device.", + "baichuan_port": "The 'Basic Service Port' to connect to the Reolink device over TCP. Normally '9000' unless manually changed in the Reolink desktop client.", "username": "Username to login to the Reolink device itself. Not the Reolink cloud account.", "password": "Password to login to the Reolink device itself. Not the Reolink cloud account." } @@ -29,7 +31,7 @@ "cannot_connect": "Failed to connect, check the IP address of the camera", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "not_admin": "User needs to be admin, user \"{username}\" has authorisation level \"{userlevel}\"", - "password_incompatible": "Password contains incompatible special character, only these characters are allowed: a-z, A-Z, 0-9 or {special_chars}", + "password_incompatible": "Password contains incompatible special character or is too long, maximum 31 characters and only these characters are allowed: a-z, A-Z, 0-9 or {special_chars}", "unknown": "[%key:common::config_flow::error::unknown%]", "update_needed": "Failed to login because of outdated firmware, please update the firmware to version {needed_firmware} using the Reolink Download Center: {download_center_url}, currently version {current_firmware} is installed", "webhook_exception": "Home Assistant URL is not available, go to Settings > System > Network > Home Assistant URL and correct the URLs, see {more_info}" @@ -100,7 +102,13 @@ "message": "Error trying to update Reolink firmware: {err}" }, "config_entry_not_ready": { - "message": "Error while trying to setup {host}: {err}" + "message": "Error while trying to set up {host}: {err}" + }, + "update_already_running": { + "message": "Reolink firmware update already running, wait on completion before starting another" + }, + "firmware_rate_limit": { + "message": "Reolink firmware update server reached hourly rate limit: updating can be tried again in 1 hour" } }, "issues": { @@ -127,6 +135,10 @@ "hub_switch_deprecated": { "title": "Reolink Home Hub switches deprecated", "description": "The redundant 'Record', 'Email on event', 'FTP upload', 'Push notifications', and 'Buzzer on event' switches on the Reolink Home Hub are deprecated since the new firmware no longer supports these. Please use the equally named switches under each of the camera devices connected to the Home Hub instead. To remove this issue, please adjust automations accordingly and disable the switch entities mentioned." + }, + "password_too_long": { + "title": "Reolink password too long", + "description": "The password for \"{name}\" is more than 31 characters long, this is no longer compatible with the Reolink API. Please change the password using the Reolink app/client to a password with is shorter than 32 characters. After changing the password, fill in the new password in the Reolink Re-authentication flow to continue using this integration. The latest version of the Reolink app/client also has a password limit of 31 characters." } }, "services": { @@ -335,6 +347,83 @@ "off": "Awake", "on": "Sleeping" } + }, + "crossline_person": { + "name": "Crossline {zone_name} person", + "state": { + "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", + "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + } + }, + "crossline_vehicle": { + "name": "Crossline {zone_name} vehicle", + "state": { + "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", + "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + } + }, + "crossline_dog_cat": { + "name": "Crossline {zone_name} animal", + "state": { + "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", + "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + } + }, + "intrusion_person": { + "name": "Intrusion {zone_name} person", + "state": { + "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", + "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + } + }, + "intrusion_vehicle": { + "name": "Intrusion {zone_name} vehicle", + "state": { + "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", + "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + } + }, + "intrusion_dog_cat": { + "name": "Intrusion {zone_name} animal", + "state": { + "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", + "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + } + }, + "linger_person": { + "name": "Linger {zone_name} person", + "state": { + "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", + "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + } + }, + "linger_vehicle": { + "name": "Linger {zone_name} vehicle", + "state": { + "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", + "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + } + }, + "linger_dog_cat": { + "name": "Linger {zone_name} animal", + "state": { + "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", + "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + } + }, + "forgotten_item": { + "name": "Item forgotten {zone_name}", + "state": { + "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", + "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + } + }, + "taken_item": { + "name": "Item taken {zone_name}", + "state": { + "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", + "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + } } }, "button": { @@ -479,6 +568,21 @@ "ai_animal_sensitivity": { "name": "AI animal sensitivity" }, + "crossline_sensitivity": { + "name": "AI crossline {zone_name} sensitivity" + }, + "intrusion_sensitivity": { + "name": "AI intrusion {zone_name} sensitivity" + }, + "linger_sensitivity": { + "name": "AI linger {zone_name} sensitivity" + }, + "forgotten_item_sensitivity": { + "name": "AI item forgotten {zone_name} sensitivity" + }, + "taken_item_sensitivity": { + "name": "AI item taken {zone_name} sensitivity" + }, "ai_face_delay": { "name": "AI face delay" }, @@ -497,6 +601,18 @@ "ai_animal_delay": { "name": "AI animal delay" }, + "intrusion_delay": { + "name": "AI intrusion {zone_name} delay" + }, + "linger_delay": { + "name": "AI linger {zone_name} delay" + }, + "forgotten_item_delay": { + "name": "AI item forgotten {zone_name} delay" + }, + "taken_item_delay": { + "name": "AI item taken {zone_name} delay" + }, "auto_quick_reply_time": { "name": "Auto quick reply time" }, @@ -720,6 +836,15 @@ }, "sub_bit_rate": { "name": "Fluent bit rate" + }, + "scene_mode": { + "name": "Scene mode", + "state": { + "off": "[%key:common::state::off%]", + "disarm": "Disarmed", + "home": "Home", + "away": "Away" + } } }, "sensor": { @@ -746,6 +871,14 @@ "chargecomplete": "Charge complete" } }, + "day_night_state": { + "name": "Day night state", + "state": { + "day": "Color", + "night": "Black & white", + "led_day": "Color with floodlight" + } + }, "hdd_storage": { "name": "HDD {hdd_index} storage" }, diff --git a/homeassistant/components/reolink/switch.py b/homeassistant/components/reolink/switch.py index 0f106c0f2cc..af87a75eece 100644 --- a/homeassistant/components/reolink/switch.py +++ b/homeassistant/components/reolink/switch.py @@ -162,6 +162,7 @@ SWITCH_ENTITIES = ( ReolinkSwitchEntityDescription( key="manual_record", cmd_key="GetManualRec", + cmd_id=588, translation_key="manual_record", entity_category=EntityCategory.CONFIG, supported=lambda api, ch: api.supported(ch, "manual_record"), diff --git a/homeassistant/components/reolink/update.py b/homeassistant/components/reolink/update.py index 0744d66fb5b..a7c883003b7 100644 --- a/homeassistant/components/reolink/update.py +++ b/homeassistant/components/reolink/update.py @@ -31,7 +31,7 @@ from .entity import ( ReolinkHostCoordinatorEntity, ReolinkHostEntityDescription, ) -from .util import ReolinkConfigEntry, ReolinkData +from .util import ReolinkConfigEntry, ReolinkData, raise_translated_error PARALLEL_UPDATES = 0 RESUME_AFTER_INSTALL = 15 @@ -184,6 +184,7 @@ class ReolinkUpdateBaseEntity( f"## Release notes\n\n{new_firmware.release_notes}" ) + @raise_translated_error async def async_install( self, version: str | None, backup: bool, **kwargs: Any ) -> None: @@ -196,6 +197,8 @@ class ReolinkUpdateBaseEntity( try: await self._host.api.update_firmware(self._channel) except ReolinkError as err: + if err.translation_key: + raise raise HomeAssistantError( translation_domain=DOMAIN, translation_key="firmware_install_error", diff --git a/homeassistant/components/reolink/util.py b/homeassistant/components/reolink/util.py index a5556b66a33..12b4825caeb 100644 --- a/homeassistant/components/reolink/util.py +++ b/homeassistant/components/reolink/util.py @@ -27,6 +27,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import device_registry as dr from homeassistant.helpers.storage import Store +from homeassistant.helpers.translation import async_get_exception_message from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import DOMAIN @@ -97,6 +98,30 @@ def get_device_uid_and_ch( return (device_uid, ch, is_chime) +def check_translation_key(err: ReolinkError) -> str | None: + """Check if the translation key from the upstream library is present.""" + if not err.translation_key: + return None + if async_get_exception_message(DOMAIN, err.translation_key) == err.translation_key: + # translation key not found in strings.json + return None + return err.translation_key + + +_EXCEPTION_TO_TRANSLATION_KEY = { + ApiError: "api_error", + InvalidContentTypeError: "invalid_content_type", + CredentialsInvalidError: "invalid_credentials", + LoginError: "login_error", + NoDataError: "no_data", + UnexpectedDataError: "unexpected_data", + NotSupportedError: "not_supported", + SubscriptionError: "subscription_error", + ReolinkConnectionError: "connection_error", + ReolinkTimeoutError: "timeout", +} + + # Decorators def raise_translated_error[**P, R]( func: Callable[P, Awaitable[R]], @@ -110,73 +135,14 @@ def raise_translated_error[**P, R]( except InvalidParameterError as err: raise ServiceValidationError( translation_domain=DOMAIN, - translation_key="invalid_parameter", - translation_placeholders={"err": str(err)}, - ) from err - except ApiError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="api_error", - translation_placeholders={"err": str(err)}, - ) from err - except InvalidContentTypeError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="invalid_content_type", - translation_placeholders={"err": str(err)}, - ) from err - except CredentialsInvalidError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="invalid_credentials", - translation_placeholders={"err": str(err)}, - ) from err - except LoginError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="login_error", - translation_placeholders={"err": str(err)}, - ) from err - except NoDataError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="no_data", - translation_placeholders={"err": str(err)}, - ) from err - except UnexpectedDataError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="unexpected_data", - translation_placeholders={"err": str(err)}, - ) from err - except NotSupportedError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="not_supported", - translation_placeholders={"err": str(err)}, - ) from err - except SubscriptionError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="subscription_error", - translation_placeholders={"err": str(err)}, - ) from err - except ReolinkConnectionError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="connection_error", - translation_placeholders={"err": str(err)}, - ) from err - except ReolinkTimeoutError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="timeout", + translation_key=check_translation_key(err) or "invalid_parameter", translation_placeholders={"err": str(err)}, ) from err except ReolinkError as err: raise HomeAssistantError( translation_domain=DOMAIN, - translation_key="unexpected", + translation_key=check_translation_key(err) + or _EXCEPTION_TO_TRANSLATION_KEY.get(type(err), "unexpected"), translation_placeholders={"err": str(err)}, ) from err diff --git a/homeassistant/components/rflink/sensor.py b/homeassistant/components/rflink/sensor.py index 027c39da70f..97d0b811509 100644 --- a/homeassistant/components/rflink/sensor.py +++ b/homeassistant/components/rflink/sensor.py @@ -236,7 +236,8 @@ SENSOR_TYPES = ( key="winddirection", name="Wind direction", icon="mdi:compass", - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, native_unit_of_measurement=DEGREE, ), SensorEntityDescription( diff --git a/homeassistant/components/rfxtrx/sensor.py b/homeassistant/components/rfxtrx/sensor.py index 4b256279445..6669b1367df 100644 --- a/homeassistant/components/rfxtrx/sensor.py +++ b/homeassistant/components/rfxtrx/sensor.py @@ -161,7 +161,8 @@ SENSOR_TYPES = ( RfxtrxSensorEntityDescription( key="Wind direction", translation_key="wind_direction", - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, native_unit_of_measurement=DEGREE, ), RfxtrxSensorEntityDescription( diff --git a/homeassistant/components/risco/const.py b/homeassistant/components/risco/const.py index 078e26c43b5..ef3280fe232 100644 --- a/homeassistant/components/risco/const.py +++ b/homeassistant/components/risco/const.py @@ -30,9 +30,9 @@ RISCO_ARM = "arm" RISCO_PARTIAL_ARM = "partial_arm" RISCO_STATES = [RISCO_ARM, RISCO_PARTIAL_ARM, *RISCO_GROUPS] -DEFAULT_RISCO_GROUPS_TO_HA = { - group: AlarmControlPanelState.ARMED_HOME for group in RISCO_GROUPS -} +DEFAULT_RISCO_GROUPS_TO_HA = dict.fromkeys( + RISCO_GROUPS, AlarmControlPanelState.ARMED_HOME +) DEFAULT_RISCO_STATES_TO_HA = { RISCO_ARM: AlarmControlPanelState.ARMED_AWAY, RISCO_PARTIAL_ARM: AlarmControlPanelState.ARMED_HOME, diff --git a/homeassistant/components/roborock/__init__.py b/homeassistant/components/roborock/__init__.py index c382a56cde7..8140b58b86c 100644 --- a/homeassistant/components/roborock/__init__.py +++ b/homeassistant/components/roborock/__init__.py @@ -23,6 +23,8 @@ from roborock.web_api import RoborockApiClient from homeassistant.const import CONF_USERNAME, EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import CONF_BASE_URL, CONF_USER_DATA, DOMAIN, PLATFORMS from .coordinator import ( @@ -44,7 +46,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) -> entry.async_on_unload(entry.add_update_listener(update_listener)) user_data = UserData.from_dict(entry.data[CONF_USER_DATA]) - api_client = RoborockApiClient(entry.data[CONF_USERNAME], entry.data[CONF_BASE_URL]) + api_client = RoborockApiClient( + entry.data[CONF_USERNAME], + entry.data[CONF_BASE_URL], + session=async_get_clientsession(hass), + ) _LOGGER.debug("Getting home data") try: home_data = await api_client.get_home_data_v2(user_data) @@ -83,7 +89,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) -> # Get a Coordinator if the device is available or if we have connected to the device before coordinators = await asyncio.gather( *build_setup_functions( - hass, entry, device_map, user_data, product_info, home_data.rooms + hass, + entry, + device_map, + user_data, + product_info, + home_data.rooms, + api_client, ), return_exceptions=True, ) @@ -105,6 +117,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) -> translation_key="no_coordinators", ) valid_coordinators = RoborockCoordinators(v1_coords, a01_coords) + await asyncio.gather( + *(coord.refresh_coordinator_map() for coord in valid_coordinators.v1) + ) async def on_stop(_: Any) -> None: _LOGGER.debug("Shutting down roborock") @@ -125,6 +140,27 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) -> await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + device_registry = dr.async_get(hass) + device_entries = dr.async_entries_for_config_entry( + device_registry, config_entry_id=entry.entry_id + ) + for device in device_entries: + # Remove any devices that are no longer in the account. + # The API returns all devices, even if they are offline + device_duids = { + identifier[1].replace("_dock", "") for identifier in device.identifiers + } + if any(device_duid in device_map for device_duid in device_duids): + continue + _LOGGER.info( + "Removing device: %s because it is no longer exists in your account", + device.name, + ) + device_registry.async_update_device( + device_id=device.id, + remove_config_entry_id=entry.entry_id, + ) + return True @@ -135,6 +171,7 @@ def build_setup_functions( user_data: UserData, product_info: dict[str, HomeDataProduct], home_data_rooms: list[HomeDataRoom], + api_client: RoborockApiClient, ) -> list[ Coroutine[ Any, @@ -151,6 +188,7 @@ def build_setup_functions( device, product_info[device.product_id], home_data_rooms, + api_client, ) for device in device_map.values() ] @@ -163,11 +201,12 @@ async def setup_device( device: HomeDataDevice, product_info: HomeDataProduct, home_data_rooms: list[HomeDataRoom], + api_client: RoborockApiClient, ) -> RoborockDataUpdateCoordinator | RoborockDataUpdateCoordinatorA01 | None: """Set up a coordinator for a given device.""" if device.pv == "1.0": return await setup_device_v1( - hass, entry, user_data, device, product_info, home_data_rooms + hass, entry, user_data, device, product_info, home_data_rooms, api_client ) if device.pv == "A01": return await setup_device_a01(hass, entry, user_data, device, product_info) @@ -187,6 +226,7 @@ async def setup_device_v1( device: HomeDataDevice, product_info: HomeDataProduct, home_data_rooms: list[HomeDataRoom], + api_client: RoborockApiClient, ) -> RoborockDataUpdateCoordinator | None: """Set up a device Coordinator.""" mqtt_client = await hass.async_add_executor_job( @@ -208,7 +248,15 @@ async def setup_device_v1( await mqtt_client.async_release() raise coordinator = RoborockDataUpdateCoordinator( - hass, entry, device, networking, product_info, mqtt_client, home_data_rooms + hass, + entry, + device, + networking, + product_info, + mqtt_client, + home_data_rooms, + api_client, + user_data, ) try: await coordinator.async_config_entry_first_refresh() diff --git a/homeassistant/components/roborock/binary_sensor.py b/homeassistant/components/roborock/binary_sensor.py index db557f055dc..a2c34f5c59d 100644 --- a/homeassistant/components/roborock/binary_sensor.py +++ b/homeassistant/components/roborock/binary_sensor.py @@ -5,6 +5,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass +from roborock.containers import RoborockStateCode from roborock.roborock_typing import DeviceProp from homeassistant.components.binary_sensor import ( @@ -12,19 +13,23 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.const import EntityCategory +from homeassistant.const import ATTR_BATTERY_CHARGING, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from .coordinator import RoborockConfigEntry, RoborockDataUpdateCoordinator from .entity import RoborockCoordinatedEntityV1 +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RoborockBinarySensorDescription(BinarySensorEntityDescription): """A class that describes Roborock binary sensors.""" value_fn: Callable[[DeviceProp], bool | int | None] + # If it is a dock entity + is_dock_entity: bool = False BINARY_SENSOR_DESCRIPTIONS = [ @@ -34,6 +39,7 @@ BINARY_SENSOR_DESCRIPTIONS = [ device_class=BinarySensorDeviceClass.RUNNING, entity_category=EntityCategory.DIAGNOSTIC, value_fn=lambda data: data.status.dry_status, + is_dock_entity=True, ), RoborockBinarySensorDescription( key="water_box_carriage_status", @@ -63,6 +69,13 @@ BINARY_SENSOR_DESCRIPTIONS = [ entity_category=EntityCategory.DIAGNOSTIC, value_fn=lambda data: data.status.in_cleaning, ), + RoborockBinarySensorDescription( + key=ATTR_BATTERY_CHARGING, + device_class=BinarySensorDeviceClass.BATTERY_CHARGING, + entity_category=EntityCategory.DIAGNOSTIC, + value_fn=lambda data: data.status.state + in (RoborockStateCode.charging, RoborockStateCode.charging_complete), + ), ] @@ -97,6 +110,7 @@ class RoborockBinarySensorEntity(RoborockCoordinatedEntityV1, BinarySensorEntity super().__init__( f"{description.key}_{coordinator.duid_slug}", coordinator, + is_dock_entity=description.is_dock_entity, ) self.entity_description = description diff --git a/homeassistant/components/roborock/button.py b/homeassistant/components/roborock/button.py index 33e9502aca1..fea38524fe0 100644 --- a/homeassistant/components/roborock/button.py +++ b/homeassistant/components/roborock/button.py @@ -2,7 +2,10 @@ from __future__ import annotations +import asyncio from dataclasses import dataclass +import itertools +from typing import Any from roborock.roborock_typing import RoborockCommand @@ -12,7 +15,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from .coordinator import RoborockConfigEntry, RoborockDataUpdateCoordinator -from .entity import RoborockEntityV1 +from .entity import RoborockEntity, RoborockEntityV1 + +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) @@ -65,14 +70,34 @@ async def async_setup_entry( async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up Roborock button platform.""" + routines_lists = await asyncio.gather( + *[coordinator.get_routines() for coordinator in config_entry.runtime_data.v1], + ) async_add_entities( - RoborockButtonEntity( - coordinator, - description, + itertools.chain( + ( + RoborockButtonEntity( + coordinator, + description, + ) + for coordinator in config_entry.runtime_data.v1 + for description in CONSUMABLE_BUTTON_DESCRIPTIONS + if isinstance(coordinator, RoborockDataUpdateCoordinator) + ), + ( + RoborockRoutineButtonEntity( + coordinator, + ButtonEntityDescription( + key=str(routine.id), + name=routine.name, + ), + ) + for coordinator, routines in zip( + config_entry.runtime_data.v1, routines_lists, strict=True + ) + for routine in routines + ), ) - for coordinator in config_entry.runtime_data.v1 - for description in CONSUMABLE_BUTTON_DESCRIPTIONS - if isinstance(coordinator, RoborockDataUpdateCoordinator) ) @@ -97,3 +122,28 @@ class RoborockButtonEntity(RoborockEntityV1, ButtonEntity): async def async_press(self) -> None: """Press the button.""" await self.send(self.entity_description.command, self.entity_description.param) + + +class RoborockRoutineButtonEntity(RoborockEntity, ButtonEntity): + """A class to define Roborock routines button entities.""" + + entity_description: ButtonEntityDescription + + def __init__( + self, + coordinator: RoborockDataUpdateCoordinator, + entity_description: ButtonEntityDescription, + ) -> None: + """Create a button entity.""" + super().__init__( + f"{entity_description.key}_{coordinator.duid_slug}", + coordinator.device_info, + coordinator.api, + ) + self._routine_id = int(entity_description.key) + self._coordinator = coordinator + self.entity_description = entity_description + + async def async_press(self, **kwargs: Any) -> None: + """Press the button.""" + await self._coordinator.execute_routines(self._routine_id) diff --git a/homeassistant/components/roborock/config_flow.py b/homeassistant/components/roborock/config_flow.py index 1a6b67286bb..886bebea9b6 100644 --- a/homeassistant/components/roborock/config_flow.py +++ b/homeassistant/components/roborock/config_flow.py @@ -21,14 +21,17 @@ import voluptuous as vol from homeassistant.config_entries import ( SOURCE_REAUTH, - ConfigEntry, ConfigFlow, ConfigFlowResult, OptionsFlow, ) from homeassistant.const import CONF_USERNAME from homeassistant.core import callback +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo +from . import RoborockConfigEntry from .const import ( CONF_BASE_URL, CONF_ENTRY_CODE, @@ -63,7 +66,9 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN): self._abort_if_unique_id_configured(error="already_configured_account") self._username = username _LOGGER.debug("Requesting code for Roborock account") - self._client = RoborockApiClient(username) + self._client = RoborockApiClient( + username, session=async_get_clientsession(self.hass) + ) errors = await self._request_code() if not errors: return await self.async_step_code() @@ -134,13 +139,32 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) + async def async_step_dhcp( + self, discovery_info: DhcpServiceInfo + ) -> ConfigFlowResult: + """Handle a flow started by a dhcp discovery.""" + await self._async_handle_discovery_without_unique_id() + device_registry = dr.async_get(self.hass) + device = device_registry.async_get_device( + connections={ + (dr.CONNECTION_NETWORK_MAC, dr.format_mac(discovery_info.macaddress)) + } + ) + if device is not None and any( + identifier[0] == DOMAIN for identifier in device.identifiers + ): + return self.async_abort(reason="already_configured") + return await self.async_step_user() + async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" self._username = entry_data[CONF_USERNAME] assert self._username - self._client = RoborockApiClient(self._username) + self._client = RoborockApiClient( + self._username, session=async_get_clientsession(self.hass) + ) return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -170,7 +194,7 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: RoborockConfigEntry, ) -> RoborockOptionsFlowHandler: """Create the options flow.""" return RoborockOptionsFlowHandler(config_entry) @@ -179,7 +203,7 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN): class RoborockOptionsFlowHandler(OptionsFlow): """Handle an option flow for Roborock.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self, config_entry: RoborockConfigEntry) -> None: """Initialize options flow.""" self.options = deepcopy(dict(config_entry.options)) diff --git a/homeassistant/components/roborock/const.py b/homeassistant/components/roborock/const.py index cc8d34fbadc..e56fade7078 100644 --- a/homeassistant/components/roborock/const.py +++ b/homeassistant/components/roborock/const.py @@ -1,5 +1,7 @@ """Constants for Roborock.""" +from datetime import timedelta + from vacuum_map_parser_base.config.drawable import Drawable from homeassistant.const import Platform @@ -43,13 +45,21 @@ PLATFORMS = [ Platform.VACUUM, ] - -IMAGE_CACHE_INTERVAL = 90 +# This can be lowered in the future if we do not receive rate limiting issues. +IMAGE_CACHE_INTERVAL = timedelta(seconds=30) MAP_SLEEP = 3 GET_MAPS_SERVICE_NAME = "get_maps" +MAP_SCALE = 4 MAP_FILE_FORMAT = "PNG" MAP_FILENAME_SUFFIX = ".png" SET_VACUUM_GOTO_POSITION_SERVICE_NAME = "set_vacuum_goto_position" GET_VACUUM_CURRENT_POSITION_SERVICE_NAME = "get_vacuum_current_position" + + +A01_UPDATE_INTERVAL = timedelta(minutes=1) +V1_CLOUD_IN_CLEANING_INTERVAL = timedelta(seconds=30) +V1_CLOUD_NOT_CLEANING_INTERVAL = timedelta(minutes=1) +V1_LOCAL_IN_CLEANING_INTERVAL = timedelta(seconds=15) +V1_LOCAL_NOT_CLEANING_INTERVAL = timedelta(seconds=30) diff --git a/homeassistant/components/roborock/coordinator.py b/homeassistant/components/roborock/coordinator.py index 806651c9ac5..cc0bee1cd5f 100644 --- a/homeassistant/components/roborock/coordinator.py +++ b/homeassistant/components/roborock/coordinator.py @@ -5,29 +5,57 @@ from __future__ import annotations import asyncio from dataclasses import dataclass from datetime import timedelta +import io import logging from propcache.api import cached_property from roborock import HomeDataRoom from roborock.code_mappings import RoborockCategory -from roborock.containers import DeviceData, HomeDataDevice, HomeDataProduct, NetworkInfo +from roborock.containers import ( + DeviceData, + HomeDataDevice, + HomeDataProduct, + HomeDataScene, + NetworkInfo, + UserData, +) from roborock.exceptions import RoborockException from roborock.roborock_message import RoborockDyadDataProtocol, RoborockZeoProtocol from roborock.roborock_typing import DeviceProp from roborock.version_1_apis.roborock_local_client_v1 import RoborockLocalClientV1 from roborock.version_1_apis.roborock_mqtt_client_v1 import RoborockMqttClientV1 from roborock.version_a01_apis import RoborockClientA01 +from roborock.web_api import RoborockApiClient +from vacuum_map_parser_base.config.color import ColorsPalette +from vacuum_map_parser_base.config.image_config import ImageConfig +from vacuum_map_parser_base.config.size import Sizes +from vacuum_map_parser_base.map_data import MapData +from vacuum_map_parser_roborock.map_data_parser import RoborockMapDataParser from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_CONNECTIONS from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from homeassistant.util import slugify +from homeassistant.util import dt as dt_util, slugify -from .const import DOMAIN +from .const import ( + A01_UPDATE_INTERVAL, + DEFAULT_DRAWABLES, + DOMAIN, + DRAWABLES, + IMAGE_CACHE_INTERVAL, + MAP_FILE_FORMAT, + MAP_SCALE, + MAP_SLEEP, + V1_CLOUD_IN_CLEANING_INTERVAL, + V1_CLOUD_NOT_CLEANING_INTERVAL, + V1_LOCAL_IN_CLEANING_INTERVAL, + V1_LOCAL_NOT_CLEANING_INTERVAL, +) from .models import RoborockA01HassDeviceInfo, RoborockHassDeviceInfo, RoborockMapInfo from .roborock_storage import RoborockMapStorage @@ -67,6 +95,8 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]): product_info: HomeDataProduct, cloud_api: RoborockMqttClientV1, home_data_rooms: list[HomeDataRoom], + api_client: RoborockApiClient, + user_data: UserData, ) -> None: """Initialize.""" super().__init__( @@ -74,7 +104,8 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]): _LOGGER, config_entry=config_entry, name=DOMAIN, - update_interval=SCAN_INTERVAL, + # Assume we can use the local api. + update_interval=V1_LOCAL_NOT_CLEANING_INTERVAL, ) self.roborock_device_info = RoborockHassDeviceInfo( device, @@ -89,7 +120,7 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]): self.cloud_api = cloud_api self.device_info = DeviceInfo( name=self.roborock_device_info.device.name, - identifiers={(DOMAIN, self.roborock_device_info.device.duid)}, + identifiers={(DOMAIN, self.duid)}, manufacturer="Roborock", model=self.roborock_device_info.product.model, model_id=self.roborock_device_info.product.model, @@ -98,13 +129,62 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]): self.current_map: int | None = None if mac := self.roborock_device_info.network_info.mac: - self.device_info[ATTR_CONNECTIONS] = {(dr.CONNECTION_NETWORK_MAC, mac)} + self.device_info[ATTR_CONNECTIONS] = { + (dr.CONNECTION_NETWORK_MAC, dr.format_mac(mac)) + } # Maps from map flag to map name self.maps: dict[int, RoborockMapInfo] = {} self._home_data_rooms = {str(room.id): room.name for room in home_data_rooms} self.map_storage = RoborockMapStorage( - hass, self.config_entry.entry_id, slugify(self.duid) + hass, self.config_entry.entry_id, self.duid_slug ) + self._user_data = user_data + self._api_client = api_client + self._is_cloud_api = False + drawables = [ + drawable + for drawable, default_value in DEFAULT_DRAWABLES.items() + if config_entry.options.get(DRAWABLES, {}).get(drawable, default_value) + ] + self.map_parser = RoborockMapDataParser( + ColorsPalette(), + Sizes({k: v * MAP_SCALE for k, v in Sizes.SIZES.items()}), + drawables, + ImageConfig(scale=MAP_SCALE), + [], + ) + + @cached_property + def dock_device_info(self) -> DeviceInfo: + """Gets the device info for the dock. + + This must happen after the coordinator does the first update. + Which will be the case when this is called. + """ + dock_type = self.roborock_device_info.props.status.dock_type + return DeviceInfo( + name=f"{self.roborock_device_info.device.name} Dock", + identifiers={(DOMAIN, f"{self.duid}_dock")}, + manufacturer="Roborock", + model=f"{self.roborock_device_info.product.model} Dock", + model_id=str(dock_type.value) if dock_type is not None else "Unknown", + sw_version=self.roborock_device_info.device.fv, + ) + + def parse_map_data_v1( + self, map_bytes: bytes + ) -> tuple[bytes | None, MapData | None]: + """Parse map_bytes and return MapData and the image.""" + try: + parsed_map = self.map_parser.parse(map_bytes) + except (IndexError, ValueError) as err: + _LOGGER.debug("Exception when parsing map contents: %s", err) + return None, None + if parsed_map.image is None: + return None, None + img_byte_arr = io.BytesIO() + parsed_map.image.data.save(img_byte_arr, format=MAP_FILE_FORMAT) + return img_byte_arr.getvalue(), parsed_map async def _async_setup(self) -> None: """Set up the coordinator.""" @@ -115,17 +195,68 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]): try: maps = await self.api.get_multi_maps_list() except RoborockException as err: - raise UpdateFailed("Failed to get map data: {err}") from err + _LOGGER.debug("Failed to get maps: %s", err) + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="map_failure", + translation_placeholders={"error": str(err)}, + ) from err # Rooms names populated later with calls to `set_current_map_rooms` for each map + roborock_maps = maps.map_info if (maps and maps.map_info) else () + stored_images = await asyncio.gather( + *[ + self.map_storage.async_load_map(roborock_map.mapFlag) + for roborock_map in roborock_maps + ] + ) self.maps = { roborock_map.mapFlag: RoborockMapInfo( flag=roborock_map.mapFlag, name=roborock_map.name or f"Map {roborock_map.mapFlag}", rooms={}, + image=image, + last_updated=dt_util.utcnow() - IMAGE_CACHE_INTERVAL, + map_data=None, ) - for roborock_map in (maps.map_info if (maps and maps.map_info) else ()) + for image, roborock_map in zip(stored_images, roborock_maps, strict=False) } + async def update_map(self) -> None: + """Update the currently selected map.""" + # The current map was set in the props update, so these can be done without + # worry of applying them to the wrong map. + if self.current_map is None: + # This exists as a safeguard/ to keep mypy happy. + return + try: + response = await self.cloud_api.get_map_v1() + except RoborockException as ex: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="map_failure", + ) from ex + if not isinstance(response, bytes): + _LOGGER.debug("Failed to parse map contents: %s", response) + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="map_failure", + ) + parsed_image, parsed_map = self.parse_map_data_v1(response) + if parsed_image is None or parsed_map is None: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="map_failure", + ) + current_roborock_map_info = self.maps[self.current_map] + if parsed_image != self.maps[self.current_map].image: + await self.map_storage.async_save_map( + self.current_map, + parsed_image, + ) + current_roborock_map_info.image = parsed_image + current_roborock_map_info.last_updated = dt_util.utcnow() + current_roborock_map_info.map_data = parsed_map + async def _verify_api(self) -> None: """Verify that the api is reachable. If it is not, switch clients.""" if isinstance(self.api, RoborockLocalClientV1): @@ -134,11 +265,13 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]): except RoborockException: _LOGGER.warning( "Using the cloud API for device %s. This is not recommended as it can lead to rate limiting. We recommend making your vacuum accessible by your Home Assistant instance", - self.roborock_device_info.device.duid, + self.duid, ) await self.api.async_disconnect() # We use the cloud api if the local api fails to connect. self.api = self.cloud_api + self.update_interval = V1_CLOUD_NOT_CLEANING_INTERVAL + self._is_cloud_api = True # Right now this should never be called if the cloud api is the primary api, # but in the future if it is, a new else should be added. @@ -158,16 +291,45 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]): async def _async_update_data(self) -> DeviceProp: """Update data via library.""" + previous_state = self.roborock_device_info.props.status.state_name try: # Update device props and standard api information await self._update_device_prop() # Set the new map id from the updated device props self._set_current_map() # Get the rooms for that map id. + + # If the vacuum is currently cleaning and it has been IMAGE_CACHE_INTERVAL + # since the last map update, you can update the map. + new_status = self.roborock_device_info.props.status + if self.current_map is not None and ( + ( + new_status.in_cleaning + and (dt_util.utcnow() - self.maps[self.current_map].last_updated) + > IMAGE_CACHE_INTERVAL + ) + or previous_state != new_status.state_name + ): + try: + await self.update_map() + except HomeAssistantError as err: + _LOGGER.debug("Failed to update map: %s", err) await self.set_current_map_rooms() except RoborockException as ex: _LOGGER.debug("Failed to update data: %s", ex) - raise UpdateFailed(ex) from ex + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_data_fail", + ) from ex + if self.roborock_device_info.props.status.in_cleaning: + if self._is_cloud_api: + self.update_interval = V1_CLOUD_IN_CLEANING_INTERVAL + else: + self.update_interval = V1_LOCAL_IN_CLEANING_INTERVAL + elif self._is_cloud_api: + self.update_interval = V1_CLOUD_NOT_CLEANING_INTERVAL + else: + self.update_interval = V1_LOCAL_NOT_CLEANING_INTERVAL return self.roborock_device_info.props def _set_current_map(self) -> None: @@ -194,6 +356,34 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]): for room in room_mapping or () } + async def get_routines(self) -> list[HomeDataScene]: + """Get routines.""" + try: + return await self._api_client.get_scenes(self._user_data, self.duid) + except RoborockException as err: + _LOGGER.error("Failed to get routines %s", err) + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="command_failed", + translation_placeholders={ + "command": "get_scenes", + }, + ) from err + + async def execute_routines(self, routine_id: int) -> None: + """Execute routines.""" + try: + await self._api_client.execute_scene(self._user_data, routine_id) + except RoborockException as err: + _LOGGER.error("Failed to execute routines %s %s", routine_id, err) + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="command_failed", + translation_placeholders={ + "command": "execute_scene", + }, + ) from err + @cached_property def duid(self) -> str: """Get the unique id of the device as specified by Roborock.""" @@ -204,6 +394,43 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]): """Get the slug of the duid.""" return slugify(self.duid) + async def refresh_coordinator_map(self) -> None: + """Get the starting map information for all maps for this device. + + The following steps must be done synchronously. + Only one map can be loaded at a time per device. + """ + cur_map = self.current_map + # This won't be None at this point as the coordinator will have run first. + if cur_map is None: + # If we don't have a cur map(shouldn't happen) just + # return as we can't do anything. + return + map_flags = sorted(self.maps, key=lambda data: data == cur_map, reverse=True) + for map_flag in map_flags: + if map_flag != cur_map: + # Only change the map and sleep if we have multiple maps. + await self.api.load_multi_map(map_flag) + self.current_map = map_flag + # We cannot get the map until the roborock servers fully process the + # map change. + await asyncio.sleep(MAP_SLEEP) + tasks = [self.set_current_map_rooms()] + # The image is set within async_setup, so if it exists, we have it here. + if self.maps[map_flag].image is None: + # If we don't have a cached map, let's update it here so that it can be + # cached in the future. + tasks.append(self.update_map()) + # If either of these fail, we don't care, and we want to continue. + await asyncio.gather(*tasks, return_exceptions=True) + + if len(self.maps) != 1: + # Set the map back to the map the user previously had selected so that it + # does not change the end user's app. + # Only needs to happen when we changed maps above. + await self.api.load_multi_map(cur_map) + self.current_map = cur_map + class RoborockDataUpdateCoordinatorA01( DataUpdateCoordinator[ @@ -228,7 +455,7 @@ class RoborockDataUpdateCoordinatorA01( _LOGGER, config_entry=config_entry, name=DOMAIN, - update_interval=SCAN_INTERVAL, + update_interval=A01_UPDATE_INTERVAL, ) self.api = api self.device_info = DeviceInfo( diff --git a/homeassistant/components/roborock/entity.py b/homeassistant/components/roborock/entity.py index 4a16ada5967..404f239c93a 100644 --- a/homeassistant/components/roborock/entity.py +++ b/homeassistant/components/roborock/entity.py @@ -8,7 +8,11 @@ from roborock.containers import Consumable, Status from roborock.exceptions import RoborockException from roborock.roborock_message import RoborockDataProtocol from roborock.roborock_typing import RoborockCommand -from roborock.version_1_apis.roborock_client_v1 import AttributeCache, RoborockClientV1 +from roborock.version_1_apis.roborock_client_v1 import ( + CLOUD_REQUIRED, + AttributeCache, + RoborockClientV1, +) from roborock.version_1_apis.roborock_mqtt_client_v1 import RoborockMqttClientV1 from roborock.version_a01_apis import RoborockClientA01 @@ -53,14 +57,16 @@ class RoborockEntityV1(RoborockEntity): """Get an item from the api cache.""" return self._api.cache[attribute] - async def send( - self, + @classmethod + async def _send_command( + cls, command: RoborockCommand | str, + api: RoborockClientV1, params: dict[str, Any] | list[Any] | int | None = None, ) -> dict: - """Send a command to a vacuum cleaner.""" + """Send a Roborock command with params to a given api.""" try: - response: dict = await self._api.send_command(command, params) + response: dict = await api.send_command(command, params) except RoborockException as err: if isinstance(command, RoborockCommand): command_name = command.name @@ -75,6 +81,14 @@ class RoborockEntityV1(RoborockEntity): ) from err return response + async def send( + self, + command: RoborockCommand | str, + params: dict[str, Any] | list[Any] | int | None = None, + ) -> dict: + """Send a command to a vacuum cleaner.""" + return await self._send_command(command, self._api, params) + @property def api(self) -> RoborockClientV1: """Returns the api.""" @@ -107,12 +121,15 @@ class RoborockCoordinatedEntityV1( listener_request: list[RoborockDataProtocol] | RoborockDataProtocol | None = None, + is_dock_entity: bool = False, ) -> None: """Initialize the coordinated Roborock Device.""" RoborockEntityV1.__init__( self, unique_id=unique_id, - device_info=coordinator.device_info, + device_info=coordinator.device_info + if not is_dock_entity + else coordinator.dock_device_info, api=coordinator.api, ) CoordinatorEntity.__init__(self, coordinator=coordinator) @@ -152,7 +169,10 @@ class RoborockCoordinatedEntityV1( params: dict[str, Any] | list[Any] | int | None = None, ) -> dict: """Overloads normal send command but refreshes coordinator.""" - res = await super().send(command, params) + if command in CLOUD_REQUIRED: + res = await self._send_command(command, self.coordinator.cloud_api, params) + else: + res = await self._send_command(command, self._api, params) await self.coordinator.async_refresh() return res diff --git a/homeassistant/components/roborock/image.py b/homeassistant/components/roborock/image.py index 3bd2fec2d90..d1c19331ba4 100644 --- a/homeassistant/components/roborock/image.py +++ b/homeassistant/components/roborock/image.py @@ -1,38 +1,21 @@ """Support for Roborock image.""" -import asyncio -from collections.abc import Callable from datetime import datetime -import io import logging -from roborock import RoborockCommand -from vacuum_map_parser_base.config.color import ColorsPalette -from vacuum_map_parser_base.config.image_config import ImageConfig -from vacuum_map_parser_base.config.size import Sizes -from vacuum_map_parser_roborock.map_data_parser import RoborockMapDataParser - from homeassistant.components.image import ImageEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback -from homeassistant.util import dt as dt_util -from .const import ( - DEFAULT_DRAWABLES, - DOMAIN, - DRAWABLES, - IMAGE_CACHE_INTERVAL, - MAP_FILE_FORMAT, - MAP_SLEEP, -) from .coordinator import RoborockConfigEntry, RoborockDataUpdateCoordinator from .entity import RoborockCoordinatedEntityV1 _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, @@ -41,30 +24,6 @@ async def async_setup_entry( ) -> None: """Set up Roborock image platform.""" - drawables = [ - drawable - for drawable, default_value in DEFAULT_DRAWABLES.items() - if config_entry.options.get(DRAWABLES, {}).get(drawable, default_value) - ] - parser = RoborockMapDataParser( - ColorsPalette(), Sizes(), drawables, ImageConfig(), [] - ) - - def parse_image(map_bytes: bytes) -> bytes | None: - try: - parsed_map = parser.parse(map_bytes) - except (IndexError, ValueError) as err: - _LOGGER.debug("Exception when parsing map contents: %s", err) - return None - if parsed_map.image is None: - return None - img_byte_arr = io.BytesIO() - parsed_map.image.data.save(img_byte_arr, format=MAP_FILE_FORMAT) - return img_byte_arr.getvalue() - - await asyncio.gather( - *(refresh_coordinators(hass, coord) for coord in config_entry.runtime_data.v1) - ) async_add_entities( ( RoborockMap( @@ -73,7 +32,6 @@ async def async_setup_entry( coord, map_info.flag, map_info.name, - parse_image, ) for coord in config_entry.runtime_data.v1 for map_info in coord.maps.values() @@ -95,14 +53,12 @@ class RoborockMap(RoborockCoordinatedEntityV1, ImageEntity): coordinator: RoborockDataUpdateCoordinator, map_flag: int, map_name: str, - parser: Callable[[bytes], bytes | None], ) -> None: """Initialize a Roborock map.""" RoborockCoordinatedEntityV1.__init__(self, unique_id, coordinator) ImageEntity.__init__(self, coordinator.hass) self.config_entry = config_entry self._attr_name = map_name - self.parser = parser self.map_flag = map_flag self.cached_map = b"" self._attr_entity_category = EntityCategory.DIAGNOSTIC @@ -115,81 +71,19 @@ class RoborockMap(RoborockCoordinatedEntityV1, ImageEntity): async def async_added_to_hass(self) -> None: """When entity is added to hass load any previously cached maps from disk.""" await super().async_added_to_hass() - content = await self.coordinator.map_storage.async_load_map(self.map_flag) - self.cached_map = content or b"" - self._attr_image_last_updated = dt_util.utcnow() + self._attr_image_last_updated = self.coordinator.maps[ + self.map_flag + ].last_updated self.async_write_ha_state() def _handle_coordinator_update(self) -> None: - # Bump last updated every third time the coordinator runs, so that async_image - # will be called and we will evaluate on the new coordinator data if we should - # update the cache. - if self.is_selected and ( - ( - (dt_util.utcnow() - self.image_last_updated).total_seconds() - > IMAGE_CACHE_INTERVAL - and self.coordinator.roborock_device_info.props.status is not None - and bool(self.coordinator.roborock_device_info.props.status.in_cleaning) - ) - or self.cached_map == b"" - ): - # This will tell async_image it should update. - self._attr_image_last_updated = dt_util.utcnow() + # If the coordinator has updated the map, we can update the image. + self._attr_image_last_updated = self.coordinator.maps[ + self.map_flag + ].last_updated + super()._handle_coordinator_update() async def async_image(self) -> bytes | None: - """Update the image if it is not cached.""" - if self.is_selected: - response = await asyncio.gather( - *( - self.cloud_api.get_map_v1(), - self.coordinator.set_current_map_rooms(), - ), - return_exceptions=True, - ) - if ( - not isinstance(response[0], bytes) - or (content := self.parser(response[0])) is None - ): - _LOGGER.debug("Failed to parse map contents: %s", response[0]) - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="map_failure", - ) - if self.cached_map != content: - self.cached_map = content - await self.coordinator.map_storage.async_save_map( - self.map_flag, - content, - ) - return self.cached_map - - -async def refresh_coordinators( - hass: HomeAssistant, coord: RoborockDataUpdateCoordinator -) -> None: - """Get the starting map information for all maps for this device. - - The following steps must be done synchronously. - Only one map can be loaded at a time per device. - """ - cur_map = coord.current_map - # This won't be None at this point as the coordinator will have run first. - assert cur_map is not None - map_flags = sorted(coord.maps, key=lambda data: data == cur_map, reverse=True) - for map_flag in map_flags: - if map_flag != cur_map: - # Only change the map and sleep if we have multiple maps. - await coord.api.send_command(RoborockCommand.LOAD_MULTI_MAP, [map_flag]) - coord.current_map = map_flag - # We cannot get the map until the roborock servers fully process the - # map change. - await asyncio.sleep(MAP_SLEEP) - await coord.set_current_map_rooms() - - if len(coord.maps) != 1: - # Set the map back to the map the user previously had selected so that it - # does not change the end user's app. - # Only needs to happen when we changed maps above. - await coord.cloud_api.send_command(RoborockCommand.LOAD_MULTI_MAP, [cur_map]) - coord.current_map = cur_map + """Get the cached image.""" + return self.coordinator.maps[self.map_flag].image diff --git a/homeassistant/components/roborock/manifest.json b/homeassistant/components/roborock/manifest.json index 1b143591203..531590d5d6e 100644 --- a/homeassistant/components/roborock/manifest.json +++ b/homeassistant/components/roborock/manifest.json @@ -3,11 +3,23 @@ "name": "Roborock", "codeowners": ["@Lash-L", "@allenporter"], "config_flow": true, + "dhcp": [ + { + "macaddress": "249E7D*" + }, + { + "macaddress": "B04A39*" + }, + { + "hostname": "roborock-*" + } + ], "documentation": "https://www.home-assistant.io/integrations/roborock", "iot_class": "local_polling", "loggers": ["roborock"], + "quality_scale": "silver", "requirements": [ - "python-roborock==2.12.2", + "python-roborock==2.16.1", "vacuum-map-parser-roborock==0.1.2" ] } diff --git a/homeassistant/components/roborock/models.py b/homeassistant/components/roborock/models.py index 4b8ab43b4a1..ab40f23d574 100644 --- a/homeassistant/components/roborock/models.py +++ b/homeassistant/components/roborock/models.py @@ -1,10 +1,12 @@ """Roborock Models.""" from dataclasses import dataclass +from datetime import datetime from typing import Any from roborock.containers import HomeDataDevice, HomeDataProduct, NetworkInfo from roborock.roborock_typing import DeviceProp +from vacuum_map_parser_base.map_data import MapData @dataclass @@ -48,3 +50,13 @@ class RoborockMapInfo: flag: int name: str rooms: dict[int, str] + image: bytes | None + last_updated: datetime + map_data: MapData | None + + @property + def current_room(self) -> str | None: + """Get the currently active room for this map if any.""" + if self.map_data is None or self.map_data.vacuum_room is None: + return None + return self.rooms.get(self.map_data.vacuum_room) diff --git a/homeassistant/components/roborock/number.py b/homeassistant/components/roborock/number.py index a710eeefb90..73ac14fca71 100644 --- a/homeassistant/components/roborock/number.py +++ b/homeassistant/components/roborock/number.py @@ -22,6 +22,8 @@ from .entity import RoborockEntityV1 _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RoborockNumberDescription(NumberEntityDescription): diff --git a/homeassistant/components/roborock/quality_scale.yaml b/homeassistant/components/roborock/quality_scale.yaml new file mode 100644 index 00000000000..32ddb145f90 --- /dev/null +++ b/homeassistant/components/roborock/quality_scale.yaml @@ -0,0 +1,75 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: done + brands: done + common-modules: done + config-flow: done + config-flow-test-coverage: done + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: + status: done + comment: The config flow verifies credentials and the cloud APIs. + test-before-setup: done + unique-config-entry: done + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: done + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: done + reauthentication-flow: done + test-coverage: done + # Gold + devices: done + diagnostics: done + discovery: done + discovery-update-info: + status: exempt + comment: Devices do not support discovery. + docs-data-update: done + docs-examples: done + docs-known-limitations: + status: todo + comment: Documentation does not describe known limitations like rate limiting + docs-supported-devices: todo + docs-supported-functions: done + docs-troubleshooting: + status: todo + comment: | + There are good troubleshooting steps, however we should update the "cloud vs local" + and rate limiting documentation with more information. + docs-use-cases: + status: todo + comment: | + The docs describe controlling the vacuum, though does not describe more + interesting potential integrations with the homoe assistant ecosystem. + dynamic-devices: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: + status: exempt + comment: There are no noisy entities. + entity-translations: done + exception-translations: done + icon-translations: todo + reconfiguration-flow: todo + repair-issues: + status: todo + comment: The Cloud vs Local API warning should probably be a repair issue. + stale-devices: done + # Platinum + async-dependency: todo + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/roborock/select.py b/homeassistant/components/roborock/select.py index 6133eed0652..208020dccab 100644 --- a/homeassistant/components/roborock/select.py +++ b/homeassistant/components/roborock/select.py @@ -4,9 +4,9 @@ import asyncio from collections.abc import Callable from dataclasses import dataclass -from roborock.containers import Status +from roborock.code_mappings import RoborockDockDustCollectionModeCode from roborock.roborock_message import RoborockDataProtocol -from roborock.roborock_typing import RoborockCommand +from roborock.roborock_typing import DeviceProp, RoborockCommand from homeassistant.components.select import SelectEntity, SelectEntityDescription from homeassistant.const import EntityCategory @@ -17,6 +17,8 @@ from .const import MAP_SLEEP from .coordinator import RoborockConfigEntry, RoborockDataUpdateCoordinator from .entity import RoborockCoordinatedEntityV1 +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RoborockSelectDescription(SelectEntityDescription): @@ -25,13 +27,15 @@ class RoborockSelectDescription(SelectEntityDescription): # The command that the select entity will send to the api. api_command: RoborockCommand # Gets the current value of the select entity. - value_fn: Callable[[Status], str | None] + value_fn: Callable[[DeviceProp], str | None] # Gets all options of the select entity. - options_lambda: Callable[[Status], list[str] | None] + options_lambda: Callable[[DeviceProp], list[str] | None] # Takes the value from the select entity and converts it for the api. - parameter_lambda: Callable[[str, Status], list[int]] + parameter_lambda: Callable[[str, DeviceProp], list[int]] protocol_listener: RoborockDataProtocol | None = None + # If it is a dock entity + is_dock_entity: bool = False SELECT_DESCRIPTIONS: list[RoborockSelectDescription] = [ @@ -39,24 +43,38 @@ SELECT_DESCRIPTIONS: list[RoborockSelectDescription] = [ key="water_box_mode", translation_key="mop_intensity", api_command=RoborockCommand.SET_WATER_BOX_CUSTOM_MODE, - value_fn=lambda data: data.water_box_mode_name, + value_fn=lambda data: data.status.water_box_mode_name, entity_category=EntityCategory.CONFIG, - options_lambda=lambda data: data.water_box_mode.keys() - if data.water_box_mode is not None + options_lambda=lambda data: data.status.water_box_mode.keys() + if data.status.water_box_mode is not None else None, - parameter_lambda=lambda key, status: [status.get_mop_intensity_code(key)], + parameter_lambda=lambda key, prop: [prop.status.get_mop_intensity_code(key)], protocol_listener=RoborockDataProtocol.WATER_BOX_MODE, ), RoborockSelectDescription( key="mop_mode", translation_key="mop_mode", api_command=RoborockCommand.SET_MOP_MODE, - value_fn=lambda data: data.mop_mode_name, + value_fn=lambda data: data.status.mop_mode_name, entity_category=EntityCategory.CONFIG, - options_lambda=lambda data: data.mop_mode.keys() - if data.mop_mode is not None + options_lambda=lambda data: data.status.mop_mode.keys() + if data.status.mop_mode is not None else None, - parameter_lambda=lambda key, status: [status.get_mop_mode_code(key)], + parameter_lambda=lambda key, prop: [prop.status.get_mop_mode_code(key)], + ), + RoborockSelectDescription( + key="dust_collection_mode", + translation_key="dust_collection_mode", + api_command=RoborockCommand.SET_DUST_COLLECTION_MODE, + value_fn=lambda data: data.dust_collection_mode_name, + entity_category=EntityCategory.CONFIG, + options_lambda=lambda data: RoborockDockDustCollectionModeCode.keys() + if data.dust_collection_mode_name is not None + else None, + parameter_lambda=lambda key, _: [ + RoborockDockDustCollectionModeCode.as_dict().get(key) + ], + is_dock_entity=True, ), ] @@ -74,7 +92,7 @@ async def async_setup_entry( for description in SELECT_DESCRIPTIONS if ( options := description.options_lambda( - coordinator.roborock_device_info.props.status + coordinator.roborock_device_info.props ) ) is not None @@ -104,6 +122,7 @@ class RoborockSelectEntity(RoborockCoordinatedEntityV1, SelectEntity): f"{entity_description.key}_{coordinator.duid_slug}", coordinator, entity_description.protocol_listener, + is_dock_entity=entity_description.is_dock_entity, ) self._attr_options = options @@ -111,27 +130,28 @@ class RoborockSelectEntity(RoborockCoordinatedEntityV1, SelectEntity): """Set the option.""" await self.send( self.entity_description.api_command, - self.entity_description.parameter_lambda(option, self._device_status), + self.entity_description.parameter_lambda(option, self.coordinator.data), ) @property def current_option(self) -> str | None: - """Get the current status of the select entity from device_status.""" - return self.entity_description.value_fn(self._device_status) + """Get the current status of the select entity from device props.""" + return self.entity_description.value_fn(self.coordinator.data) class RoborockCurrentMapSelectEntity(RoborockCoordinatedEntityV1, SelectEntity): """A class to let you set the selected map on Roborock vacuum.""" - _attr_entity_category = EntityCategory.DIAGNOSTIC + _attr_entity_category = EntityCategory.CONFIG _attr_translation_key = "selected_map" async def async_select_option(self, option: str) -> None: """Set the option.""" for map_id, map_ in self.coordinator.maps.items(): if map_.name == option: - await self.send( + await self._send_command( RoborockCommand.LOAD_MULTI_MAP, + self.api, [map_id], ) # Update the current map id manually so that nothing gets broken @@ -140,6 +160,7 @@ class RoborockCurrentMapSelectEntity(RoborockCoordinatedEntityV1, SelectEntity): # We need to wait after updating the map # so that other commands will be executed correctly. await asyncio.sleep(MAP_SLEEP) + await self.coordinator.async_refresh() break @property diff --git a/homeassistant/components/roborock/sensor.py b/homeassistant/components/roborock/sensor.py index f95dc5fa98f..33ecaf74d4f 100644 --- a/homeassistant/components/roborock/sensor.py +++ b/homeassistant/components/roborock/sensor.py @@ -36,7 +36,13 @@ from .coordinator import ( RoborockDataUpdateCoordinator, RoborockDataUpdateCoordinatorA01, ) -from .entity import RoborockCoordinatedEntityA01, RoborockCoordinatedEntityV1 +from .entity import ( + RoborockCoordinatedEntityA01, + RoborockCoordinatedEntityV1, + RoborockEntity, +) + +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) @@ -47,6 +53,9 @@ class RoborockSensorDescription(SensorEntityDescription): protocol_listener: RoborockDataProtocol | None = None + # If it is a dock entity + is_dock_entity: bool = False + @dataclass(frozen=True, kw_only=True) class RoborockSensorDescriptionA01(SensorEntityDescription): @@ -197,6 +206,7 @@ SENSOR_DESCRIPTIONS = [ entity_category=EntityCategory.DIAGNOSTIC, device_class=SensorDeviceClass.ENUM, options=RoborockDockErrorCode.keys(), + is_dock_entity=True, ), RoborockSensorDescription( key="mop_clean_remaining", @@ -205,6 +215,7 @@ SENSOR_DESCRIPTIONS = [ value_fn=lambda data: data.status.rdt, translation_key="mop_drying_remaining_time", entity_category=EntityCategory.DIAGNOSTIC, + is_dock_entity=True, ), ] @@ -299,7 +310,7 @@ async def async_setup_entry( ) -> None: """Set up the Roborock vacuum sensors.""" coordinators = config_entry.runtime_data - async_add_entities( + entities: list[RoborockEntity] = [ RoborockSensorEntity( coordinator, description, @@ -307,8 +318,9 @@ async def async_setup_entry( for coordinator in coordinators.v1 for description in SENSOR_DESCRIPTIONS if description.value_fn(coordinator.roborock_device_info.props) is not None - ) - async_add_entities( + ] + entities.extend(RoborockCurrentRoom(coordinator) for coordinator in coordinators.v1) + entities.extend( RoborockSensorEntityA01( coordinator, description, @@ -317,6 +329,7 @@ async def async_setup_entry( for description in A01_SENSOR_DESCRIPTIONS if description.data_protocol in coordinator.data ) + async_add_entities(entities) class RoborockSensorEntity(RoborockCoordinatedEntityV1, SensorEntity): @@ -335,6 +348,7 @@ class RoborockSensorEntity(RoborockCoordinatedEntityV1, SensorEntity): f"{description.key}_{coordinator.duid_slug}", coordinator, description.protocol_listener, + is_dock_entity=description.is_dock_entity, ) @property @@ -345,6 +359,42 @@ class RoborockSensorEntity(RoborockCoordinatedEntityV1, SensorEntity): ) +class RoborockCurrentRoom(RoborockCoordinatedEntityV1, SensorEntity): + """Representation of a Current Room Sensor.""" + + _attr_device_class = SensorDeviceClass.ENUM + _attr_translation_key = "current_room" + _attr_entity_category = EntityCategory.DIAGNOSTIC + + def __init__( + self, + coordinator: RoborockDataUpdateCoordinator, + ) -> None: + """Initialize the entity.""" + super().__init__( + f"current_room_{coordinator.duid_slug}", + coordinator, + None, + is_dock_entity=False, + ) + + @property + def options(self) -> list[str]: + """Return the currently valid rooms.""" + if self.coordinator.current_map is not None: + return list( + self.coordinator.maps[self.coordinator.current_map].rooms.values() + ) + return [] + + @property + def native_value(self) -> str | None: + """Return the value reported by the sensor.""" + if self.coordinator.current_map is not None: + return self.coordinator.maps[self.coordinator.current_map].current_room + return None + + class RoborockSensorEntityA01(RoborockCoordinatedEntityA01, SensorEntity): """Representation of a A01 Roborock sensor.""" diff --git a/homeassistant/components/roborock/strings.json b/homeassistant/components/roborock/strings.json index eb058ea74e3..caad67e4ce6 100644 --- a/homeassistant/components/roborock/strings.json +++ b/homeassistant/components/roborock/strings.json @@ -5,12 +5,18 @@ "description": "Enter your Roborock email address.", "data": { "username": "[%key:common::config_flow::data::email%]" + }, + "data_description": { + "username": "The email address used to sign in to the Roborock app." } }, "code": { "description": "Type the verification code sent to your email", "data": { "code": "Verification code" + }, + "data_description": { + "code": "The verification code sent to your email." } }, "reauth_confirm": { @@ -54,6 +60,25 @@ "vacuum_position": "Vacuum position", "virtual_walls": "Virtual walls", "zones": "Zones" + }, + "data_description": { + "charger": "Show the charger on the map.", + "cleaned_area": "Show the area cleaned on the map.", + "goto_path": "Show the go-to path on the map.", + "ignored_obstacles": "Show ignored obstacles on the map.", + "ignored_obstacles_with_photo": "Show ignored obstacles with photos on the map.", + "mop_path": "Show the mop path on the map.", + "no_carpet_zones": "Show the no carpet zones on the map.", + "no_go_zones": "Show the no-go zones on the map.", + "no_mopping_zones": "Show the no-mop zones on the map.", + "obstacles": "Show obstacles on the map.", + "obstacles_with_photo": "Show obstacles with photos on the map.", + "path": "Show the path on the map.", + "predicted_path": "Show the predicted path on the map.", + "room_names": "Show room names on the map.", + "vacuum_position": "Show the vacuum position on the map.", + "virtual_walls": "Show virtual walls on the map.", + "zones": "Show zones on the map." } } } @@ -156,6 +181,9 @@ "countdown": { "name": "Countdown" }, + "current_room": { + "name": "Current room" + }, "dock_error": { "name": "Dock error", "state": { @@ -353,6 +381,15 @@ }, "selected_map": { "name": "Selected map" + }, + "dust_collection_mode": { + "name": "Empty mode", + "state": { + "smart": "Smart", + "light": "Light", + "balanced": "[%key:component::roborock::entity::vacuum::roborock::state_attributes::fan_speed::state::balanced%]", + "max": "[%key:component::roborock::entity::select::mop_intensity::state::max%]" + } } }, "switch": { @@ -420,6 +457,12 @@ "map_failure": { "message": "Something went wrong creating the map" }, + "position_not_found": { + "message": "Robot position not found" + }, + "update_data_fail": { + "message": "Failed to update data" + }, "no_coordinators": { "message": "No devices were able to successfully setup" }, diff --git a/homeassistant/components/roborock/switch.py b/homeassistant/components/roborock/switch.py index 0171d59abfd..44feccdebac 100644 --- a/homeassistant/components/roborock/switch.py +++ b/homeassistant/components/roborock/switch.py @@ -24,6 +24,8 @@ from .entity import RoborockEntityV1 _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RoborockSwitchDescription(SwitchEntityDescription): @@ -35,6 +37,8 @@ class RoborockSwitchDescription(SwitchEntityDescription): update_value: Callable[[AttributeCache, bool], Coroutine[Any, Any, None]] # Attribute from cache attribute: str + # If it is a dock entity + is_dock_entity: bool = False SWITCH_DESCRIPTIONS: list[RoborockSwitchDescription] = [ @@ -47,6 +51,7 @@ SWITCH_DESCRIPTIONS: list[RoborockSwitchDescription] = [ key="child_lock", translation_key="child_lock", entity_category=EntityCategory.CONFIG, + is_dock_entity=True, ), RoborockSwitchDescription( cache_key=CacheableAttribute.flow_led_status, @@ -57,6 +62,7 @@ SWITCH_DESCRIPTIONS: list[RoborockSwitchDescription] = [ key="status_indicator", translation_key="status_indicator", entity_category=EntityCategory.CONFIG, + is_dock_entity=True, ), RoborockSwitchDescription( cache_key=CacheableAttribute.dnd_timer, @@ -147,7 +153,13 @@ class RoborockSwitch(RoborockEntityV1, SwitchEntity): ) -> None: """Initialize the entity.""" self.entity_description = entity_description - super().__init__(unique_id, coordinator.device_info, coordinator.api) + super().__init__( + unique_id, + coordinator.device_info + if not entity_description.is_dock_entity + else coordinator.dock_device_info, + coordinator.api, + ) async def async_turn_off(self, **kwargs: Any) -> None: """Turn off the switch.""" diff --git a/homeassistant/components/roborock/time.py b/homeassistant/components/roborock/time.py index 6aa70e300e5..83d341fa2dd 100644 --- a/homeassistant/components/roborock/time.py +++ b/homeassistant/components/roborock/time.py @@ -24,6 +24,8 @@ from .entity import RoborockEntityV1 _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class RoborockTimeDescription(TimeEntityDescription): diff --git a/homeassistant/components/roborock/vacuum.py b/homeassistant/components/roborock/vacuum.py index 59abc888673..058fffbdb1c 100644 --- a/homeassistant/components/roborock/vacuum.py +++ b/homeassistant/components/roborock/vacuum.py @@ -1,11 +1,14 @@ """Support for Roborock vacuum class.""" -from dataclasses import asdict from typing import Any from roborock.code_mappings import RoborockStateCode from roborock.roborock_message import RoborockDataProtocol from roborock.roborock_typing import RoborockCommand +from vacuum_map_parser_base.config.color import ColorsPalette +from vacuum_map_parser_base.config.image_config import ImageConfig +from vacuum_map_parser_base.config.size import Sizes +from vacuum_map_parser_roborock.map_data_parser import RoborockMapDataParser import voluptuous as vol from homeassistant.components.vacuum import ( @@ -26,7 +29,6 @@ from .const import ( ) from .coordinator import RoborockConfigEntry, RoborockDataUpdateCoordinator from .entity import RoborockCoordinatedEntityV1 -from .image import ColorsPalette, ImageConfig, RoborockMapDataParser, Sizes STATE_CODE_TO_STATE = { RoborockStateCode.starting: VacuumActivity.IDLE, # "Starting" @@ -54,6 +56,8 @@ STATE_CODE_TO_STATE = { RoborockStateCode.device_offline: VacuumActivity.ERROR, # "Device offline" } +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, @@ -201,7 +205,14 @@ class RoborockVacuum(RoborockCoordinatedEntityV1, StateVacuumEntity): """Get map information such as map id and room ids.""" return { "maps": [ - asdict(vacuum_map) for vacuum_map in self.coordinator.maps.values() + { + "flag": vacuum_map.flag, + "name": vacuum_map.name, + # JsonValueType does not accept a int as a key - was not a + # issue with previous asdict() implementation. + "rooms": vacuum_map.rooms, # type: ignore[dict-item] + } + for vacuum_map in self.coordinator.maps.values() ] } @@ -210,13 +221,18 @@ class RoborockVacuum(RoborockCoordinatedEntityV1, StateVacuumEntity): map_data = await self.coordinator.cloud_api.get_map_v1() if not isinstance(map_data, bytes): - raise HomeAssistantError("Failed to retrieve map data.") + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="map_failure", + ) parser = RoborockMapDataParser(ColorsPalette(), Sizes(), [], ImageConfig(), []) parsed_map = parser.parse(map_data) robot_position = parsed_map.vacuum_position if robot_position is None: - raise HomeAssistantError("Robot position not found") + raise HomeAssistantError( + translation_domain=DOMAIN, translation_key="position_not_found" + ) return { "x": robot_position.x, diff --git a/homeassistant/components/roon/media_player.py b/homeassistant/components/roon/media_player.py index 0460e2cfc6e..4a87601a24f 100644 --- a/homeassistant/components/roon/media_player.py +++ b/homeassistant/components/roon/media_player.py @@ -329,6 +329,11 @@ class RoonDevice(MediaPlayerEntity): """Album artist of current playing media (Music track only).""" return self.media_artist + @property + def media_content_type(self) -> str: + """Return the media type.""" + return MediaType.MUSIC + @property def supports_standby(self): """Return power state of source controls.""" diff --git a/homeassistant/components/rova/strings.json b/homeassistant/components/rova/strings.json index 3b89fc789ee..21f4146bf78 100644 --- a/homeassistant/components/rova/strings.json +++ b/homeassistant/components/rova/strings.json @@ -4,7 +4,7 @@ "user": { "title": "Provide your address details", "data": { - "zip_code": "Your zip code", + "zip_code": "Your ZIP code", "house_number": "Your house number", "house_number_suffix": "A suffix for your house number" } diff --git a/homeassistant/components/samsungtv/manifest.json b/homeassistant/components/samsungtv/manifest.json index 6a30efd64f8..5bb69e7f121 100644 --- a/homeassistant/components/samsungtv/manifest.json +++ b/homeassistant/components/samsungtv/manifest.json @@ -39,7 +39,7 @@ "samsungctl[websocket]==0.7.1", "samsungtvws[async,encrypted]==2.7.2", "wakeonlan==2.1.0", - "async-upnp-client==0.43.0" + "async-upnp-client==0.44.0" ], "ssdp": [ { diff --git a/homeassistant/components/schedule/strings.json b/homeassistant/components/schedule/strings.json index 8638e4a8a84..bb81c029dbf 100644 --- a/homeassistant/components/schedule/strings.json +++ b/homeassistant/components/schedule/strings.json @@ -28,7 +28,7 @@ }, "get_schedule": { "name": "Get schedule", - "description": "Retrieve one or multiple schedules." + "description": "Retrieves the configured time ranges of one or multiple schedules." } } } diff --git a/homeassistant/components/search/manifest.json b/homeassistant/components/search/manifest.json index cd372139451..42a54fe8b55 100644 --- a/homeassistant/components/search/manifest.json +++ b/homeassistant/components/search/manifest.json @@ -1,7 +1,6 @@ { "domain": "search", "name": "Search", - "after_dependencies": ["scene", "group", "automation", "script"], "codeowners": ["@home-assistant/core"], "dependencies": ["websocket_api"], "documentation": "https://www.home-assistant.io/integrations/search", diff --git a/homeassistant/components/sensibo/manifest.json b/homeassistant/components/sensibo/manifest.json index e6398c5076e..610695aaf7b 100644 --- a/homeassistant/components/sensibo/manifest.json +++ b/homeassistant/components/sensibo/manifest.json @@ -14,5 +14,6 @@ }, "iot_class": "cloud_polling", "loggers": ["pysensibo"], + "quality_scale": "platinum", "requirements": ["pysensibo==1.1.0"] } diff --git a/homeassistant/components/sensibo/quality_scale.yaml b/homeassistant/components/sensibo/quality_scale.yaml index c21cf100e9d..3d71d0ad3ba 100644 --- a/homeassistant/components/sensibo/quality_scale.yaml +++ b/homeassistant/components/sensibo/quality_scale.yaml @@ -19,9 +19,9 @@ rules: comment: | No integrations services. common-modules: done - docs-high-level-description: todo + docs-high-level-description: done docs-installation-instructions: done - docs-removal-instructions: todo + docs-removal-instructions: done docs-actions: done brands: done # Silver @@ -39,9 +39,7 @@ rules: comment: | Tests are very complex and needs a rewrite for future additions integration-owner: done - docs-installation-parameters: - status: todo - comment: configuration_basic + docs-installation-parameters: done docs-configuration-parameters: status: exempt comment: | @@ -71,13 +69,13 @@ rules: status: exempt comment: | This integration doesn't have any cases where raising an issue is needed. - docs-use-cases: todo - docs-supported-devices: todo - docs-supported-functions: todo - docs-data-update: todo - docs-known-limitations: todo - docs-troubleshooting: todo - docs-examples: todo + docs-use-cases: done + docs-supported-devices: done + docs-supported-functions: done + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: done + docs-examples: done # Platinum async-dependency: done diff --git a/homeassistant/components/sensibo/strings.json b/homeassistant/components/sensibo/strings.json index 6aba2be52fc..0fbcda461c8 100644 --- a/homeassistant/components/sensibo/strings.json +++ b/homeassistant/components/sensibo/strings.json @@ -330,7 +330,7 @@ "timer_on_switch": { "name": "Timer", "state_attributes": { - "id": { "name": "Id" }, + "id": { "name": "ID" }, "turn_on": { "name": "Turns on", "state": { @@ -594,7 +594,7 @@ "issues": { "deprecated_entity_horizontalswing": { "title": "The Sensibo {name} entity is deprecated", - "description": "The Sensibo entity `{entity}` is deprecated and will be removed in a future release.\nPlease update your automations and scripts to use the `horizontal_swing` attribute part of the `climate` entity instead.\n, Disable the `{entity}` and reload the config entry or restart Home Assistant to fix this issue." + "description": "The Sensibo entity `{entity}` is deprecated and will be removed in a future release.\nPlease update your automations and scripts to use the `horizontal_swing` attribute part of the `climate` entity instead.\nDisable `{entity}` and reload the config entry or restart Home Assistant to fix this issue." } } } diff --git a/homeassistant/components/sensor/__init__.py b/homeassistant/components/sensor/__init__.py index 89f39d4fb8c..e06ee85cd03 100644 --- a/homeassistant/components/sensor/__init__.py +++ b/homeassistant/components/sensor/__init__.py @@ -44,6 +44,7 @@ from .const import ( # noqa: F401 DEVICE_CLASSES_SCHEMA, DOMAIN, NON_NUMERIC_DEVICE_CLASSES, + STATE_CLASS_UNITS, STATE_CLASSES, STATE_CLASSES_SCHEMA, UNIT_CONVERTERS, @@ -675,22 +676,13 @@ class SensorEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): ): # Deduce the precision by finding the decimal point, if any value_s = str(value) - precision = ( - len(value_s) - value_s.index(".") - 1 if "." in value_s else 0 - ) - # Scale the precision when converting to a larger unit # For example 1.1 Wh should be rendered as 0.0011 kWh, not 0.0 kWh - ratio_log = max( - 0, - log10( - converter.get_unit_ratio( - native_unit_of_measurement, unit_of_measurement - ) - ), + precision = ( + len(value_s) - value_s.index(".") - 1 if "." in value_s else 0 + ) + converter.get_unit_floored_log_ratio( + native_unit_of_measurement, unit_of_measurement ) - precision = precision + floor(ratio_log) - value = f"{converted_numerical_value:z.{precision}f}" else: value = converted_numerical_value @@ -722,6 +714,18 @@ class SensorEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): report_issue, ) + # Validate unit of measurement used for sensors with a state class + if ( + state_class + and (units := STATE_CLASS_UNITS.get(state_class)) is not None + and native_unit_of_measurement not in units + ): + raise ValueError( + f"Sensor {self.entity_id} ({type(self)}) is using native unit of " + f"measurement '{native_unit_of_measurement}' which is not a valid unit " + f"for the state class ('{state_class}') it is using; expected one of {units};" + ) + return value def _display_precision_or_none(self) -> int | None: diff --git a/homeassistant/components/sensor/const.py b/homeassistant/components/sensor/const.py index 1edb87f4bce..63af8e5bf52 100644 --- a/homeassistant/components/sensor/const.py +++ b/homeassistant/components/sensor/const.py @@ -186,7 +186,7 @@ class SensorDeviceClass(StrEnum): DURATION = "duration" """Fixed duration. - Unit of measurement: `d`, `h`, `min`, `s`, `ms` + Unit of measurement: `d`, `h`, `min`, `s`, `ms`, `µs` """ ENERGY = "energy" @@ -491,6 +491,9 @@ class SensorStateClass(StrEnum): MEASUREMENT = "measurement" """The state represents a measurement in present time.""" + MEASUREMENT_ANGLE = "measurement_angle" + """The state represents a angle measurement in present time. Currently only degrees are supported.""" + TOTAL = "total" """The state represents a total amount. @@ -558,6 +561,7 @@ DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = { UnitOfTime.MINUTES, UnitOfTime.SECONDS, UnitOfTime.MILLISECONDS, + UnitOfTime.MICROSECONDS, }, SensorDeviceClass.ENERGY: set(UnitOfEnergy), SensorDeviceClass.ENERGY_DISTANCE: set(UnitOfEnergyDistance), @@ -692,6 +696,11 @@ DEVICE_CLASS_STATE_CLASSES: dict[SensorDeviceClass, set[SensorStateClass]] = { SensorStateClass.TOTAL, SensorStateClass.TOTAL_INCREASING, }, - SensorDeviceClass.WIND_DIRECTION: set(), + SensorDeviceClass.WIND_DIRECTION: {SensorStateClass.MEASUREMENT_ANGLE}, SensorDeviceClass.WIND_SPEED: {SensorStateClass.MEASUREMENT}, } + + +STATE_CLASS_UNITS: dict[SensorStateClass | str, set[type[StrEnum] | str | None]] = { + SensorStateClass.MEASUREMENT_ANGLE: {DEGREE}, +} diff --git a/homeassistant/components/sensor/recorder.py b/homeassistant/components/sensor/recorder.py index 675d24b9240..cb80fa7d2ce 100644 --- a/homeassistant/components/sensor/recorder.py +++ b/homeassistant/components/sensor/recorder.py @@ -5,6 +5,7 @@ from __future__ import annotations from collections import defaultdict from collections.abc import Callable, Iterable from contextlib import suppress +from dataclasses import dataclass import datetime import itertools import logging @@ -21,6 +22,7 @@ from homeassistant.components.recorder import ( ) from homeassistant.components.recorder.models import ( StatisticData, + StatisticMeanType, StatisticMetaData, StatisticResult, ) @@ -52,10 +54,22 @@ from .const import ( _LOGGER = logging.getLogger(__name__) + +@dataclass +class _StatisticsConfig: + types: set[str] + mean_type: StatisticMeanType = StatisticMeanType.NONE + + DEFAULT_STATISTICS = { - SensorStateClass.MEASUREMENT: {"mean", "min", "max"}, - SensorStateClass.TOTAL: {"sum"}, - SensorStateClass.TOTAL_INCREASING: {"sum"}, + SensorStateClass.MEASUREMENT: _StatisticsConfig( + {"mean", "min", "max"}, StatisticMeanType.ARITHMETIC + ), + SensorStateClass.MEASUREMENT_ANGLE: _StatisticsConfig( + {"mean"}, StatisticMeanType.CIRCULAR + ), + SensorStateClass.TOTAL: _StatisticsConfig({"sum"}), + SensorStateClass.TOTAL_INCREASING: _StatisticsConfig({"sum"}), } EQUIVALENT_UNITS = { @@ -76,8 +90,15 @@ WARN_NEGATIVE: HassKey[set[str]] = HassKey(f"{DOMAIN}_warn_total_increasing_nega # Keep track of entities for which a warning about unsupported unit has been logged WARN_UNSUPPORTED_UNIT: HassKey[set[str]] = HassKey(f"{DOMAIN}_warn_unsupported_unit") WARN_UNSTABLE_UNIT: HassKey[set[str]] = HassKey(f"{DOMAIN}_warn_unstable_unit") +# Keep track of entities for which a warning about statistics mean algorithm change has been logged +WARN_STATISTICS_MEAN_CHANGED: HassKey[set[str]] = HassKey( + f"{DOMAIN}_warn_statistics_mean_change" +) # Link to dev statistics where issues around LTS can be fixed LINK_DEV_STATISTICS = "https://my.home-assistant.io/redirect/developer_statistics" +STATE_CLASS_REMOVED_ISSUE = "state_class_removed" +UNITS_CHANGED_ISSUE = "units_changed" +MEAN_TYPE_CHANGED_ISSUE = "mean_type_changed" def _get_sensor_states(hass: HomeAssistant) -> list[State]: @@ -99,7 +120,7 @@ def _get_sensor_states(hass: HomeAssistant) -> list[State]: ] -def _time_weighted_average( +def _time_weighted_arithmetic_mean( fstates: list[tuple[float, State]], start: datetime.datetime, end: datetime.datetime ) -> float: """Calculate a time weighted average. @@ -134,16 +155,44 @@ def _time_weighted_average( duration = end - old_start_time accumulated += old_fstate * duration.total_seconds() - period_seconds = (end - start).total_seconds() - if period_seconds == 0: - # If the only state changed that happened was at the exact moment - # at the end of the period, we can't calculate a meaningful average - # so we return 0.0 since it represents a time duration smaller than - # we can measure. This probably means the precision of statistics - # column schema in the database is incorrect but it is actually possible - # to happen if the state change event fired at the exact microsecond - return 0.0 - return accumulated / period_seconds + return accumulated / (end - start).total_seconds() + + +def _time_weighted_circular_mean( + fstates: list[tuple[float, State]], start: datetime.datetime, end: datetime.datetime +) -> float: + """Calculate a time weighted circular mean. + + The circular mean is calculated by weighting the states by duration in seconds between + state changes. + Note: there's no interpolation of values between state changes. + """ + old_fstate: float | None = None + old_start_time: datetime.datetime | None = None + values: list[tuple[float, float]] = [] + + for fstate, state in fstates: + # The recorder will give us the last known state, which may be well + # before the requested start time for the statistics + start_time = max(state.last_updated, start) + if old_start_time is None: + # Adjust start time, if there was no last known state + start = start_time + else: + duration = (start_time - old_start_time).total_seconds() + assert old_fstate is not None + values.append((old_fstate, duration)) + + old_fstate = fstate + old_start_time = start_time + + if old_fstate is not None: + # Add last value weighted by duration until end of the period + assert old_start_time is not None + duration = (end - old_start_time).total_seconds() + values.append((old_fstate, duration)) + + return statistics.weighted_circular_mean(values) def _get_units(fstates: list[tuple[float, State]]) -> set[str | None]: @@ -371,7 +420,7 @@ def reset_detected( return fstate < 0.9 * previous_fstate -def _wanted_statistics(sensor_states: list[State]) -> dict[str, set[str]]: +def _wanted_statistics(sensor_states: list[State]) -> dict[str, _StatisticsConfig]: """Prepare a dict with wanted statistics for entities.""" return { state.entity_id: DEFAULT_STATISTICS[state.attributes[ATTR_STATE_CLASS]] @@ -415,7 +464,9 @@ def compile_statistics( # noqa: C901 wanted_statistics = _wanted_statistics(sensor_states) # Get history between start and end entities_full_history = [ - i.entity_id for i in sensor_states if "sum" in wanted_statistics[i.entity_id] + i.entity_id + for i in sensor_states + if "sum" in wanted_statistics[i.entity_id].types ] history_list: dict[str, list[State]] = {} if entities_full_history: @@ -430,7 +481,7 @@ def compile_statistics( # noqa: C901 entities_significant_history = [ i.entity_id for i in sensor_states - if "sum" not in wanted_statistics[i.entity_id] + if "sum" not in wanted_statistics[i.entity_id].types ] if entities_significant_history: _history_list = history.get_full_significant_states_with_session( @@ -447,7 +498,11 @@ def compile_statistics( # noqa: C901 entity_id = _state.entity_id # If there are no recent state changes, the sensor's state may already be pruned # from the recorder. Get the state from the state machine instead. - if not (entity_history := history_list.get(entity_id, [_state])): + try: + entity_history = history_list[entity_id] + except KeyError: + entity_history = [_state] if _state.last_changed < end else [] + if not entity_history: continue if not (float_states := _entity_history_to_float_and_state(entity_history)): continue @@ -476,7 +531,7 @@ def compile_statistics( # noqa: C901 continue state_class: str = _state.attributes[ATTR_STATE_CLASS] to_process.append((entity_id, statistics_unit, state_class, valid_float_states)) - if "sum" in wanted_statistics[entity_id]: + if "sum" in wanted_statistics[entity_id].types: to_query.add(entity_id) last_stats = statistics.get_latest_short_term_statistics_with_session( @@ -488,6 +543,10 @@ def compile_statistics( # noqa: C901 state_class, valid_float_states, ) in to_process: + mean_type = StatisticMeanType.NONE + if "mean" in wanted_statistics[entity_id].types: + mean_type = wanted_statistics[entity_id].mean_type + # Check metadata if old_metadata := old_metadatas.get(entity_id): if not _equivalent_units( @@ -513,10 +572,34 @@ def compile_statistics( # noqa: C901 ) continue + if ( + mean_type is not StatisticMeanType.NONE + and (old_mean_type := old_metadata[1]["mean_type"]) + is not StatisticMeanType.NONE + and mean_type != old_mean_type + ): + if WARN_STATISTICS_MEAN_CHANGED not in hass.data: + hass.data[WARN_STATISTICS_MEAN_CHANGED] = set() + if entity_id not in hass.data[WARN_STATISTICS_MEAN_CHANGED]: + hass.data[WARN_STATISTICS_MEAN_CHANGED].add(entity_id) + _LOGGER.warning( + ( + "The statistics mean algorithm for %s have changed from %s to %s." + " Generation of long term statistics will be suppressed" + " unless it changes back or go to %s to delete the old" + " statistics" + ), + entity_id, + old_mean_type.name, + mean_type.name, + LINK_DEV_STATISTICS, + ) + continue + # Set meta data meta: StatisticMetaData = { - "has_mean": "mean" in wanted_statistics[entity_id], - "has_sum": "sum" in wanted_statistics[entity_id], + "mean_type": mean_type, + "has_sum": "sum" in wanted_statistics[entity_id].types, "name": None, "source": RECORDER_DOMAIN, "statistic_id": entity_id, @@ -525,19 +608,26 @@ def compile_statistics( # noqa: C901 # Make calculations stat: StatisticData = {"start": start} - if "max" in wanted_statistics[entity_id]: + if "max" in wanted_statistics[entity_id].types: stat["max"] = max( *itertools.islice(zip(*valid_float_states, strict=False), 1) ) - if "min" in wanted_statistics[entity_id]: + if "min" in wanted_statistics[entity_id].types: stat["min"] = min( *itertools.islice(zip(*valid_float_states, strict=False), 1) ) - if "mean" in wanted_statistics[entity_id]: - stat["mean"] = _time_weighted_average(valid_float_states, start, end) + match mean_type: + case StatisticMeanType.ARITHMETIC: + stat["mean"] = _time_weighted_arithmetic_mean( + valid_float_states, start, end + ) + case StatisticMeanType.CIRCULAR: + stat["mean"] = _time_weighted_circular_mean( + valid_float_states, start, end + ) - if "sum" in wanted_statistics[entity_id]: + if "sum" in wanted_statistics[entity_id].types: last_reset = old_last_reset = None new_state = old_state = None _sum = 0.0 @@ -661,18 +751,25 @@ def list_statistic_ids( attributes = state.attributes state_class = attributes[ATTR_STATE_CLASS] provided_statistics = DEFAULT_STATISTICS[state_class] - if statistic_type is not None and statistic_type not in provided_statistics: + if ( + statistic_type is not None + and statistic_type not in provided_statistics.types + ): continue if ( - (has_sum := "sum" in provided_statistics) + (has_sum := "sum" in provided_statistics.types) and ATTR_LAST_RESET not in attributes and state_class == SensorStateClass.MEASUREMENT ): continue + mean_type = StatisticMeanType.NONE + if "mean" in provided_statistics.types: + mean_type = provided_statistics.mean_type + result[entity_id] = { - "has_mean": "mean" in provided_statistics, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": RECORDER_DOMAIN, @@ -702,7 +799,7 @@ def _update_issues( if numeric and state_class is None: # Sensor no longer has a valid state class report_issue( - "state_class_removed", + STATE_CLASS_REMOVED_ISSUE, entity_id, {"statistic_id": entity_id}, ) @@ -713,7 +810,7 @@ def _update_issues( if numeric and not _equivalent_units({state_unit, metadata_unit}): # The unit has changed, and it's not possible to convert report_issue( - "units_changed", + UNITS_CHANGED_ISSUE, entity_id, { "statistic_id": entity_id, @@ -727,7 +824,7 @@ def _update_issues( valid_units = (unit or "" for unit in converter.VALID_UNITS) valid_units_str = ", ".join(sorted(valid_units)) report_issue( - "units_changed", + UNITS_CHANGED_ISSUE, entity_id, { "statistic_id": entity_id, @@ -737,6 +834,23 @@ def _update_issues( }, ) + if ( + (metadata_mean_type := metadata[1]["mean_type"]) is not None + and state_class + and (state_mean_type := DEFAULT_STATISTICS[state_class].mean_type) + != metadata_mean_type + ): + # The mean type has changed and the old statistics are not valid anymore + report_issue( + MEAN_TYPE_CHANGED_ISSUE, + entity_id, + { + "statistic_id": entity_id, + "metadata_mean_type": metadata_mean_type, + "state_mean_type": state_mean_type, + }, + ) + def update_statistics_issues( hass: HomeAssistant, @@ -759,7 +873,11 @@ def update_statistics_issues( issue.domain != DOMAIN or not (issue_data := issue.data) or issue_data.get("issue_type") - not in ("state_class_removed", "units_changed") + not in ( + STATE_CLASS_REMOVED_ISSUE, + UNITS_CHANGED_ISSUE, + MEAN_TYPE_CHANGED_ISSUE, + ) ): continue issues.add(issue.issue_id) diff --git a/homeassistant/components/sensor/strings.json b/homeassistant/components/sensor/strings.json index ae414a178e9..123c30da72e 100644 --- a/homeassistant/components/sensor/strings.json +++ b/homeassistant/components/sensor/strings.json @@ -278,10 +278,10 @@ "name": "Timestamp" }, "volatile_organic_compounds": { - "name": "VOCs" + "name": "Volatile organic compounds" }, "volatile_organic_compounds_parts": { - "name": "[%key:component::sensor::entity_component::volatile_organic_compounds::name%]" + "name": "Volatile organic compounds parts" }, "voltage": { "name": "Voltage" @@ -309,6 +309,10 @@ } }, "issues": { + "mean_type_changed": { + "title": "The mean type of {statistic_id} has changed", + "description": "" + }, "state_class_removed": { "title": "{statistic_id} no longer has a state class", "description": "" diff --git a/homeassistant/components/sentry/strings.json b/homeassistant/components/sentry/strings.json index efcdb631f3c..22f7b355e0e 100644 --- a/homeassistant/components/sentry/strings.json +++ b/homeassistant/components/sentry/strings.json @@ -24,7 +24,7 @@ "event_handled": "Send handled events", "event_third_party_packages": "Send events from third-party packages", "logging_event_level": "The log level Sentry will register an event for", - "logging_level": "The log level Sentry will record logs as breadcrums for", + "logging_level": "The log level Sentry will record events as breadcrumbs for", "tracing": "Enable performance tracing", "tracing_sample_rate": "Tracing sample rate; between 0.0 and 1.0 (1.0 = 100%)" } diff --git a/homeassistant/components/shelly/__init__.py b/homeassistant/components/shelly/__init__.py index 5ca58ec7d01..ee28c41f18b 100644 --- a/homeassistant/components/shelly/__init__.py +++ b/homeassistant/components/shelly/__init__.py @@ -4,6 +4,7 @@ from __future__ import annotations from typing import Final +from aioshelly.ble.const import BLE_SCRIPT_NAME from aioshelly.block_device import BlockDevice from aioshelly.common import ConnectionOptions from aioshelly.const import DEFAULT_COAP_PORT, RPC_GENERATIONS @@ -11,12 +12,19 @@ from aioshelly.exceptions import ( DeviceConnectionError, InvalidAuthError, MacAddressMismatchError, + RpcCallError, ) -from aioshelly.rpc_device import RpcDevice +from aioshelly.rpc_device import RpcDevice, bluetooth_mac_from_primary_mac import voluptuous as vol from homeassistant.components.bluetooth import async_remove_scanner -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.const import ( + CONF_HOST, + CONF_MODEL, + CONF_PASSWORD, + CONF_USERNAME, + Platform, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import ( @@ -53,6 +61,7 @@ from .utils import ( get_coap_context, get_device_entry_gen, get_http_port, + get_rpc_scripts_event_types, get_ws_context, ) @@ -102,6 +111,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ShellyConfigEntry) -> bool: """Set up Shelly from a config entry.""" + entry.runtime_data = ShellyEntryData([]) + # The custom component for Shelly devices uses shelly domain as well as core # integration. If the user removes the custom component but doesn't remove the # config entry, core integration will try to configure that config entry with an @@ -153,13 +164,14 @@ async def _async_setup_block_entry( device_entry = None sleep_period = entry.data.get(CONF_SLEEP_PERIOD) - runtime_data = entry.runtime_data = ShellyEntryData(BLOCK_SLEEPING_PLATFORMS) + runtime_data = entry.runtime_data + runtime_data.platforms = BLOCK_SLEEPING_PLATFORMS # Some old firmware have a wrong sleep period hardcoded value. # Following code block will force the right value for affected devices if ( sleep_period == BLOCK_WRONG_SLEEP_PERIOD - and entry.data["model"] in MODELS_WITH_WRONG_SLEEP_PERIOD + and entry.data[CONF_MODEL] in MODELS_WITH_WRONG_SLEEP_PERIOD ): LOGGER.warning( "Updating stored sleep period for %s: from %s to %s", @@ -180,13 +192,25 @@ async def _async_setup_block_entry( if not device.firmware_supported: async_create_issue_unsupported_firmware(hass, entry) await device.shutdown() - raise ConfigEntryNotReady + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="firmware_unsupported", + translation_placeholders={"device": entry.title}, + ) except (DeviceConnectionError, MacAddressMismatchError) as err: await device.shutdown() - raise ConfigEntryNotReady(repr(err)) from err + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="device_communication_error", + translation_placeholders={"device": entry.title}, + ) from err except InvalidAuthError as err: await device.shutdown() - raise ConfigEntryAuthFailed(repr(err)) from err + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="auth_error", + translation_placeholders={"device": entry.title}, + ) from err runtime_data.block = ShellyBlockCoordinator(hass, entry, device) runtime_data.block.async_setup() @@ -252,7 +276,8 @@ async def _async_setup_rpc_entry(hass: HomeAssistant, entry: ShellyConfigEntry) device_entry = None sleep_period = entry.data.get(CONF_SLEEP_PERIOD) - runtime_data = entry.runtime_data = ShellyEntryData(RPC_SLEEPING_PLATFORMS) + runtime_data = entry.runtime_data + runtime_data.platforms = RPC_SLEEPING_PLATFORMS if sleep_period == 0: # Not a sleeping device, finish setup @@ -263,13 +288,30 @@ async def _async_setup_rpc_entry(hass: HomeAssistant, entry: ShellyConfigEntry) if not device.firmware_supported: async_create_issue_unsupported_firmware(hass, entry) await device.shutdown() - raise ConfigEntryNotReady - except (DeviceConnectionError, MacAddressMismatchError) as err: + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="firmware_unsupported", + translation_placeholders={"device": entry.title}, + ) + runtime_data.rpc_supports_scripts = await device.supports_scripts() + if runtime_data.rpc_supports_scripts: + runtime_data.rpc_script_events = await get_rpc_scripts_event_types( + device, ignore_scripts=[BLE_SCRIPT_NAME] + ) + except (DeviceConnectionError, MacAddressMismatchError, RpcCallError) as err: await device.shutdown() - raise ConfigEntryNotReady(repr(err)) from err + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="device_communication_error", + translation_placeholders={"device": entry.title}, + ) from err except InvalidAuthError as err: await device.shutdown() - raise ConfigEntryAuthFailed(repr(err)) from err + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="auth_error", + translation_placeholders={"device": entry.title}, + ) from err runtime_data.rpc = ShellyRpcCoordinator(hass, entry, device) runtime_data.rpc.async_setup() @@ -339,4 +381,5 @@ async def async_remove_entry(hass: HomeAssistant, entry: ShellyConfigEntry) -> N if get_device_entry_gen(entry) in RPC_GENERATIONS and ( mac_address := entry.unique_id ): - async_remove_scanner(hass, mac_address) + source = dr.format_mac(bluetooth_mac_from_primary_mac(mac_address)).upper() + async_remove_scanner(hass, source) diff --git a/homeassistant/components/shelly/binary_sensor.py b/homeassistant/components/shelly/binary_sensor.py index ed2ac68d264..b74578f1fb3 100644 --- a/homeassistant/components/shelly/binary_sensor.py +++ b/homeassistant/components/shelly/binary_sensor.py @@ -130,6 +130,7 @@ SENSORS: dict[tuple[str, str], BlockBinarySensorDescription] = { device_class=BinarySensorDeviceClass.GAS, translation_key="gas", value=lambda value: value in ["mild", "heavy"], + # Deprecated, remove in 2025.10 extra_state_attributes=lambda block: {"detected": block.gas}, ), ("sensor", "smoke"): BlockBinarySensorDescription( diff --git a/homeassistant/components/shelly/bluetooth/__init__.py b/homeassistant/components/shelly/bluetooth/__init__.py index d7eb020d671..2b772bd1b78 100644 --- a/homeassistant/components/shelly/bluetooth/__init__.py +++ b/homeassistant/components/shelly/bluetooth/__init__.py @@ -7,15 +7,22 @@ from typing import TYPE_CHECKING from aioshelly.ble import async_start_scanner, create_scanner from aioshelly.ble.const import BLE_SCAN_RESULT_EVENT, BLE_SCAN_RESULT_VERSION -from homeassistant.components.bluetooth import async_register_scanner +from homeassistant.components.bluetooth import ( + BluetoothScanningMode, + async_register_scanner, +) from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback as hass_callback -from homeassistant.helpers.device_registry import format_mac from ..const import BLEScannerMode if TYPE_CHECKING: from ..coordinator import ShellyRpcCoordinator +BLE_SCANNER_MODE_TO_BLUETOOTH_SCANNING_MODE = { + BLEScannerMode.PASSIVE: BluetoothScanningMode.PASSIVE, + BLEScannerMode.ACTIVE: BluetoothScanningMode.ACTIVE, +} + async def async_connect_scanner( hass: HomeAssistant, @@ -26,8 +33,13 @@ async def async_connect_scanner( """Connect scanner.""" device = coordinator.device entry = coordinator.config_entry - source = format_mac(coordinator.mac).upper() - scanner = create_scanner(source, entry.title) + bluetooth_scanning_mode = BLE_SCANNER_MODE_TO_BLUETOOTH_SCANNING_MODE[scanner_mode] + scanner = create_scanner( + coordinator.bluetooth_source, + entry.title, + requested_mode=bluetooth_scanning_mode, + current_mode=bluetooth_scanning_mode, + ) unload_callbacks = [ async_register_scanner( hass, diff --git a/homeassistant/components/shelly/button.py b/homeassistant/components/shelly/button.py index 1f3c555a64b..06dffba5ead 100644 --- a/homeassistant/components/shelly/button.py +++ b/homeassistant/components/shelly/button.py @@ -2,12 +2,13 @@ from __future__ import annotations -from collections.abc import Callable, Coroutine +from collections.abc import Callable from dataclasses import dataclass from functools import partial from typing import TYPE_CHECKING, Any, Final -from aioshelly.const import RPC_GENERATIONS +from aioshelly.const import BLU_TRV_IDENTIFIER, MODEL_BLU_GATEWAY, RPC_GENERATIONS +from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError, RpcCallError from homeassistant.components.button import ( ButtonDeviceClass, @@ -16,15 +17,20 @@ from homeassistant.components.button import ( ) from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo +from homeassistant.helpers.device_registry import ( + CONNECTION_BLUETOOTH, + CONNECTION_NETWORK_MAC, + DeviceInfo, +) from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util import slugify -from .const import LOGGER, SHELLY_GAS_MODELS +from .const import DOMAIN, LOGGER, SHELLY_GAS_MODELS from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator -from .utils import get_device_entry_gen +from .utils import get_device_entry_gen, get_rpc_key_ids @dataclass(frozen=True, kw_only=True) @@ -33,7 +39,7 @@ class ShellyButtonDescription[ ](ButtonEntityDescription): """Class to describe a Button entity.""" - press_action: Callable[[_ShellyCoordinatorT], Coroutine[Any, Any, None]] + press_action: str supported: Callable[[_ShellyCoordinatorT], bool] = lambda _: True @@ -44,14 +50,14 @@ BUTTONS: Final[list[ShellyButtonDescription[Any]]] = [ name="Reboot", device_class=ButtonDeviceClass.RESTART, entity_category=EntityCategory.CONFIG, - press_action=lambda coordinator: coordinator.device.trigger_reboot(), + press_action="trigger_reboot", ), ShellyButtonDescription[ShellyBlockCoordinator]( key="self_test", name="Self test", translation_key="self_test", entity_category=EntityCategory.DIAGNOSTIC, - press_action=lambda coordinator: coordinator.device.trigger_shelly_gas_self_test(), + press_action="trigger_shelly_gas_self_test", supported=lambda coordinator: coordinator.device.model in SHELLY_GAS_MODELS, ), ShellyButtonDescription[ShellyBlockCoordinator]( @@ -59,7 +65,7 @@ BUTTONS: Final[list[ShellyButtonDescription[Any]]] = [ name="Mute", translation_key="mute", entity_category=EntityCategory.CONFIG, - press_action=lambda coordinator: coordinator.device.trigger_shelly_gas_mute(), + press_action="trigger_shelly_gas_mute", supported=lambda coordinator: coordinator.device.model in SHELLY_GAS_MODELS, ), ShellyButtonDescription[ShellyBlockCoordinator]( @@ -67,11 +73,22 @@ BUTTONS: Final[list[ShellyButtonDescription[Any]]] = [ name="Unmute", translation_key="unmute", entity_category=EntityCategory.CONFIG, - press_action=lambda coordinator: coordinator.device.trigger_shelly_gas_unmute(), + press_action="trigger_shelly_gas_unmute", supported=lambda coordinator: coordinator.device.model in SHELLY_GAS_MODELS, ), ] +BLU_TRV_BUTTONS: Final[list[ShellyButtonDescription]] = [ + ShellyButtonDescription[ShellyRpcCoordinator]( + key="calibrate", + name="Calibrate", + translation_key="calibrate", + entity_category=EntityCategory.CONFIG, + press_action="trigger_blu_trv_calibration", + supported=lambda coordinator: coordinator.device.model == MODEL_BLU_GATEWAY, + ), +] + @callback def async_migrate_unique_ids( @@ -123,14 +140,28 @@ async def async_setup_entry( hass, config_entry.entry_id, partial(async_migrate_unique_ids, coordinator) ) - async_add_entities( + entities: list[ShellyButton | ShellyBluTrvButton] = [] + + entities.extend( ShellyButton(coordinator, button) for button in BUTTONS if button.supported(coordinator) ) + if blutrv_key_ids := get_rpc_key_ids(coordinator.device.status, BLU_TRV_IDENTIFIER): + if TYPE_CHECKING: + assert isinstance(coordinator, ShellyRpcCoordinator) -class ShellyButton( + entities.extend( + ShellyBluTrvButton(coordinator, button, id_) + for id_ in blutrv_key_ids + for button in BLU_TRV_BUTTONS + ) + + async_add_entities(entities) + + +class ShellyBaseButton( CoordinatorEntity[ShellyRpcCoordinator | ShellyBlockCoordinator], ButtonEntity ): """Defines a Shelly base button.""" @@ -148,14 +179,98 @@ class ShellyButton( ) -> None: """Initialize Shelly button.""" super().__init__(coordinator) + self.entity_description = description + async def async_press(self) -> None: + """Triggers the Shelly button press service.""" + try: + await self._press_method() + except DeviceConnectionError as err: + self.coordinator.last_update_success = False + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="device_communication_action_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.name, + }, + ) from err + except RpcCallError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="rpc_call_action_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.name, + }, + ) from err + except InvalidAuthError: + await self.coordinator.async_shutdown_device_and_start_reauth() + + async def _press_method(self) -> None: + """Press method.""" + raise NotImplementedError + + +class ShellyButton(ShellyBaseButton): + """Defines a Shelly button.""" + + def __init__( + self, + coordinator: ShellyRpcCoordinator | ShellyBlockCoordinator, + description: ShellyButtonDescription[ + ShellyRpcCoordinator | ShellyBlockCoordinator + ], + ) -> None: + """Initialize Shelly button.""" + super().__init__(coordinator, description) + self._attr_name = f"{coordinator.device.name} {description.name}" self._attr_unique_id = f"{coordinator.mac}_{description.key}" self._attr_device_info = DeviceInfo( connections={(CONNECTION_NETWORK_MAC, coordinator.mac)} ) - async def async_press(self) -> None: - """Triggers the Shelly button press service.""" - await self.entity_description.press_action(self.coordinator) + async def _press_method(self) -> None: + """Press method.""" + method = getattr(self.coordinator.device, self.entity_description.press_action) + + if TYPE_CHECKING: + assert method is not None + + await method() + + +class ShellyBluTrvButton(ShellyBaseButton): + """Represent a Shelly BLU TRV button.""" + + def __init__( + self, + coordinator: ShellyRpcCoordinator, + description: ShellyButtonDescription, + id_: int, + ) -> None: + """Initialize.""" + super().__init__(coordinator, description) + + ble_addr: str = coordinator.device.config[f"{BLU_TRV_IDENTIFIER}:{id_}"]["addr"] + device_name = ( + coordinator.device.config[f"{BLU_TRV_IDENTIFIER}:{id_}"]["name"] + or f"shellyblutrv-{ble_addr.replace(':', '')}" + ) + self._attr_name = f"{device_name} {description.name}" + self._attr_unique_id = f"{ble_addr}_{description.key}" + self._attr_device_info = DeviceInfo( + connections={(CONNECTION_BLUETOOTH, ble_addr)} + ) + self._id = id_ + + async def _press_method(self) -> None: + """Press method.""" + method = getattr(self.coordinator.device, self.entity_description.press_action) + + if TYPE_CHECKING: + assert method is not None + + await method(self._id) diff --git a/homeassistant/components/shelly/climate.py b/homeassistant/components/shelly/climate.py index a3ec9be7cb0..498f2d3dba9 100644 --- a/homeassistant/components/shelly/climate.py +++ b/homeassistant/components/shelly/climate.py @@ -7,7 +7,12 @@ from dataclasses import asdict, dataclass from typing import Any, cast from aioshelly.block_device import Block -from aioshelly.const import BLU_TRV_IDENTIFIER, BLU_TRV_MODEL_NAME, RPC_GENERATIONS +from aioshelly.const import ( + BLU_TRV_IDENTIFIER, + BLU_TRV_MODEL_NAME, + BLU_TRV_TIMEOUT, + RPC_GENERATIONS, +) from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError from homeassistant.components.climate import ( @@ -36,7 +41,6 @@ from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM from .const import ( BLU_TRV_TEMPERATURE_SETTINGS, - BLU_TRV_TIMEOUT, DOMAIN, LOGGER, NOT_CALIBRATED_ISSUE_ID, @@ -322,8 +326,12 @@ class BlockSleepingClimate( except DeviceConnectionError as err: self.coordinator.last_update_success = False raise HomeAssistantError( - f"Setting state for entity {self.name} failed, state: {kwargs}, error:" - f" {err!r}" + translation_domain=DOMAIN, + translation_key="device_communication_action_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.name, + }, ) from err except InvalidAuthError: await self.coordinator.async_shutdown_device_and_start_reauth() diff --git a/homeassistant/components/shelly/config_flow.py b/homeassistant/components/shelly/config_flow.py index 5c5e187a0f4..200a88ea24c 100644 --- a/homeassistant/components/shelly/config_flow.py +++ b/homeassistant/components/shelly/config_flow.py @@ -7,12 +7,7 @@ from typing import Any, Final from aioshelly.block_device import BlockDevice from aioshelly.common import ConnectionOptions, get_info -from aioshelly.const import ( - BLOCK_GENERATIONS, - DEFAULT_HTTP_PORT, - MODEL_WALL_DISPLAY, - RPC_GENERATIONS, -) +from aioshelly.const import BLOCK_GENERATIONS, DEFAULT_HTTP_PORT, RPC_GENERATIONS from aioshelly.exceptions import ( CustomPortNotSupported, DeviceConnectionError, @@ -22,15 +17,11 @@ from aioshelly.exceptions import ( from aioshelly.rpc_device import RpcDevice import voluptuous as vol -from homeassistant.config_entries import ( - ConfigEntry, - ConfigFlow, - ConfigFlowResult, - OptionsFlow, -) +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow from homeassistant.const import ( CONF_HOST, CONF_MAC, + CONF_MODEL, CONF_PASSWORD, CONF_PORT, CONF_USERNAME, @@ -48,7 +39,7 @@ from .const import ( LOGGER, BLEScannerMode, ) -from .coordinator import async_reconnect_soon +from .coordinator import ShellyConfigEntry, async_reconnect_soon from .utils import ( get_block_device_sleep_period, get_coap_context, @@ -116,7 +107,9 @@ async def validate_input( return { "title": rpc_device.name, CONF_SLEEP_PERIOD: sleep_period, - "model": rpc_device.xmod_info.get("p") or rpc_device.shelly.get("model"), + CONF_MODEL: ( + rpc_device.xmod_info.get("p") or rpc_device.shelly.get(CONF_MODEL) + ), CONF_GEN: gen, } @@ -136,7 +129,7 @@ async def validate_input( return { "title": block_device.name, CONF_SLEEP_PERIOD: sleep_period, - "model": block_device.model, + CONF_MODEL: block_device.model, CONF_GEN: gen, } @@ -191,14 +184,14 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - if device_info["model"]: + if device_info[CONF_MODEL]: return self.async_create_entry( title=device_info["title"], data={ CONF_HOST: user_input[CONF_HOST], CONF_PORT: user_input[CONF_PORT], CONF_SLEEP_PERIOD: device_info[CONF_SLEEP_PERIOD], - "model": device_info["model"], + CONF_MODEL: device_info[CONF_MODEL], CONF_GEN: device_info[CONF_GEN], }, ) @@ -230,7 +223,7 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - if device_info["model"]: + if device_info[CONF_MODEL]: return self.async_create_entry( title=device_info["title"], data={ @@ -238,7 +231,7 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): CONF_HOST: self.host, CONF_PORT: self.port, CONF_SLEEP_PERIOD: device_info[CONF_SLEEP_PERIOD], - "model": device_info["model"], + CONF_MODEL: device_info[CONF_MODEL], CONF_GEN: device_info[CONF_GEN], }, ) @@ -336,7 +329,7 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): """Handle discovery confirm.""" errors: dict[str, str] = {} - if not self.device_info["model"]: + if not self.device_info[CONF_MODEL]: errors["base"] = "firmware_not_fully_provisioned" model = "Shelly" else: @@ -345,9 +338,9 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_create_entry( title=self.device_info["title"], data={ - "host": self.host, + CONF_HOST: self.host, CONF_SLEEP_PERIOD: self.device_info[CONF_SLEEP_PERIOD], - "model": self.device_info["model"], + CONF_MODEL: self.device_info[CONF_MODEL], CONF_GEN: self.device_info[CONF_GEN], }, ) @@ -356,8 +349,8 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="confirm_discovery", description_placeholders={ - "model": model, - "host": self.host, + CONF_MODEL: model, + CONF_HOST: self.host, }, errors=errors, ) @@ -455,19 +448,17 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler: + def async_get_options_flow(config_entry: ShellyConfigEntry) -> OptionsFlowHandler: """Get the options flow for this handler.""" return OptionsFlowHandler() @classmethod @callback - def async_supports_options_flow(cls, config_entry: ConfigEntry) -> bool: + def async_supports_options_flow(cls, config_entry: ShellyConfigEntry) -> bool: """Return options flow support for this handler.""" - return ( - get_device_entry_gen(config_entry) in RPC_GENERATIONS - and not config_entry.data.get(CONF_SLEEP_PERIOD) - and config_entry.data.get("model") != MODEL_WALL_DISPLAY - ) + return get_device_entry_gen( + config_entry + ) in RPC_GENERATIONS and not config_entry.data.get(CONF_SLEEP_PERIOD) class OptionsFlowHandler(OptionsFlow): @@ -477,6 +468,13 @@ class OptionsFlowHandler(OptionsFlow): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle options flow.""" + if ( + supports_scripts := self.config_entry.runtime_data.rpc_supports_scripts + ) is None: + return self.async_abort(reason="cannot_connect") + if not supports_scripts: + return self.async_abort(reason="no_scripts_support") + if user_input is not None: return self.async_create_entry(title="", data=user_input) diff --git a/homeassistant/components/shelly/const.py b/homeassistant/components/shelly/const.py index d47f2b0ae80..43fb6df18d0 100644 --- a/homeassistant/components/shelly/const.py +++ b/homeassistant/components/shelly/const.py @@ -25,6 +25,7 @@ from aioshelly.const import ( MODEL_VALVE, MODEL_VINTAGE_V2, MODEL_WALL_DISPLAY, + MODEL_WALL_DISPLAY_X2, ) from homeassistant.components.number import NumberMode @@ -245,6 +246,7 @@ GEN2_RELEASE_URL = "https://shelly-api-docs.shelly.cloud/gen2/changelog/" GEN2_BETA_RELEASE_URL = f"{GEN2_RELEASE_URL}#unreleased" DEVICES_WITHOUT_FIRMWARE_CHANGELOG = ( MODEL_WALL_DISPLAY, + MODEL_WALL_DISPLAY_X2, MODEL_MOTION, MODEL_MOTION_2, MODEL_VALVE, @@ -271,9 +273,6 @@ API_WS_URL = "/api/shelly/ws" COMPONENT_ID_PATTERN = re.compile(r"[a-z\d]+:\d+") -# value confirmed by Shelly team -BLU_TRV_TIMEOUT = 60 - ROLE_TO_DEVICE_CLASS_MAP = { "current_humidity": SensorDeviceClass.HUMIDITY, "current_temperature": SensorDeviceClass.TEMPERATURE, diff --git a/homeassistant/components/shelly/coordinator.py b/homeassistant/components/shelly/coordinator.py index 7b4da241043..4a1ea72f38a 100644 --- a/homeassistant/components/shelly/coordinator.py +++ b/homeassistant/components/shelly/coordinator.py @@ -18,6 +18,7 @@ from aioshelly.exceptions import ( RpcCallError, ) from aioshelly.rpc_device import RpcDevice, RpcUpdateType +from aioshelly.rpc_device.utils import bluetooth_mac_from_primary_mac from propcache.api import cached_property from homeassistant.components.bluetooth import async_remove_scanner @@ -25,6 +26,7 @@ from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.const import ( ATTR_DEVICE_ID, CONF_HOST, + CONF_MODEL, EVENT_HOMEASSISTANT_STOP, Platform, ) @@ -86,6 +88,8 @@ class ShellyEntryData: rest: ShellyRestCoordinator | None = None rpc: ShellyRpcCoordinator | None = None rpc_poll: ShellyRpcPollingCoordinator | None = None + rpc_script_events: dict[int, list[str]] | None = None + rpc_supports_scripts: bool | None = None type ShellyConfigEntry = ConfigEntry[ShellyEntryData] @@ -138,7 +142,7 @@ class ShellyCoordinatorBase[_DeviceT: BlockDevice | RpcDevice]( @cached_property def model(self) -> str: """Model of the device.""" - return cast(str, self.config_entry.data["model"]) + return cast(str, self.config_entry.data[CONF_MODEL]) @cached_property def mac(self) -> str: @@ -375,14 +379,23 @@ class ShellyBlockCoordinator(ShellyCoordinatorBase[BlockDevice]): if self.sleep_period: # Sleeping device, no point polling it, just mark it unavailable raise UpdateFailed( - f"Sleeping device did not update within {self.sleep_period} seconds interval" + translation_domain=DOMAIN, + translation_key="update_error_sleeping_device", + translation_placeholders={ + "device": self.name, + "period": str(self.sleep_period), + }, ) LOGGER.debug("Polling Shelly Block Device - %s", self.name) try: await self.device.update() except DeviceConnectionError as err: - raise UpdateFailed(repr(err)) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + translation_placeholders={"device": self.name}, + ) from err except InvalidAuthError: await self.async_shutdown_device_and_start_reauth() @@ -467,7 +480,11 @@ class ShellyRestCoordinator(ShellyCoordinatorBase[BlockDevice]): return await self.device.update_shelly() except (DeviceConnectionError, MacAddressMismatchError) as err: - raise UpdateFailed(repr(err)) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + translation_placeholders={"device": self.name}, + ) from err except InvalidAuthError: await self.async_shutdown_device_and_start_reauth() else: @@ -496,6 +513,15 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): self._connect_task: asyncio.Task | None = None entry.async_on_unload(entry.add_update_listener(self._async_update_listener)) + @cached_property + def bluetooth_source(self) -> str: + """Return the Bluetooth source address. + + This is the Bluetooth MAC address of the device that is used + for the Bluetooth scanner. + """ + return format_mac(bluetooth_mac_from_primary_mac(self.mac)).upper() + async def async_device_online(self, source: str) -> None: """Handle device going online.""" if not self.sleep_period: @@ -624,7 +650,12 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): if self.sleep_period: # Sleeping device, no point polling it, just mark it unavailable raise UpdateFailed( - f"Sleeping device did not update within {self.sleep_period} seconds interval" + translation_domain=DOMAIN, + translation_key="update_error_sleeping_device", + translation_placeholders={ + "device": self.name, + "period": str(self.sleep_period), + }, ) async with self._connection_lock: @@ -632,7 +663,11 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): return if not await self._async_device_connect_task(): - raise UpdateFailed("Device reconnect error") + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error_reconnect_error", + translation_placeholders={"device": self.name}, + ) async def _async_disconnected(self, reconnect: bool) -> None: """Handle device disconnected.""" @@ -682,7 +717,8 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): is updated. """ if not self.sleep_period: - await self._async_connect_ble_scanner() + if self.config_entry.runtime_data.rpc_supports_scripts: + await self._async_connect_ble_scanner() else: await self._async_setup_outbound_websocket() @@ -706,7 +742,7 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): ) if ble_scanner_mode == BLEScannerMode.DISABLED and self.connected: await async_stop_scanner(self.device) - async_remove_scanner(self.hass, format_mac(self.mac).upper()) + async_remove_scanner(self.hass, self.bluetooth_source) return if await async_ensure_ble_enabled(self.device): # BLE enable required a reboot, don't bother connecting @@ -808,13 +844,21 @@ class ShellyRpcPollingCoordinator(ShellyCoordinatorBase[RpcDevice]): async def _async_update_data(self) -> None: """Fetch data.""" if not self.device.connected: - raise UpdateFailed("Device disconnected") + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error_device_disconnected", + translation_placeholders={"device": self.name}, + ) LOGGER.debug("Polling Shelly RPC Device - %s", self.name) try: await self.device.poll() except (DeviceConnectionError, RpcCallError) as err: - raise UpdateFailed(f"Device disconnected: {err!r}") from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_error", + translation_placeholders={"device": self.name}, + ) from err except InvalidAuthError: await self.async_shutdown_device_and_start_reauth() diff --git a/homeassistant/components/shelly/device_trigger.py b/homeassistant/components/shelly/device_trigger.py index 6e96eb5ed21..740e6aae9b2 100644 --- a/homeassistant/components/shelly/device_trigger.py +++ b/homeassistant/components/shelly/device_trigger.py @@ -105,7 +105,9 @@ async def async_validate_trigger_config( return config raise InvalidDeviceAutomationConfig( - f"Invalid ({CONF_TYPE},{CONF_SUBTYPE}): {trigger}" + translation_domain=DOMAIN, + translation_key="invalid_trigger", + translation_placeholders={"trigger": str(trigger)}, ) @@ -137,7 +139,11 @@ async def async_get_triggers( return triggers - raise InvalidDeviceAutomationConfig(f"Device not found: {device_id}") + raise InvalidDeviceAutomationConfig( + translation_domain=DOMAIN, + translation_key="device_not_found", + translation_placeholders={"device": device_id}, + ) async def async_attach_trigger( diff --git a/homeassistant/components/shelly/diagnostics.py b/homeassistant/components/shelly/diagnostics.py index 9250206b8ab..2a9699e0a08 100644 --- a/homeassistant/components/shelly/diagnostics.py +++ b/homeassistant/components/shelly/diagnostics.py @@ -6,9 +6,14 @@ from typing import Any from homeassistant.components.bluetooth import async_scanner_by_source from homeassistant.components.diagnostics import async_redact_data -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import ( + ATTR_MODEL, + ATTR_NAME, + ATTR_SW_VERSION, + CONF_PASSWORD, + CONF_USERNAME, +) from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import format_mac from .coordinator import ShellyConfigEntry from .utils import get_rpc_ws_url @@ -31,9 +36,9 @@ async def async_get_config_entry_diagnostics( block_coordinator = shelly_entry_data.block assert block_coordinator device_info = { - "name": block_coordinator.name, - "model": block_coordinator.model, - "sw_version": block_coordinator.sw_version, + ATTR_NAME: block_coordinator.name, + ATTR_MODEL: block_coordinator.model, + ATTR_SW_VERSION: block_coordinator.sw_version, } if block_coordinator.device.initialized: device_settings = { @@ -66,9 +71,9 @@ async def async_get_config_entry_diagnostics( rpc_coordinator = shelly_entry_data.rpc assert rpc_coordinator device_info = { - "name": rpc_coordinator.name, - "model": rpc_coordinator.model, - "sw_version": rpc_coordinator.sw_version, + ATTR_NAME: rpc_coordinator.name, + ATTR_MODEL: rpc_coordinator.model, + ATTR_SW_VERSION: rpc_coordinator.sw_version, } if rpc_coordinator.device.initialized: device_settings = { @@ -88,8 +93,7 @@ async def async_get_config_entry_diagnostics( if k in ["sys", "wifi"] } - source = format_mac(rpc_coordinator.mac).upper() - if scanner := async_scanner_by_source(hass, source): + if scanner := async_scanner_by_source(hass, rpc_coordinator.bluetooth_source): bluetooth = { "scanner": await scanner.async_diagnostics(), } diff --git a/homeassistant/components/shelly/entity.py b/homeassistant/components/shelly/entity.py index 001727c74b3..9ed3f47b41a 100644 --- a/homeassistant/components/shelly/entity.py +++ b/homeassistant/components/shelly/entity.py @@ -19,7 +19,7 @@ from homeassistant.helpers.entity_registry import RegistryEntry from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import CONF_SLEEP_PERIOD, LOGGER +from .const import CONF_SLEEP_PERIOD, DOMAIN, LOGGER from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator from .utils import ( async_remove_shelly_entity, @@ -296,7 +296,6 @@ class RpcEntityDescription(EntityDescription): value: Callable[[Any, Any], Any] | None = None available: Callable[[dict], bool] | None = None removal_condition: Callable[[dict, dict, str], bool] | None = None - extra_state_attributes: Callable[[dict, dict], dict | None] | None = None use_polling_coordinator: bool = False supported: Callable = lambda _: False unit: Callable[[dict], str | None] | None = None @@ -313,7 +312,6 @@ class RestEntityDescription(EntityDescription): name: str = "" value: Callable[[dict, Any], Any] | None = None - extra_state_attributes: Callable[[dict], dict | None] | None = None class ShellyBlockEntity(CoordinatorEntity[ShellyBlockCoordinator]): @@ -347,8 +345,12 @@ class ShellyBlockEntity(CoordinatorEntity[ShellyBlockCoordinator]): except DeviceConnectionError as err: self.coordinator.last_update_success = False raise HomeAssistantError( - f"Setting state for entity {self.name} failed, state: {kwargs}, error:" - f" {err!r}" + translation_domain=DOMAIN, + translation_key="device_communication_action_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.name, + }, ) from err except InvalidAuthError: await self.coordinator.async_shutdown_device_and_start_reauth() @@ -408,13 +410,21 @@ class ShellyRpcEntity(CoordinatorEntity[ShellyRpcCoordinator]): except DeviceConnectionError as err: self.coordinator.last_update_success = False raise HomeAssistantError( - f"Call RPC for {self.name} connection error, method: {method}, params:" - f" {params}, error: {err!r}" + translation_domain=DOMAIN, + translation_key="device_communication_action_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.name, + }, ) from err except RpcCallError as err: raise HomeAssistantError( - f"Call RPC for {self.name} request error, method: {method}, params:" - f" {params}, error: {err!r}" + translation_domain=DOMAIN, + translation_key="rpc_call_action_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.name, + }, ) from err except InvalidAuthError: await self.coordinator.async_shutdown_device_and_start_reauth() diff --git a/homeassistant/components/shelly/event.py b/homeassistant/components/shelly/event.py index bfd705f447a..ec5810581b1 100644 --- a/homeassistant/components/shelly/event.py +++ b/homeassistant/components/shelly/event.py @@ -34,7 +34,6 @@ from .utils import ( get_device_entry_gen, get_rpc_entity_name, get_rpc_key_instances, - get_rpc_script_event_types, is_block_momentary_input, is_rpc_momentary_input, ) @@ -109,18 +108,15 @@ async def async_setup_entry( script_instances = get_rpc_key_instances( coordinator.device.status, SCRIPT_EVENT.key ) + script_events = config_entry.runtime_data.rpc_script_events for script in script_instances: script_name = get_rpc_entity_name(coordinator.device, script) if script_name == BLE_SCRIPT_NAME: continue - event_types = await get_rpc_script_event_types( - coordinator.device, int(script.split(":")[-1]) - ) - if not event_types: - continue - - entities.append(ShellyRpcScriptEvent(coordinator, script, event_types)) + script_id = int(script.split(":")[-1]) + if script_events and (event_types := script_events[script_id]): + entities.append(ShellyRpcScriptEvent(coordinator, script, event_types)) # If a script is removed, from the device configuration, we need to remove orphaned entities async_remove_orphaned_entities( diff --git a/homeassistant/components/shelly/icons.json b/homeassistant/components/shelly/icons.json index f93abf6b854..08b269a73c5 100644 --- a/homeassistant/components/shelly/icons.json +++ b/homeassistant/components/shelly/icons.json @@ -23,12 +23,18 @@ "gas_concentration": { "default": "mdi:gauge" }, + "gas_detected": { + "default": "mdi:gas-burner" + }, "lamp_life": { "default": "mdi:progress-wrench" }, "operation": { "default": "mdi:cog-transfer" }, + "self_test": { + "default": "mdi:progress-wrench" + }, "tilt": { "default": "mdi:angle-acute" }, diff --git a/homeassistant/components/shelly/manifest.json b/homeassistant/components/shelly/manifest.json index 722fd4c128a..e863720e476 100644 --- a/homeassistant/components/shelly/manifest.json +++ b/homeassistant/components/shelly/manifest.json @@ -8,7 +8,7 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["aioshelly"], - "requirements": ["aioshelly==13.1.0"], + "requirements": ["aioshelly==13.4.0"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/homeassistant/components/shelly/number.py b/homeassistant/components/shelly/number.py index 59716f39c7f..c629eb4a57a 100644 --- a/homeassistant/components/shelly/number.py +++ b/homeassistant/components/shelly/number.py @@ -7,7 +7,7 @@ from dataclasses import dataclass from typing import TYPE_CHECKING, Any, Final, cast from aioshelly.block_device import Block -from aioshelly.const import RPC_GENERATIONS +from aioshelly.const import BLU_TRV_TIMEOUT, RPC_GENERATIONS from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError from homeassistant.components.number import ( @@ -25,7 +25,7 @@ from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH, DeviceIn from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.entity_registry import RegistryEntry -from .const import BLU_TRV_TIMEOUT, CONF_SLEEP_PERIOD, LOGGER, VIRTUAL_NUMBER_MODE_MAP +from .const import CONF_SLEEP_PERIOD, DOMAIN, LOGGER, VIRTUAL_NUMBER_MODE_MAP from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator from .entity import ( BlockEntityDescription, @@ -324,8 +324,12 @@ class BlockSleepingNumber(ShellySleepingBlockAttributeEntity, RestoreNumber): except DeviceConnectionError as err: self.coordinator.last_update_success = False raise HomeAssistantError( - f"Setting state for entity {self.name} failed, state: {params}, error:" - f" {err!r}" + translation_domain=DOMAIN, + translation_key="device_communication_action_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.name, + }, ) from err except InvalidAuthError: await self.coordinator.async_shutdown_device_and_start_reauth() diff --git a/homeassistant/components/shelly/sensor.py b/homeassistant/components/shelly/sensor.py index 183a1aa06a1..79e4c97aead 100644 --- a/homeassistant/components/shelly/sensor.py +++ b/homeassistant/components/shelly/sensor.py @@ -39,7 +39,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.entity_registry import RegistryEntry from homeassistant.helpers.typing import StateType -from .const import CONF_SLEEP_PERIOD, ROLE_TO_DEVICE_CLASS_MAP, SHAIR_MAX_WORK_HOURS +from .const import CONF_SLEEP_PERIOD, ROLE_TO_DEVICE_CLASS_MAP from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator from .entity import ( BlockEntityDescription, @@ -58,6 +58,7 @@ from .utils import ( async_remove_orphaned_entities, get_device_entry_gen, get_device_uptime, + get_shelly_air_lamp_life, get_virtual_component_ids, is_rpc_wifi_stations_disabled, ) @@ -355,8 +356,9 @@ SENSORS: dict[tuple[str, str], BlockSensorDescription] = { name="Lamp life", native_unit_of_measurement=PERCENTAGE, translation_key="lamp_life", - value=lambda value: 100 - (value / 3600 / SHAIR_MAX_WORK_HOURS), + value=get_shelly_air_lamp_life, suggested_display_precision=1, + # Deprecated, remove in 2025.10 extra_state_attributes=lambda block: { "Operational hours": round(cast(int, block.totalWorkTime) / 3600, 1) }, @@ -374,9 +376,10 @@ SENSORS: dict[tuple[str, str], BlockSensorDescription] = { key="sensor|sensorOp", name="Operation", device_class=SensorDeviceClass.ENUM, - options=["unknown", "warmup", "normal", "fault"], + options=["warmup", "normal", "fault"], translation_key="operation", - value=lambda value: value, + value=lambda value: None if value == "unknown" else value, + # Deprecated, remove in 2025.10 extra_state_attributes=lambda block: {"self_test": block.selfTest}, ), ("valve", "valve"): BlockSensorDescription( @@ -391,11 +394,33 @@ SENSORS: dict[tuple[str, str], BlockSensorDescription] = { "failure", "opened", "opening", - "unknown", ], + value=lambda value: None if value == "unknown" else value, entity_category=EntityCategory.DIAGNOSTIC, removal_condition=lambda _, block: block.valve == "not_connected", ), + ("sensor", "gas"): BlockSensorDescription( + key="sensor|gas", + name="Gas detected", + translation_key="gas_detected", + device_class=SensorDeviceClass.ENUM, + options=[ + "none", + "mild", + "heavy", + "test", + ], + value=lambda value: None if value == "unknown" else value, + entity_category=EntityCategory.DIAGNOSTIC, + ), + ("sensor", "selfTest"): BlockSensorDescription( + key="sensor|selfTest", + name="Self test", + translation_key="self_test", + device_class=SensorDeviceClass.ENUM, + options=["not_completed", "completed", "running", "pending"], + entity_category=EntityCategory.DIAGNOSTIC, + ), } REST_SENSORS: Final = { diff --git a/homeassistant/components/shelly/strings.json b/homeassistant/components/shelly/strings.json index eb869b54e4c..afc3f92a3ce 100644 --- a/homeassistant/components/shelly/strings.json +++ b/homeassistant/components/shelly/strings.json @@ -17,12 +17,20 @@ "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "username": "Username for the device's web panel.", + "password": "Password for the device's web panel." } }, "reauth_confirm": { "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "username": "[%key:component::shelly::config::step::credentials::data_description::username%]", + "password": "[%key:component::shelly::config::step::credentials::data_description::password%]" } }, "confirm_discovery": { @@ -87,8 +95,15 @@ "description": "Bluetooth scanning can be active or passive. With active, the Shelly requests data from nearby devices; with passive, the Shelly receives unsolicited data from nearby devices.", "data": { "ble_scanner_mode": "Bluetooth scanner mode" + }, + "data_description": { + "ble_scanner_mode": "The scanner mode to use for Bluetooth scanning." } } + }, + "abort": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "no_scripts_support": "Device does not support scripts and cannot be used as a Bluetooth scanner." } }, "selector": { @@ -106,7 +121,6 @@ "state_attributes": { "detected": { "state": { - "unknown": "Unknown", "none": "None", "mild": "Mild", "heavy": "Heavy", @@ -139,9 +153,26 @@ } }, "sensor": { + "gas_detected": { + "state": { + "none": "None", + "mild": "Mild", + "heavy": "Heavy", + "test": "Test" + }, + "state_attributes": { + "options": { + "state": { + "none": "[%key:component::shelly::entity::sensor::gas_detected::state::none%]", + "mild": "[%key:component::shelly::entity::sensor::gas_detected::state::mild%]", + "heavy": "[%key:component::shelly::entity::sensor::gas_detected::state::heavy%]", + "test": "[%key:component::shelly::entity::sensor::gas_detected::state::test%]" + } + } + } + }, "operation": { "state": { - "unknown": "Unknown", "warmup": "Warm-up", "normal": "Normal", "fault": "Fault" @@ -157,6 +188,24 @@ } } }, + "self_test": { + "state": { + "not_completed": "Not completed", + "completed": "Completed", + "running": "Running", + "pending": "Pending" + }, + "state_attributes": { + "options": { + "state": { + "not_completed": "[%key:component::shelly::entity::sensor::self_test::state::not_completed%]", + "completed": "[%key:component::shelly::entity::sensor::self_test::state::completed%]", + "running": "[%key:component::shelly::entity::sensor::self_test::state::running%]", + "pending": "[%key:component::shelly::entity::sensor::self_test::state::pending%]" + } + } + } + }, "valve_status": { "state": { "checking": "Checking", @@ -164,12 +213,52 @@ "closing": "Closing", "failure": "Failure", "opened": "Opened", - "opening": "Opening", - "unknown": "[%key:component::shelly::entity::sensor::operation::state::unknown%]" + "opening": "Opening" } } } }, + "exceptions": { + "auth_error": { + "message": "Authentication failed for {device}, please update your credentials" + }, + "device_communication_error": { + "message": "Device communication error occurred for {device}" + }, + "device_communication_action_error": { + "message": "Device communication error occurred while calling action for {entity} of {device}" + }, + "device_not_found": { + "message": "{device} not found while configuring device automation triggers" + }, + "firmware_unsupported": { + "message": "{device} is running an unsupported firmware, please update the firmware" + }, + "invalid_trigger": { + "message": "Invalid device automation trigger (type, subtype): {trigger}" + }, + "ota_update_connection_error": { + "message": "Device communication error occurred while triggering OTA update for {device}" + }, + "ota_update_rpc_error": { + "message": "RPC call error occurred while triggering OTA update for {device}" + }, + "rpc_call_action_error": { + "message": "RPC call error occurred while calling action for {entity} of {device}" + }, + "update_error": { + "message": "An error occurred while retrieving data from {device}" + }, + "update_error_device_disconnected": { + "message": "An error occurred while retrieving data from {device} because it is disconnected" + }, + "update_error_reconnect_error": { + "message": "An error occurred while reconnecting to {device}" + }, + "update_error_sleeping_device": { + "message": "Sleeping device did not update within {period} seconds interval" + } + }, "issues": { "device_not_calibrated": { "title": "Shelly device {device_name} is not calibrated", diff --git a/homeassistant/components/shelly/switch.py b/homeassistant/components/shelly/switch.py index 41826706945..ce9e4f065fb 100644 --- a/homeassistant/components/shelly/switch.py +++ b/homeassistant/components/shelly/switch.py @@ -7,8 +7,9 @@ from dataclasses import dataclass from typing import Any, cast from aioshelly.block_device import Block -from aioshelly.const import MODEL_2, MODEL_25, MODEL_WALL_DISPLAY, RPC_GENERATIONS +from aioshelly.const import RPC_GENERATIONS +from homeassistant.components.climate import DOMAIN as CLIMATE_PLATFORM from homeassistant.components.switch import ( DOMAIN as SWITCH_PLATFORM, SwitchEntity, @@ -20,28 +21,22 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.entity_registry import RegistryEntry from homeassistant.helpers.restore_state import RestoreEntity -from .const import CONF_SLEEP_PERIOD, MOTION_MODELS from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator from .entity import ( BlockEntityDescription, RpcEntityDescription, - ShellyBlockEntity, + ShellyBlockAttributeEntity, ShellyRpcAttributeEntity, - ShellyRpcEntity, ShellySleepingBlockAttributeEntity, async_setup_entry_attribute_entities, async_setup_entry_rpc, ) from .utils import ( async_remove_orphaned_entities, - async_remove_shelly_entity, get_device_entry_gen, - get_rpc_key_ids, get_virtual_component_ids, - is_block_channel_type_light, - is_rpc_channel_type_light, - is_rpc_thermostat_internal_actuator, - is_rpc_thermostat_mode, + is_block_exclude_from_relay, + is_rpc_exclude_from_relay, ) @@ -50,11 +45,20 @@ class BlockSwitchDescription(BlockEntityDescription, SwitchEntityDescription): """Class to describe a BLOCK switch.""" -MOTION_SWITCH = BlockSwitchDescription( - key="sensor|motionActive", - name="Motion detection", - entity_category=EntityCategory.CONFIG, -) +BLOCK_RELAY_SWITCHES = { + ("relay", "output"): BlockSwitchDescription( + key="relay|output", + removal_condition=is_block_exclude_from_relay, + ) +} + +BLOCK_SLEEPING_MOTION_SWITCH = { + ("sensor", "motionActive"): BlockSwitchDescription( + key="sensor|motionActive", + name="Motion detection", + entity_category=EntityCategory.CONFIG, + ) +} @dataclass(frozen=True, kw_only=True) @@ -67,6 +71,18 @@ class RpcSwitchDescription(RpcEntityDescription, SwitchEntityDescription): method_params_fn: Callable[[int | None, bool], dict] +RPC_RELAY_SWITCHES = { + "switch": RpcSwitchDescription( + key="switch", + sub_key="output", + removal_condition=is_rpc_exclude_from_relay, + is_on=lambda status: bool(status["output"]), + method_on="Switch.Set", + method_off="Switch.Set", + method_params_fn=lambda id, value: {"id": id, "on": value}, + ), +} + RPC_SWITCHES = { "boolean": RpcSwitchDescription( key="boolean", @@ -111,46 +127,17 @@ def async_setup_block_entry( coordinator = config_entry.runtime_data.block assert coordinator - # Add Shelly Motion as a switch - if coordinator.model in MOTION_MODELS: - async_setup_entry_attribute_entities( - hass, - config_entry, - async_add_entities, - {("sensor", "motionActive"): MOTION_SWITCH}, - BlockSleepingMotionSwitch, - ) - return + async_setup_entry_attribute_entities( + hass, config_entry, async_add_entities, BLOCK_RELAY_SWITCHES, BlockRelaySwitch + ) - if config_entry.data[CONF_SLEEP_PERIOD]: - return - - # In roller mode the relay blocks exist but do not contain required info - if ( - coordinator.model in [MODEL_2, MODEL_25] - and coordinator.device.settings["mode"] != "relay" - ): - return - - relay_blocks = [] - assert coordinator.device.blocks - for block in coordinator.device.blocks: - if block.type != "relay" or ( - block.channel is not None - and is_block_channel_type_light( - coordinator.device.settings, int(block.channel) - ) - ): - continue - - relay_blocks.append(block) - unique_id = f"{coordinator.mac}-{block.type}_{block.channel}" - async_remove_shelly_entity(hass, "light", unique_id) - - if not relay_blocks: - return - - async_add_entities(BlockRelaySwitch(coordinator, block) for block in relay_blocks) + async_setup_entry_attribute_entities( + hass, + config_entry, + async_add_entities, + BLOCK_SLEEPING_MOTION_SWITCH, + BlockSleepingMotionSwitch, + ) @callback @@ -162,32 +149,10 @@ def async_setup_rpc_entry( """Set up entities for RPC device.""" coordinator = config_entry.runtime_data.rpc assert coordinator - switch_key_ids = get_rpc_key_ids(coordinator.device.status, "switch") - switch_ids = [] - for id_ in switch_key_ids: - if is_rpc_channel_type_light(coordinator.device.config, id_): - continue - - if coordinator.model == MODEL_WALL_DISPLAY: - # There are three configuration scenarios for WallDisplay: - # - relay mode (no thermostat) - # - thermostat mode using the internal relay as an actuator - # - thermostat mode using an external (from another device) relay as - # an actuator - if not is_rpc_thermostat_mode(id_, coordinator.device.status): - # The device is not in thermostat mode, we need to remove a climate - # entity - unique_id = f"{coordinator.mac}-thermostat:{id_}" - async_remove_shelly_entity(hass, "climate", unique_id) - elif is_rpc_thermostat_internal_actuator(coordinator.device.status): - # The internal relay is an actuator, skip this ID so as not to create - # a switch entity - continue - - switch_ids.append(id_) - unique_id = f"{coordinator.mac}-switch:{id_}" - async_remove_shelly_entity(hass, "light", unique_id) + async_setup_entry_rpc( + hass, config_entry, async_add_entities, RPC_RELAY_SWITCHES, RpcRelaySwitch + ) async_setup_entry_rpc( hass, config_entry, async_add_entities, RPC_SWITCHES, RpcSwitch @@ -218,10 +183,16 @@ def async_setup_rpc_entry( "script", ) - if not switch_ids: - return - - async_add_entities(RpcRelaySwitch(coordinator, id_) for id_ in switch_ids) + # if the climate is removed, from the device configuration, we need + # to remove orphaned entities + async_remove_orphaned_entities( + hass, + config_entry.entry_id, + coordinator.mac, + CLIMATE_PLATFORM, + coordinator.device.status, + "thermostat", + ) class BlockSleepingMotionSwitch( @@ -272,13 +243,22 @@ class BlockSleepingMotionSwitch( self.last_state = last_state -class BlockRelaySwitch(ShellyBlockEntity, SwitchEntity): +class BlockRelaySwitch(ShellyBlockAttributeEntity, SwitchEntity): """Entity that controls a relay on Block based Shelly devices.""" - def __init__(self, coordinator: ShellyBlockCoordinator, block: Block) -> None: + entity_description: BlockSwitchDescription + + def __init__( + self, + coordinator: ShellyBlockCoordinator, + block: Block, + attribute: str, + description: BlockSwitchDescription, + ) -> None: """Initialize relay switch.""" - super().__init__(coordinator, block) + super().__init__(coordinator, block, attribute, description) self.control_result: dict[str, Any] | None = None + self._attr_unique_id: str = f"{coordinator.mac}-{block.description}" @property def is_on(self) -> bool: @@ -305,28 +285,6 @@ class BlockRelaySwitch(ShellyBlockEntity, SwitchEntity): super()._update_callback() -class RpcRelaySwitch(ShellyRpcEntity, SwitchEntity): - """Entity that controls a relay on RPC based Shelly devices.""" - - def __init__(self, coordinator: ShellyRpcCoordinator, id_: int) -> None: - """Initialize relay switch.""" - super().__init__(coordinator, f"switch:{id_}") - self._id = id_ - - @property - def is_on(self) -> bool: - """If switch is on.""" - return bool(self.status["output"]) - - async def async_turn_on(self, **kwargs: Any) -> None: - """Turn on relay.""" - await self.call_rpc("Switch.Set", {"id": self._id, "on": True}) - - async def async_turn_off(self, **kwargs: Any) -> None: - """Turn off relay.""" - await self.call_rpc("Switch.Set", {"id": self._id, "on": False}) - - class RpcSwitch(ShellyRpcAttributeEntity, SwitchEntity): """Entity that controls a switch on RPC based Shelly devices.""" @@ -351,3 +309,21 @@ class RpcSwitch(ShellyRpcAttributeEntity, SwitchEntity): self.entity_description.method_off, self.entity_description.method_params_fn(self._id, False), ) + + +class RpcRelaySwitch(RpcSwitch): + """Entity that controls a switch on RPC based Shelly devices.""" + + # False to avoid double naming as True is inerithed from base class + _attr_has_entity_name = False + + def __init__( + self, + coordinator: ShellyRpcCoordinator, + key: str, + attribute: str, + description: RpcEntityDescription, + ) -> None: + """Initialize the switch.""" + super().__init__(coordinator, key, attribute, description) + self._attr_unique_id: str = f"{coordinator.mac}-{key}" diff --git a/homeassistant/components/shelly/update.py b/homeassistant/components/shelly/update.py index b1aa84b2640..12ce6dc70cd 100644 --- a/homeassistant/components/shelly/update.py +++ b/homeassistant/components/shelly/update.py @@ -25,7 +25,14 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity -from .const import CONF_SLEEP_PERIOD, OTA_BEGIN, OTA_ERROR, OTA_PROGRESS, OTA_SUCCESS +from .const import ( + CONF_SLEEP_PERIOD, + DOMAIN, + OTA_BEGIN, + OTA_ERROR, + OTA_PROGRESS, + OTA_SUCCESS, +) from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator from .entity import ( RestEntityDescription, @@ -198,7 +205,11 @@ class RestUpdateEntity(ShellyRestAttributeEntity, UpdateEntity): try: result = await self.coordinator.device.trigger_ota_update(beta=beta) except DeviceConnectionError as err: - raise HomeAssistantError(f"Error starting OTA update: {err!r}") from err + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="ota_update_connection_error", + translation_placeholders={"device": self.coordinator.name}, + ) from err except InvalidAuthError: await self.coordinator.async_shutdown_device_and_start_reauth() else: @@ -310,9 +321,20 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity): try: await self.coordinator.device.trigger_ota_update(beta=beta) except DeviceConnectionError as err: - raise HomeAssistantError(f"OTA update connection error: {err!r}") from err + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="ota_update_connection_error", + translation_placeholders={"device": self.coordinator.name}, + ) from err except RpcCallError as err: - raise HomeAssistantError(f"OTA update request error: {err!r}") from err + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="ota_update_rpc_error", + translation_placeholders={ + "entity": self.entity_id, + "device": self.coordinator.name, + }, + ) from err except InvalidAuthError: await self.coordinator.async_shutdown_device_and_start_reauth() else: diff --git a/homeassistant/components/shelly/utils.py b/homeassistant/components/shelly/utils.py index 2e81f745819..474e2bb9410 100644 --- a/homeassistant/components/shelly/utils.py +++ b/homeassistant/components/shelly/utils.py @@ -6,7 +6,7 @@ from collections.abc import Iterable from datetime import datetime, timedelta from ipaddress import IPv4Address, IPv6Address, ip_address from types import MappingProxyType -from typing import Any, cast +from typing import TYPE_CHECKING, Any, cast from aiohttp.web import Request, WebSocketResponse from aioshelly.block_device import COAP, Block, BlockDevice @@ -28,7 +28,12 @@ from yarl import URL from homeassistant.components import network from homeassistant.components.http import HomeAssistantView from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_PORT, EVENT_HOMEASSISTANT_STOP +from homeassistant.const import ( + CONF_HOST, + CONF_MODEL, + CONF_PORT, + EVENT_HOMEASSISTANT_STOP, +) from homeassistant.core import Event, HomeAssistant, callback from homeassistant.helpers import ( device_registry as dr, @@ -54,6 +59,7 @@ from .const import ( GEN2_RELEASE_URL, LOGGER, RPC_INPUTS_EVENTS_TYPES, + SHAIR_MAX_WORK_HOURS, SHBTN_INPUTS_EVENTS_TYPES, SHBTN_MODELS, SHELLY_EMIT_EVENT_PATTERN, @@ -175,6 +181,18 @@ def is_block_momentary_input( return button_type in momentary_types +def is_block_exclude_from_relay(settings: dict[str, Any], block: Block) -> bool: + """Return true if block should be excluded from switch platform.""" + + if settings.get("mode") == "roller": + return True + + if TYPE_CHECKING: + assert block.channel is not None + + return is_block_channel_type_light(settings, int(block.channel)) + + def get_device_uptime(uptime: float, last_uptime: datetime | None) -> datetime: """Return device uptime string, tolerate up to 5 seconds deviation.""" delta_uptime = utcnow() - timedelta(seconds=uptime) @@ -310,7 +328,7 @@ def get_info_gen(info: dict[str, Any]) -> int: def get_model_name(info: dict[str, Any]) -> str: """Return the device model name.""" if get_info_gen(info) in RPC_GENERATIONS: - return cast(str, MODEL_NAMES.get(info["model"], info["model"])) + return cast(str, MODEL_NAMES.get(info[CONF_MODEL], info[CONF_MODEL])) return cast(str, MODEL_NAMES.get(info["type"], info["type"])) @@ -502,7 +520,7 @@ def async_create_issue_unsupported_firmware( translation_key="unsupported_firmware", translation_placeholders={ "device_name": entry.title, - "ip_address": entry.data["host"], + "ip_address": entry.data[CONF_HOST], }, ) @@ -627,3 +645,39 @@ async def get_rpc_script_event_types(device: RpcDevice, id: int) -> list[str]: code_response = await device.script_getcode(id) matches = SHELLY_EMIT_EVENT_PATTERN.finditer(code_response["data"]) return sorted([*{str(event_type.group(1)) for event_type in matches}]) + + +def is_rpc_exclude_from_relay( + settings: dict[str, Any], status: dict[str, Any], channel: str +) -> bool: + """Return true if rpc channel should be excludeed from switch platform.""" + ch = int(channel.split(":")[1]) + if is_rpc_thermostat_internal_actuator(status): + return True + + return is_rpc_channel_type_light(settings, ch) + + +def get_shelly_air_lamp_life(lamp_seconds: int) -> float: + """Return Shelly Air lamp life in percentage.""" + lamp_hours = lamp_seconds / 3600 + if lamp_hours >= SHAIR_MAX_WORK_HOURS: + return 0.0 + return 100 * (1 - lamp_hours / SHAIR_MAX_WORK_HOURS) + + +async def get_rpc_scripts_event_types( + device: RpcDevice, ignore_scripts: list[str] +) -> dict[int, list[str]]: + """Return a dict of all scripts and their event types.""" + script_instances = get_rpc_key_instances(device.status, "script") + script_events = {} + for script in script_instances: + script_name = get_rpc_entity_name(device, script) + if script_name in ignore_scripts: + continue + + script_id = int(script.split(":")[-1]) + script_events[script_id] = await get_rpc_script_event_types(device, script_id) + + return script_events diff --git a/homeassistant/components/simplisafe/binary_sensor.py b/homeassistant/components/simplisafe/binary_sensor.py index e1f69ed8113..38a80ddd354 100644 --- a/homeassistant/components/simplisafe/binary_sensor.py +++ b/homeassistant/components/simplisafe/binary_sensor.py @@ -34,6 +34,7 @@ SUPPORTED_BATTERY_SENSOR_TYPES = [ DeviceTypes.PANIC_BUTTON, DeviceTypes.REMOTE, DeviceTypes.SIREN, + DeviceTypes.OUTDOOR_ALARM_SECURITY_BELL_BOX, DeviceTypes.SMOKE, DeviceTypes.SMOKE_AND_CARBON_MONOXIDE, DeviceTypes.TEMPERATURE, @@ -47,6 +48,7 @@ TRIGGERED_SENSOR_TYPES = { DeviceTypes.MOTION: BinarySensorDeviceClass.MOTION, DeviceTypes.MOTION_V2: BinarySensorDeviceClass.MOTION, DeviceTypes.SIREN: BinarySensorDeviceClass.SAFETY, + DeviceTypes.OUTDOOR_ALARM_SECURITY_BELL_BOX: BinarySensorDeviceClass.SAFETY, DeviceTypes.SMOKE: BinarySensorDeviceClass.SMOKE, # Although this sensor can technically apply to both smoke and carbon, we use the # SMOKE device class for simplicity: diff --git a/homeassistant/components/sky_remote/config_flow.py b/homeassistant/components/sky_remote/config_flow.py index 13cddf99332..51cf9c9bf64 100644 --- a/homeassistant/components/sky_remote/config_flow.py +++ b/homeassistant/components/sky_remote/config_flow.py @@ -12,6 +12,8 @@ from homeassistant.helpers import config_validation as cv from .const import DEFAULT_PORT, DOMAIN, LEGACY_PORT +_LOGGER = logging.getLogger(__name__) + DATA_SCHEMA = vol.Schema( { vol.Required(CONF_HOST): cv.string, @@ -21,7 +23,7 @@ DATA_SCHEMA = vol.Schema( async def async_find_box_port(host: str) -> int: """Find port box uses for communication.""" - logging.debug("Attempting to find port to connect to %s on", host) + _LOGGER.debug("Attempting to find port to connect to %s on", host) remote = RemoteControl(host, DEFAULT_PORT) try: await remote.check_connectable() @@ -46,12 +48,12 @@ class SkyRemoteConfigFlow(ConfigFlow, domain=DOMAIN): errors: dict[str, str] = {} if user_input is not None: - logging.debug("user_input: %s", user_input) + _LOGGER.debug("user_input: %s", user_input) self._async_abort_entries_match(user_input) try: port = await async_find_box_port(user_input[CONF_HOST]) except SkyBoxConnectionError: - logging.exception("while finding port of skybox") + _LOGGER.exception("While finding port of skybox") errors["base"] = "cannot_connect" else: return self.async_create_entry( diff --git a/homeassistant/components/skybell/config_flow.py b/homeassistant/components/skybell/config_flow.py index a32441f4cf8..9893d0dd93a 100644 --- a/homeassistant/components/skybell/config_flow.py +++ b/homeassistant/components/skybell/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from typing import Any from aioskybell import Skybell, exceptions @@ -14,6 +15,8 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) + class SkybellFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow for Skybell.""" @@ -95,6 +98,7 @@ class SkybellFlowHandler(ConfigFlow, domain=DOMAIN): return None, "invalid_auth" except exceptions.SkybellException: return None, "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") return None, "unknown" return skybell.user_id, None diff --git a/homeassistant/components/slide_local/strings.json b/homeassistant/components/slide_local/strings.json index 67514ff0d50..10efa4bc4f2 100644 --- a/homeassistant/components/slide_local/strings.json +++ b/homeassistant/components/slide_local/strings.json @@ -25,7 +25,7 @@ }, "zeroconf_confirm": { "title": "Confirm setup for Slide", - "description": "Do you want to setup {host}?" + "description": "Do you want to set up {host}?" } }, "abort": { diff --git a/homeassistant/components/smappee/strings.json b/homeassistant/components/smappee/strings.json index 2966b5cd753..3037fbc98f6 100644 --- a/homeassistant/components/smappee/strings.json +++ b/homeassistant/components/smappee/strings.json @@ -15,7 +15,7 @@ } }, "zeroconf_confirm": { - "description": "Do you want to add the Smappee device with serialnumber `{serialnumber}` to Home Assistant?", + "description": "Do you want to add the Smappee device with serial number `{serialnumber}` to Home Assistant?", "title": "Discovered Smappee device" }, "pick_implementation": { diff --git a/homeassistant/components/smartthings/__init__.py b/homeassistant/components/smartthings/__init__.py index b615f76640c..c8ca1a819e0 100644 --- a/homeassistant/components/smartthings/__init__.py +++ b/homeassistant/components/smartthings/__init__.py @@ -3,25 +3,36 @@ from __future__ import annotations from collections.abc import Callable +import contextlib from dataclasses import dataclass +from http import HTTPStatus import logging -from typing import TYPE_CHECKING, cast +from typing import TYPE_CHECKING, Any, cast -from aiohttp import ClientError +from aiohttp import ClientResponseError from pysmartthings import ( Attribute, Capability, + ComponentStatus, Device, DeviceEvent, + Lifecycle, Scene, SmartThings, SmartThingsAuthenticationFailedError, + SmartThingsConnectionError, SmartThingsSinkError, Status, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( + ATTR_CONNECTIONS, + ATTR_HW_VERSION, + ATTR_MANUFACTURER, + ATTR_MODEL, + ATTR_SW_VERSION, + ATTR_VIA_DEVICE, CONF_ACCESS_TOKEN, CONF_TOKEN, EVENT_HOMEASSISTANT_STOP, @@ -29,14 +40,16 @@ from homeassistant.const import ( ) from homeassistant.core import Event, HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.config_entry_oauth2_flow import ( OAuth2Session, async_get_config_entry_implementation, ) +from homeassistant.helpers.entity_registry import RegistryEntry, async_migrate_entries from .const import ( + BINARY_SENSOR_ATTRIBUTES_TO_CAPABILITIES, CONF_INSTALLED_APP_ID, CONF_LOCATION_ID, CONF_SUBSCRIPTION_ID, @@ -44,6 +57,7 @@ from .const import ( EVENT_BUTTON, MAIN, OLD_DATA, + SENSOR_ATTRIBUTES_TO_CAPABILITIES, ) _LOGGER = logging.getLogger(__name__) @@ -55,6 +69,7 @@ class SmartThingsData: devices: dict[str, FullDevice] scenes: dict[str, Scene] + rooms: dict[str, str] client: SmartThings @@ -63,21 +78,28 @@ class FullDevice: """Define an object to hold device data.""" device: Device - status: dict[str, dict[Capability | str, dict[Attribute | str, Status]]] + status: dict[str, ComponentStatus] type SmartThingsConfigEntry = ConfigEntry[SmartThingsData] PLATFORMS = [ Platform.BINARY_SENSOR, + Platform.BUTTON, Platform.CLIMATE, Platform.COVER, + Platform.EVENT, Platform.FAN, Platform.LIGHT, Platform.LOCK, + Platform.MEDIA_PLAYER, + Platform.NUMBER, Platform.SCENE, + Platform.SELECT, Platform.SENSOR, Platform.SWITCH, + Platform.UPDATE, + Platform.VALVE, ] @@ -92,7 +114,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmartThingsConfigEntry) try: await session.async_ensure_token_valid() - except ClientError as err: + except ClientResponseError as err: + if err.status == HTTPStatus.BAD_REQUEST: + raise ConfigEntryAuthFailed("Token not valid, trigger renewal") from err raise ConfigEntryNotReady from err client = SmartThings(session=async_get_clientsession(hass)) @@ -107,7 +131,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmartThingsConfigEntry) client.refresh_token_function = _refresh_token def _handle_max_connections() -> None: - _LOGGER.debug("We hit the limit of max connections") + _LOGGER.debug( + "We hit the limit of max connections or we could not remove the old one, so retrying" + ) hass.config_entries.async_schedule_reload(entry.entry_id) client.max_connections_reached_callback = _handle_max_connections @@ -130,7 +156,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmartThingsConfigEntry) if (old_identifier := entry.data.get(CONF_SUBSCRIPTION_ID)) is not None: _LOGGER.debug("Trying to delete old subscription %s", old_identifier) - await client.delete_subscription(old_identifier) + try: + await client.delete_subscription(old_identifier) + except SmartThingsConnectionError as err: + raise ConfigEntryNotReady("Could not delete old subscription") from err _LOGGER.debug("Trying to create a new subscription") try: @@ -156,6 +185,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmartThingsConfigEntry) device_status: dict[str, FullDevice] = {} try: + rooms = { + room.room_id: room.name + for room in await client.get_rooms(location_id=entry.data[CONF_LOCATION_ID]) + } devices = await client.get_devices() for device in devices: status = process_status(await client.get_device_status(device.device_id)) @@ -163,11 +196,30 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmartThingsConfigEntry) except SmartThingsAuthenticationFailedError as err: raise ConfigEntryAuthFailed from err + device_registry = dr.async_get(hass) + create_devices(device_registry, device_status, entry, rooms) + scenes = { scene.scene_id: scene for scene in await client.get_scenes(location_id=entry.data[CONF_LOCATION_ID]) } + def handle_deleted_device(device_id: str) -> None: + """Handle a deleted device.""" + dev_entry = device_registry.async_get_device( + identifiers={(DOMAIN, device_id)}, + ) + if dev_entry is not None: + device_registry.async_update_device( + dev_entry.id, remove_config_entry_id=entry.entry_id + ) + + entry.async_on_unload( + client.add_device_lifecycle_event_listener( + Lifecycle.DELETE, handle_deleted_device + ) + ) + entry.runtime_data = SmartThingsData( devices={ device_id: device @@ -176,8 +228,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmartThingsConfigEntry) }, client=client, scenes=scenes, + rooms=rooms, ) + # Events are deprecated and will be removed in 2025.10 def handle_button_press(event: DeviceEvent) -> None: """Handle a button press.""" if ( @@ -210,7 +264,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmartThingsConfigEntry) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - device_registry = dr.async_get(hass) device_entries = dr.async_entries_for_config_entry(device_registry, entry.entry_id) for device_entry in device_entries: device_id = next( @@ -218,7 +271,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmartThingsConfigEntry) for identifier in device_entry.identifiers if identifier[0] == DOMAIN ) - if device_id in entry.runtime_data.devices: + if device_id in device_status: continue device_registry.async_update_device( device_entry.id, remove_config_entry_id=entry.entry_id @@ -233,7 +286,8 @@ async def async_unload_entry( """Unload a config entry.""" client = entry.runtime_data.client if (subscription_id := entry.data.get(CONF_SUBSCRIPTION_ID)) is not None: - await client.delete_subscription(subscription_id) + with contextlib.suppress(SmartThingsConnectionError): + await client.delete_subscription(subscription_id) return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) @@ -246,9 +300,166 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry, version=3, data={OLD_DATA: dict(entry.data)} ) + if entry.minor_version < 2: + + def migrate_entities(entity_entry: RegistryEntry) -> dict[str, Any] | None: + if entity_entry.domain == "binary_sensor": + device_id, attribute = entity_entry.unique_id.split(".") + if ( + capability := BINARY_SENSOR_ATTRIBUTES_TO_CAPABILITIES.get( + attribute + ) + ) is None: + return None + new_unique_id = ( + f"{device_id}_{MAIN}_{capability}_{attribute}_{attribute}" + ) + return { + "new_unique_id": new_unique_id, + } + if entity_entry.domain in {"cover", "climate", "fan", "light", "lock"}: + return {"new_unique_id": f"{entity_entry.unique_id}_{MAIN}"} + if entity_entry.domain == "sensor": + delimiter = "." if " " not in entity_entry.unique_id else " " + if delimiter not in entity_entry.unique_id: + return None + device_id, attribute = entity_entry.unique_id.split( + delimiter, maxsplit=1 + ) + if ( + capability := SENSOR_ATTRIBUTES_TO_CAPABILITIES.get(attribute) + ) is None: + if attribute in { + "energy_meter", + "power_meter", + "deltaEnergy_meter", + "powerEnergy_meter", + "energySaved_meter", + }: + return { + "new_unique_id": f"{device_id}_{MAIN}_{Capability.POWER_CONSUMPTION_REPORT}_{Attribute.POWER_CONSUMPTION}_{attribute}", + } + if attribute in { + "X Coordinate", + "Y Coordinate", + "Z Coordinate", + }: + new_attribute = { + "X Coordinate": "x_coordinate", + "Y Coordinate": "y_coordinate", + "Z Coordinate": "z_coordinate", + }[attribute] + return { + "new_unique_id": f"{device_id}_{MAIN}_{Capability.THREE_AXIS}_{Attribute.THREE_AXIS}_{new_attribute}", + } + if attribute in { + Attribute.MACHINE_STATE, + Attribute.COMPLETION_TIME, + }: + capability = determine_machine_type( + hass, entry.entry_id, device_id + ) + if capability is None: + return None + return { + "new_unique_id": f"{device_id}_{MAIN}_{capability}_{attribute}_{attribute}", + } + return None + return { + "new_unique_id": f"{device_id}_{MAIN}_{capability}_{attribute}_{attribute}", + } + + if entity_entry.domain == "switch": + return { + "new_unique_id": f"{entity_entry.unique_id}_{MAIN}_{Capability.SWITCH}_{Attribute.SWITCH}_{Attribute.SWITCH}", + } + + return None + + await async_migrate_entries(hass, entry.entry_id, migrate_entities) + hass.config_entries.async_update_entry( + entry, + minor_version=2, + ) + return True +def determine_machine_type( + hass: HomeAssistant, + entry_id: str, + device_id: str, +) -> Capability | None: + """Determine the machine type for a device.""" + entity_registry = er.async_get(hass) + entries = er.async_entries_for_config_entry(entity_registry, entry_id) + device_entries = [entry for entry in entries if device_id in entry.unique_id] + for entry in device_entries: + if Attribute.DISHWASHER_JOB_STATE in entry.unique_id: + return Capability.DISHWASHER_OPERATING_STATE + if Attribute.WASHER_JOB_STATE in entry.unique_id: + return Capability.WASHER_OPERATING_STATE + if Attribute.DRYER_JOB_STATE in entry.unique_id: + return Capability.DRYER_OPERATING_STATE + if Attribute.OVEN_JOB_STATE in entry.unique_id: + return Capability.OVEN_OPERATING_STATE + return None + + +def create_devices( + device_registry: dr.DeviceRegistry, + devices: dict[str, FullDevice], + entry: SmartThingsConfigEntry, + rooms: dict[str, str], +) -> None: + """Create devices in the device registry.""" + for device in sorted( + devices.values(), key=lambda d: d.device.parent_device_id or "" + ): + kwargs: dict[str, Any] = {} + if device.device.hub is not None: + kwargs = { + ATTR_SW_VERSION: device.device.hub.firmware_version, + ATTR_MODEL: device.device.hub.hardware_type, + } + if device.device.hub.mac_address: + kwargs[ATTR_CONNECTIONS] = { + (dr.CONNECTION_NETWORK_MAC, device.device.hub.mac_address) + } + if device.device.parent_device_id and device.device.parent_device_id in devices: + kwargs[ATTR_VIA_DEVICE] = (DOMAIN, device.device.parent_device_id) + if (ocf := device.device.ocf) is not None: + kwargs.update( + { + ATTR_MANUFACTURER: ocf.manufacturer_name, + ATTR_MODEL: ( + (ocf.model_number.split("|")[0]) if ocf.model_number else None + ), + ATTR_HW_VERSION: ocf.hardware_version, + ATTR_SW_VERSION: ocf.firmware_version, + } + ) + if (viper := device.device.viper) is not None: + kwargs.update( + { + ATTR_MANUFACTURER: viper.manufacturer_name, + ATTR_MODEL: viper.model_name, + ATTR_HW_VERSION: viper.hardware_version, + ATTR_SW_VERSION: viper.software_version, + } + ) + device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + identifiers={(DOMAIN, device.device.device_id)}, + configuration_url="https://account.smartthings.com", + name=device.device.label, + suggested_area=( + rooms.get(device.device.room_id) if device.device.room_id else None + ), + **kwargs, + ) + + KEEP_CAPABILITY_QUIRK: dict[ Capability | str, Callable[[dict[Attribute | str, Status]], bool] ] = { @@ -262,14 +473,32 @@ KEEP_CAPABILITY_QUIRK: dict[ } -def process_status( - status: dict[str, dict[Capability | str, dict[Attribute | str, Status]]], -) -> dict[str, dict[Capability | str, dict[Attribute | str, Status]]]: +def process_status(status: dict[str, ComponentStatus]) -> dict[str, ComponentStatus]: """Remove disabled capabilities from status.""" if (main_component := status.get(MAIN)) is None: return status if ( - disabled_capabilities_capability := main_component.get( + disabled_components_capability := main_component.get( + Capability.CUSTOM_DISABLED_COMPONENTS + ) + ) is not None: + disabled_components = cast( + list[str], + disabled_components_capability[Attribute.DISABLED_COMPONENTS].value, + ) + if disabled_components is not None: + for component in disabled_components: + if component in status: + del status[component] + for component_status in status.values(): + process_component_status(component_status) + return status + + +def process_component_status(status: ComponentStatus) -> None: + """Remove disabled capabilities from component status.""" + if ( + disabled_capabilities_capability := status.get( Capability.CUSTOM_DISABLED_CAPABILITIES ) ) is not None: @@ -279,9 +508,8 @@ def process_status( ) if disabled_capabilities is not None: for capability in disabled_capabilities: - if capability in main_component and ( + if capability in status and ( capability not in KEEP_CAPABILITY_QUIRK - or not KEEP_CAPABILITY_QUIRK[capability](main_component[capability]) + or not KEEP_CAPABILITY_QUIRK[capability](status[capability]) ): - del main_component[capability] - return status + del status[capability] diff --git a/homeassistant/components/smartthings/binary_sensor.py b/homeassistant/components/smartthings/binary_sensor.py index 99cbd3f9353..0fe0e7fe919 100644 --- a/homeassistant/components/smartthings/binary_sensor.py +++ b/homeassistant/components/smartthings/binary_sensor.py @@ -2,22 +2,26 @@ from __future__ import annotations +from collections.abc import Callable from dataclasses import dataclass -from pysmartthings import Attribute, Capability, SmartThings +from pysmartthings import Attribute, Capability, Category, SmartThings, Status from homeassistant.components.binary_sensor import ( + DOMAIN as BINARY_SENSOR_DOMAIN, BinarySensorDeviceClass, BinarySensorEntity, BinarySensorEntityDescription, ) from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from . import FullDevice, SmartThingsConfigEntry -from .const import MAIN +from .const import INVALID_SWITCH_CATEGORIES, MAIN from .entity import SmartThingsEntity +from .util import deprecate_entity @dataclass(frozen=True, kw_only=True) @@ -25,6 +29,13 @@ class SmartThingsBinarySensorEntityDescription(BinarySensorEntityDescription): """Describe a SmartThings binary sensor entity.""" is_on_key: str + category_device_class: dict[Category | str, BinarySensorDeviceClass] | None = None + category: set[Category] | None = None + exists_fn: Callable[[str], bool] | None = None + component_translation_key: dict[str, str] | None = None + deprecated_fn: Callable[ + [dict[str, dict[Capability | str, dict[Attribute | str, Status]]]], str | None + ] = lambda _: None CAPABILITY_TO_SENSORS: dict[ @@ -43,6 +54,29 @@ CAPABILITY_TO_SENSORS: dict[ key=Attribute.CONTACT, device_class=BinarySensorDeviceClass.DOOR, is_on_key="open", + category_device_class={ + Category.GARAGE_DOOR: BinarySensorDeviceClass.GARAGE_DOOR, + Category.DOOR: BinarySensorDeviceClass.DOOR, + Category.WINDOW: BinarySensorDeviceClass.WINDOW, + }, + exists_fn=lambda key: key in {"freezer", "cooler"}, + component_translation_key={ + "freezer": "freezer_door", + "cooler": "cooler_door", + }, + deprecated_fn=( + lambda status: "fridge_door" + if "freezer" in status and "cooler" in status + else None + ), + ) + }, + Capability.CUSTOM_DRYER_WRINKLE_PREVENT: { + Attribute.OPERATING_STATE: SmartThingsBinarySensorEntityDescription( + key=Attribute.OPERATING_STATE, + translation_key="dryer_wrinkle_prevent_active", + is_on_key="running", + entity_category=EntityCategory.DIAGNOSTIC, ) }, Capability.FILTER_STATUS: { @@ -53,6 +87,13 @@ CAPABILITY_TO_SENSORS: dict[ is_on_key="replace", ) }, + Capability.SAMSUNG_CE_KIDS_LOCK: { + Attribute.LOCK_STATE: SmartThingsBinarySensorEntityDescription( + key=Attribute.LOCK_STATE, + translation_key="child_lock", + is_on_key="locked", + ) + }, Capability.MOTION_SENSOR: { Attribute.MOTION: SmartThingsBinarySensorEntityDescription( key=Attribute.MOTION, @@ -67,6 +108,13 @@ CAPABILITY_TO_SENSORS: dict[ is_on_key="present", ) }, + Capability.REMOTE_CONTROL_STATUS: { + Attribute.REMOTE_CONTROL_ENABLED: SmartThingsBinarySensorEntityDescription( + key=Attribute.REMOTE_CONTROL_ENABLED, + translation_key="remote_control", + is_on_key="true", + ) + }, Capability.SOUND_SENSOR: { Attribute.SOUND: SmartThingsBinarySensorEntityDescription( key=Attribute.SOUND, @@ -74,6 +122,14 @@ CAPABILITY_TO_SENSORS: dict[ is_on_key="detected", ) }, + Capability.SWITCH: { + Attribute.SWITCH: SmartThingsBinarySensorEntityDescription( + key=Attribute.SWITCH, + device_class=BinarySensorDeviceClass.POWER, + is_on_key="on", + category=INVALID_SWITCH_CATEGORIES, + ) + }, Capability.TAMPER_ALERT: { Attribute.TAMPER: SmartThingsBinarySensorEntityDescription( key=Attribute.TAMPER, @@ -88,6 +144,7 @@ CAPABILITY_TO_SENSORS: dict[ translation_key="valve", device_class=BinarySensorDeviceClass.OPENING, is_on_key="open", + deprecated_fn=lambda _: "valve", ) }, Capability.WATER_SENSOR: { @@ -97,9 +154,25 @@ CAPABILITY_TO_SENSORS: dict[ is_on_key="wet", ) }, + Capability.SAMSUNG_CE_DOOR_STATE: { + Attribute.DOOR_STATE: SmartThingsBinarySensorEntityDescription( + key=Attribute.DOOR_STATE, + translation_key="door", + device_class=BinarySensorDeviceClass.OPENING, + is_on_key="open", + ) + }, } +def get_main_component_category( + device: FullDevice, +) -> Category | str: + """Get the main component of a device.""" + main = device.device.components[MAIN] + return main.user_category or main.manufacturer_category + + async def async_setup_entry( hass: HomeAssistant, entry: SmartThingsConfigEntry, @@ -107,15 +180,64 @@ async def async_setup_entry( ) -> None: """Add binary sensors for a config entry.""" entry_data = entry.runtime_data - async_add_entities( - SmartThingsBinarySensor( - entry_data.client, device, description, capability, attribute - ) - for device in entry_data.devices.values() - for capability, attribute_map in CAPABILITY_TO_SENSORS.items() - if capability in device.status[MAIN] - for attribute, description in attribute_map.items() - ) + entities = [] + + entity_registry = er.async_get(hass) + + for device in entry_data.devices.values(): # pylint: disable=too-many-nested-blocks + for capability, attribute_map in CAPABILITY_TO_SENSORS.items(): + for attribute, description in attribute_map.items(): + for component in device.status: + if ( + capability in device.status[component] + and ( + component == MAIN + or ( + description.exists_fn is not None + and description.exists_fn(component) + ) + ) + and ( + not description.category + or get_main_component_category(device) + in description.category + ) + ): + if ( + component == MAIN + and (issue := description.deprecated_fn(device.status)) + is not None + ): + if deprecate_entity( + hass, + entity_registry, + BINARY_SENSOR_DOMAIN, + f"{device.device.device_id}_{component}_{capability}_{attribute}_{attribute}", + f"deprecated_binary_{issue}", + ): + entities.append( + SmartThingsBinarySensor( + entry_data.client, + device, + description, + capability, + attribute, + component, + ) + ) + continue + entities.append( + SmartThingsBinarySensor( + entry_data.client, + device, + description, + capability, + attribute, + component, + ) + ) + + async_add_entities(entities) class SmartThingsBinarySensor(SmartThingsEntity, BinarySensorEntity): @@ -130,13 +252,31 @@ class SmartThingsBinarySensor(SmartThingsEntity, BinarySensorEntity): entity_description: SmartThingsBinarySensorEntityDescription, capability: Capability, attribute: Attribute, + component: str, ) -> None: """Init the class.""" - super().__init__(client, device, {capability}) + super().__init__(client, device, {capability}, component=component) self._attribute = attribute self.capability = capability self.entity_description = entity_description - self._attr_unique_id = f"{device.device.device_id}.{attribute}" + self._attr_unique_id = f"{device.device.device_id}_{component}_{capability}_{attribute}_{attribute}" + if ( + entity_description.category_device_class + and (category := get_main_component_category(device)) + in entity_description.category_device_class + ): + self._attr_device_class = entity_description.category_device_class[category] + self._attr_name = None + if ( + entity_description.component_translation_key is not None + and ( + translation_key := entity_description.component_translation_key.get( + component + ) + ) + is not None + ): + self._attr_translation_key = translation_key @property def is_on(self) -> bool: diff --git a/homeassistant/components/smartthings/button.py b/homeassistant/components/smartthings/button.py new file mode 100644 index 00000000000..00fbaa0e2c4 --- /dev/null +++ b/homeassistant/components/smartthings/button.py @@ -0,0 +1,78 @@ +"""Support for button entities through the SmartThings cloud API.""" + +from __future__ import annotations + +from dataclasses import dataclass + +from pysmartthings import Capability, Command, SmartThings + +from homeassistant.components.button import ButtonEntity, ButtonEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import FullDevice, SmartThingsConfigEntry +from .const import MAIN +from .entity import SmartThingsEntity + + +@dataclass(frozen=True, kw_only=True) +class SmartThingsButtonDescription(ButtonEntityDescription): + """Class describing SmartThings button entities.""" + + key: Capability + command: Command + + +CAPABILITIES_TO_BUTTONS: dict[Capability | str, SmartThingsButtonDescription] = { + Capability.OVEN_OPERATING_STATE: SmartThingsButtonDescription( + key=Capability.OVEN_OPERATING_STATE, + translation_key="stop", + command=Command.STOP, + ), + Capability.CUSTOM_WATER_FILTER: SmartThingsButtonDescription( + key=Capability.CUSTOM_WATER_FILTER, + translation_key="reset_water_filter", + command=Command.RESET_WATER_FILTER, + ), +} + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmartThingsConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Add button entities for a config entry.""" + entry_data = entry.runtime_data + async_add_entities( + SmartThingsButtonEntity( + entry_data.client, device, CAPABILITIES_TO_BUTTONS[capability] + ) + for device in entry_data.devices.values() + for capability in device.status[MAIN] + if capability in CAPABILITIES_TO_BUTTONS + ) + + +class SmartThingsButtonEntity(SmartThingsEntity, ButtonEntity): + """Define a SmartThings button.""" + + entity_description: SmartThingsButtonDescription + + def __init__( + self, + client: SmartThings, + device: FullDevice, + entity_description: SmartThingsButtonDescription, + ) -> None: + """Initialize the instance.""" + super().__init__(client, device, set()) + self.entity_description = entity_description + self._attr_unique_id = f"{device.device.device_id}_{MAIN}_{entity_description.key}_{entity_description.command}" + + async def async_press(self) -> None: + """Press the button.""" + await self.execute_device_command( + self.entity_description.key, + self.entity_description.command, + ) diff --git a/homeassistant/components/smartthings/climate.py b/homeassistant/components/smartthings/climate.py index e20f191352f..49499732c24 100644 --- a/homeassistant/components/smartthings/climate.py +++ b/homeassistant/components/smartthings/climate.py @@ -281,7 +281,7 @@ class SmartThingsThermostat(SmartThingsEntity, ClimateEntity): return [ state for mode in supported_thermostat_modes - if (state := AC_MODE_TO_STATE.get(mode)) is not None + if (state := MODE_TO_STATE.get(mode)) is not None ] @property @@ -466,12 +466,14 @@ class SmartThingsAirConditioner(SmartThingsEntity, ClimateEntity): Capability.DEMAND_RESPONSE_LOAD_CONTROL, Attribute.DEMAND_RESPONSE_LOAD_CONTROL_STATUS, ) - return { - "drlc_status_duration": drlc_status["duration"], - "drlc_status_level": drlc_status["drlcLevel"], - "drlc_status_start": drlc_status["start"], - "drlc_status_override": drlc_status["override"], - } + res = {} + for key in ("duration", "start", "override", "drlcLevel"): + if key in drlc_status: + dict_key = {"drlcLevel": "drlc_status_level"}.get( + key, f"drlc_status_{key}" + ) + res[dict_key] = drlc_status[key] + return res @property def fan_mode(self) -> str: diff --git a/homeassistant/components/smartthings/config_flow.py b/homeassistant/components/smartthings/config_flow.py index d2654348527..03c8e4bfa66 100644 --- a/homeassistant/components/smartthings/config_flow.py +++ b/homeassistant/components/smartthings/config_flow.py @@ -20,6 +20,7 @@ class SmartThingsConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN): """Handle configuration of SmartThings integrations.""" VERSION = 3 + MINOR_VERSION = 2 DOMAIN = DOMAIN @property diff --git a/homeassistant/components/smartthings/const.py b/homeassistant/components/smartthings/const.py index 2ba59ade4e8..8f27b785688 100644 --- a/homeassistant/components/smartthings/const.py +++ b/homeassistant/components/smartthings/const.py @@ -1,5 +1,7 @@ """Constants used by the SmartThings component and platforms.""" +from pysmartthings import Attribute, Capability, Category + DOMAIN = "smartthings" SCOPES = [ @@ -35,3 +37,84 @@ OLD_DATA = "old_data" CONF_SUBSCRIPTION_ID = "subscription_id" EVENT_BUTTON = "smartthings.button" + +BINARY_SENSOR_ATTRIBUTES_TO_CAPABILITIES: dict[str, str] = { + Attribute.ACCELERATION: Capability.ACCELERATION_SENSOR, + Attribute.CONTACT: Capability.CONTACT_SENSOR, + Attribute.FILTER_STATUS: Capability.FILTER_STATUS, + Attribute.MOTION: Capability.MOTION_SENSOR, + Attribute.PRESENCE: Capability.PRESENCE_SENSOR, + Attribute.SOUND: Capability.SOUND_SENSOR, + Attribute.TAMPER: Capability.TAMPER_ALERT, + Attribute.VALVE: Capability.VALVE, + Attribute.WATER: Capability.WATER_SENSOR, +} + +SENSOR_ATTRIBUTES_TO_CAPABILITIES: dict[str, str] = { + Attribute.LIGHTING_MODE: Capability.ACTIVITY_LIGHTING_MODE, + Attribute.AIR_CONDITIONER_MODE: Capability.AIR_CONDITIONER_MODE, + Attribute.AIR_QUALITY: Capability.AIR_QUALITY_SENSOR, + Attribute.ALARM: Capability.ALARM, + Attribute.BATTERY: Capability.BATTERY, + Attribute.BMI_MEASUREMENT: Capability.BODY_MASS_INDEX_MEASUREMENT, + Attribute.BODY_WEIGHT_MEASUREMENT: Capability.BODY_WEIGHT_MEASUREMENT, + Attribute.CARBON_DIOXIDE: Capability.CARBON_DIOXIDE_MEASUREMENT, + Attribute.CARBON_MONOXIDE: Capability.CARBON_MONOXIDE_MEASUREMENT, + Attribute.CARBON_MONOXIDE_LEVEL: Capability.CARBON_MONOXIDE_MEASUREMENT, + Attribute.DISHWASHER_JOB_STATE: Capability.DISHWASHER_OPERATING_STATE, + Attribute.DRYER_MODE: Capability.DRYER_MODE, + Attribute.DRYER_JOB_STATE: Capability.DRYER_OPERATING_STATE, + Attribute.DUST_LEVEL: Capability.DUST_SENSOR, + Attribute.FINE_DUST_LEVEL: Capability.DUST_SENSOR, + Attribute.ENERGY: Capability.ENERGY_METER, + Attribute.EQUIVALENT_CARBON_DIOXIDE_MEASUREMENT: Capability.EQUIVALENT_CARBON_DIOXIDE_MEASUREMENT, + Attribute.FORMALDEHYDE_LEVEL: Capability.FORMALDEHYDE_MEASUREMENT, + Attribute.GAS_METER: Capability.GAS_METER, + Attribute.GAS_METER_CALORIFIC: Capability.GAS_METER, + Attribute.GAS_METER_TIME: Capability.GAS_METER, + Attribute.GAS_METER_VOLUME: Capability.GAS_METER, + Attribute.ILLUMINANCE: Capability.ILLUMINANCE_MEASUREMENT, + Attribute.INFRARED_LEVEL: Capability.INFRARED_LEVEL, + Attribute.INPUT_SOURCE: Capability.MEDIA_INPUT_SOURCE, + Attribute.PLAYBACK_REPEAT_MODE: Capability.MEDIA_PLAYBACK_REPEAT, + Attribute.PLAYBACK_SHUFFLE: Capability.MEDIA_PLAYBACK_SHUFFLE, + Attribute.PLAYBACK_STATUS: Capability.MEDIA_PLAYBACK, + Attribute.ODOR_LEVEL: Capability.ODOR_SENSOR, + Attribute.OVEN_MODE: Capability.OVEN_MODE, + Attribute.OVEN_JOB_STATE: Capability.OVEN_OPERATING_STATE, + Attribute.OVEN_SETPOINT: Capability.OVEN_SETPOINT, + Attribute.POWER: Capability.POWER_METER, + Attribute.POWER_SOURCE: Capability.POWER_SOURCE, + Attribute.REFRIGERATION_SETPOINT: Capability.REFRIGERATION_SETPOINT, + Attribute.HUMIDITY: Capability.RELATIVE_HUMIDITY_MEASUREMENT, + Attribute.ROBOT_CLEANER_CLEANING_MODE: Capability.ROBOT_CLEANER_CLEANING_MODE, + Attribute.ROBOT_CLEANER_MOVEMENT: Capability.ROBOT_CLEANER_MOVEMENT, + Attribute.ROBOT_CLEANER_TURBO_MODE: Capability.ROBOT_CLEANER_TURBO_MODE, + Attribute.LQI: Capability.SIGNAL_STRENGTH, + Attribute.RSSI: Capability.SIGNAL_STRENGTH, + Attribute.SMOKE: Capability.SMOKE_DETECTOR, + Attribute.TEMPERATURE: Capability.TEMPERATURE_MEASUREMENT, + Attribute.COOLING_SETPOINT: Capability.THERMOSTAT_COOLING_SETPOINT, + Attribute.THERMOSTAT_FAN_MODE: Capability.THERMOSTAT_FAN_MODE, + Attribute.HEATING_SETPOINT: Capability.THERMOSTAT_HEATING_SETPOINT, + Attribute.THERMOSTAT_MODE: Capability.THERMOSTAT_MODE, + Attribute.THERMOSTAT_OPERATING_STATE: Capability.THERMOSTAT_OPERATING_STATE, + Attribute.THERMOSTAT_SETPOINT: Capability.THERMOSTAT_SETPOINT, + Attribute.TV_CHANNEL: Capability.TV_CHANNEL, + Attribute.TV_CHANNEL_NAME: Capability.TV_CHANNEL, + Attribute.TVOC_LEVEL: Capability.TVOC_MEASUREMENT, + Attribute.ULTRAVIOLET_INDEX: Capability.ULTRAVIOLET_INDEX, + Attribute.VERY_FINE_DUST_LEVEL: Capability.VERY_FINE_DUST_SENSOR, + Attribute.VOLTAGE: Capability.VOLTAGE_MEASUREMENT, + Attribute.WASHER_MODE: Capability.WASHER_MODE, + Attribute.WASHER_JOB_STATE: Capability.WASHER_OPERATING_STATE, +} + +INVALID_SWITCH_CATEGORIES = { + Category.CLOTHING_CARE_MACHINE, + Category.COOKTOP, + Category.DRYER, + Category.WASHER, + Category.MICROWAVE, + Category.DISHWASHER, +} diff --git a/homeassistant/components/smartthings/cover.py b/homeassistant/components/smartthings/cover.py index 29250031be4..0b68409443d 100644 --- a/homeassistant/components/smartthings/cover.py +++ b/homeassistant/components/smartthings/cover.py @@ -55,7 +55,10 @@ class SmartThingsCover(SmartThingsEntity, CoverEntity): _state: CoverState | None = None def __init__( - self, client: SmartThings, device: FullDevice, capability: Capability + self, + client: SmartThings, + device: FullDevice, + capability: Capability, ) -> None: """Initialize the cover class.""" super().__init__( @@ -123,6 +126,7 @@ class SmartThingsCover(SmartThingsEntity, CoverEntity): Capability.WINDOW_SHADE_LEVEL, Attribute.SHADE_LEVEL ) + # Deprecated, remove in 2025.10 self._attr_extra_state_attributes = {} if self.supports_capability(Capability.BATTERY): self._attr_extra_state_attributes[ATTR_BATTERY_LEVEL] = ( diff --git a/homeassistant/components/smartthings/entity.py b/homeassistant/components/smartthings/entity.py index 5a2ce560f75..5544297a4c6 100644 --- a/homeassistant/components/smartthings/entity.py +++ b/homeassistant/components/smartthings/entity.py @@ -8,9 +8,9 @@ from pysmartthings import ( Attribute, Capability, Command, + ComponentStatus, DeviceEvent, SmartThings, - Status, ) from homeassistant.helpers.device_registry import DeviceInfo @@ -27,43 +27,27 @@ class SmartThingsEntity(Entity): _attr_has_entity_name = True def __init__( - self, client: SmartThings, device: FullDevice, capabilities: set[Capability] + self, + client: SmartThings, + device: FullDevice, + capabilities: set[Capability], + *, + component: str = MAIN, ) -> None: """Initialize the instance.""" self.client = client self.capabilities = capabilities - self._internal_state: dict[Capability | str, dict[Attribute | str, Status]] = { - capability: device.status[MAIN][capability] + self.component = component + self._internal_state: ComponentStatus = { + capability: device.status[component][capability] for capability in capabilities - if capability in device.status[MAIN] + if capability in device.status[component] } self.device = device - self._attr_unique_id = device.device.device_id + self._attr_unique_id = f"{device.device.device_id}_{component}" self._attr_device_info = DeviceInfo( - configuration_url="https://account.smartthings.com", identifiers={(DOMAIN, device.device.device_id)}, - name=device.device.label, ) - if (ocf := device.device.ocf) is not None: - self._attr_device_info.update( - { - "manufacturer": ocf.manufacturer_name, - "model": ( - (ocf.model_number.split("|")[0]) if ocf.model_number else None - ), - "hw_version": ocf.hardware_version, - "sw_version": ocf.firmware_version, - } - ) - if (viper := device.device.viper) is not None: - self._attr_device_info.update( - { - "manufacturer": viper.manufacturer_name, - "model": viper.model_name, - "hw_version": viper.hardware_version, - "sw_version": viper.software_version, - } - ) async def async_added_to_hass(self) -> None: """Subscribe to updates.""" @@ -72,7 +56,7 @@ class SmartThingsEntity(Entity): self.async_on_remove( self.client.add_device_capability_event_listener( self.device.device.device_id, - MAIN, + self.component, capability, self._update_handler, ) @@ -86,7 +70,7 @@ class SmartThingsEntity(Entity): def supports_capability(self, capability: Capability) -> bool: """Test if device supports a capability.""" - return capability in self.device.status[MAIN] + return capability in self.device.status[self.component] def get_attribute_value(self, capability: Capability, attribute: Attribute) -> Any: """Get the value of a device attribute.""" @@ -111,5 +95,5 @@ class SmartThingsEntity(Entity): if argument is not None: kwargs["argument"] = argument await self.client.execute_device_command( - self.device.device.device_id, capability, command, MAIN, **kwargs + self.device.device.device_id, capability, command, self.component, **kwargs ) diff --git a/homeassistant/components/smartthings/event.py b/homeassistant/components/smartthings/event.py new file mode 100644 index 00000000000..0439e6391f4 --- /dev/null +++ b/homeassistant/components/smartthings/event.py @@ -0,0 +1,63 @@ +"""Support for events through the SmartThings cloud API.""" + +from __future__ import annotations + +from typing import cast + +from pysmartthings import Attribute, Capability, Component, DeviceEvent, SmartThings + +from homeassistant.components.event import EventDeviceClass, EventEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import FullDevice, SmartThingsConfigEntry +from .entity import SmartThingsEntity + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmartThingsConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Add events for a config entry.""" + entry_data = entry.runtime_data + async_add_entities( + SmartThingsButtonEvent( + entry_data.client, device, device.device.components[component] + ) + for device in entry_data.devices.values() + for component, capabilities in device.status.items() + if Capability.BUTTON in capabilities + ) + + +class SmartThingsButtonEvent(SmartThingsEntity, EventEntity): + """Define a SmartThings event.""" + + _attr_device_class = EventDeviceClass.BUTTON + _attr_translation_key = "button" + + def __init__( + self, + client: SmartThings, + device: FullDevice, + component: Component, + ) -> None: + """Init the class.""" + super().__init__(client, device, {Capability.BUTTON}, component=component.id) + self._attr_name = component.label + self._attr_unique_id = ( + f"{device.device.device_id}_{component.id}_{Capability.BUTTON}" + ) + + @property + def event_types(self) -> list[str]: + """Return the event types.""" + return self.get_attribute_value( + Capability.BUTTON, Attribute.SUPPORTED_BUTTON_VALUES + ) + + def _update_handler(self, event: DeviceEvent) -> None: + if event.attribute is Attribute.BUTTON: + self._trigger_event(cast(str, event.value)) + super()._update_handler(event) diff --git a/homeassistant/components/smartthings/icons.json b/homeassistant/components/smartthings/icons.json new file mode 100644 index 00000000000..214a9953a5a --- /dev/null +++ b/homeassistant/components/smartthings/icons.json @@ -0,0 +1,63 @@ +{ + "entity": { + "binary_sensor": { + "dryer_wrinkle_prevent_active": { + "default": "mdi:tumble-dryer", + "state": { + "on": "mdi:tumble-dryer-alert" + } + }, + "remote_control": { + "default": "mdi:remote-off", + "state": { + "on": "mdi:remote" + } + }, + "child_lock": { + "default": "mdi:lock-open", + "state": { + "on": "mdi:lock" + } + } + }, + "button": { + "reset_water_filter": { + "default": "mdi:reload" + }, + "stop": { + "default": "mdi:stop" + } + }, + "number": { + "washer_rinse_cycles": { + "default": "mdi:waves-arrow-up" + } + }, + "select": { + "operating_state": { + "state": { + "run": "mdi:play", + "pause": "mdi:pause", + "stop": "mdi:stop" + } + } + }, + "switch": { + "bubble_soak": { + "default": "mdi:water-off", + "state": { + "on": "mdi:water" + } + }, + "wrinkle_prevent": { + "default": "mdi:tumble-dryer", + "state": { + "off": "mdi:tumble-dryer-off" + } + }, + "ice_maker": { + "default": "mdi:delete-variant" + } + } + } +} diff --git a/homeassistant/components/smartthings/manifest.json b/homeassistant/components/smartthings/manifest.json index a456a6bef2f..2af3e5c193b 100644 --- a/homeassistant/components/smartthings/manifest.json +++ b/homeassistant/components/smartthings/manifest.json @@ -29,5 +29,6 @@ "documentation": "https://www.home-assistant.io/integrations/smartthings", "iot_class": "cloud_push", "loggers": ["pysmartthings"], - "requirements": ["pysmartthings==2.7.4"] + "quality_scale": "bronze", + "requirements": ["pysmartthings==3.0.1"] } diff --git a/homeassistant/components/smartthings/media_player.py b/homeassistant/components/smartthings/media_player.py new file mode 100644 index 00000000000..9a676d2efb6 --- /dev/null +++ b/homeassistant/components/smartthings/media_player.py @@ -0,0 +1,355 @@ +"""Support for media players through the SmartThings cloud API.""" + +from __future__ import annotations + +from typing import Any + +from pysmartthings import Attribute, Capability, Category, Command, SmartThings + +from homeassistant.components.media_player import ( + MediaPlayerDeviceClass, + MediaPlayerEntity, + MediaPlayerEntityFeature, + MediaPlayerState, + RepeatMode, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import FullDevice, SmartThingsConfigEntry +from .const import MAIN +from .entity import SmartThingsEntity + +MEDIA_PLAYER_CAPABILITIES = ( + Capability.AUDIO_MUTE, + Capability.AUDIO_VOLUME, + Capability.MEDIA_PLAYBACK, +) + +CONTROLLABLE_SOURCES = ["bluetooth", "wifi"] + +DEVICE_CLASS_MAP: dict[Category | str, MediaPlayerDeviceClass] = { + Category.NETWORK_AUDIO: MediaPlayerDeviceClass.SPEAKER, + Category.SPEAKER: MediaPlayerDeviceClass.SPEAKER, + Category.TELEVISION: MediaPlayerDeviceClass.TV, + Category.RECEIVER: MediaPlayerDeviceClass.RECEIVER, +} + +VALUE_TO_STATE = { + "buffering": MediaPlayerState.BUFFERING, + "paused": MediaPlayerState.PAUSED, + "playing": MediaPlayerState.PLAYING, + "stopped": MediaPlayerState.IDLE, + "fast forwarding": MediaPlayerState.BUFFERING, + "rewinding": MediaPlayerState.BUFFERING, +} + +REPEAT_MODE_TO_HA = { + "all": RepeatMode.ALL, + "one": RepeatMode.ONE, + "off": RepeatMode.OFF, +} + +HA_REPEAT_MODE_TO_SMARTTHINGS = {v: k for k, v in REPEAT_MODE_TO_HA.items()} + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmartThingsConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Add media players for a config entry.""" + entry_data = entry.runtime_data + + async_add_entities( + SmartThingsMediaPlayer(entry_data.client, device) + for device in entry_data.devices.values() + if all( + capability in device.status[MAIN] + for capability in MEDIA_PLAYER_CAPABILITIES + ) + ) + + +class SmartThingsMediaPlayer(SmartThingsEntity, MediaPlayerEntity): + """Define a SmartThings media player.""" + + _attr_name = None + + def __init__(self, client: SmartThings, device: FullDevice) -> None: + """Initialize the media_player class.""" + super().__init__( + client, + device, + { + Capability.AUDIO_MUTE, + Capability.AUDIO_TRACK_DATA, + Capability.AUDIO_VOLUME, + Capability.MEDIA_INPUT_SOURCE, + Capability.MEDIA_PLAYBACK, + Capability.MEDIA_PLAYBACK_REPEAT, + Capability.MEDIA_PLAYBACK_SHUFFLE, + Capability.SAMSUNG_VD_AUDIO_INPUT_SOURCE, + Capability.SWITCH, + }, + ) + self._attr_supported_features = self._determine_features() + self._attr_device_class = DEVICE_CLASS_MAP.get( + device.device.components[MAIN].user_category + or device.device.components[MAIN].manufacturer_category, + ) + + def _determine_features(self) -> MediaPlayerEntityFeature: + flags = MediaPlayerEntityFeature(0) + playback_commands = self.get_attribute_value( + Capability.MEDIA_PLAYBACK, Attribute.SUPPORTED_PLAYBACK_COMMANDS + ) + if "play" in playback_commands: + flags |= MediaPlayerEntityFeature.PLAY + if "pause" in playback_commands: + flags |= MediaPlayerEntityFeature.PAUSE + if "stop" in playback_commands: + flags |= MediaPlayerEntityFeature.STOP + if "rewind" in playback_commands: + flags |= MediaPlayerEntityFeature.PREVIOUS_TRACK + if "fastForward" in playback_commands: + flags |= MediaPlayerEntityFeature.NEXT_TRACK + if self.supports_capability(Capability.AUDIO_VOLUME): + flags |= ( + MediaPlayerEntityFeature.VOLUME_SET + | MediaPlayerEntityFeature.VOLUME_STEP + ) + if self.supports_capability(Capability.AUDIO_MUTE): + flags |= MediaPlayerEntityFeature.VOLUME_MUTE + if self.supports_capability(Capability.SWITCH): + flags |= ( + MediaPlayerEntityFeature.TURN_ON | MediaPlayerEntityFeature.TURN_OFF + ) + if self.supports_capability(Capability.MEDIA_INPUT_SOURCE): + flags |= MediaPlayerEntityFeature.SELECT_SOURCE + if self.supports_capability(Capability.MEDIA_PLAYBACK_SHUFFLE): + flags |= MediaPlayerEntityFeature.SHUFFLE_SET + if self.supports_capability(Capability.MEDIA_PLAYBACK_REPEAT): + flags |= MediaPlayerEntityFeature.REPEAT_SET + return flags + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the media player off.""" + await self.execute_device_command( + Capability.SWITCH, + Command.OFF, + ) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the media player on.""" + await self.execute_device_command( + Capability.SWITCH, + Command.ON, + ) + + async def async_mute_volume(self, mute: bool) -> None: + """Mute volume.""" + await self.execute_device_command( + Capability.AUDIO_MUTE, + Command.SET_MUTE, + argument="muted" if mute else "unmuted", + ) + + async def async_set_volume_level(self, volume: float) -> None: + """Set volume level.""" + await self.execute_device_command( + Capability.AUDIO_VOLUME, + Command.SET_VOLUME, + argument=int(volume * 100), + ) + + async def async_volume_up(self) -> None: + """Increase volume.""" + await self.execute_device_command( + Capability.AUDIO_VOLUME, + Command.VOLUME_UP, + ) + + async def async_volume_down(self) -> None: + """Decrease volume.""" + await self.execute_device_command( + Capability.AUDIO_VOLUME, + Command.VOLUME_DOWN, + ) + + async def async_media_play(self) -> None: + """Play media.""" + await self.execute_device_command( + Capability.MEDIA_PLAYBACK, + Command.PLAY, + ) + + async def async_media_pause(self) -> None: + """Pause media.""" + await self.execute_device_command( + Capability.MEDIA_PLAYBACK, + Command.PAUSE, + ) + + async def async_media_stop(self) -> None: + """Stop media.""" + await self.execute_device_command( + Capability.MEDIA_PLAYBACK, + Command.STOP, + ) + + async def async_media_previous_track(self) -> None: + """Previous track.""" + await self.execute_device_command( + Capability.MEDIA_PLAYBACK, + Command.REWIND, + ) + + async def async_media_next_track(self) -> None: + """Next track.""" + await self.execute_device_command( + Capability.MEDIA_PLAYBACK, + Command.FAST_FORWARD, + ) + + async def async_select_source(self, source: str) -> None: + """Select source.""" + await self.execute_device_command( + Capability.MEDIA_INPUT_SOURCE, + Command.SET_INPUT_SOURCE, + argument=source, + ) + + async def async_set_shuffle(self, shuffle: bool) -> None: + """Set shuffle mode.""" + await self.execute_device_command( + Capability.MEDIA_PLAYBACK_SHUFFLE, + Command.SET_PLAYBACK_SHUFFLE, + argument="enabled" if shuffle else "disabled", + ) + + async def async_set_repeat(self, repeat: RepeatMode) -> None: + """Set repeat mode.""" + await self.execute_device_command( + Capability.MEDIA_PLAYBACK_REPEAT, + Command.SET_PLAYBACK_REPEAT_MODE, + argument=HA_REPEAT_MODE_TO_SMARTTHINGS[repeat], + ) + + @property + def media_title(self) -> str | None: + """Title of current playing media.""" + if ( + not self.supports_capability(Capability.AUDIO_TRACK_DATA) + or ( + track_data := self.get_attribute_value( + Capability.AUDIO_TRACK_DATA, Attribute.AUDIO_TRACK_DATA + ) + ) + is None + ): + return None + return track_data.get("title", None) + + @property + def media_artist(self) -> str | None: + """Artist of current playing media.""" + if ( + not self.supports_capability(Capability.AUDIO_TRACK_DATA) + or ( + track_data := self.get_attribute_value( + Capability.AUDIO_TRACK_DATA, Attribute.AUDIO_TRACK_DATA + ) + ) + is None + ): + return None + return track_data.get("artist") + + @property + def state(self) -> MediaPlayerState | None: + """State of the media player.""" + if self.supports_capability(Capability.SWITCH): + if self.get_attribute_value(Capability.SWITCH, Attribute.SWITCH) == "on": + if ( + self.source is not None + and self.source in CONTROLLABLE_SOURCES + and self.get_attribute_value( + Capability.MEDIA_PLAYBACK, Attribute.PLAYBACK_STATUS + ) + in VALUE_TO_STATE + ): + return VALUE_TO_STATE[ + self.get_attribute_value( + Capability.MEDIA_PLAYBACK, Attribute.PLAYBACK_STATUS + ) + ] + return MediaPlayerState.ON + return MediaPlayerState.OFF + return VALUE_TO_STATE[ + self.get_attribute_value( + Capability.MEDIA_PLAYBACK, Attribute.PLAYBACK_STATUS + ) + ] + + @property + def is_volume_muted(self) -> bool: + """Returns if the volume is muted.""" + return ( + self.get_attribute_value(Capability.AUDIO_MUTE, Attribute.MUTE) == "muted" + ) + + @property + def volume_level(self) -> float: + """Volume level.""" + return self.get_attribute_value(Capability.AUDIO_VOLUME, Attribute.VOLUME) / 100 + + @property + def source(self) -> str | None: + """Input source.""" + if self.supports_capability(Capability.MEDIA_INPUT_SOURCE): + return self.get_attribute_value( + Capability.MEDIA_INPUT_SOURCE, Attribute.INPUT_SOURCE + ) + if self.supports_capability(Capability.SAMSUNG_VD_AUDIO_INPUT_SOURCE): + return self.get_attribute_value( + Capability.SAMSUNG_VD_AUDIO_INPUT_SOURCE, Attribute.INPUT_SOURCE + ) + return None + + @property + def source_list(self) -> list[str] | None: + """List of input sources.""" + if self.supports_capability(Capability.MEDIA_INPUT_SOURCE): + return self.get_attribute_value( + Capability.MEDIA_INPUT_SOURCE, Attribute.SUPPORTED_INPUT_SOURCES + ) + if self.supports_capability(Capability.SAMSUNG_VD_AUDIO_INPUT_SOURCE): + return self.get_attribute_value( + Capability.SAMSUNG_VD_AUDIO_INPUT_SOURCE, + Attribute.SUPPORTED_INPUT_SOURCES, + ) + return None + + @property + def shuffle(self) -> bool | None: + """Returns if shuffle mode is set.""" + if self.supports_capability(Capability.MEDIA_PLAYBACK_SHUFFLE): + return ( + self.get_attribute_value( + Capability.MEDIA_PLAYBACK_SHUFFLE, Attribute.PLAYBACK_SHUFFLE + ) + == "enabled" + ) + return None + + @property + def repeat(self) -> RepeatMode | None: + """Returns if repeat mode is set.""" + if self.supports_capability(Capability.MEDIA_PLAYBACK_REPEAT): + return REPEAT_MODE_TO_HA[ + self.get_attribute_value( + Capability.MEDIA_PLAYBACK_REPEAT, Attribute.PLAYBACK_REPEAT_MODE + ) + ] + return None diff --git a/homeassistant/components/smartthings/number.py b/homeassistant/components/smartthings/number.py new file mode 100644 index 00000000000..2f2ac7903f2 --- /dev/null +++ b/homeassistant/components/smartthings/number.py @@ -0,0 +1,76 @@ +"""Support for number entities through the SmartThings cloud API.""" + +from __future__ import annotations + +from pysmartthings import Attribute, Capability, Command, SmartThings + +from homeassistant.components.number import NumberEntity, NumberMode +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import FullDevice, SmartThingsConfigEntry +from .const import MAIN +from .entity import SmartThingsEntity + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmartThingsConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Add number entities for a config entry.""" + entry_data = entry.runtime_data + async_add_entities( + SmartThingsWasherRinseCyclesNumberEntity(entry_data.client, device) + for device in entry_data.devices.values() + if Capability.CUSTOM_WASHER_RINSE_CYCLES in device.status[MAIN] + ) + + +class SmartThingsWasherRinseCyclesNumberEntity(SmartThingsEntity, NumberEntity): + """Define a SmartThings number.""" + + _attr_translation_key = "washer_rinse_cycles" + _attr_native_step = 1.0 + _attr_mode = NumberMode.BOX + + def __init__(self, client: SmartThings, device: FullDevice) -> None: + """Initialize the instance.""" + super().__init__(client, device, {Capability.CUSTOM_WASHER_RINSE_CYCLES}) + self._attr_unique_id = f"{device.device.device_id}_{MAIN}_{Capability.CUSTOM_WASHER_RINSE_CYCLES}_{Attribute.WASHER_RINSE_CYCLES}_{Attribute.WASHER_RINSE_CYCLES}" + + @property + def options(self) -> list[int]: + """Return the list of options.""" + values = self.get_attribute_value( + Capability.CUSTOM_WASHER_RINSE_CYCLES, + Attribute.SUPPORTED_WASHER_RINSE_CYCLES, + ) + return [int(value) for value in values] if values else [] + + @property + def native_value(self) -> float | None: + """Return the current value.""" + return int( + self.get_attribute_value( + Capability.CUSTOM_WASHER_RINSE_CYCLES, Attribute.WASHER_RINSE_CYCLES + ) + ) + + @property + def native_min_value(self) -> float: + """Return the minimum value.""" + return min(self.options) + + @property + def native_max_value(self) -> float: + """Return the maximum value.""" + return max(self.options) + + async def async_set_native_value(self, value: float) -> None: + """Set the value.""" + await self.execute_device_command( + Capability.CUSTOM_WASHER_RINSE_CYCLES, + Command.SET_WASHER_RINSE_CYCLES, + str(int(value)), + ) diff --git a/homeassistant/components/smartthings/quality_scale.yaml b/homeassistant/components/smartthings/quality_scale.yaml new file mode 100644 index 00000000000..be8a9039617 --- /dev/null +++ b/homeassistant/components/smartthings/quality_scale.yaml @@ -0,0 +1,80 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: + status: exempt + comment: | + This integration works via push. + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: todo + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: No options to configure + docs-installation-parameters: + status: exempt + comment: No parameters needed during installation + entity-unavailable: todo + integration-owner: done + log-when-unavailable: todo + parallel-updates: todo + reauthentication-flow: done + test-coverage: todo + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: | + This integration connects via the cloud. + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: + status: exempt + comment: | + This integration does not have any entities that are disabled by default. + entity-translations: done + exception-translations: todo + icon-translations: todo + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + stale-devices: done + # Platinum + async-dependency: done + inject-websession: done + strict-typing: todo diff --git a/homeassistant/components/smartthings/select.py b/homeassistant/components/smartthings/select.py new file mode 100644 index 00000000000..f0a483b1329 --- /dev/null +++ b/homeassistant/components/smartthings/select.py @@ -0,0 +1,127 @@ +"""Support for select entities through the SmartThings cloud API.""" + +from __future__ import annotations + +from dataclasses import dataclass + +from pysmartthings import Attribute, Capability, Command, SmartThings + +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import FullDevice, SmartThingsConfigEntry +from .const import MAIN +from .entity import SmartThingsEntity + + +@dataclass(frozen=True, kw_only=True) +class SmartThingsSelectDescription(SelectEntityDescription): + """Class describing SmartThings select entities.""" + + key: Capability + requires_remote_control_status: bool + options_attribute: Attribute + status_attribute: Attribute + command: Command + + +CAPABILITIES_TO_SELECT: dict[Capability | str, SmartThingsSelectDescription] = { + Capability.DISHWASHER_OPERATING_STATE: SmartThingsSelectDescription( + key=Capability.DISHWASHER_OPERATING_STATE, + name=None, + translation_key="operating_state", + requires_remote_control_status=True, + options_attribute=Attribute.SUPPORTED_MACHINE_STATES, + status_attribute=Attribute.MACHINE_STATE, + command=Command.SET_MACHINE_STATE, + ), + Capability.DRYER_OPERATING_STATE: SmartThingsSelectDescription( + key=Capability.DRYER_OPERATING_STATE, + name=None, + translation_key="operating_state", + requires_remote_control_status=True, + options_attribute=Attribute.SUPPORTED_MACHINE_STATES, + status_attribute=Attribute.MACHINE_STATE, + command=Command.SET_MACHINE_STATE, + ), + Capability.WASHER_OPERATING_STATE: SmartThingsSelectDescription( + key=Capability.WASHER_OPERATING_STATE, + name=None, + translation_key="operating_state", + requires_remote_control_status=True, + options_attribute=Attribute.SUPPORTED_MACHINE_STATES, + status_attribute=Attribute.MACHINE_STATE, + command=Command.SET_MACHINE_STATE, + ), +} + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmartThingsConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Add select entities for a config entry.""" + entry_data = entry.runtime_data + async_add_entities( + SmartThingsSelectEntity( + entry_data.client, device, CAPABILITIES_TO_SELECT[capability] + ) + for device in entry_data.devices.values() + for capability in device.status[MAIN] + if capability in CAPABILITIES_TO_SELECT + ) + + +class SmartThingsSelectEntity(SmartThingsEntity, SelectEntity): + """Define a SmartThings select.""" + + entity_description: SmartThingsSelectDescription + + def __init__( + self, + client: SmartThings, + device: FullDevice, + entity_description: SmartThingsSelectDescription, + ) -> None: + """Initialize the instance.""" + capabilities = {entity_description.key} + if entity_description.requires_remote_control_status: + capabilities.add(Capability.REMOTE_CONTROL_STATUS) + super().__init__(client, device, capabilities) + self.entity_description = entity_description + self._attr_unique_id = f"{device.device.device_id}_{MAIN}_{entity_description.key}_{entity_description.status_attribute}_{entity_description.status_attribute}" + + @property + def options(self) -> list[str]: + """Return the list of options.""" + return self.get_attribute_value( + self.entity_description.key, self.entity_description.options_attribute + ) + + @property + def current_option(self) -> str | None: + """Return the current option.""" + return self.get_attribute_value( + self.entity_description.key, self.entity_description.status_attribute + ) + + async def async_select_option(self, option: str) -> None: + """Select an option.""" + if ( + self.entity_description.requires_remote_control_status + and self.get_attribute_value( + Capability.REMOTE_CONTROL_STATUS, Attribute.REMOTE_CONTROL_ENABLED + ) + == "false" + ): + raise ServiceValidationError( + "Can only be updated when remote control is enabled" + ) + await self.execute_device_command( + self.entity_description.key, + self.entity_description.command, + option, + ) diff --git a/homeassistant/components/smartthings/sensor.py b/homeassistant/components/smartthings/sensor.py index 08c9cb86c90..346516be480 100644 --- a/homeassistant/components/smartthings/sensor.py +++ b/homeassistant/components/smartthings/sensor.py @@ -7,9 +7,10 @@ from dataclasses import dataclass from datetime import datetime from typing import Any, cast -from pysmartthings import Attribute, Capability, SmartThings, Status +from pysmartthings import Attribute, Capability, ComponentStatus, SmartThings, Status from homeassistant.components.sensor import ( + DOMAIN as SENSOR_DOMAIN, SensorDeviceClass, SensorEntity, SensorEntityDescription, @@ -29,12 +30,14 @@ from homeassistant.const import ( UnitOfVolume, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.util import dt as dt_util from . import FullDevice, SmartThingsConfigEntry from .const import MAIN from .entity import SmartThingsEntity +from .util import deprecate_entity THERMOSTAT_CAPABILITIES = { Capability.TEMPERATURE_MEASUREMENT, @@ -128,11 +131,11 @@ class SmartThingsSensorEntityDescription(SensorEntityDescription): value_fn: Callable[[Any], str | float | int | datetime | None] = lambda value: value extra_state_attributes_fn: Callable[[Any], dict[str, Any]] | None = None - unique_id_separator: str = "." capability_ignore_list: list[set[Capability]] | None = None options_attribute: Attribute | None = None exists_fn: Callable[[Status], bool] | None = None use_temperature_unit: bool = False + deprecated: Callable[[ComponentStatus], str | None] | None = None CAPABILITY_TO_SENSORS: dict[ @@ -189,6 +192,17 @@ CAPABILITY_TO_SENSORS: dict[ key=Attribute.VOLUME, translation_key="audio_volume", native_unit_of_measurement=PERCENTAGE, + deprecated=( + lambda status: "media_player" + if all( + capability in status + for capability in ( + Capability.AUDIO_MUTE, + Capability.MEDIA_PLAYBACK, + ) + ) + else None + ), ) ] }, @@ -225,7 +239,6 @@ CAPABILITY_TO_SENSORS: dict[ ) ] }, - # Haven't seen at devices yet Capability.CARBON_DIOXIDE_MEASUREMENT: { Attribute.CARBON_DIOXIDE: [ SmartThingsSensorEntityDescription( @@ -464,24 +477,25 @@ CAPABILITY_TO_SENSORS: dict[ device_class=SensorDeviceClass.ENUM, options_attribute=Attribute.SUPPORTED_INPUT_SOURCES, value_fn=lambda value: value.lower() if value else None, + deprecated=lambda _: "media_player", ) ] }, - # part of the proposed spec, Haven't seen at devices yet Capability.MEDIA_PLAYBACK_REPEAT: { Attribute.PLAYBACK_REPEAT_MODE: [ SmartThingsSensorEntityDescription( key=Attribute.PLAYBACK_REPEAT_MODE, translation_key="media_playback_repeat", + deprecated=lambda _: "media_player", ) ] }, - # part of the proposed spec, Haven't seen at devices yet Capability.MEDIA_PLAYBACK_SHUFFLE: { Attribute.PLAYBACK_SHUFFLE: [ SmartThingsSensorEntityDescription( key=Attribute.PLAYBACK_SHUFFLE, translation_key="media_playback_shuffle", + deprecated=lambda _: "media_player", ) ] }, @@ -500,6 +514,7 @@ CAPABILITY_TO_SENSORS: dict[ ], device_class=SensorDeviceClass.ENUM, value_fn=lambda value: MEDIA_PLAYBACK_STATE_MAP.get(value, value), + deprecated=lambda _: "media_player", ) ] }, @@ -678,6 +693,15 @@ CAPABILITY_TO_SENSORS: dict[ ) ] }, + Capability.RELATIVE_BRIGHTNESS: { + Attribute.BRIGHTNESS_INTENSITY: [ + SmartThingsSensorEntityDescription( + key=Attribute.BRIGHTNESS_INTENSITY, + translation_key="brightness_intensity", + state_class=SensorStateClass.MEASUREMENT, + ) + ] + }, Capability.RELATIVE_HUMIDITY_MEASUREMENT: { Attribute.HUMIDITY: [ SmartThingsSensorEntityDescription( @@ -849,21 +873,18 @@ CAPABILITY_TO_SENSORS: dict[ Capability.THREE_AXIS: { Attribute.THREE_AXIS: [ SmartThingsSensorEntityDescription( - key="X Coordinate", + key="x_coordinate", translation_key="x_coordinate", - unique_id_separator=" ", value_fn=lambda value: value[0], ), SmartThingsSensorEntityDescription( - key="Y Coordinate", + key="y_coordinate", translation_key="y_coordinate", - unique_id_separator=" ", value_fn=lambda value: value[1], ), SmartThingsSensorEntityDescription( - key="Z Coordinate", + key="z_coordinate", translation_key="z_coordinate", - unique_id_separator=" ", value_fn=lambda value: value[2], ), ] @@ -903,6 +924,16 @@ CAPABILITY_TO_SENSORS: dict[ ) ] }, + Capability.VERY_FINE_DUST_SENSOR: { + Attribute.VERY_FINE_DUST_LEVEL: [ + SmartThingsSensorEntityDescription( + key=Attribute.VERY_FINE_DUST_LEVEL, + native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + device_class=SensorDeviceClass.PM1, + state_class=SensorStateClass.MEASUREMENT, + ) + ] + }, Capability.VOLTAGE_MEASUREMENT: { Attribute.VOLTAGE: [ SmartThingsSensorEntityDescription( @@ -985,25 +1016,67 @@ async def async_setup_entry( ) -> None: """Add sensors for a config entry.""" entry_data = entry.runtime_data - async_add_entities( - SmartThingsSensor(entry_data.client, device, description, capability, attribute) - for device in entry_data.devices.values() - for capability, attributes in CAPABILITY_TO_SENSORS.items() - if capability in device.status[MAIN] - for attribute, descriptions in attributes.items() - for description in descriptions - if ( - not description.capability_ignore_list - or not any( - all(capability in device.status[MAIN] for capability in capability_list) - for capability_list in description.capability_ignore_list - ) - ) - and ( - not description.exists_fn - or description.exists_fn(device.status[MAIN][capability][attribute]) - ) - ) + entities = [] + + entity_registry = er.async_get(hass) + + for device in entry_data.devices.values(): # pylint: disable=too-many-nested-blocks + for capability, attributes in CAPABILITY_TO_SENSORS.items(): + if capability in device.status[MAIN]: + for attribute, descriptions in attributes.items(): + for description in descriptions: + if ( + not description.capability_ignore_list + or not any( + all( + capability in device.status[MAIN] + for capability in capability_list + ) + for capability_list in description.capability_ignore_list + ) + ) and ( + not description.exists_fn + or description.exists_fn( + device.status[MAIN][capability][attribute] + ) + ): + if ( + description.deprecated + and ( + reason := description.deprecated( + device.status[MAIN] + ) + ) + is not None + ): + if deprecate_entity( + hass, + entity_registry, + SENSOR_DOMAIN, + f"{device.device.device_id}_{MAIN}_{capability}_{attribute}_{description.key}", + f"deprecated_{reason}", + ): + entities.append( + SmartThingsSensor( + entry_data.client, + device, + description, + capability, + attribute, + ) + ) + continue + entities.append( + SmartThingsSensor( + entry_data.client, + device, + description, + capability, + attribute, + ) + ) + + async_add_entities(entities) class SmartThingsSensor(SmartThingsEntity, SensorEntity): @@ -1024,7 +1097,7 @@ class SmartThingsSensor(SmartThingsEntity, SensorEntity): if entity_description.use_temperature_unit: capabilities_to_subscribe.add(Capability.TEMPERATURE_MEASUREMENT) super().__init__(client, device, capabilities_to_subscribe) - self._attr_unique_id = f"{device.device.device_id}{entity_description.unique_id_separator}{entity_description.key}" + self._attr_unique_id = f"{device.device.device_id}_{MAIN}_{capability}_{attribute}_{entity_description.key}" self._attribute = attribute self.capability = capability self.entity_description = entity_description diff --git a/homeassistant/components/smartthings/strings.json b/homeassistant/components/smartthings/strings.json index 844ebd12004..5e18dada260 100644 --- a/homeassistant/components/smartthings/strings.json +++ b/homeassistant/components/smartthings/strings.json @@ -33,13 +33,86 @@ "acceleration": { "name": "Acceleration" }, + "door": { + "name": "[%key:component::binary_sensor::entity_component::door::name%]" + }, + "dryer_wrinkle_prevent_active": { + "name": "Wrinkle prevent active" + }, "filter_status": { "name": "Filter status" }, + "freezer_door": { + "name": "Freezer door" + }, + "cooler_door": { + "name": "Cooler door" + }, + "remote_control": { + "name": "Remote control" + }, + "child_lock": { + "name": "Child lock" + }, "valve": { "name": "Valve" } }, + "button": { + "reset_water_filter": { + "name": "Reset water filter" + }, + "stop": { + "name": "[%key:common::action::stop%]" + } + }, + "event": { + "button": { + "state": { + "pushed": "Pushed", + "held": "Held", + "double": "Double", + "pushed_2x": "Pushed 2x", + "pushed_3x": "Pushed 3x", + "pushed_4x": "Pushed 4x", + "pushed_5x": "Pushed 5x", + "pushed_6x": "Pushed 6x", + "down": "Down", + "down_2x": "Down 2x", + "down_3x": "Down 3x", + "down_4x": "Down 4x", + "down_5x": "Down 5x", + "down_6x": "Down 6x", + "down_hold": "Down hold", + "up": "Up", + "up_2x": "Up 2x", + "up_3x": "Up 3x", + "up_4x": "Up 4x", + "up_5x": "Up 5x", + "up_6x": "Up 6x", + "up_hold": "Up hold", + "swipe_up": "Swipe up", + "swipe_down": "Swipe down", + "swipe_left": "Swipe left", + "swipe_right": "Swipe right" + } + } + }, + "number": { + "washer_rinse_cycles": { + "name": "Rinse cycles", + "unit_of_measurement": "cycles" + } + }, + "select": { + "operating_state": { + "state": { + "run": "[%key:component::smartthings::entity::sensor::dishwasher_machine_state::state::run%]", + "pause": "[%key:common::state::paused%]", + "stop": "[%key:component::smartthings::entity::sensor::dishwasher_machine_state::state::stop%]" + } + } + }, "sensor": { "lighting_mode": { "name": "Activity lighting mode" @@ -275,6 +348,9 @@ "refrigeration_setpoint": { "name": "[%key:component::smartthings::entity::sensor::oven_setpoint::name%]" }, + "brightness_intensity": { + "name": "Brightness intensity" + }, "robot_cleaner_cleaning_mode": { "name": "Cleaning mode", "state": { @@ -389,6 +465,59 @@ "freeze_protection": "Freeze protection" } } + }, + "switch": { + "bubble_soak": { + "name": "Bubble Soak" + }, + "wrinkle_prevent": { + "name": "Wrinkle prevent" + }, + "ice_maker": { + "name": "Ice maker" + } + } + }, + "issues": { + "deprecated_binary_valve": { + "title": "Valve binary sensor deprecated", + "description": "The valve binary sensor {entity_name} (`{entity_id}`) is deprecated and will be removed in the future. A valve entity with controls is available and should be used going forward. Please update your dashboards, templates accordingly and disable the entity to fix this issue." + }, + "deprecated_binary_valve_scripts": { + "title": "[%key:component::smartthings::issues::deprecated_binary_valve::title%]", + "description": "The valve binary sensor {entity_name} (`{entity_id}`) is deprecated and will be removed in the future. The entity is used in the following automations or scripts:\n{items}\n\nA valve entity with controls is available and should be used going forward. Please use the new valve entity in the above automations or scripts and disable the entity to fix this issue." + }, + "deprecated_binary_fridge_door": { + "title": "Refrigerator door binary sensor deprecated", + "description": "The refrigerator door binary sensor {entity_name} (`{entity_id}`) is deprecated and will be removed in the future. Separate entities for cooler and freezer door are available and should be used going forward. Please update your dashboards, templates accordingly and disable the entity to fix this issue." + }, + "deprecated_binary_fridge_door_scripts": { + "title": "[%key:component::smartthings::issues::deprecated_binary_fridge_door::title%]", + "description": "The refrigerator door binary sensor {entity_name} (`{entity_id}`) is deprecated and will be removed in the future. The entity is used in the following automations or scripts:\n{items}\n\nSeparate entities for cooler and freezer door are available and should be used going forward. Please use them in the above automations or scripts and disable the entity to fix this issue." + }, + "deprecated_switch_appliance": { + "title": "Appliance switch deprecated", + "description": "The switch `{entity_id}` is deprecated because the actions did not work, so it has been replaced with a binary sensor instead.\n\nPlease update your dashboards, templates accordingly and disable the entity to fix this issue." + }, + "deprecated_switch_appliance_scripts": { + "title": "[%key:component::smartthings::issues::deprecated_switch_appliance::title%]", + "description": "The switch `{entity_id}` is deprecated because the actions did not work, so it has been replaced with a binary sensor instead.\n\nThe switch was used in the following automations or scripts:\n{items}\n\nPlease use the new binary sensor in the above automations or scripts and disable the entity to fix this issue." + }, + "deprecated_switch_media_player": { + "title": "[%key:component::smartthings::issues::deprecated_switch_appliance::title%]", + "description": "The switch `{entity_id}` is deprecated and a media player entity has been added to replace it.\n\nPlease use the new media player entity in the above automations or scripts and disable the entity to fix this issue." + }, + "deprecated_switch_media_player_scripts": { + "title": "[%key:component::smartthings::issues::deprecated_switch_appliance::title%]", + "description": "The switch `{entity_id}` is deprecated and a media player entity has been added to replace it.\n\nThe switch was used in the following automations or scripts:\n{items}\n\nPlease use the new media player entity in the above automations or scripts and disable the entity to fix this issue." + }, + "deprecated_media_player": { + "title": "Media player sensors deprecated", + "description": "The sensor {entity_name} (`{entity_id}`) is deprecated because it has been replaced with a media player entity.\n\nPlease update your dashboards, templates to use the new media player entity and disable the entity to fix this issue." + }, + "deprecated_media_player_scripts": { + "title": "Deprecated sensor detected in some automations or scripts", + "description": "The sensor {entity_name} (`{entity_id}`) is deprecated because it has been replaced with a media player entity.\n\nThe sensor was used in the following automations or scripts:\n{items}\n\nPlease update the above automations or scripts to use the new media player entity and disable the entity to fix this issue." } } } diff --git a/homeassistant/components/smartthings/switch.py b/homeassistant/components/smartthings/switch.py index 380005f1b93..4e62957d3d4 100644 --- a/homeassistant/components/smartthings/switch.py +++ b/homeassistant/components/smartthings/switch.py @@ -2,17 +2,24 @@ from __future__ import annotations +from dataclasses import dataclass from typing import Any -from pysmartthings import Attribute, Capability, Command +from pysmartthings import Attribute, Capability, Command, SmartThings -from homeassistant.components.switch import SwitchEntity +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SwitchEntity, + SwitchEntityDescription, +) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback -from . import SmartThingsConfigEntry -from .const import MAIN +from . import FullDevice, SmartThingsConfigEntry +from .const import INVALID_SWITCH_CATEGORIES, MAIN from .entity import SmartThingsEntity +from .util import deprecate_entity CAPABILITIES = ( Capability.SWITCH_LEVEL, @@ -28,6 +35,58 @@ AC_CAPABILITIES = ( Capability.THERMOSTAT_COOLING_SETPOINT, ) +MEDIA_PLAYER_CAPABILITIES = ( + Capability.AUDIO_MUTE, + Capability.AUDIO_VOLUME, + Capability.MEDIA_PLAYBACK, +) + + +@dataclass(frozen=True, kw_only=True) +class SmartThingsSwitchEntityDescription(SwitchEntityDescription): + """Describe a SmartThings switch entity.""" + + status_attribute: Attribute + component_translation_key: dict[str, str] | None = None + + +@dataclass(frozen=True, kw_only=True) +class SmartThingsCommandSwitchEntityDescription(SmartThingsSwitchEntityDescription): + """Describe a SmartThings switch entity.""" + + command: Command + + +SWITCH = SmartThingsSwitchEntityDescription( + key=Capability.SWITCH, + status_attribute=Attribute.SWITCH, + name=None, +) +CAPABILITY_TO_COMMAND_SWITCHES: dict[ + Capability | str, SmartThingsCommandSwitchEntityDescription +] = { + Capability.CUSTOM_DRYER_WRINKLE_PREVENT: SmartThingsCommandSwitchEntityDescription( + key=Capability.CUSTOM_DRYER_WRINKLE_PREVENT, + translation_key="wrinkle_prevent", + status_attribute=Attribute.DRYER_WRINKLE_PREVENT, + command=Command.SET_DRYER_WRINKLE_PREVENT, + ) +} +CAPABILITY_TO_SWITCHES: dict[Capability | str, SmartThingsSwitchEntityDescription] = { + Capability.SAMSUNG_CE_WASHER_BUBBLE_SOAK: SmartThingsSwitchEntityDescription( + key=Capability.SAMSUNG_CE_WASHER_BUBBLE_SOAK, + translation_key="bubble_soak", + status_attribute=Attribute.STATUS, + ), + Capability.SWITCH: SmartThingsSwitchEntityDescription( + key=Capability.SWITCH, + status_attribute=Attribute.SWITCH, + component_translation_key={ + "icemaker": "ice_maker", + }, + ), +} + async def async_setup_entry( hass: HomeAssistant, @@ -36,35 +95,152 @@ async def async_setup_entry( ) -> None: """Add switches for a config entry.""" entry_data = entry.runtime_data - async_add_entities( - SmartThingsSwitch(entry_data.client, device, {Capability.SWITCH}) + entities: list[SmartThingsEntity] = [ + SmartThingsCommandSwitch( + entry_data.client, + device, + description, + Capability(capability), + ) for device in entry_data.devices.values() - if Capability.SWITCH in device.status[MAIN] - and not any(capability in device.status[MAIN] for capability in CAPABILITIES) - and not all(capability in device.status[MAIN] for capability in AC_CAPABILITIES) + for capability, description in CAPABILITY_TO_COMMAND_SWITCHES.items() + if capability in device.status[MAIN] + ] + entities.extend( + SmartThingsSwitch( + entry_data.client, + device, + description, + Capability(capability), + component, + ) + for device in entry_data.devices.values() + for capability, description in CAPABILITY_TO_SWITCHES.items() + for component in device.status + if capability in device.status[component] + and ( + (description.component_translation_key is None and component == MAIN) + or ( + description.component_translation_key is not None + and component in description.component_translation_key + ) + ) ) + entity_registry = er.async_get(hass) + for device in entry_data.devices.values(): + if ( + Capability.SWITCH in device.status[MAIN] + and not any( + capability in device.status[MAIN] for capability in CAPABILITIES + ) + and not all( + capability in device.status[MAIN] for capability in AC_CAPABILITIES + ) + ): + media_player = all( + capability in device.status[MAIN] + for capability in MEDIA_PLAYER_CAPABILITIES + ) + appliance = ( + device.device.components[MAIN].manufacturer_category + in INVALID_SWITCH_CATEGORIES + ) + if media_player or appliance: + issue = "media_player" if media_player else "appliance" + if deprecate_entity( + hass, + entity_registry, + SWITCH_DOMAIN, + f"{device.device.device_id}_{MAIN}_{Capability.SWITCH}_{Attribute.SWITCH}_{Attribute.SWITCH}", + f"deprecated_switch_{issue}", + ): + entities.append( + SmartThingsSwitch( + entry_data.client, + device, + SWITCH, + Capability.SWITCH, + ) + ) + continue + entities.append( + SmartThingsSwitch( + entry_data.client, + device, + SWITCH, + Capability.SWITCH, + ) + ) + async_add_entities(entities) class SmartThingsSwitch(SmartThingsEntity, SwitchEntity): """Define a SmartThings switch.""" - _attr_name = None + entity_description: SmartThingsSwitchEntityDescription + + def __init__( + self, + client: SmartThings, + device: FullDevice, + entity_description: SmartThingsSwitchEntityDescription, + capability: Capability, + component: str = MAIN, + ) -> None: + """Initialize the switch.""" + super().__init__(client, device, {capability}, component=component) + self.entity_description = entity_description + self.switch_capability = capability + self._attr_unique_id = f"{device.device.device_id}_{component}_{capability}_{entity_description.status_attribute}_{entity_description.status_attribute}" + if ( + translation_keys := entity_description.component_translation_key + ) is not None and ( + translation_key := translation_keys.get(component) + ) is not None: + self._attr_translation_key = translation_key async def async_turn_off(self, **kwargs: Any) -> None: """Turn the switch off.""" await self.execute_device_command( - Capability.SWITCH, + self.switch_capability, Command.OFF, ) async def async_turn_on(self, **kwargs: Any) -> None: """Turn the switch on.""" await self.execute_device_command( - Capability.SWITCH, + self.switch_capability, Command.ON, ) @property def is_on(self) -> bool: - """Return true if light is on.""" - return self.get_attribute_value(Capability.SWITCH, Attribute.SWITCH) == "on" + """Return true if switch is on.""" + return ( + self.get_attribute_value( + self.switch_capability, self.entity_description.status_attribute + ) + == "on" + ) + + +class SmartThingsCommandSwitch(SmartThingsSwitch): + """Define a SmartThings command switch.""" + + entity_description: SmartThingsCommandSwitchEntityDescription + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the switch off.""" + await self.execute_device_command( + self.switch_capability, + self.entity_description.command, + "off", + ) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the switch on.""" + await self.execute_device_command( + self.switch_capability, + self.entity_description.command, + "on", + ) diff --git a/homeassistant/components/smartthings/update.py b/homeassistant/components/smartthings/update.py new file mode 100644 index 00000000000..bb226918596 --- /dev/null +++ b/homeassistant/components/smartthings/update.py @@ -0,0 +1,87 @@ +"""Support for update entities through the SmartThings cloud API.""" + +from __future__ import annotations + +from typing import Any + +from awesomeversion import AwesomeVersion +from pysmartthings import Attribute, Capability, Command + +from homeassistant.components.update import ( + UpdateDeviceClass, + UpdateEntity, + UpdateEntityFeature, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import SmartThingsConfigEntry +from .const import MAIN +from .entity import SmartThingsEntity + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmartThingsConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Add update entities for a config entry.""" + entry_data = entry.runtime_data + async_add_entities( + SmartThingsUpdateEntity(entry_data.client, device, {Capability.FIRMWARE_UPDATE}) + for device in entry_data.devices.values() + if Capability.FIRMWARE_UPDATE in device.status[MAIN] + ) + + +def is_hex_version(version: str) -> bool: + """Check if the version is a hex version.""" + return len(version) == 8 and all(c in "0123456789abcdefABCDEF" for c in version) + + +class SmartThingsUpdateEntity(SmartThingsEntity, UpdateEntity): + """Define a SmartThings update entity.""" + + _attr_device_class = UpdateDeviceClass.FIRMWARE + _attr_supported_features = ( + UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS + ) + + @property + def installed_version(self) -> str | None: + """Return the installed version of the entity.""" + return self.get_attribute_value( + Capability.FIRMWARE_UPDATE, Attribute.CURRENT_VERSION + ) + + @property + def latest_version(self) -> str | None: + """Return the available version of the entity.""" + return self.get_attribute_value( + Capability.FIRMWARE_UPDATE, Attribute.AVAILABLE_VERSION + ) + + @property + def in_progress(self) -> bool: + """Return if the entity is in progress.""" + return ( + self.get_attribute_value(Capability.FIRMWARE_UPDATE, Attribute.STATE) + == "updateInProgress" + ) + + async def async_install( + self, version: str | None, backup: bool, **kwargs: Any + ) -> None: + """Install the firmware update.""" + await self.execute_device_command( + Capability.FIRMWARE_UPDATE, + Command.UPDATE_FIRMWARE, + ) + + def version_is_newer(self, latest_version: str, installed_version: str) -> bool: + """Return if the latest version is newer.""" + if is_hex_version(latest_version): + latest_version = f"0x{latest_version}" + if is_hex_version(installed_version): + installed_version = f"0x{installed_version}" + return AwesomeVersion(latest_version) > AwesomeVersion(installed_version) diff --git a/homeassistant/components/smartthings/util.py b/homeassistant/components/smartthings/util.py new file mode 100644 index 00000000000..b21652ca629 --- /dev/null +++ b/homeassistant/components/smartthings/util.py @@ -0,0 +1,83 @@ +"""Utility functions for SmartThings integration.""" + +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.script import scripts_with_entity +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.issue_registry import ( + IssueSeverity, + async_create_issue, + async_delete_issue, +) + +from .const import DOMAIN + + +def deprecate_entity( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + platform_domain: str, + entity_unique_id: str, + issue_string: str, +) -> bool: + """Create an issue for deprecated entities.""" + if entity_id := entity_registry.async_get_entity_id( + platform_domain, DOMAIN, entity_unique_id + ): + entity_entry = entity_registry.async_get(entity_id) + if not entity_entry: + return False + if entity_entry.disabled: + entity_registry.async_remove(entity_id) + async_delete_issue( + hass, + DOMAIN, + f"{issue_string}_{entity_id}", + ) + return False + translation_key = issue_string + placeholders = { + "entity_id": entity_id, + "entity_name": entity_entry.name or entity_entry.original_name or "Unknown", + } + if items := get_automations_and_scripts_using_entity(hass, entity_id): + translation_key = f"{translation_key}_scripts" + placeholders.update( + { + "items": "\n".join(items), + } + ) + async_create_issue( + hass, + DOMAIN, + f"{issue_string}_{entity_id}", + breaks_in_ha_version="2025.10.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key=translation_key, + translation_placeholders=placeholders, + ) + return True + return False + + +def get_automations_and_scripts_using_entity( + hass: HomeAssistant, + entity_id: str, +) -> list[str]: + """Get automations and scripts using an entity.""" + automations = automations_with_entity(hass, entity_id) + scripts = scripts_with_entity(hass, entity_id) + if not automations and not scripts: + return [] + + entity_reg = er.async_get(hass) + return [ + f"- [{item.original_name}](/config/{integration}/edit/{item.unique_id})" + for integration, entities in ( + ("automation", automations), + ("script", scripts), + ) + for entity_id in entities + if (item := entity_reg.async_get(entity_id)) + ] diff --git a/homeassistant/components/smartthings/valve.py b/homeassistant/components/smartthings/valve.py new file mode 100644 index 00000000000..4279d528f8b --- /dev/null +++ b/homeassistant/components/smartthings/valve.py @@ -0,0 +1,71 @@ +"""Support for valves through the SmartThings cloud API.""" + +from __future__ import annotations + +from pysmartthings import Attribute, Capability, Category, Command, SmartThings + +from homeassistant.components.valve import ( + ValveDeviceClass, + ValveEntity, + ValveEntityFeature, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import FullDevice, SmartThingsConfigEntry +from .const import MAIN +from .entity import SmartThingsEntity + +DEVICE_CLASS_MAP: dict[Category | str, ValveDeviceClass] = { + Category.WATER_VALVE: ValveDeviceClass.WATER, + Category.GAS_VALVE: ValveDeviceClass.GAS, +} + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmartThingsConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Add valves for a config entry.""" + entry_data = entry.runtime_data + async_add_entities( + SmartThingsValve(entry_data.client, device) + for device in entry_data.devices.values() + if Capability.VALVE in device.status[MAIN] + ) + + +class SmartThingsValve(SmartThingsEntity, ValveEntity): + """Define a SmartThings valve.""" + + _attr_supported_features = ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE + _attr_reports_position = False + _attr_name = None + + def __init__(self, client: SmartThings, device: FullDevice) -> None: + """Init the class.""" + super().__init__(client, device, {Capability.VALVE}) + self._attr_device_class = DEVICE_CLASS_MAP.get( + device.device.components[MAIN].user_category + or device.device.components[MAIN].manufacturer_category + ) + + async def async_open_valve(self) -> None: + """Open the valve.""" + await self.execute_device_command( + Capability.VALVE, + Command.OPEN, + ) + + async def async_close_valve(self) -> None: + """Close the valve.""" + await self.execute_device_command( + Capability.VALVE, + Command.CLOSE, + ) + + @property + def is_closed(self) -> bool: + """Return if the valve is closed.""" + return self.get_attribute_value(Capability.VALVE, Attribute.VALVE) == "closed" diff --git a/homeassistant/components/smarttub/strings.json b/homeassistant/components/smarttub/strings.json index 974e5fb7d37..79fa7a4820f 100644 --- a/homeassistant/components/smarttub/strings.json +++ b/homeassistant/components/smarttub/strings.json @@ -49,17 +49,17 @@ }, "snooze_reminder": { "name": "Snooze a reminder", - "description": "Delay a reminder, so that it won't trigger again for a period of time.", + "description": "Temporarily suppresses the maintenance reminder on a hot tub.", "fields": { "days": { "name": "Days", - "description": "The number of days to delay the reminder." + "description": "The number of days to snooze the reminder." } } }, "reset_reminder": { "name": "Reset a reminder", - "description": "Reset a reminder, and set the next time it will be triggered.", + "description": "Resets the maintenance reminder on a hot tub.", "fields": { "days": { "name": "[%key:component::smarttub::services::snooze_reminder::fields::days::name%]", diff --git a/homeassistant/components/smarty/config_flow.py b/homeassistant/components/smarty/config_flow.py index 9a55356a990..a7f0bdd4123 100644 --- a/homeassistant/components/smarty/config_flow.py +++ b/homeassistant/components/smarty/config_flow.py @@ -1,5 +1,6 @@ """Config flow for Smarty integration.""" +import logging from typing import Any from pysmarty2 import Smarty @@ -10,6 +11,8 @@ from homeassistant.const import CONF_HOST, CONF_NAME from .const import DOMAIN +_LOGGER = logging.getLogger(__name__) + class SmartyConfigFlow(ConfigFlow, domain=DOMAIN): """Smarty config flow.""" @@ -20,7 +23,8 @@ class SmartyConfigFlow(ConfigFlow, domain=DOMAIN): try: if smarty.update(): return None - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") return "unknown" else: return "cannot_connect" diff --git a/homeassistant/components/smlight/__init__.py b/homeassistant/components/smlight/__init__.py index 8f3e675ef6b..b3a6860e5b7 100644 --- a/homeassistant/components/smlight/__init__.py +++ b/homeassistant/components/smlight/__init__.py @@ -2,7 +2,7 @@ from __future__ import annotations -from pysmlight import Api2, Info, Radio +from pysmlight import Api2 from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant @@ -50,9 +50,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: SmConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: SmConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - -def get_radio(info: Info, idx: int) -> Radio: - """Get the radio object from the info.""" - assert info.radios is not None - return info.radios[idx] diff --git a/homeassistant/components/smlight/config_flow.py b/homeassistant/components/smlight/config_flow.py index fcfc364d983..ce4f8f43233 100644 --- a/homeassistant/components/smlight/config_flow.py +++ b/homeassistant/components/smlight/config_flow.py @@ -51,14 +51,14 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): self.client = Api2(self._host, session=async_get_clientsession(self.hass)) try: - info = await self.client.get_info() - self._host = str(info.device_ip) - self._device_name = str(info.hostname) - - if info.model not in Devices: - return self.async_abort(reason="unsupported_device") - if not await self._async_check_auth_required(user_input): + info = await self.client.get_info() + self._host = str(info.device_ip) + self._device_name = str(info.hostname) + + if info.model not in Devices: + return self.async_abort(reason="unsupported_device") + return await self._async_complete_entry(user_input) except SmlightConnectionError: errors["base"] = "cannot_connect" @@ -128,13 +128,13 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: try: - info = await self.client.get_info() - - if info.model not in Devices: - return self.async_abort(reason="unsupported_device") - if not await self._async_check_auth_required(user_input): - return await self._async_complete_entry(user_input) + info = await self.client.get_info() + + if info.model not in Devices: + return self.async_abort(reason="unsupported_device") + + return await self._async_complete_entry(user_input) except SmlightConnectionError: return self.async_abort(reason="cannot_connect") diff --git a/homeassistant/components/smlight/manifest.json b/homeassistant/components/smlight/manifest.json index 3f527d1fcd9..e9025203b8c 100644 --- a/homeassistant/components/smlight/manifest.json +++ b/homeassistant/components/smlight/manifest.json @@ -11,7 +11,7 @@ "documentation": "https://www.home-assistant.io/integrations/smlight", "integration_type": "device", "iot_class": "local_push", - "requirements": ["pysmlight==0.2.3"], + "requirements": ["pysmlight==0.2.4"], "zeroconf": [ { "type": "_slzb-06._tcp.local." diff --git a/homeassistant/components/smlight/sensor.py b/homeassistant/components/smlight/sensor.py index 57a08d177d4..2f57843b5eb 100644 --- a/homeassistant/components/smlight/sensor.py +++ b/homeassistant/components/smlight/sensor.py @@ -37,7 +37,7 @@ class SmSensorEntityDescription(SensorEntityDescription): class SmInfoEntityDescription(SensorEntityDescription): """Class describing SMLIGHT information entities.""" - value_fn: Callable[[Info], StateType] + value_fn: Callable[[Info, int], StateType] INFO: list[SmInfoEntityDescription] = [ @@ -46,24 +46,25 @@ INFO: list[SmInfoEntityDescription] = [ translation_key="device_mode", device_class=SensorDeviceClass.ENUM, options=["eth", "wifi", "usb"], - value_fn=lambda x: x.coord_mode, + value_fn=lambda x, idx: x.coord_mode, ), SmInfoEntityDescription( key="firmware_channel", translation_key="firmware_channel", device_class=SensorDeviceClass.ENUM, options=["dev", "release"], - value_fn=lambda x: x.fw_channel, - ), - SmInfoEntityDescription( - key="zigbee_type", - translation_key="zigbee_type", - device_class=SensorDeviceClass.ENUM, - options=["coordinator", "router", "thread"], - value_fn=lambda x: x.zb_type, + value_fn=lambda x, idx: x.fw_channel, ), ] +RADIO_INFO = SmInfoEntityDescription( + key="zigbee_type", + translation_key="zigbee_type", + device_class=SensorDeviceClass.ENUM, + options=["coordinator", "router", "thread"], + value_fn=lambda x, idx: x.radios[idx].zb_type, +) + SENSORS: list[SmSensorEntityDescription] = [ SmSensorEntityDescription( @@ -102,6 +103,16 @@ SENSORS: list[SmSensorEntityDescription] = [ ), ] +EXTRA_SENSOR = SmSensorEntityDescription( + key="zigbee_temperature_2", + translation_key="zigbee_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + value_fn=lambda x: x.zb_temp2, +) + UPTIME: list[SmSensorEntityDescription] = [ SmSensorEntityDescription( key="core_uptime", @@ -127,8 +138,7 @@ async def async_setup_entry( ) -> None: """Set up SMLIGHT sensor based on a config entry.""" coordinator = entry.runtime_data.data - - async_add_entities( + entities: list[SmEntity] = list( chain( (SmInfoSensorEntity(coordinator, description) for description in INFO), (SmSensorEntity(coordinator, description) for description in SENSORS), @@ -136,6 +146,16 @@ async def async_setup_entry( ) ) + entities.extend( + SmInfoSensorEntity(coordinator, RADIO_INFO, idx) + for idx, _ in enumerate(coordinator.data.info.radios) + ) + + if coordinator.data.sensors.zb_temp2 is not None: + entities.append(SmSensorEntity(coordinator, EXTRA_SENSOR)) + + async_add_entities(entities) + class SmSensorEntity(SmEntity, SensorEntity): """Representation of a slzb sensor.""" @@ -172,17 +192,20 @@ class SmInfoSensorEntity(SmEntity, SensorEntity): self, coordinator: SmDataUpdateCoordinator, description: SmInfoEntityDescription, + idx: int = 0, ) -> None: """Initiate slzb sensor.""" super().__init__(coordinator) self.entity_description = description - self._attr_unique_id = f"{coordinator.unique_id}_{description.key}" + self.idx = idx + sensor = f"_{idx}" if idx else "" + self._attr_unique_id = f"{coordinator.unique_id}_{description.key}{sensor}" @property def native_value(self) -> StateType: """Return the sensor value.""" - value = self.entity_description.value_fn(self.coordinator.data.info) + value = self.entity_description.value_fn(self.coordinator.data.info, self.idx) options = self.entity_description.options if isinstance(value, int) and options is not None: diff --git a/homeassistant/components/smlight/update.py b/homeassistant/components/smlight/update.py index 10d142e6221..3143f2f4290 100644 --- a/homeassistant/components/smlight/update.py +++ b/homeassistant/components/smlight/update.py @@ -22,7 +22,6 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback -from . import get_radio from .const import LOGGER from .coordinator import SmConfigEntry, SmFirmwareUpdateCoordinator, SmFwData from .entity import SmEntity @@ -56,7 +55,7 @@ CORE_UPDATE_ENTITY = SmUpdateEntityDescription( ZB_UPDATE_ENTITY = SmUpdateEntityDescription( key="zigbee_update", translation_key="zigbee_update", - installed_version=lambda x, idx: get_radio(x, idx).zb_version, + installed_version=lambda x, idx: x.radios[idx].zb_version, latest_version=zigbee_latest_version, ) @@ -75,7 +74,6 @@ async def async_setup_entry( entities = [SmUpdateEntity(coordinator, CORE_UPDATE_ENTITY)] radios = coordinator.data.info.radios - assert radios is not None entities.extend( SmUpdateEntity(coordinator, ZB_UPDATE_ENTITY, idx) diff --git a/homeassistant/components/snoo/__init__.py b/homeassistant/components/snoo/__init__.py index aaf0c828830..54834bf58ce 100644 --- a/homeassistant/components/snoo/__init__.py +++ b/homeassistant/components/snoo/__init__.py @@ -17,7 +17,13 @@ from .coordinator import SnooConfigEntry, SnooCoordinator _LOGGER = logging.getLogger(__name__) -PLATFORMS: list[Platform] = [Platform.SENSOR] +PLATFORMS: list[Platform] = [ + Platform.BINARY_SENSOR, + Platform.EVENT, + Platform.SELECT, + Platform.SENSOR, + Platform.SWITCH, +] async def async_setup_entry(hass: HomeAssistant, entry: SnooConfigEntry) -> bool: diff --git a/homeassistant/components/snoo/binary_sensor.py b/homeassistant/components/snoo/binary_sensor.py new file mode 100644 index 00000000000..3c91db5b86d --- /dev/null +++ b/homeassistant/components/snoo/binary_sensor.py @@ -0,0 +1,70 @@ +"""Support for Snoo Binary Sensors.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass + +from python_snoo.containers import SnooData + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, + EntityCategory, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .coordinator import SnooConfigEntry +from .entity import SnooDescriptionEntity + + +@dataclass(frozen=True, kw_only=True) +class SnooBinarySensorEntityDescription(BinarySensorEntityDescription): + """Describes a Snoo Binary Sensor.""" + + value_fn: Callable[[SnooData], bool] + + +BINARY_SENSOR_DESCRIPTIONS: list[SnooBinarySensorEntityDescription] = [ + SnooBinarySensorEntityDescription( + key="left_clip", + translation_key="left_clip", + value_fn=lambda data: data.left_safety_clip, + device_class=BinarySensorDeviceClass.CONNECTIVITY, + entity_category=EntityCategory.DIAGNOSTIC, + ), + SnooBinarySensorEntityDescription( + key="right_clip", + translation_key="right_clip", + value_fn=lambda data: data.left_safety_clip, + device_class=BinarySensorDeviceClass.CONNECTIVITY, + entity_category=EntityCategory.DIAGNOSTIC, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SnooConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up Snoo device.""" + coordinators = entry.runtime_data + async_add_entities( + SnooBinarySensor(coordinator, description) + for coordinator in coordinators.values() + for description in BINARY_SENSOR_DESCRIPTIONS + ) + + +class SnooBinarySensor(SnooDescriptionEntity, BinarySensorEntity): + """A Binary sensor using Snoo coordinator.""" + + entity_description: SnooBinarySensorEntityDescription + + @property + def is_on(self) -> bool: + """Return true if the binary sensor is on.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/snoo/event.py b/homeassistant/components/snoo/event.py new file mode 100644 index 00000000000..1e50ee46d90 --- /dev/null +++ b/homeassistant/components/snoo/event.py @@ -0,0 +1,63 @@ +"""Support for Snoo Events.""" + +from homeassistant.components.event import EventEntity, EventEntityDescription +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .coordinator import SnooConfigEntry +from .entity import SnooDescriptionEntity + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SnooConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up Snoo device.""" + coordinators = entry.runtime_data + async_add_entities( + SnooEvent( + coordinator, + EventEntityDescription( + key="event", + translation_key="event", + event_types=[ + "timer", + "cry", + "command", + "safety_clip", + "long_activity_press", + "activity", + "power", + "status_requested", + "sticky_white_noise_updated", + "config_change", + ], + ), + ) + for coordinator in coordinators.values() + ) + + +class SnooEvent(SnooDescriptionEntity, EventEntity): + """A event using Snoo coordinator.""" + + @callback + def _async_handle_event(self) -> None: + """Handle the demo button event.""" + self._trigger_event( + self.coordinator.data.event.value, + ) + self.async_write_ha_state() + + async def async_added_to_hass(self) -> None: + """Add Event.""" + await super().async_added_to_hass() + if self.coordinator.data: + # If we were able to get data on startup - set it + # Otherwise, it will update when the coordinator gets data. + self._async_handle_event() + + def _handle_coordinator_update(self) -> None: + self._async_handle_event() + return super()._handle_coordinator_update() diff --git a/homeassistant/components/snoo/manifest.json b/homeassistant/components/snoo/manifest.json index 4084a7e3e79..839382b2d84 100644 --- a/homeassistant/components/snoo/manifest.json +++ b/homeassistant/components/snoo/manifest.json @@ -7,5 +7,5 @@ "iot_class": "cloud_push", "loggers": ["snoo"], "quality_scale": "bronze", - "requirements": ["python-snoo==0.6.4"] + "requirements": ["python-snoo==0.6.5"] } diff --git a/homeassistant/components/snoo/select.py b/homeassistant/components/snoo/select.py new file mode 100644 index 00000000000..44624ed1a2d --- /dev/null +++ b/homeassistant/components/snoo/select.py @@ -0,0 +1,78 @@ +"""Support for Snoo Select.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass + +from python_snoo.containers import SnooData, SnooDevice, SnooLevels +from python_snoo.exceptions import SnooCommandException +from python_snoo.snoo import Snoo + +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .const import DOMAIN +from .coordinator import SnooConfigEntry +from .entity import SnooDescriptionEntity + + +@dataclass(frozen=True, kw_only=True) +class SnooSelectEntityDescription(SelectEntityDescription): + """Describes a Snoo Select.""" + + value_fn: Callable[[SnooData], str] + set_value_fn: Callable[[Snoo, SnooDevice, str], Awaitable[None]] + + +SELECT_DESCRIPTIONS: list[SnooSelectEntityDescription] = [ + SnooSelectEntityDescription( + key="intensity", + translation_key="intensity", + value_fn=lambda data: data.state_machine.level.name, + set_value_fn=lambda snoo_api, device, state: snoo_api.set_level( + device, SnooLevels[state] + ), + options=[level.name for level in SnooLevels], + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SnooConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up Snoo device.""" + coordinators = entry.runtime_data + async_add_entities( + SnooSelect(coordinator, description) + for coordinator in coordinators.values() + for description in SELECT_DESCRIPTIONS + ) + + +class SnooSelect(SnooDescriptionEntity, SelectEntity): + """A sensor using Snoo coordinator.""" + + entity_description: SnooSelectEntityDescription + + @property + def current_option(self) -> str | None: + """Return the selected entity option to represent the entity state.""" + return self.entity_description.value_fn(self.coordinator.data) + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + try: + await self.entity_description.set_value_fn( + self.coordinator.snoo, self.device, option + ) + except SnooCommandException as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="select_failed", + translation_placeholders={"name": str(self.name), "option": option}, + ) from err diff --git a/homeassistant/components/snoo/strings.json b/homeassistant/components/snoo/strings.json index 567fa30fca7..72b0342c7f4 100644 --- a/homeassistant/components/snoo/strings.json +++ b/homeassistant/components/snoo/strings.json @@ -21,7 +21,47 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } }, + "exceptions": { + "select_failed": { + "message": "Error while updating {name} to {option}" + }, + "switch_on_failed": { + "message": "Turning {name} on failed" + }, + "switch_off_failed": { + "message": "Turning {name} off failed" + } + }, "entity": { + "binary_sensor": { + "left_clip": { + "name": "Left safety clip" + }, + "right_clip": { + "name": "Right safety clip" + } + }, + "event": { + "event": { + "name": "Snoo event", + "state_attributes": { + "event_type": { + "state": { + "timer": "Timer", + "cry": "Cry", + "command": "Command sent", + "safety_clip": "Safety clip changed", + "long_activity_press": "Long activity press", + "activity": "Activity press", + "power": "Power button pressed", + "status_requested": "Status requested", + "sticky_white_noise_updated": "Sleepytime sounds updated", + "config_change": "Config changed" + } + } + } + } + }, "sensor": { "state": { "name": "State", @@ -39,6 +79,27 @@ "time_left": { "name": "Time left" } + }, + "select": { + "intensity": { + "name": "Intensity", + "state": { + "baseline": "[%key:component::snoo::entity::sensor::state::state::baseline%]", + "level1": "[%key:component::snoo::entity::sensor::state::state::level1%]", + "level2": "[%key:component::snoo::entity::sensor::state::state::level2%]", + "level3": "[%key:component::snoo::entity::sensor::state::state::level3%]", + "level4": "[%key:component::snoo::entity::sensor::state::state::level4%]", + "stop": "[%key:component::snoo::entity::sensor::state::state::stop%]" + } + } + }, + "switch": { + "sticky_white_noise": { + "name": "Sleepytime sounds" + }, + "hold": { + "name": "Level lock" + } } } } diff --git a/homeassistant/components/snoo/switch.py b/homeassistant/components/snoo/switch.py new file mode 100644 index 00000000000..2ed322d5f6b --- /dev/null +++ b/homeassistant/components/snoo/switch.py @@ -0,0 +1,105 @@ +"""Support for Snoo Switches.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import Any + +from python_snoo.containers import SnooData, SnooDevice +from python_snoo.exceptions import SnooCommandException +from python_snoo.snoo import Snoo + +from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from .const import DOMAIN +from .coordinator import SnooConfigEntry +from .entity import SnooDescriptionEntity + + +@dataclass(frozen=True, kw_only=True) +class SnooSwitchEntityDescription(SwitchEntityDescription): + """Describes a Snoo sensor.""" + + value_fn: Callable[[SnooData], bool] + set_value_fn: Callable[[Snoo, SnooDevice, SnooData, bool], Awaitable[None]] + + +BINARY_SENSOR_DESCRIPTIONS: list[SnooSwitchEntityDescription] = [ + SnooSwitchEntityDescription( + key="sticky_white_noise", + translation_key="sticky_white_noise", + value_fn=lambda data: data.state_machine.sticky_white_noise == "on", + set_value_fn=lambda snoo_api, device, _, state: snoo_api.set_sticky_white_noise( + device, state + ), + ), + SnooSwitchEntityDescription( + key="hold", + translation_key="hold", + value_fn=lambda data: data.state_machine.hold == "on", + set_value_fn=lambda snoo_api, device, data, state: snoo_api.set_level( + device, data.state_machine.level, state + ), + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SnooConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up Snoo device.""" + coordinators = entry.runtime_data + async_add_entities( + SnooSwitch(coordinator, description) + for coordinator in coordinators.values() + for description in BINARY_SENSOR_DESCRIPTIONS + ) + + +class SnooSwitch(SnooDescriptionEntity, SwitchEntity): + """A switch using Snoo coordinator.""" + + entity_description: SnooSwitchEntityDescription + + @property + def is_on(self) -> bool | None: + """Return True if entity is on.""" + return self.entity_description.value_fn(self.coordinator.data) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the entity on.""" + try: + await self.entity_description.set_value_fn( + self.coordinator.snoo, + self.coordinator.device, + self.coordinator.data, + True, + ) + except SnooCommandException as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="switch_on_failed", + translation_placeholders={"name": str(self.name), "status": "on"}, + ) from err + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the entity off.""" + try: + await self.entity_description.set_value_fn( + self.coordinator.snoo, + self.coordinator.device, + self.coordinator.data, + False, + ) + except SnooCommandException as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="switch_off_failed", + translation_placeholders={"name": str(self.name), "status": "off"}, + ) from err diff --git a/homeassistant/components/solaredge/strings.json b/homeassistant/components/solaredge/strings.json index 2b626987546..105a9282a6d 100644 --- a/homeassistant/components/solaredge/strings.json +++ b/homeassistant/components/solaredge/strings.json @@ -5,7 +5,7 @@ "title": "Define the API parameters for this installation", "data": { "name": "The name of this installation", - "site_id": "The SolarEdge site-id", + "site_id": "The SolarEdge site ID", "api_key": "[%key:common::config_flow::data::api_key%]" } } @@ -14,7 +14,7 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]", "site_not_active": "The site is not active", - "could_not_connect": "Could not connect to the solaredge API" + "could_not_connect": "Could not connect to the SolarEdge API" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" @@ -65,7 +65,7 @@ "name": "Grid power" }, "storage_power": { - "name": "Stored power" + "name": "Storage power" }, "purchased_energy": { "name": "Imported energy" diff --git a/homeassistant/components/solarlog/coordinator.py b/homeassistant/components/solarlog/coordinator.py index 6292b1332d7..48ebeece1ba 100644 --- a/homeassistant/components/solarlog/coordinator.py +++ b/homeassistant/components/solarlog/coordinator.py @@ -75,7 +75,7 @@ class SolarLogCoordinator(DataUpdateCoordinator[SolarlogData]): await self.solarlog.test_extended_data_available() if logged_in or await self.solarlog.test_extended_data_available(): device_list = await self.solarlog.update_device_list() - self.solarlog.set_enabled_devices({key: True for key in device_list}) + self.solarlog.set_enabled_devices(dict.fromkeys(device_list, True)) async def _async_update_data(self) -> SolarlogData: """Update the data from the SolarLog device.""" diff --git a/homeassistant/components/sonos/__init__.py b/homeassistant/components/sonos/__init__.py index d530fa21e39..24580971ae2 100644 --- a/homeassistant/components/sonos/__init__.py +++ b/homeassistant/components/sonos/__init__.py @@ -7,6 +7,7 @@ from collections import OrderedDict from dataclasses import dataclass, field import datetime from functools import partial +from ipaddress import AddressValueError, IPv4Address import logging import socket from typing import Any, cast @@ -208,6 +209,14 @@ class SonosDiscoveryManager: async def async_subscribe_to_zone_updates(self, ip_address: str) -> None: """Test subscriptions and create SonosSpeakers based on results.""" + try: + _ = IPv4Address(ip_address) + except AddressValueError: + _LOGGER.debug( + "Sonos integration only supports IPv4 addresses, invalid ip_address received: %s", + ip_address, + ) + return soco = SoCo(ip_address) # Cache now to avoid household ID lookup during first ZoneGroupState processing await self.hass.async_add_executor_job( diff --git a/homeassistant/components/sonos/config_flow.py b/homeassistant/components/sonos/config_flow.py index 057cdb8ec08..b5e2c684281 100644 --- a/homeassistant/components/sonos/config_flow.py +++ b/homeassistant/components/sonos/config_flow.py @@ -31,6 +31,8 @@ class SonosDiscoveryFlowHandler(DiscoveryFlowHandler[Awaitable[bool]], domain=DO hostname = discovery_info.hostname if hostname is None or not hostname.lower().startswith("sonos"): return self.async_abort(reason="not_sonos_device") + if discovery_info.ip_address.version != 4: + return self.async_abort(reason="not_ipv4_address") if discovery_manager := self.hass.data.get(DATA_SONOS_DISCOVERY_MANAGER): host = discovery_info.host mdns_name = discovery_info.name diff --git a/homeassistant/components/sonos/media_player.py b/homeassistant/components/sonos/media_player.py index 0c66484202f..a774de0ae5b 100644 --- a/homeassistant/components/sonos/media_player.py +++ b/homeassistant/components/sonos/media_player.py @@ -462,11 +462,20 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity): """Play a favorite.""" uri = favorite.reference.get_uri() soco = self.coordinator.soco - if soco.music_source_from_uri(uri) in [ - MUSIC_SRC_RADIO, - MUSIC_SRC_LINE_IN, - ]: - soco.play_uri(uri, title=favorite.title, timeout=LONG_SERVICE_TIMEOUT) + if ( + soco.music_source_from_uri(uri) + in [ + MUSIC_SRC_RADIO, + MUSIC_SRC_LINE_IN, + ] + or favorite.reference.item_class == "object.item.audioItem.audioBook" + ): + soco.play_uri( + uri, + title=favorite.title, + meta=favorite.resource_meta_data, + timeout=LONG_SERVICE_TIMEOUT, + ) else: soco.clear_queue() soco.add_to_queue(favorite.reference, timeout=LONG_SERVICE_TIMEOUT) diff --git a/homeassistant/components/sonos/strings.json b/homeassistant/components/sonos/strings.json index 07d2e2db4e0..433bb3cc36a 100644 --- a/homeassistant/components/sonos/strings.json +++ b/homeassistant/components/sonos/strings.json @@ -8,7 +8,8 @@ "abort": { "not_sonos_device": "Discovered device is not a Sonos device", "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", - "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]" + "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", + "not_ipv4_address": "No IPv4 address in SSDP discovery information" } }, "issues": { diff --git a/homeassistant/components/spotify/config_flow.py b/homeassistant/components/spotify/config_flow.py index d99fa7793df..3478887d64c 100644 --- a/homeassistant/components/spotify/config_flow.py +++ b/homeassistant/components/spotify/config_flow.py @@ -41,7 +41,8 @@ class SpotifyFlowHandler( try: current_user = await spotify.get_current_user() - except Exception: # noqa: BLE001 + except Exception: + self.logger.exception("Error while connecting to Spotify") return self.async_abort(reason="connection_error") name = current_user.display_name diff --git a/homeassistant/components/spotify/strings.json b/homeassistant/components/spotify/strings.json index 90e573a1706..66d837c503f 100644 --- a/homeassistant/components/spotify/strings.json +++ b/homeassistant/components/spotify/strings.json @@ -13,7 +13,7 @@ "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", "missing_configuration": "The Spotify integration is not configured. Please follow the documentation.", "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", - "reauth_account_mismatch": "The Spotify account authenticated with, does not match the account needed re-authentication.", + "reauth_account_mismatch": "The Spotify account authenticated with does not match the account that needed re-authentication.", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", diff --git a/homeassistant/components/sql/manifest.json b/homeassistant/components/sql/manifest.json index 2b00a5b0d65..37b5dc2b647 100644 --- a/homeassistant/components/sql/manifest.json +++ b/homeassistant/components/sql/manifest.json @@ -6,5 +6,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/sql", "iot_class": "local_polling", - "requirements": ["SQLAlchemy==2.0.38", "sqlparse==0.5.0"] + "requirements": ["SQLAlchemy==2.0.39", "sqlparse==0.5.0"] } diff --git a/homeassistant/components/squeezebox/__init__.py b/homeassistant/components/squeezebox/__init__.py index fd641d3389d..78a97e38833 100644 --- a/homeassistant/components/squeezebox/__init__.py +++ b/homeassistant/components/squeezebox/__init__.py @@ -53,6 +53,7 @@ _LOGGER = logging.getLogger(__name__) PLATFORMS = [ Platform.BINARY_SENSOR, + Platform.BUTTON, Platform.MEDIA_PLAYER, Platform.SENSOR, ] diff --git a/homeassistant/components/squeezebox/browse_media.py b/homeassistant/components/squeezebox/browse_media.py index 6bc1d2380cf..633f004993f 100644 --- a/homeassistant/components/squeezebox/browse_media.py +++ b/homeassistant/components/squeezebox/browse_media.py @@ -62,7 +62,7 @@ SQUEEZEBOX_ID_BY_TYPE: dict[str | MediaType, str] = { MediaType.APPS: "item_id", } -CONTENT_TYPE_MEDIA_CLASS: dict[str | MediaType, dict[str, MediaClass | None]] = { +CONTENT_TYPE_MEDIA_CLASS: dict[str | MediaType, dict[str, MediaClass | str]] = { "Favorites": {"item": MediaClass.DIRECTORY, "children": MediaClass.TRACK}, "Apps": {"item": MediaClass.DIRECTORY, "children": MediaClass.APP}, "Radios": {"item": MediaClass.DIRECTORY, "children": MediaClass.APP}, @@ -76,7 +76,7 @@ CONTENT_TYPE_MEDIA_CLASS: dict[str | MediaType, dict[str, MediaClass | None]] = "Album Artists": {"item": MediaClass.DIRECTORY, "children": MediaClass.ARTIST}, MediaType.ALBUM: {"item": MediaClass.ALBUM, "children": MediaClass.TRACK}, MediaType.ARTIST: {"item": MediaClass.ARTIST, "children": MediaClass.ALBUM}, - MediaType.TRACK: {"item": MediaClass.TRACK, "children": None}, + MediaType.TRACK: {"item": MediaClass.TRACK, "children": ""}, MediaType.GENRE: {"item": MediaClass.GENRE, "children": MediaClass.ARTIST}, MediaType.PLAYLIST: {"item": MediaClass.PLAYLIST, "children": MediaClass.TRACK}, MediaType.APP: {"item": MediaClass.DIRECTORY, "children": MediaClass.TRACK}, @@ -115,7 +115,7 @@ class BrowseData: str | MediaType, str | MediaType | None, ] = field(default_factory=dict) - content_type_media_class: dict[str | MediaType, dict[str, MediaClass | None]] = ( + content_type_media_class: dict[str | MediaType, dict[str, MediaClass | str]] = ( field(default_factory=dict) ) squeezebox_id_by_type: dict[str | MediaType, str] = field(default_factory=dict) @@ -130,16 +130,6 @@ class BrowseData: self.media_type_to_squeezebox.update(MEDIA_TYPE_TO_SQUEEZEBOX) -@dataclass -class BrowseItemResponse: - """Class for response data for browse item functions.""" - - child_item_type: str | MediaType - child_media_class: dict[str, MediaClass | None] - can_expand: bool - can_play: bool - - def _add_new_command_to_browse_data( browse_data: BrowseData, cmd: str | MediaType, type: str ) -> None: @@ -154,13 +144,14 @@ def _add_new_command_to_browse_data( def _build_response_apps_radios_category( - browse_data: BrowseData, - cmd: str | MediaType, -) -> BrowseItemResponse: + browse_data: BrowseData, cmd: str | MediaType, item: dict[str, Any] +) -> BrowseMedia: """Build item for App or radio category.""" - return BrowseItemResponse( - child_item_type=cmd, - child_media_class=browse_data.content_type_media_class[cmd], + return BrowseMedia( + media_content_id=item.get("id", ""), + title=item["title"], + media_content_type=cmd, + media_class=browse_data.content_type_media_class[cmd]["item"], can_expand=True, can_play=False, ) @@ -168,17 +159,74 @@ def _build_response_apps_radios_category( def _build_response_known_app( browse_data: BrowseData, search_type: str, item: dict[str, Any] -) -> BrowseItemResponse: +) -> BrowseMedia: """Build item for app or radio.""" - return BrowseItemResponse( - child_item_type=search_type, - child_media_class=browse_data.content_type_media_class[search_type], + return BrowseMedia( + media_content_id=item.get("id", ""), + title=item["title"], + media_content_type=search_type, + media_class=browse_data.content_type_media_class[search_type]["item"], can_play=bool(item["isaudio"] and item.get("url")), can_expand=item["hasitems"], ) +def _build_response_favorites(item: dict[str, Any]) -> BrowseMedia: + """Build item for Favorites.""" + if "album_id" in item: + return BrowseMedia( + media_content_id=str(item["album_id"]), + title=item["title"], + media_content_type=MediaType.ALBUM, + media_class=CONTENT_TYPE_MEDIA_CLASS[MediaType.ALBUM]["item"], + can_expand=True, + can_play=True, + ) + if item["hasitems"] and not item["isaudio"]: + return BrowseMedia( + media_content_id=item.get("id", ""), + title=item["title"], + media_content_type="Favorites", + media_class=CONTENT_TYPE_MEDIA_CLASS["Favorites"]["item"], + can_expand=True, + can_play=False, + ) + return BrowseMedia( + media_content_id=item.get("id", ""), + title=item["title"], + media_content_type="Favorites", + media_class=CONTENT_TYPE_MEDIA_CLASS[MediaType.TRACK]["item"], + can_expand=item["hasitems"], + can_play=bool(item["isaudio"] and item.get("url")), + ) + + +def _get_item_thumbnail( + item: dict[str, Any], + player: Player, + entity: MediaPlayerEntity, + item_type: str | MediaType | None, + search_type: str, + internal_request: bool, +) -> str | None: + """Construct path to thumbnail image.""" + item_thumbnail: str | None = None + if artwork_track_id := item.get("artwork_track_id"): + if internal_request: + item_thumbnail = player.generate_image_url_from_track_id(artwork_track_id) + elif item_type is not None: + item_thumbnail = entity.get_browse_image_url( + item_type, item.get("id", ""), artwork_track_id + ) + + elif search_type in ["Apps", "Radios"]: + item_thumbnail = player.generate_image_url(item["icon"]) + if item_thumbnail is None: + item_thumbnail = item.get("image_url") # will not be proxied by HA + return item_thumbnail + + async def build_item_response( entity: MediaPlayerEntity, player: Player, @@ -214,36 +262,11 @@ async def build_item_response( item_type = browse_data.content_type_to_child_type[search_type] children = [] - list_playable = [] for item in result["items"]: - item_id = str(item.get("id", "")) - item_thumbnail: str | None = None - - if item_type: - child_item_type: MediaType | str = item_type - child_media_class = CONTENT_TYPE_MEDIA_CLASS[item_type] - can_expand = child_media_class["children"] is not None - can_play = True - if search_type == "Favorites": - if "album_id" in item: - item_id = str(item["album_id"]) - child_item_type = MediaType.ALBUM - child_media_class = CONTENT_TYPE_MEDIA_CLASS[MediaType.ALBUM] - can_expand = True - can_play = True - elif item["hasitems"] and not item["isaudio"]: - child_item_type = "Favorites" - child_media_class = CONTENT_TYPE_MEDIA_CLASS["Favorites"] - can_expand = True - can_play = False - else: - child_item_type = "Favorites" - child_media_class = CONTENT_TYPE_MEDIA_CLASS[MediaType.TRACK] - can_expand = item["hasitems"] - can_play = item["isaudio"] and item.get("url") + child_media = _build_response_favorites(item) - if search_type in ["Apps", "Radios"]: + elif search_type in ["Apps", "Radios"]: # item["cmd"] contains the name of the command to use with the cli for the app # add the command to the dictionaries if item["title"] == "Search" or item.get("type") in UNPLAYABLE_TYPES: @@ -253,19 +276,12 @@ async def build_item_response( if app_cmd not in browse_data.known_apps_radios: browse_data.known_apps_radios.add(app_cmd) + _add_new_command_to_browse_data(browse_data, app_cmd, "item_id") - _add_new_command_to_browse_data(browse_data, app_cmd, "item_id") - - browse_item_response = _build_response_apps_radios_category( - browse_data, app_cmd + child_media = _build_response_apps_radios_category( + browse_data=browse_data, cmd=app_cmd, item=item ) - # Temporary variables until remainder of browse calls are restructured - child_item_type = browse_item_response.child_item_type - child_media_class = browse_item_response.child_media_class - can_expand = browse_item_response.can_expand - can_play = browse_item_response.can_play - elif search_type in browse_data.known_apps_radios: if ( item.get("title") in ["Search", None] @@ -274,43 +290,31 @@ async def build_item_response( # Skip searches in apps as they'd need UI continue - browse_item_response = _build_response_known_app( - browse_data, search_type, item - ) + child_media = _build_response_known_app(browse_data, search_type, item) - # Temporary variables until remainder of browse calls are restructured - child_item_type = browse_item_response.child_item_type - child_media_class = browse_item_response.child_media_class - can_expand = browse_item_response.can_expand - can_play = browse_item_response.can_play - - if artwork_track_id := item.get("artwork_track_id"): - if internal_request: - item_thumbnail = player.generate_image_url_from_track_id( - artwork_track_id - ) - elif item_type is not None: - item_thumbnail = entity.get_browse_image_url( - item_type, item_id, artwork_track_id - ) - elif search_type in ["Apps", "Radios"]: - item_thumbnail = player.generate_image_url(item["icon"]) - else: - item_thumbnail = item.get("image_url") # will not be proxied by HA - - assert child_media_class["item"] is not None - children.append( - BrowseMedia( + elif item_type: + child_media = BrowseMedia( + media_content_id=str(item.get("id", "")), title=item["title"], - media_class=child_media_class["item"], - media_content_id=item_id, - media_content_type=child_item_type, - can_play=can_play, - can_expand=can_expand, - thumbnail=item_thumbnail, + media_content_type=item_type, + media_class=CONTENT_TYPE_MEDIA_CLASS[item_type]["item"], + can_expand=CONTENT_TYPE_MEDIA_CLASS[item_type]["children"] + is not None, + can_play=True, ) + + assert child_media.media_class is not None + + child_media.thumbnail = _get_item_thumbnail( + item=item, + player=player, + entity=entity, + item_type=item_type, + search_type=search_type, + internal_request=internal_request, ) - list_playable.append(can_play) + + children.append(child_media) if children is None: raise BrowseError(f"Media not found: {search_type} / {search_id}") @@ -325,7 +329,7 @@ async def build_item_response( children_media_class=media_class["children"], media_content_id=search_id, media_content_type=search_type, - can_play=any(list_playable), + can_play=any(child.can_play for child in children), children=children, can_expand=True, ) diff --git a/homeassistant/components/squeezebox/button.py b/homeassistant/components/squeezebox/button.py new file mode 100644 index 00000000000..098df3a1b5c --- /dev/null +++ b/homeassistant/components/squeezebox/button.py @@ -0,0 +1,155 @@ +"""Platform for button integration for squeezebox.""" + +from __future__ import annotations + +from dataclasses import dataclass +import logging + +from homeassistant.components.button import ButtonEntity, ButtonEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import format_mac +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import SqueezeboxConfigEntry +from .const import SIGNAL_PLAYER_DISCOVERED +from .coordinator import SqueezeBoxPlayerUpdateCoordinator +from .entity import SqueezeboxEntity + +_LOGGER = logging.getLogger(__name__) + +HARDWARE_MODELS_WITH_SCREEN = [ + "Squeezebox Boom", + "Squeezebox Radio", + "Transporter", + "Squeezebox Touch", + "Squeezebox", + "SliMP3", + "Squeezebox 1", + "Squeezebox 2", + "Squeezebox 3", +] + +HARDWARE_MODELS_WITH_TONE = [ + *HARDWARE_MODELS_WITH_SCREEN, + "Squeezebox Receiver", +] + + +@dataclass(frozen=True, kw_only=True) +class SqueezeboxButtonEntityDescription(ButtonEntityDescription): + """Squeezebox Button description.""" + + press_action: str + + +BUTTON_ENTITIES: tuple[SqueezeboxButtonEntityDescription, ...] = tuple( + SqueezeboxButtonEntityDescription( + key=f"preset_{i}", + translation_key="preset", + translation_placeholders={"index": str(i)}, + press_action=f"preset_{i}.single", + ) + for i in range(1, 7) +) + +SCREEN_BUTTON_ENTITIES: tuple[SqueezeboxButtonEntityDescription, ...] = ( + SqueezeboxButtonEntityDescription( + key="brightness_up", + translation_key="brightness_up", + press_action="brightness_up", + ), + SqueezeboxButtonEntityDescription( + key="brightness_down", + translation_key="brightness_down", + press_action="brightness_down", + ), +) + +TONE_BUTTON_ENTITIES: tuple[SqueezeboxButtonEntityDescription, ...] = ( + SqueezeboxButtonEntityDescription( + key="bass_up", + translation_key="bass_up", + press_action="bass_up", + ), + SqueezeboxButtonEntityDescription( + key="bass_down", + translation_key="bass_down", + press_action="bass_down", + ), + SqueezeboxButtonEntityDescription( + key="treble_up", + translation_key="treble_up", + press_action="treble_up", + ), + SqueezeboxButtonEntityDescription( + key="treble_down", + translation_key="treble_down", + press_action="treble_down", + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SqueezeboxConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the Squeezebox button platform from a server config entry.""" + + # Add button entities when player discovered + async def _player_discovered( + player_coordinator: SqueezeBoxPlayerUpdateCoordinator, + ) -> None: + _LOGGER.debug( + "Setting up button entity for player %s, model %s", + player_coordinator.player.name, + player_coordinator.player.model, + ) + + entities: list[SqueezeboxButtonEntity] = [] + + entities.extend( + SqueezeboxButtonEntity(player_coordinator, description) + for description in BUTTON_ENTITIES + ) + + entities.extend( + SqueezeboxButtonEntity(player_coordinator, description) + for description in TONE_BUTTON_ENTITIES + if player_coordinator.player.model in HARDWARE_MODELS_WITH_TONE + ) + + entities.extend( + SqueezeboxButtonEntity(player_coordinator, description) + for description in SCREEN_BUTTON_ENTITIES + if player_coordinator.player.model in HARDWARE_MODELS_WITH_SCREEN + ) + + async_add_entities(entities) + + entry.async_on_unload( + async_dispatcher_connect(hass, SIGNAL_PLAYER_DISCOVERED, _player_discovered) + ) + + +class SqueezeboxButtonEntity(SqueezeboxEntity, ButtonEntity): + """Representation of Buttons for Squeezebox entities.""" + + entity_description: SqueezeboxButtonEntityDescription + + def __init__( + self, + coordinator: SqueezeBoxPlayerUpdateCoordinator, + entity_description: SqueezeboxButtonEntityDescription, + ) -> None: + """Initialize the SqueezeBox Button.""" + super().__init__(coordinator) + self.entity_description = entity_description + self._attr_unique_id = ( + f"{format_mac(self._player.player_id)}_{entity_description.key}" + ) + + async def async_press(self) -> None: + """Execute the button action.""" + await self._player.async_query("button", self.entity_description.press_action) diff --git a/homeassistant/components/squeezebox/config_flow.py b/homeassistant/components/squeezebox/config_flow.py index 2853ad14217..31dd5b003b7 100644 --- a/homeassistant/components/squeezebox/config_flow.py +++ b/homeassistant/components/squeezebox/config_flow.py @@ -151,7 +151,8 @@ class SqueezeboxConfigFlow(ConfigFlow, domain=DOMAIN): if server.http_status == HTTPStatus.UNAUTHORIZED: return "invalid_auth" return "cannot_connect" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unknown exception while validating connection") return "unknown" if "uuid" in status: diff --git a/homeassistant/components/squeezebox/entity.py b/homeassistant/components/squeezebox/entity.py index 027ca68edc6..2c443c24ffd 100644 --- a/homeassistant/components/squeezebox/entity.py +++ b/homeassistant/components/squeezebox/entity.py @@ -1,11 +1,37 @@ """Base class for Squeezebox Sensor entities.""" -from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.device_registry import ( + CONNECTION_NETWORK_MAC, + DeviceInfo, + format_mac, +) from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN, STATUS_QUERY_UUID -from .coordinator import LMSStatusDataUpdateCoordinator +from .coordinator import ( + LMSStatusDataUpdateCoordinator, + SqueezeBoxPlayerUpdateCoordinator, +) + + +class SqueezeboxEntity(CoordinatorEntity[SqueezeBoxPlayerUpdateCoordinator]): + """Base entity class for Squeezebox entities.""" + + _attr_has_entity_name = True + + def __init__(self, coordinator: SqueezeBoxPlayerUpdateCoordinator) -> None: + """Initialize the SqueezeBox entity.""" + super().__init__(coordinator) + self._player = coordinator.player + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, format_mac(self._player.player_id))}, + name=self._player.name, + connections={(CONNECTION_NETWORK_MAC, format_mac(self._player.player_id))}, + via_device=(DOMAIN, coordinator.server_uuid), + model=self._player.model, + manufacturer=self._player.creator, + ) class LMSStatusEntity(CoordinatorEntity[LMSStatusDataUpdateCoordinator]): diff --git a/homeassistant/components/squeezebox/media_player.py b/homeassistant/components/squeezebox/media_player.py index 1767d92730a..40662477745 100644 --- a/homeassistant/components/squeezebox/media_player.py +++ b/homeassistant/components/squeezebox/media_player.py @@ -35,15 +35,10 @@ from homeassistant.helpers import ( entity_platform, entity_registry as er, ) -from homeassistant.helpers.device_registry import ( - CONNECTION_NETWORK_MAC, - DeviceInfo, - format_mac, -) +from homeassistant.helpers.device_registry import format_mac from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.start import async_at_start -from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util.dt import utcnow from .browse_media import ( @@ -68,6 +63,7 @@ from .const import ( SQUEEZEBOX_SOURCE_STRINGS, ) from .coordinator import SqueezeBoxPlayerUpdateCoordinator +from .entity import SqueezeboxEntity if TYPE_CHECKING: from . import SqueezeboxConfigEntry @@ -181,9 +177,7 @@ def get_announce_timeout(extra: dict) -> int | None: return announce_timeout -class SqueezeBoxMediaPlayerEntity( - CoordinatorEntity[SqueezeBoxPlayerUpdateCoordinator], MediaPlayerEntity -): +class SqueezeBoxMediaPlayerEntity(SqueezeboxEntity, MediaPlayerEntity): """Representation of the media player features of a SqueezeBox device. Wraps a pysqueezebox.Player() object. @@ -217,30 +211,10 @@ class SqueezeBoxMediaPlayerEntity( def __init__(self, coordinator: SqueezeBoxPlayerUpdateCoordinator) -> None: """Initialize the SqueezeBox device.""" super().__init__(coordinator) - player = coordinator.player - self._player = player self._query_result: bool | dict = {} self._remove_dispatcher: Callable | None = None self._previous_media_position = 0 - self._attr_unique_id = format_mac(player.player_id) - _manufacturer = None - if player.model.startswith("SqueezeLite") or "SqueezePlay" in player.model: - _manufacturer = "Ralph Irving" - elif ( - "Squeezebox" in player.model - or "Transporter" in player.model - or "Slim" in player.model - ): - _manufacturer = "Logitech" - - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, self._attr_unique_id)}, - name=player.name, - connections={(CONNECTION_NETWORK_MAC, self._attr_unique_id)}, - via_device=(DOMAIN, coordinator.server_uuid), - model=player.model, - manufacturer=_manufacturer, - ) + self._attr_unique_id = format_mac(self._player.player_id) self._browse_data = BrowseData() @callback diff --git a/homeassistant/components/squeezebox/strings.json b/homeassistant/components/squeezebox/strings.json index ed569989b56..83c5d7dd5d0 100644 --- a/homeassistant/components/squeezebox/strings.json +++ b/homeassistant/components/squeezebox/strings.json @@ -63,6 +63,29 @@ } }, "entity": { + "button": { + "preset": { + "name": "Preset {index}" + }, + "brightness_up": { + "name": "Brightness up" + }, + "brightness_down": { + "name": "Brightness down" + }, + "bass_up": { + "name": "Bass up" + }, + "bass_down": { + "name": "Bass down" + }, + "treble_up": { + "name": "Treble up" + }, + "treble_down": { + "name": "Treble down" + } + }, "binary_sensor": { "rescan": { "name": "Library rescan" diff --git a/homeassistant/components/srp_energy/strings.json b/homeassistant/components/srp_energy/strings.json index eca4f465435..5fa97b00b57 100644 --- a/homeassistant/components/srp_energy/strings.json +++ b/homeassistant/components/srp_energy/strings.json @@ -3,10 +3,10 @@ "step": { "user": { "data": { - "id": "Account Id", + "id": "Account ID", "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", - "is_tou": "Is Time of Use Plan" + "is_tou": "Is Time-of-Use Price Plan" } } }, diff --git a/homeassistant/components/ssdp/manifest.json b/homeassistant/components/ssdp/manifest.json index 6e1fba8c3a3..93943b0a9ea 100644 --- a/homeassistant/components/ssdp/manifest.json +++ b/homeassistant/components/ssdp/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["async_upnp_client"], "quality_scale": "internal", - "requirements": ["async-upnp-client==0.43.0"] + "requirements": ["async-upnp-client==0.44.0"] } diff --git a/homeassistant/components/stookwijzer/diagnostics.py b/homeassistant/components/stookwijzer/diagnostics.py index 2849e0e976a..1f3ef4ee4ba 100644 --- a/homeassistant/components/stookwijzer/diagnostics.py +++ b/homeassistant/components/stookwijzer/diagnostics.py @@ -18,4 +18,5 @@ async def async_get_config_entry_diagnostics( "advice": client.advice, "air_quality_index": client.lki, "windspeed_ms": client.windspeed_ms, + "forecast": await client.async_get_forecast(), } diff --git a/homeassistant/components/stookwijzer/strings.json b/homeassistant/components/stookwijzer/strings.json index d7304fa1238..a028f1f19c5 100644 --- a/homeassistant/components/stookwijzer/strings.json +++ b/homeassistant/components/stookwijzer/strings.json @@ -29,7 +29,7 @@ }, "issues": { "location_migration_failed": { - "description": "The Stookwijzer integration was unable to automatically migrate your location to a new format the updated integrations uses.\n\nMake sure you are connected to the internet and restart Home Assistant to try again.\n\nIf this doesn't resolve the error, remove and re-add the integration.", + "description": "The Stookwijzer integration was unable to automatically migrate your location to a new format the updated integration uses.\n\nMake sure you are connected to the Internet and restart Home Assistant to try again.\n\nIf this doesn't resolve the error, remove and re-add the integration.", "title": "Migration of your location failed" } }, diff --git a/homeassistant/components/subaru/strings.json b/homeassistant/components/subaru/strings.json index 00da729dccd..7525e73f802 100644 --- a/homeassistant/components/subaru/strings.json +++ b/homeassistant/components/subaru/strings.json @@ -2,7 +2,7 @@ "config": { "step": { "user": { - "title": "Subaru Starlink Configuration", + "title": "Subaru Starlink configuration", "description": "Please enter your MySubaru credentials\nNOTE: Initial setup may take up to 30 seconds", "data": { "username": "[%key:common::config_flow::data::username%]", @@ -49,7 +49,7 @@ "options": { "step": { "init": { - "title": "Subaru Starlink Options", + "title": "Subaru Starlink options", "description": "When enabled, vehicle polling will send a remote command to your vehicle every 2 hours to obtain new sensor data. Without vehicle polling, new sensor data is only received when the vehicle automatically pushes data (normally after engine shutdown).", "data": { "update_enabled": "Enable vehicle polling" @@ -106,7 +106,7 @@ "fields": { "door": { "name": "Door", - "description": "One of the following: 'all', 'driver', 'tailgate'." + "description": "Which door(s) to open." } } } diff --git a/homeassistant/components/suez_water/strings.json b/homeassistant/components/suez_water/strings.json index be2d4849e76..a8632fcb24a 100644 --- a/homeassistant/components/suez_water/strings.json +++ b/homeassistant/components/suez_water/strings.json @@ -5,21 +5,21 @@ "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", - "counter_id": "Meter id" + "counter_id": "Meter ID" }, "data_description": { "username": "Enter your login associated with your {tout_sur_mon_eau} account", "password": "Enter your password associated with your {tout_sur_mon_eau} account", - "counter_id": "Enter your meter id (ex: 12345678). Should be found automatically during setup, if not see integration documentation for more information" + "counter_id": "Enter your meter ID (ex: 12345678). Should be found automatically during setup, if not see integration documentation for more information" }, - "description": "Connect your suez water {tout_sur_mon_eau} account to retrieve your water consumption" + "description": "Connect your Suez Water {tout_sur_mon_eau} account to retrieve your water consumption" } }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "unknown": "[%key:common::config_flow::error::unknown%]", - "counter_not_found": "Could not find meter id automatically" + "counter_not_found": "Could not find meter ID automatically" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" diff --git a/homeassistant/components/sunweg/__init__.py b/homeassistant/components/sunweg/__init__.py index 86da0a247b1..0dfed0e6bb3 100644 --- a/homeassistant/components/sunweg/__init__.py +++ b/homeassistant/components/sunweg/__init__.py @@ -1,197 +1,39 @@ """The Sun WEG inverter sensor integration.""" -import datetime -import json -import logging - -from sunweg.api import APIHelper -from sunweg.plant import Plant - -from homeassistant import config_entries from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryAuthFailed -from homeassistant.helpers.typing import StateType, UndefinedType -from homeassistant.util import Throttle +from homeassistant.helpers import issue_registry as ir -from .const import CONF_PLANT_ID, DOMAIN, PLATFORMS, DeviceType - -SCAN_INTERVAL = datetime.timedelta(minutes=5) - -_LOGGER = logging.getLogger(__name__) +DOMAIN = "sunweg" -async def async_setup_entry( - hass: HomeAssistant, entry: config_entries.ConfigEntry -) -> bool: +async def async_setup_entry(hass: HomeAssistant, _: ConfigEntry) -> bool: """Load the saved entities.""" - api = APIHelper(entry.data[CONF_USERNAME], entry.data[CONF_PASSWORD]) - if not await hass.async_add_executor_job(api.authenticate): - raise ConfigEntryAuthFailed("Username or Password may be incorrect!") - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = SunWEGData( - api, entry.data[CONF_PLANT_ID] + ir.async_create_issue( + hass, + DOMAIN, + DOMAIN, + is_fixable=False, + severity=ir.IssueSeverity.ERROR, + translation_key="integration_removed", + translation_placeholders={ + "issue": "https://github.com/rokam/sunweg/issues/13", + "entries": "/config/integrations/integration/sunweg", + }, ) - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - hass.data[DOMAIN].pop(entry.entry_id) - if len(hass.data[DOMAIN]) == 0: - hass.data.pop(DOMAIN) - return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + return True -class SunWEGData: - """The class for handling data retrieval.""" - - def __init__( - self, - api: APIHelper, - plant_id: int, - ) -> None: - """Initialize the probe.""" - - self.api = api - self.plant_id = plant_id - self.data: Plant = None - self.previous_values: dict = {} - - @Throttle(SCAN_INTERVAL) - def update(self) -> None: - """Update probe data.""" - _LOGGER.debug("Updating data for plant %s", self.plant_id) - try: - self.data = self.api.plant(self.plant_id) - for inverter in self.data.inverters: - self.api.complete_inverter(inverter) - except json.decoder.JSONDecodeError: - _LOGGER.error("Unable to fetch data from SunWEG server") - _LOGGER.debug("Finished updating data for plant %s", self.plant_id) - - def get_api_value( - self, - variable: str, - device_type: DeviceType, - inverter_id: int = 0, - deep_name: str | None = None, - ): - """Retrieve from a Plant the desired variable value.""" - if device_type == DeviceType.TOTAL: - return self.data.__dict__.get(variable) - - inverter_list = [i for i in self.data.inverters if i.id == inverter_id] - if len(inverter_list) == 0: - return None - inverter = inverter_list[0] - - if device_type == DeviceType.INVERTER: - return inverter.__dict__.get(variable) - if device_type == DeviceType.PHASE: - for phase in inverter.phases: - if phase.name == deep_name: - return phase.__dict__.get(variable) - elif device_type == DeviceType.STRING: - for mppt in inverter.mppts: - for string in mppt.strings: - if string.name == deep_name: - return string.__dict__.get(variable) - return None - - def get_data( - self, - *, - api_variable_key: str, - api_variable_unit: str | None, - deep_name: str | None, - device_type: DeviceType, - inverter_id: int, - name: str | UndefinedType | None, - native_unit_of_measurement: str | None, - never_resets: bool, - previous_value_drop_threshold: float | None, - ) -> tuple[StateType | datetime.datetime, str | None]: - """Get the data.""" - _LOGGER.debug( - "Data request for: %s", - name, - ) - variable = api_variable_key - previous_unit = native_unit_of_measurement - api_value = self.get_api_value(variable, device_type, inverter_id, deep_name) - previous_value = self.previous_values.get(variable) - return_value = api_value - if api_variable_unit is not None: - native_unit_of_measurement = self.get_api_value( - api_variable_unit, - device_type, - inverter_id, - deep_name, - ) - - # If we have a 'drop threshold' specified, then check it and correct if needed - if ( - previous_value_drop_threshold is not None - and previous_value is not None - and api_value is not None - and previous_unit == native_unit_of_measurement - ): - _LOGGER.debug( - ( - "%s - Drop threshold specified (%s), checking for drop... API" - " Value: %s, Previous Value: %s" - ), - name, - previous_value_drop_threshold, - api_value, - previous_value, - ) - diff = float(api_value) - float(previous_value) - - # Check if the value has dropped (negative value i.e. < 0) and it has only - # dropped by a small amount, if so, use the previous value. - # Note - The energy dashboard takes care of drops within 10% - # of the current value, however if the value is low e.g. 0.2 - # and drops by 0.1 it classes as a reset. - if -(previous_value_drop_threshold) <= diff < 0: - _LOGGER.debug( - ( - "Diff is negative, but only by a small amount therefore not a" - " nightly reset, using previous value (%s) instead of api value" - " (%s)" - ), - previous_value, - api_value, - ) - return_value = previous_value - else: - _LOGGER.debug("%s - No drop detected, using API value", name) - - # Lifetime total values should always be increasing, they will never reset, - # however the API sometimes returns 0 values when the clock turns to 00:00 - # local time in that scenario we should just return the previous value - # Scenarios: - # 1 - System has a genuine 0 value when it it first commissioned: - # - will return 0 until a non-zero value is registered - # 2 - System has been running fine but temporarily resets to 0 briefly - # at midnight: - # - will return the previous value - # 3 - HA is restarted during the midnight 'outage' - Not handled: - # - Previous value will not exist meaning 0 will be returned - # - This is an edge case that would be better handled by looking - # up the previous value of the entity from the recorder - if never_resets and api_value == 0 and previous_value: - _LOGGER.debug( - ( - "API value is 0, but this value should never reset, returning" - " previous value (%s) instead" - ), - previous_value, - ) - return_value = previous_value - - self.previous_values[variable] = return_value - - return (return_value, native_unit_of_measurement) +async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: + """Remove a config entry.""" + if not hass.config_entries.async_loaded_entries(DOMAIN): + ir.async_delete_issue(hass, DOMAIN, DOMAIN) + # Remove any remaining disabled or ignored entries + for _entry in hass.config_entries.async_entries(DOMAIN): + hass.async_create_task(hass.config_entries.async_remove(_entry.entry_id)) diff --git a/homeassistant/components/sunweg/config_flow.py b/homeassistant/components/sunweg/config_flow.py index 24df8c02f55..42535a9ef58 100644 --- a/homeassistant/components/sunweg/config_flow.py +++ b/homeassistant/components/sunweg/config_flow.py @@ -1,129 +1,11 @@ """Config flow for Sun WEG integration.""" -from collections.abc import Mapping -from typing import Any +from homeassistant.config_entries import ConfigFlow -from sunweg.api import APIHelper, SunWegApiError -import voluptuous as vol - -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import callback - -from .const import CONF_PLANT_ID, DOMAIN +from . import DOMAIN class SunWEGConfigFlow(ConfigFlow, domain=DOMAIN): """Config flow class.""" VERSION = 1 - - def __init__(self) -> None: - """Initialise sun weg server flow.""" - self.api: APIHelper = None - self.data: dict[str, Any] = {} - - @callback - def _async_show_user_form(self, step_id: str, errors=None) -> ConfigFlowResult: - """Show the form to the user.""" - default_username = "" - if CONF_USERNAME in self.data: - default_username = self.data[CONF_USERNAME] - data_schema = vol.Schema( - { - vol.Required(CONF_USERNAME, default=default_username): str, - vol.Required(CONF_PASSWORD): str, - } - ) - - return self.async_show_form( - step_id=step_id, data_schema=data_schema, errors=errors - ) - - def _set_auth_data( - self, step: str, username: str, password: str - ) -> ConfigFlowResult | None: - """Set username and password.""" - if self.api: - # Set username and password - self.api.username = username - self.api.password = password - else: - # Initialise the library with the username & password - self.api = APIHelper(username, password) - - try: - if not self.api.authenticate(): - return self._async_show_user_form(step, {"base": "invalid_auth"}) - except SunWegApiError: - return self._async_show_user_form(step, {"base": "timeout_connect"}) - - return None - - async def async_step_user(self, user_input=None) -> ConfigFlowResult: - """Handle the start of the config flow.""" - if not user_input: - return self._async_show_user_form("user") - - # Store authentication info - self.data = user_input - - conf_result = await self.hass.async_add_executor_job( - self._set_auth_data, - "user", - user_input[CONF_USERNAME], - user_input[CONF_PASSWORD], - ) - - return await self.async_step_plant() if conf_result is None else conf_result - - async def async_step_plant(self, user_input=None) -> ConfigFlowResult: - """Handle adding a "plant" to Home Assistant.""" - plant_list = await self.hass.async_add_executor_job(self.api.listPlants) - - if len(plant_list) == 0: - return self.async_abort(reason="no_plants") - - plants = {plant.id: plant.name for plant in plant_list} - - if user_input is None and len(plant_list) > 1: - data_schema = vol.Schema({vol.Required(CONF_PLANT_ID): vol.In(plants)}) - - return self.async_show_form(step_id="plant", data_schema=data_schema) - - if user_input is None and len(plant_list) == 1: - user_input = {CONF_PLANT_ID: plant_list[0].id} - - user_input[CONF_NAME] = plants[user_input[CONF_PLANT_ID]] - await self.async_set_unique_id(user_input[CONF_PLANT_ID]) - self._abort_if_unique_id_configured() - self.data.update(user_input) - return self.async_create_entry(title=self.data[CONF_NAME], data=self.data) - - async def async_step_reauth( - self, entry_data: Mapping[str, Any] - ) -> ConfigFlowResult: - """Handle reauthorization request from SunWEG.""" - self.data.update(entry_data) - return await self.async_step_reauth_confirm() - - async def async_step_reauth_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle reauthorization flow.""" - if user_input is None: - return self._async_show_user_form("reauth_confirm") - - self.data.update(user_input) - conf_result = await self.hass.async_add_executor_job( - self._set_auth_data, - "reauth_confirm", - user_input[CONF_USERNAME], - user_input[CONF_PASSWORD], - ) - if conf_result is not None: - return conf_result - - return self.async_update_reload_and_abort( - self._get_reauth_entry(), data=self.data - ) diff --git a/homeassistant/components/sunweg/const.py b/homeassistant/components/sunweg/const.py deleted file mode 100644 index 11d24352962..00000000000 --- a/homeassistant/components/sunweg/const.py +++ /dev/null @@ -1,25 +0,0 @@ -"""Define constants for the Sun WEG component.""" - -from enum import Enum - -from homeassistant.const import Platform - - -class DeviceType(Enum): - """Device Type Enum.""" - - TOTAL = 1 - INVERTER = 2 - PHASE = 3 - STRING = 4 - - -CONF_PLANT_ID = "plant_id" - -DEFAULT_PLANT_ID = 0 - -DEFAULT_NAME = "Sun WEG" - -DOMAIN = "sunweg" - -PLATFORMS = [Platform.SENSOR] diff --git a/homeassistant/components/sunweg/manifest.json b/homeassistant/components/sunweg/manifest.json index 3ebe9ef8cb4..3e5c669f37f 100644 --- a/homeassistant/components/sunweg/manifest.json +++ b/homeassistant/components/sunweg/manifest.json @@ -1,10 +1,10 @@ { "domain": "sunweg", "name": "Sun WEG", - "codeowners": ["@rokam"], + "codeowners": [], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/sunweg", "iot_class": "cloud_polling", - "loggers": ["sunweg"], - "requirements": ["sunweg==3.0.2"] + "loggers": [], + "requirements": [] } diff --git a/homeassistant/components/sunweg/sensor/__init__.py b/homeassistant/components/sunweg/sensor/__init__.py deleted file mode 100644 index f71d992bea9..00000000000 --- a/homeassistant/components/sunweg/sensor/__init__.py +++ /dev/null @@ -1,178 +0,0 @@ -"""Read status of SunWEG inverters.""" - -from __future__ import annotations - -import logging -from types import MappingProxyType -from typing import Any - -from sunweg.api import APIHelper -from sunweg.device import Inverter -from sunweg.plant import Plant - -from homeassistant.components.sensor import SensorEntity -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_NAME -from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback - -from .. import SunWEGData -from ..const import CONF_PLANT_ID, DEFAULT_PLANT_ID, DOMAIN, DeviceType -from .inverter import INVERTER_SENSOR_TYPES -from .phase import PHASE_SENSOR_TYPES -from .sensor_entity_description import SunWEGSensorEntityDescription -from .string import STRING_SENSOR_TYPES -from .total import TOTAL_SENSOR_TYPES - -_LOGGER = logging.getLogger(__name__) - - -def get_device_list( - api: APIHelper, config: MappingProxyType[str, Any] -) -> tuple[list[Inverter], int]: - """Retrieve the device list for the selected plant.""" - plant_id = int(config[CONF_PLANT_ID]) - - if plant_id == DEFAULT_PLANT_ID: - plant_info: list[Plant] = api.listPlants() - plant_id = plant_info[0].id - - devices: list[Inverter] = [] - # Get a list of devices for specified plant to add sensors for. - for inverter in api.plant(plant_id).inverters: - api.complete_inverter(inverter) - devices.append(inverter) - return (devices, plant_id) - - -async def async_setup_entry( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddConfigEntryEntitiesCallback, -) -> None: - """Set up the SunWEG sensor.""" - name = config_entry.data[CONF_NAME] - - probe: SunWEGData = hass.data[DOMAIN][config_entry.entry_id] - - devices, plant_id = await hass.async_add_executor_job( - get_device_list, probe.api, config_entry.data - ) - - entities = [ - SunWEGInverter( - probe, - name=f"{name} Total", - unique_id=f"{plant_id}-{description.key}", - description=description, - device_type=DeviceType.TOTAL, - ) - for description in TOTAL_SENSOR_TYPES - ] - - # Add sensors for each device in the specified plant. - entities.extend( - [ - SunWEGInverter( - probe, - name=f"{device.name}", - unique_id=f"{device.sn}-{description.key}", - description=description, - device_type=DeviceType.INVERTER, - inverter_id=device.id, - ) - for device in devices - for description in INVERTER_SENSOR_TYPES - ] - ) - - entities.extend( - [ - SunWEGInverter( - probe, - name=f"{device.name} {phase.name}", - unique_id=f"{device.sn}-{phase.name}-{description.key}", - description=description, - inverter_id=device.id, - device_type=DeviceType.PHASE, - deep_name=phase.name, - ) - for device in devices - for phase in device.phases - for description in PHASE_SENSOR_TYPES - ] - ) - - entities.extend( - [ - SunWEGInverter( - probe, - name=f"{device.name} {string.name}", - unique_id=f"{device.sn}-{string.name}-{description.key}", - description=description, - inverter_id=device.id, - device_type=DeviceType.STRING, - deep_name=string.name, - ) - for device in devices - for mppt in device.mppts - for string in mppt.strings - for description in STRING_SENSOR_TYPES - ] - ) - - async_add_entities(entities, True) - - -class SunWEGInverter(SensorEntity): - """Representation of a SunWEG Sensor.""" - - entity_description: SunWEGSensorEntityDescription - - def __init__( - self, - probe: SunWEGData, - name: str, - unique_id: str, - description: SunWEGSensorEntityDescription, - device_type: DeviceType, - inverter_id: int = 0, - deep_name: str | None = None, - ) -> None: - """Initialize a sensor.""" - self.probe = probe - self.entity_description = description - self.device_type = device_type - self.inverter_id = inverter_id - self.deep_name = deep_name - - self._attr_name = f"{name} {description.name}" - self._attr_unique_id = unique_id - self._attr_icon = ( - description.icon if description.icon is not None else "mdi:solar-power" - ) - - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, str(probe.plant_id))}, - manufacturer="SunWEG", - name=name, - ) - - def update(self) -> None: - """Get the latest data from the Sun WEG API and updates the state.""" - self.probe.update() - ( - self._attr_native_value, - self._attr_native_unit_of_measurement, - ) = self.probe.get_data( - api_variable_key=self.entity_description.api_variable_key, - api_variable_unit=self.entity_description.api_variable_unit, - deep_name=self.deep_name, - device_type=self.device_type, - inverter_id=self.inverter_id, - name=self.entity_description.name, - native_unit_of_measurement=self.native_unit_of_measurement, - never_resets=self.entity_description.never_resets, - previous_value_drop_threshold=self.entity_description.previous_value_drop_threshold, - ) diff --git a/homeassistant/components/sunweg/sensor/inverter.py b/homeassistant/components/sunweg/sensor/inverter.py deleted file mode 100644 index 1010488b38a..00000000000 --- a/homeassistant/components/sunweg/sensor/inverter.py +++ /dev/null @@ -1,70 +0,0 @@ -"""SunWEG Sensor definitions for the Inverter type.""" - -from __future__ import annotations - -from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass -from homeassistant.const import ( - UnitOfEnergy, - UnitOfFrequency, - UnitOfPower, - UnitOfTemperature, -) - -from .sensor_entity_description import SunWEGSensorEntityDescription - -INVERTER_SENSOR_TYPES: tuple[SunWEGSensorEntityDescription, ...] = ( - SunWEGSensorEntityDescription( - key="inverter_energy_today", - name="Energy today", - api_variable_key="_today_energy", - api_variable_unit="_today_energy_metric", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, - device_class=SensorDeviceClass.ENERGY, - state_class=SensorStateClass.TOTAL_INCREASING, - suggested_display_precision=1, - ), - SunWEGSensorEntityDescription( - key="inverter_energy_total", - name="Lifetime energy output", - api_variable_key="_total_energy", - api_variable_unit="_total_energy_metric", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, - device_class=SensorDeviceClass.ENERGY, - suggested_display_precision=1, - state_class=SensorStateClass.TOTAL, - never_resets=True, - ), - SunWEGSensorEntityDescription( - key="inverter_frequency", - name="AC frequency", - api_variable_key="_frequency", - native_unit_of_measurement=UnitOfFrequency.HERTZ, - device_class=SensorDeviceClass.FREQUENCY, - suggested_display_precision=1, - ), - SunWEGSensorEntityDescription( - key="inverter_current_wattage", - name="Output power", - api_variable_key="_power", - api_variable_unit="_power_metric", - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - suggested_display_precision=1, - ), - SunWEGSensorEntityDescription( - key="inverter_temperature", - name="Temperature", - api_variable_key="_temperature", - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - device_class=SensorDeviceClass.TEMPERATURE, - icon="mdi:temperature-celsius", - suggested_display_precision=1, - ), - SunWEGSensorEntityDescription( - key="inverter_power_factor", - name="Power Factor", - api_variable_key="_power_factor", - suggested_display_precision=1, - ), -) diff --git a/homeassistant/components/sunweg/sensor/phase.py b/homeassistant/components/sunweg/sensor/phase.py deleted file mode 100644 index d9db6c7c714..00000000000 --- a/homeassistant/components/sunweg/sensor/phase.py +++ /dev/null @@ -1,27 +0,0 @@ -"""SunWEG Sensor definitions for the Phase type.""" - -from __future__ import annotations - -from homeassistant.components.sensor import SensorDeviceClass -from homeassistant.const import UnitOfElectricCurrent, UnitOfElectricPotential - -from .sensor_entity_description import SunWEGSensorEntityDescription - -PHASE_SENSOR_TYPES: tuple[SunWEGSensorEntityDescription, ...] = ( - SunWEGSensorEntityDescription( - key="voltage", - name="Voltage", - api_variable_key="_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - suggested_display_precision=2, - ), - SunWEGSensorEntityDescription( - key="amperage", - name="Amperage", - api_variable_key="_amperage", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - suggested_display_precision=1, - ), -) diff --git a/homeassistant/components/sunweg/sensor/sensor_entity_description.py b/homeassistant/components/sunweg/sensor/sensor_entity_description.py deleted file mode 100644 index 8c792ab617f..00000000000 --- a/homeassistant/components/sunweg/sensor/sensor_entity_description.py +++ /dev/null @@ -1,24 +0,0 @@ -"""Sensor Entity Description for the SunWEG integration.""" - -from __future__ import annotations - -from dataclasses import dataclass - -from homeassistant.components.sensor import SensorEntityDescription - - -@dataclass(frozen=True) -class SunWEGRequiredKeysMixin: - """Mixin for required keys.""" - - api_variable_key: str - - -@dataclass(frozen=True) -class SunWEGSensorEntityDescription(SensorEntityDescription, SunWEGRequiredKeysMixin): - """Describes SunWEG sensor entity.""" - - api_variable_unit: str | None = None - previous_value_drop_threshold: float | None = None - never_resets: bool = False - icon: str | None = None diff --git a/homeassistant/components/sunweg/sensor/string.py b/homeassistant/components/sunweg/sensor/string.py deleted file mode 100644 index ec59da5d20d..00000000000 --- a/homeassistant/components/sunweg/sensor/string.py +++ /dev/null @@ -1,27 +0,0 @@ -"""SunWEG Sensor definitions for the String type.""" - -from __future__ import annotations - -from homeassistant.components.sensor import SensorDeviceClass -from homeassistant.const import UnitOfElectricCurrent, UnitOfElectricPotential - -from .sensor_entity_description import SunWEGSensorEntityDescription - -STRING_SENSOR_TYPES: tuple[SunWEGSensorEntityDescription, ...] = ( - SunWEGSensorEntityDescription( - key="voltage", - name="Voltage", - api_variable_key="_voltage", - native_unit_of_measurement=UnitOfElectricPotential.VOLT, - device_class=SensorDeviceClass.VOLTAGE, - suggested_display_precision=2, - ), - SunWEGSensorEntityDescription( - key="amperage", - name="Amperage", - api_variable_key="_amperage", - native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, - device_class=SensorDeviceClass.CURRENT, - suggested_display_precision=1, - ), -) diff --git a/homeassistant/components/sunweg/sensor/total.py b/homeassistant/components/sunweg/sensor/total.py deleted file mode 100644 index 2b94446a165..00000000000 --- a/homeassistant/components/sunweg/sensor/total.py +++ /dev/null @@ -1,50 +0,0 @@ -"""SunWEG Sensor definitions for Totals.""" - -from __future__ import annotations - -from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass -from homeassistant.const import UnitOfEnergy, UnitOfPower - -from .sensor_entity_description import SunWEGSensorEntityDescription - -TOTAL_SENSOR_TYPES: tuple[SunWEGSensorEntityDescription, ...] = ( - SunWEGSensorEntityDescription( - key="total_money_total", - name="Money lifetime", - api_variable_key="_saving", - icon="mdi:cash", - native_unit_of_measurement="R$", - suggested_display_precision=2, - ), - SunWEGSensorEntityDescription( - key="total_energy_today", - name="Energy Today", - api_variable_key="_today_energy", - api_variable_unit="_today_energy_metric", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, - device_class=SensorDeviceClass.ENERGY, - state_class=SensorStateClass.TOTAL_INCREASING, - ), - SunWEGSensorEntityDescription( - key="total_output_power", - name="Output Power", - api_variable_key="_total_power", - native_unit_of_measurement=UnitOfPower.KILO_WATT, - device_class=SensorDeviceClass.POWER, - ), - SunWEGSensorEntityDescription( - key="total_energy_output", - name="Lifetime energy output", - api_variable_key="_total_energy", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, - device_class=SensorDeviceClass.ENERGY, - state_class=SensorStateClass.TOTAL, - never_resets=True, - ), - SunWEGSensorEntityDescription( - key="last_update", - name="Last Update", - api_variable_key="_last_update", - device_class=SensorDeviceClass.DATE, - ), -) diff --git a/homeassistant/components/sunweg/strings.json b/homeassistant/components/sunweg/strings.json index 9ab7be053b1..75abf5d9271 100644 --- a/homeassistant/components/sunweg/strings.json +++ b/homeassistant/components/sunweg/strings.json @@ -1,35 +1,8 @@ { - "config": { - "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "no_plants": "No plants have been found on this account", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" - }, - "error": { - "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "timeout_connect": "[%key:common::config_flow::error::timeout_connect%]" - }, - "step": { - "plant": { - "data": { - "plant_id": "Plant" - }, - "title": "Select your plant" - }, - "user": { - "data": { - "password": "[%key:common::config_flow::data::password%]", - "username": "[%key:common::config_flow::data::username%]" - }, - "title": "Enter your Sun WEG information" - }, - "reauth_confirm": { - "data": { - "password": "[%key:common::config_flow::data::password%]", - "username": "[%key:common::config_flow::data::username%]" - }, - "title": "[%key:common::config_flow::title::reauth%]" - } + "issues": { + "integration_removed": { + "title": "The SunWEG integration has been removed", + "description": "The SunWEG integration has been removed from Home Assistant.\n\nThe library that Home Assistant uses to connect with SunWEG services, [doesn't work as expected anymore, demanding daily token renew]({issue}).\n\nTo resolve this issue, please remove the (now defunct) integration entries from your Home Assistant setup. [Click here to see your existing SunWEG integration entries]({entries})." } } } diff --git a/homeassistant/components/swiss_public_transport/config_flow.py b/homeassistant/components/swiss_public_transport/config_flow.py index 4dc6efc2e85..872044097d6 100644 --- a/homeassistant/components/swiss_public_transport/config_flow.py +++ b/homeassistant/components/swiss_public_transport/config_flow.py @@ -190,7 +190,7 @@ class SwissPublicTransportConfigFlow(ConfigFlow, domain=DOMAIN): return "cannot_connect" except OpendataTransportError: return "bad_config" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unknown error") return "unknown" return None diff --git a/homeassistant/components/swiss_public_transport/strings.json b/homeassistant/components/swiss_public_transport/strings.json index 1cdbd527467..f1b28f5ed14 100644 --- a/homeassistant/components/swiss_public_transport/strings.json +++ b/homeassistant/components/swiss_public_transport/strings.json @@ -83,8 +83,8 @@ }, "services": { "fetch_connections": { - "name": "Fetch Connections", - "description": "Fetch a list of connections from the swiss public transport.", + "name": "Fetch connections", + "description": "Fetches a list of connections from Swiss public transport.", "fields": { "config_entry_id": { "name": "Instance", @@ -92,7 +92,7 @@ }, "limit": { "name": "Limit", - "description": "Number of connections to fetch from [1-15]" + "description": "Number of connections to fetch." } } } diff --git a/homeassistant/components/switch/strings.json b/homeassistant/components/switch/strings.json index 0663384fe2c..b73cf8f849d 100644 --- a/homeassistant/components/switch/strings.json +++ b/homeassistant/components/switch/strings.json @@ -25,10 +25,18 @@ } }, "switch": { - "name": "[%key:component::switch::title%]" + "name": "[%key:component::switch::title%]", + "state": { + "off": "[%key:common::state::off%]", + "on": "[%key:common::state::on%]" + } }, "outlet": { - "name": "Outlet" + "name": "Outlet", + "state": { + "off": "[%key:common::state::off%]", + "on": "[%key:common::state::on%]" + } } }, "services": { diff --git a/homeassistant/components/switchbot/cover.py b/homeassistant/components/switchbot/cover.py index 3ef0f5625c2..5a9613ab2a2 100644 --- a/homeassistant/components/switchbot/cover.py +++ b/homeassistant/components/switchbot/cover.py @@ -154,7 +154,7 @@ class SwitchBotBlindTiltEntity(SwitchbotEntity, CoverEntity, RestoreEntity): ATTR_CURRENT_TILT_POSITION ) self._last_run_success = last_state.attributes.get("last_run_success") - if (_tilt := self._attr_current_cover_position) is not None: + if (_tilt := self._attr_current_cover_tilt_position) is not None: self._attr_is_closed = (_tilt < self.CLOSED_DOWN_THRESHOLD) or ( _tilt > self.CLOSED_UP_THRESHOLD ) diff --git a/homeassistant/components/switchbot/light.py b/homeassistant/components/switchbot/light.py index 0a2c342ecf0..4b9a7e1b988 100644 --- a/homeassistant/components/switchbot/light.py +++ b/homeassistant/components/switchbot/light.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Any +from typing import Any, cast from switchbot import ColorMode as SwitchBotColorMode, SwitchbotBaseLight @@ -68,7 +68,9 @@ class SwitchbotLightEntity(SwitchbotEntity, LightEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Instruct the light to turn on.""" - brightness = round(kwargs.get(ATTR_BRIGHTNESS, self.brightness) / 255 * 100) + brightness = round( + cast(int, kwargs.get(ATTR_BRIGHTNESS, self.brightness)) / 255 * 100 + ) if ( self.supported_color_modes diff --git a/homeassistant/components/switchbot/manifest.json b/homeassistant/components/switchbot/manifest.json index 85d5bcf6436..d9f6f98d1fd 100644 --- a/homeassistant/components/switchbot/manifest.json +++ b/homeassistant/components/switchbot/manifest.json @@ -39,5 +39,5 @@ "documentation": "https://www.home-assistant.io/integrations/switchbot", "iot_class": "local_push", "loggers": ["switchbot"], - "requirements": ["PySwitchbot==0.57.1"] + "requirements": ["PySwitchbot==0.58.0"] } diff --git a/homeassistant/components/switchbot/sensor.py b/homeassistant/components/switchbot/sensor.py index 025c40bff9e..d68c913db15 100644 --- a/homeassistant/components/switchbot/sensor.py +++ b/homeassistant/components/switchbot/sensor.py @@ -11,6 +11,7 @@ from homeassistant.components.sensor import ( ) from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, + LIGHT_LUX, PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, @@ -71,9 +72,14 @@ SENSOR_TYPES: dict[str, SensorEntityDescription] = { state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.HUMIDITY, ), + "illuminance": SensorEntityDescription( + key="illuminance", + native_unit_of_measurement=LIGHT_LUX, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.ILLUMINANCE, + ), "temperature": SensorEntityDescription( key="temperature", - name=None, native_unit_of_measurement=UnitOfTemperature.CELSIUS, state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.TEMPERATURE, diff --git a/homeassistant/components/switcher_kis/strings.json b/homeassistant/components/switcher_kis/strings.json index e380711303d..c3cf111199f 100644 --- a/homeassistant/components/switcher_kis/strings.json +++ b/homeassistant/components/switcher_kis/strings.json @@ -9,13 +9,21 @@ "data": { "username": "[%key:common::config_flow::data::username%]", "token": "[%key:common::config_flow::data::access_token%]" + }, + "data_description": { + "username": "The email address used to sign in to the Switcher app.", + "token": "The local control token received from Switcher." } }, "reauth_confirm": { - "description": "Found a Switcher device that requires a token\nEnter your username and token\nFor more information see https://www.home-assistant.io/integrations/switcher_kis/#prerequisites", + "description": "[%key:component::switcher_kis::config::step::credentials::description%]", "data": { "username": "[%key:common::config_flow::data::username%]", "token": "[%key:common::config_flow::data::access_token%]" + }, + "data_description": { + "username": "[%key:component::switcher_kis::config::step::credentials::data_description::username%]", + "token": "[%key:component::switcher_kis::config::step::credentials::data_description::token%]" } } }, diff --git a/homeassistant/components/synology_dsm/__init__.py b/homeassistant/components/synology_dsm/__init__.py index 1b26b7df84d..d9319beb595 100644 --- a/homeassistant/components/synology_dsm/__init__.py +++ b/homeassistant/components/synology_dsm/__init__.py @@ -9,7 +9,6 @@ from synology_dsm.api.surveillance_station import SynoSurveillanceStation from synology_dsm.api.surveillance_station.camera import SynoCamera from synology_dsm.exceptions import SynologyDSMNotLoggedInException -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_MAC, CONF_SCAN_INTERVAL, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady @@ -31,15 +30,16 @@ from .const import ( from .coordinator import ( SynologyDSMCameraUpdateCoordinator, SynologyDSMCentralUpdateCoordinator, + SynologyDSMConfigEntry, + SynologyDSMData, SynologyDSMSwitchUpdateCoordinator, ) -from .models import SynologyDSMData from .service import async_setup_services _LOGGER = logging.getLogger(__name__) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: SynologyDSMConfigEntry) -> bool: """Set up Synology DSM sensors.""" # Migrate device identifiers @@ -120,13 +120,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except SYNOLOGY_CONNECTION_EXCEPTIONS as ex: raise ConfigEntryNotReady from ex - synology_data = SynologyDSMData( + entry.runtime_data = SynologyDSMData( api=api, coordinator_central=coordinator_central, + coordinator_central_old_update_success=True, coordinator_cameras=coordinator_cameras, coordinator_switches=coordinator_switches, ) - hass.data.setdefault(DOMAIN, {})[entry.unique_id] = synology_data await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) entry.async_on_unload(entry.add_update_listener(_async_update_listener)) @@ -140,28 +140,42 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry.async_on_state_change(async_notify_backup_listeners) ) + def async_check_last_update_success() -> None: + if ( + last := coordinator_central.last_update_success + ) is not entry.runtime_data.coordinator_central_old_update_success: + entry.runtime_data.coordinator_central_old_update_success = last + async_notify_backup_listeners() + + entry.runtime_data.coordinator_central.async_add_listener( + async_check_last_update_success + ) + return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: SynologyDSMConfigEntry +) -> bool: """Unload Synology DSM sensors.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - entry_data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id] + entry_data = entry.runtime_data await entry_data.api.async_unload() - hass.data[DOMAIN].pop(entry.unique_id) return unload_ok -async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def _async_update_listener( + hass: HomeAssistant, entry: SynologyDSMConfigEntry +) -> None: """Handle options update.""" await hass.config_entries.async_reload(entry.entry_id) async def async_remove_config_entry_device( - hass: HomeAssistant, entry: ConfigEntry, device_entry: dr.DeviceEntry + hass: HomeAssistant, entry: SynologyDSMConfigEntry, device_entry: dr.DeviceEntry ) -> bool: """Remove synology_dsm config entry from a device.""" - data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id] + data = entry.runtime_data api = data.api assert api.information is not None serial = api.information.serial diff --git a/homeassistant/components/synology_dsm/backup.py b/homeassistant/components/synology_dsm/backup.py index 670c4c9bef0..46e47ebde16 100644 --- a/homeassistant/components/synology_dsm/backup.py +++ b/homeassistant/components/synology_dsm/backup.py @@ -17,7 +17,6 @@ from homeassistant.components.backup import ( BackupNotFound, suggested_filename, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.aiohttp_client import ChunkAsyncStreamIterator from homeassistant.helpers.json import json_dumps @@ -29,7 +28,7 @@ from .const import ( DATA_BACKUP_AGENT_LISTENERS, DOMAIN, ) -from .models import SynologyDSMData +from .coordinator import SynologyDSMConfigEntry LOGGER = logging.getLogger(__name__) @@ -47,19 +46,19 @@ async def async_get_backup_agents( hass: HomeAssistant, ) -> list[BackupAgent]: """Return a list of backup agents.""" - if not ( - entries := hass.config_entries.async_loaded_entries(DOMAIN) - ) or not hass.data.get(DOMAIN): + entries: list[SynologyDSMConfigEntry] = hass.config_entries.async_loaded_entries( + DOMAIN + ) + if not entries: LOGGER.debug("No proper config entry found") return [] - syno_datas: dict[str, SynologyDSMData] = hass.data[DOMAIN] return [ SynologyDSMBackupAgent(hass, entry, entry.unique_id) for entry in entries if entry.unique_id is not None - and (syno_data := syno_datas.get(entry.unique_id)) - and syno_data.api.file_station + and entry.runtime_data.api.file_station and entry.options.get(CONF_BACKUP_PATH) + and entry.runtime_data.coordinator_central.last_update_success ] @@ -91,7 +90,9 @@ class SynologyDSMBackupAgent(BackupAgent): domain = DOMAIN - def __init__(self, hass: HomeAssistant, entry: ConfigEntry, unique_id: str) -> None: + def __init__( + self, hass: HomeAssistant, entry: SynologyDSMConfigEntry, unique_id: str + ) -> None: """Initialize the Synology DSM backup agent.""" super().__init__() LOGGER.debug("Initializing Synology DSM backup agent for %s", entry.unique_id) @@ -100,7 +101,7 @@ class SynologyDSMBackupAgent(BackupAgent): self.path = ( f"{entry.options[CONF_BACKUP_SHARE]}/{entry.options[CONF_BACKUP_PATH]}" ) - syno_data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id] + syno_data = entry.runtime_data self.api = syno_data.api self.backup_base_names: dict[str, str] = {} @@ -120,8 +121,7 @@ class SynologyDSMBackupAgent(BackupAgent): :param backup_id: The ID of the backup that was returned in async_list_backups. :return: A tuple of tar_filename and meta_filename """ - if await self.async_get_backup(backup_id) is None: - raise BackupNotFound + await self.async_get_backup(backup_id) base_name = self.backup_base_names[backup_id] return (f"{base_name}.tar", f"{base_name}_meta.json") @@ -195,13 +195,7 @@ class SynologyDSMBackupAgent(BackupAgent): :param backup_id: The ID of the backup that was returned in async_list_backups. """ - try: - (filename_tar, filename_meta) = await self._async_backup_filenames( - backup_id - ) - except BackupAgentError: - # backup meta data could not be found, so we can't delete the backup - return + (filename_tar, filename_meta) = await self._async_backup_filenames(backup_id) for filename in (filename_tar, filename_meta): try: @@ -269,7 +263,9 @@ class SynologyDSMBackupAgent(BackupAgent): self, backup_id: str, **kwargs: Any, - ) -> AgentBackup | None: + ) -> AgentBackup: """Return a backup.""" backups = await self._async_list_backups() - return backups.get(backup_id) + if backup_id not in backups: + raise BackupNotFound(f"Backup {backup_id} not found") + return backups[backup_id] diff --git a/homeassistant/components/synology_dsm/binary_sensor.py b/homeassistant/components/synology_dsm/binary_sensor.py index 2f7d041cb10..1ae5fa90760 100644 --- a/homeassistant/components/synology_dsm/binary_sensor.py +++ b/homeassistant/components/synology_dsm/binary_sensor.py @@ -12,20 +12,17 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_DISKS, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from . import SynoApi -from .const import DOMAIN -from .coordinator import SynologyDSMCentralUpdateCoordinator +from .coordinator import SynologyDSMCentralUpdateCoordinator, SynologyDSMConfigEntry from .entity import ( SynologyDSMBaseEntity, SynologyDSMDeviceEntity, SynologyDSMEntityDescription, ) -from .models import SynologyDSMData @dataclass(frozen=True, kw_only=True) @@ -64,11 +61,11 @@ STORAGE_DISK_BINARY_SENSORS: tuple[SynologyDSMBinarySensorEntityDescription, ... async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: SynologyDSMConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up the Synology NAS binary sensor.""" - data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id] + data = entry.runtime_data api = data.api coordinator = data.coordinator_central assert api.storage is not None diff --git a/homeassistant/components/synology_dsm/button.py b/homeassistant/components/synology_dsm/button.py index 6512c370334..79297b1f1b4 100644 --- a/homeassistant/components/synology_dsm/button.py +++ b/homeassistant/components/synology_dsm/button.py @@ -12,7 +12,6 @@ from homeassistant.components.button import ( ButtonEntity, ButtonEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo @@ -20,7 +19,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from . import SynoApi from .const import DOMAIN -from .models import SynologyDSMData +from .coordinator import SynologyDSMConfigEntry LOGGER = logging.getLogger(__name__) @@ -52,11 +51,11 @@ BUTTONS: Final = [ async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: SynologyDSMConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set buttons for device.""" - data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id] + data = entry.runtime_data async_add_entities(SynologyDSMButton(data.api, button) for button in BUTTONS) diff --git a/homeassistant/components/synology_dsm/camera.py b/homeassistant/components/synology_dsm/camera.py index acbcccb8894..f393b8efb55 100644 --- a/homeassistant/components/synology_dsm/camera.py +++ b/homeassistant/components/synology_dsm/camera.py @@ -16,7 +16,6 @@ from homeassistant.components.camera import ( CameraEntityDescription, CameraEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -29,9 +28,8 @@ from .const import ( DOMAIN, SIGNAL_CAMERA_SOURCE_CHANGED, ) -from .coordinator import SynologyDSMCameraUpdateCoordinator +from .coordinator import SynologyDSMCameraUpdateCoordinator, SynologyDSMConfigEntry from .entity import SynologyDSMBaseEntity, SynologyDSMEntityDescription -from .models import SynologyDSMData _LOGGER = logging.getLogger(__name__) @@ -47,11 +45,11 @@ class SynologyDSMCameraEntityDescription( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: SynologyDSMConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up the Synology NAS cameras.""" - data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id] + data = entry.runtime_data if coordinator := data.coordinator_cameras: async_add_entities( SynoDSMCamera(data.api, coordinator, camera_id) diff --git a/homeassistant/components/synology_dsm/config_flow.py b/homeassistant/components/synology_dsm/config_flow.py index 58784862305..f0da6f8fe47 100644 --- a/homeassistant/components/synology_dsm/config_flow.py +++ b/homeassistant/components/synology_dsm/config_flow.py @@ -72,7 +72,7 @@ from .const import ( DOMAIN, SYNOLOGY_CONNECTION_EXCEPTIONS, ) -from .models import SynologyDSMData +from .coordinator import SynologyDSMConfigEntry _LOGGER = logging.getLogger(__name__) @@ -131,7 +131,7 @@ class SynologyDSMFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: SynologyDSMConfigEntry, ) -> SynologyDSMOptionsFlowHandler: """Get the options flow for this handler.""" return SynologyDSMOptionsFlowHandler() @@ -444,6 +444,8 @@ class SynologyDSMFlowHandler(ConfigFlow, domain=DOMAIN): class SynologyDSMOptionsFlowHandler(OptionsFlow): """Handle a option flow.""" + config_entry: SynologyDSMConfigEntry + async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -451,7 +453,7 @@ class SynologyDSMOptionsFlowHandler(OptionsFlow): if user_input is not None: return self.async_create_entry(title="", data=user_input) - syno_data: SynologyDSMData = self.hass.data[DOMAIN][self.config_entry.unique_id] + syno_data = self.config_entry.runtime_data data_schema = vol.Schema( { diff --git a/homeassistant/components/synology_dsm/coordinator.py b/homeassistant/components/synology_dsm/coordinator.py index 1b3e21090b8..dd97dedf65e 100644 --- a/homeassistant/components/synology_dsm/coordinator.py +++ b/homeassistant/components/synology_dsm/coordinator.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Awaitable, Callable, Coroutine +from dataclasses import dataclass from datetime import timedelta import logging from typing import Any, Concatenate @@ -28,6 +29,20 @@ from .const import ( _LOGGER = logging.getLogger(__name__) +@dataclass +class SynologyDSMData: + """Data for the synology_dsm integration.""" + + api: SynoApi + coordinator_central: SynologyDSMCentralUpdateCoordinator + coordinator_central_old_update_success: bool + coordinator_cameras: SynologyDSMCameraUpdateCoordinator | None + coordinator_switches: SynologyDSMSwitchUpdateCoordinator | None + + +type SynologyDSMConfigEntry = ConfigEntry[SynologyDSMData] + + def async_re_login_on_expired[_T: SynologyDSMUpdateCoordinator[Any], **_P, _R]( func: Callable[Concatenate[_T, _P], Awaitable[_R]], ) -> Callable[Concatenate[_T, _P], Coroutine[Any, Any, _R]]: @@ -57,12 +72,12 @@ def async_re_login_on_expired[_T: SynologyDSMUpdateCoordinator[Any], **_P, _R]( class SynologyDSMUpdateCoordinator[_DataT](DataUpdateCoordinator[_DataT]): """DataUpdateCoordinator base class for synology_dsm.""" - config_entry: ConfigEntry + config_entry: SynologyDSMConfigEntry def __init__( self, hass: HomeAssistant, - entry: ConfigEntry, + entry: SynologyDSMConfigEntry, api: SynoApi, update_interval: timedelta, ) -> None: @@ -85,7 +100,7 @@ class SynologyDSMSwitchUpdateCoordinator( def __init__( self, hass: HomeAssistant, - entry: ConfigEntry, + entry: SynologyDSMConfigEntry, api: SynoApi, ) -> None: """Initialize DataUpdateCoordinator for switch devices.""" @@ -116,7 +131,7 @@ class SynologyDSMCentralUpdateCoordinator(SynologyDSMUpdateCoordinator[None]): def __init__( self, hass: HomeAssistant, - entry: ConfigEntry, + entry: SynologyDSMConfigEntry, api: SynoApi, ) -> None: """Initialize DataUpdateCoordinator for central device.""" @@ -136,7 +151,7 @@ class SynologyDSMCameraUpdateCoordinator( def __init__( self, hass: HomeAssistant, - entry: ConfigEntry, + entry: SynologyDSMConfigEntry, api: SynoApi, ) -> None: """Initialize DataUpdateCoordinator for cameras.""" diff --git a/homeassistant/components/synology_dsm/diagnostics.py b/homeassistant/components/synology_dsm/diagnostics.py index b30955ae682..a673be23096 100644 --- a/homeassistant/components/synology_dsm/diagnostics.py +++ b/homeassistant/components/synology_dsm/diagnostics.py @@ -6,21 +6,20 @@ from typing import Any from homeassistant.components.camera import diagnostics as camera_diagnostics from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant -from .const import CONF_DEVICE_TOKEN, DOMAIN -from .models import SynologyDSMData +from .const import CONF_DEVICE_TOKEN +from .coordinator import SynologyDSMConfigEntry TO_REDACT = {CONF_USERNAME, CONF_PASSWORD, CONF_DEVICE_TOKEN} async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: SynologyDSMConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id] + data = entry.runtime_data syno_api = data.api dsm_info = syno_api.dsm.information diff --git a/homeassistant/components/synology_dsm/media_source.py b/homeassistant/components/synology_dsm/media_source.py index d35b262809c..6234f5e8dd0 100644 --- a/homeassistant/components/synology_dsm/media_source.py +++ b/homeassistant/components/synology_dsm/media_source.py @@ -2,6 +2,7 @@ from __future__ import annotations +from logging import getLogger import mimetypes from aiohttp import web @@ -22,7 +23,9 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from .const import DOMAIN, SHARED_SUFFIX -from .models import SynologyDSMData +from .coordinator import SynologyDSMConfigEntry, SynologyDSMData + +LOGGER = getLogger(__name__) async def async_get_media_source(hass: HomeAssistant) -> MediaSource: @@ -41,15 +44,13 @@ class SynologyPhotosMediaSourceIdentifier: """Split identifier into parts.""" parts = identifier.split("/") - self.unique_id = None + self.unique_id = parts[0] self.album_id = None self.cache_key = None self.file_name = None self.is_shared = False self.passphrase = "" - self.unique_id = parts[0] - if len(parts) > 1: album_parts = parts[1].split("_") self.album_id = album_parts[0] @@ -82,7 +83,7 @@ class SynologyPhotosMediaSource(MediaSource): item: MediaSourceItem, ) -> BrowseMediaSource: """Return media.""" - if not self.hass.data.get(DOMAIN): + if not self.hass.config_entries.async_loaded_entries(DOMAIN): raise BrowseError("Diskstation not initialized") return BrowseMediaSource( domain=DOMAIN, @@ -116,7 +117,13 @@ class SynologyPhotosMediaSource(MediaSource): for entry in self.entries ] identifier = SynologyPhotosMediaSourceIdentifier(item.identifier) - diskstation: SynologyDSMData = self.hass.data[DOMAIN][identifier.unique_id] + entry: SynologyDSMConfigEntry | None = ( + self.hass.config_entries.async_entry_for_domain_unique_id( + DOMAIN, identifier.unique_id + ) + ) + assert entry + diskstation = entry.runtime_data assert diskstation.api.photos is not None if identifier.album_id is None: @@ -244,7 +251,7 @@ class SynologyDsmMediaView(http.HomeAssistantView): self, request: web.Request, source_dir_id: str, location: str ) -> web.Response: """Start a GET request.""" - if not self.hass.data.get(DOMAIN): + if not self.hass.config_entries.async_loaded_entries(DOMAIN): raise web.HTTPNotFound # location: {cache_key}/{filename} cache_key, file_name, passphrase = location.split("/") @@ -257,7 +264,13 @@ class SynologyDsmMediaView(http.HomeAssistantView): if not isinstance(mime_type, str): raise web.HTTPNotFound - diskstation: SynologyDSMData = self.hass.data[DOMAIN][source_dir_id] + entry: SynologyDSMConfigEntry | None = ( + self.hass.config_entries.async_entry_for_domain_unique_id( + DOMAIN, source_dir_id + ) + ) + assert entry + diskstation = entry.runtime_data assert diskstation.api.photos is not None item = SynoPhotosItem(image_id, "", "", "", cache_key, "xl", shared, passphrase) try: diff --git a/homeassistant/components/synology_dsm/models.py b/homeassistant/components/synology_dsm/models.py deleted file mode 100644 index 4f51d329ded..00000000000 --- a/homeassistant/components/synology_dsm/models.py +++ /dev/null @@ -1,22 +0,0 @@ -"""The synology_dsm integration models.""" - -from __future__ import annotations - -from dataclasses import dataclass - -from .common import SynoApi -from .coordinator import ( - SynologyDSMCameraUpdateCoordinator, - SynologyDSMCentralUpdateCoordinator, - SynologyDSMSwitchUpdateCoordinator, -) - - -@dataclass -class SynologyDSMData: - """Data for the synology_dsm integration.""" - - api: SynoApi - coordinator_central: SynologyDSMCentralUpdateCoordinator - coordinator_cameras: SynologyDSMCameraUpdateCoordinator | None - coordinator_switches: SynologyDSMSwitchUpdateCoordinator | None diff --git a/homeassistant/components/synology_dsm/repairs.py b/homeassistant/components/synology_dsm/repairs.py index 725e77a2593..8a4e47a32b5 100644 --- a/homeassistant/components/synology_dsm/repairs.py +++ b/homeassistant/components/synology_dsm/repairs.py @@ -11,7 +11,6 @@ import voluptuous as vol from homeassistant import data_entry_flow from homeassistant.components.repairs import ConfirmRepairFlow, RepairsFlow -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.selector import ( @@ -28,7 +27,7 @@ from .const import ( ISSUE_MISSING_BACKUP_SETUP, SYNOLOGY_CONNECTION_EXCEPTIONS, ) -from .models import SynologyDSMData +from .coordinator import SynologyDSMConfigEntry LOGGER = logging.getLogger(__name__) @@ -36,7 +35,7 @@ LOGGER = logging.getLogger(__name__) class MissingBackupSetupRepairFlow(RepairsFlow): """Handler for an issue fixing flow.""" - def __init__(self, entry: ConfigEntry, issue_id: str) -> None: + def __init__(self, entry: SynologyDSMConfigEntry, issue_id: str) -> None: """Create flow.""" self.entry = entry self.issue_id = issue_id @@ -59,7 +58,7 @@ class MissingBackupSetupRepairFlow(RepairsFlow): ) -> data_entry_flow.FlowResult: """Handle the confirm step of a fix flow.""" - syno_data: SynologyDSMData = self.hass.data[DOMAIN][self.entry.unique_id] + syno_data = self.entry.runtime_data if user_input is not None: self.hass.config_entries.async_update_entry( diff --git a/homeassistant/components/synology_dsm/sensor.py b/homeassistant/components/synology_dsm/sensor.py index 2987de7a7c7..566885e3989 100644 --- a/homeassistant/components/synology_dsm/sensor.py +++ b/homeassistant/components/synology_dsm/sensor.py @@ -16,7 +16,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_DISKS, PERCENTAGE, @@ -31,14 +30,13 @@ from homeassistant.helpers.typing import StateType from homeassistant.util.dt import utcnow from . import SynoApi -from .const import CONF_VOLUMES, DOMAIN, ENTITY_UNIT_LOAD -from .coordinator import SynologyDSMCentralUpdateCoordinator +from .const import CONF_VOLUMES, ENTITY_UNIT_LOAD +from .coordinator import SynologyDSMCentralUpdateCoordinator, SynologyDSMConfigEntry from .entity import ( SynologyDSMBaseEntity, SynologyDSMDeviceEntity, SynologyDSMEntityDescription, ) -from .models import SynologyDSMData @dataclass(frozen=True, kw_only=True) @@ -287,11 +285,11 @@ INFORMATION_SENSORS: tuple[SynologyDSMSensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: SynologyDSMConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up the Synology NAS Sensor.""" - data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id] + data = entry.runtime_data api = data.api coordinator = data.coordinator_central storage = api.storage diff --git a/homeassistant/components/synology_dsm/service.py b/homeassistant/components/synology_dsm/service.py index 366f7d4ba3a..40b6fd4bc30 100644 --- a/homeassistant/components/synology_dsm/service.py +++ b/homeassistant/components/synology_dsm/service.py @@ -3,13 +3,14 @@ from __future__ import annotations import logging +from typing import cast from synology_dsm.exceptions import SynologyDSMException from homeassistant.core import HomeAssistant, ServiceCall from .const import CONF_SERIAL, DOMAIN, SERVICE_REBOOT, SERVICE_SHUTDOWN, SERVICES -from .models import SynologyDSMData +from .coordinator import SynologyDSMConfigEntry LOGGER = logging.getLogger(__name__) @@ -19,11 +20,20 @@ async def async_setup_services(hass: HomeAssistant) -> None: async def service_handler(call: ServiceCall) -> None: """Handle service call.""" - serial = call.data.get(CONF_SERIAL) - dsm_devices = hass.data[DOMAIN] + serial: str | None = call.data.get(CONF_SERIAL) + entries: list[SynologyDSMConfigEntry] = ( + hass.config_entries.async_loaded_entries(DOMAIN) + ) + dsm_devices = { + cast(str, entry.unique_id): entry.runtime_data for entry in entries + } if serial: - dsm_device: SynologyDSMData = hass.data[DOMAIN][serial] + entry: SynologyDSMConfigEntry | None = ( + hass.config_entries.async_entry_for_domain_unique_id(DOMAIN, serial) + ) + assert entry + dsm_device = entry.runtime_data elif len(dsm_devices) == 1: dsm_device = next(iter(dsm_devices.values())) serial = next(iter(dsm_devices)) @@ -39,7 +49,7 @@ async def async_setup_services(hass: HomeAssistant) -> None: return if call.service in [SERVICE_REBOOT, SERVICE_SHUTDOWN]: - if serial not in hass.data[DOMAIN]: + if serial not in dsm_devices: LOGGER.error("DSM with specified serial %s not found", serial) return LOGGER.debug("%s DSM with serial %s", call.service, serial) @@ -50,7 +60,7 @@ async def async_setup_services(hass: HomeAssistant) -> None: ), call.service, ) - dsm_device = hass.data[DOMAIN][serial] + dsm_device = dsm_devices[serial] dsm_api = dsm_device.api try: await getattr(dsm_api, f"async_{call.service}")() diff --git a/homeassistant/components/synology_dsm/switch.py b/homeassistant/components/synology_dsm/switch.py index c4f1572ceea..91863ff3a26 100644 --- a/homeassistant/components/synology_dsm/switch.py +++ b/homeassistant/components/synology_dsm/switch.py @@ -9,16 +9,14 @@ from typing import Any from synology_dsm.api.surveillance_station import SynoSurveillanceStation from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from . import SynoApi from .const import DOMAIN -from .coordinator import SynologyDSMSwitchUpdateCoordinator +from .coordinator import SynologyDSMConfigEntry, SynologyDSMSwitchUpdateCoordinator from .entity import SynologyDSMBaseEntity, SynologyDSMEntityDescription -from .models import SynologyDSMData _LOGGER = logging.getLogger(__name__) @@ -41,11 +39,11 @@ SURVEILLANCE_SWITCH: tuple[SynologyDSMSwitchEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: SynologyDSMConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up the Synology NAS switch.""" - data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id] + data = entry.runtime_data if coordinator := data.coordinator_switches: assert coordinator.version is not None async_add_entities( diff --git a/homeassistant/components/synology_dsm/update.py b/homeassistant/components/synology_dsm/update.py index 71eed2d7f1f..3048a38cb9c 100644 --- a/homeassistant/components/synology_dsm/update.py +++ b/homeassistant/components/synology_dsm/update.py @@ -9,15 +9,12 @@ from synology_dsm.api.core.upgrade import SynoCoreUpgrade from yarl import URL from homeassistant.components.update import UpdateEntity, UpdateEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback -from .const import DOMAIN -from .coordinator import SynologyDSMCentralUpdateCoordinator +from .coordinator import SynologyDSMCentralUpdateCoordinator, SynologyDSMConfigEntry from .entity import SynologyDSMBaseEntity, SynologyDSMEntityDescription -from .models import SynologyDSMData @dataclass(frozen=True, kw_only=True) @@ -39,11 +36,11 @@ UPDATE_ENTITIES: Final = [ async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: SynologyDSMConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up Synology DSM update entities.""" - data: SynologyDSMData = hass.data[DOMAIN][entry.unique_id] + data = entry.runtime_data async_add_entities( SynoDSMUpdateEntity(data.api, data.coordinator_central, description) for description in UPDATE_ENTITIES diff --git a/homeassistant/components/system_bridge/__init__.py b/homeassistant/components/system_bridge/__init__.py index 3bda29867cc..e1ee57e42b2 100644 --- a/homeassistant/components/system_bridge/__init__.py +++ b/homeassistant/components/system_bridge/__init__.py @@ -11,6 +11,7 @@ from systembridgeconnector.exceptions import ( AuthenticationException, ConnectionClosedException, ConnectionErrorException, + DataMissingException, ) from systembridgeconnector.version import Version from systembridgemodels.keyboard_key import KeyboardKey @@ -184,7 +185,7 @@ async def async_setup_entry( "host": entry.data[CONF_HOST], }, ) from exception - except TimeoutError as exception: + except (DataMissingException, TimeoutError) as exception: raise ConfigEntryNotReady( translation_domain=DOMAIN, translation_key="timeout", diff --git a/homeassistant/components/system_bridge/const.py b/homeassistant/components/system_bridge/const.py index 32507f6d84e..235d7e6b986 100644 --- a/homeassistant/components/system_bridge/const.py +++ b/homeassistant/components/system_bridge/const.py @@ -18,4 +18,6 @@ MODULES: Final[list[Module]] = [ Module.SYSTEM, ] -DATA_WAIT_TIMEOUT: Final[int] = 10 +DATA_WAIT_TIMEOUT: Final[int] = 20 + +GET_DATA_WAIT_TIMEOUT: Final[int] = 15 diff --git a/homeassistant/components/system_bridge/coordinator.py b/homeassistant/components/system_bridge/coordinator.py index 1690bad4a4d..7e545f39e46 100644 --- a/homeassistant/components/system_bridge/coordinator.py +++ b/homeassistant/components/system_bridge/coordinator.py @@ -33,7 +33,7 @@ from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from .const import DOMAIN, MODULES +from .const import DOMAIN, GET_DATA_WAIT_TIMEOUT, MODULES from .data import SystemBridgeData @@ -119,7 +119,10 @@ class SystemBridgeDataUpdateCoordinator(DataUpdateCoordinator[SystemBridgeData]) """Get data from WebSocket.""" await self.check_websocket_connected() - modules_data = await self.websocket_client.get_data(GetData(modules=modules)) + modules_data = await self.websocket_client.get_data( + GetData(modules=modules), + timeout=GET_DATA_WAIT_TIMEOUT, + ) # Merge new data with existing data for module in MODULES: diff --git a/homeassistant/components/system_bridge/strings.json b/homeassistant/components/system_bridge/strings.json index ef7495ef74f..1c079c1ef0c 100644 --- a/homeassistant/components/system_bridge/strings.json +++ b/homeassistant/components/system_bridge/strings.json @@ -109,7 +109,7 @@ "message": "No data received from {host}" }, "process_not_found": { - "message": "Could not find process with id {id}." + "message": "Could not find process with ID {id}." }, "timeout": { "message": "A timeout occurred for {title} ({host})" @@ -120,7 +120,7 @@ }, "issues": { "unsupported_version": { - "title": "System Bridge Upgrade Required", + "title": "System Bridge upgrade required", "description": "Your version of System Bridge for host {host} is not supported.\n\nPlease upgrade to the latest version." } }, diff --git a/homeassistant/components/systemmonitor/manifest.json b/homeassistant/components/systemmonitor/manifest.json index bd16464b290..9302746aa17 100644 --- a/homeassistant/components/systemmonitor/manifest.json +++ b/homeassistant/components/systemmonitor/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/systemmonitor", "iot_class": "local_push", "loggers": ["psutil"], - "requirements": ["psutil-home-assistant==0.0.1", "psutil==6.1.1"], + "requirements": ["psutil-home-assistant==0.0.1", "psutil==7.0.0"], "single_config_entry": true } diff --git a/homeassistant/components/systemmonitor/strings.json b/homeassistant/components/systemmonitor/strings.json index fb8a318ff45..134fe390357 100644 --- a/homeassistant/components/systemmonitor/strings.json +++ b/homeassistant/components/systemmonitor/strings.json @@ -48,13 +48,13 @@ "name": "Last boot" }, "load_15m": { - "name": "Load (15m)" + "name": "Load (15 min)" }, "load_1m": { - "name": "Load (1m)" + "name": "Load (1 min)" }, "load_5m": { - "name": "Load (5m)" + "name": "Load (5 min)" }, "memory_free": { "name": "Memory free" diff --git a/homeassistant/components/tado/__init__.py b/homeassistant/components/tado/__init__.py index 4b0203acda3..d1994075f12 100644 --- a/homeassistant/components/tado/__init__.py +++ b/homeassistant/components/tado/__init__.py @@ -10,12 +10,17 @@ from PyTado.interface import Tado from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady +from homeassistant.exceptions import ( + ConfigEntryAuthFailed, + ConfigEntryError, + ConfigEntryNotReady, +) from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType from .const import ( CONF_FALLBACK, + CONF_REFRESH_TOKEN, CONST_OVERLAY_MANUAL, CONST_OVERLAY_TADO_DEFAULT, CONST_OVERLAY_TADO_MODE, @@ -56,23 +61,34 @@ type TadoConfigEntry = ConfigEntry[TadoData] async def async_setup_entry(hass: HomeAssistant, entry: TadoConfigEntry) -> bool: """Set up Tado from a config entry.""" + if CONF_REFRESH_TOKEN not in entry.data: + raise ConfigEntryAuthFailed _async_import_options_from_data_if_missing(hass, entry) _LOGGER.debug("Setting up Tado connection") + _LOGGER.debug( + "Creating tado instance with refresh token: %s", + entry.data[CONF_REFRESH_TOKEN], + ) + + def create_tado_instance() -> tuple[Tado, str]: + """Create a Tado instance, this time with a previously obtained refresh token.""" + tado = Tado(saved_refresh_token=entry.data[CONF_REFRESH_TOKEN]) + return tado, tado.device_activation_status() + try: - tado = await hass.async_add_executor_job( - Tado, - entry.data[CONF_USERNAME], - entry.data[CONF_PASSWORD], - ) + tado, device_status = await hass.async_add_executor_job(create_tado_instance) except PyTado.exceptions.TadoWrongCredentialsException as err: raise ConfigEntryError(f"Invalid Tado credentials. Error: {err}") from err except PyTado.exceptions.TadoException as err: raise ConfigEntryNotReady(f"Error during Tado setup: {err}") from err - _LOGGER.debug( - "Tado connection established for username: %s", entry.data[CONF_USERNAME] - ) + if device_status != "COMPLETED": + raise ConfigEntryAuthFailed( + f"Device login flow status is {device_status}. Starting re-authentication." + ) + + _LOGGER.debug("Tado connection established") coordinator = TadoDataUpdateCoordinator(hass, entry, tado) await coordinator.async_config_entry_first_refresh() @@ -82,11 +98,23 @@ async def async_setup_entry(hass: HomeAssistant, entry: TadoConfigEntry) -> bool entry.runtime_data = TadoData(coordinator, mobile_coordinator) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - entry.async_on_unload(entry.add_update_listener(update_listener)) return True +async def async_migrate_entry(hass: HomeAssistant, entry: TadoConfigEntry) -> bool: + """Migrate old entry.""" + + if entry.version < 2: + _LOGGER.debug("Migrating Tado entry to version 2. Current data: %s", entry.data) + data = dict(entry.data) + data.pop(CONF_USERNAME, None) + data.pop(CONF_PASSWORD, None) + hass.config_entries.async_update_entry(entry=entry, data=data, version=2) + _LOGGER.debug("Migration to version 2 successful") + return True + + @callback def _async_import_options_from_data_if_missing( hass: HomeAssistant, entry: TadoConfigEntry @@ -106,11 +134,6 @@ def _async_import_options_from_data_if_missing( hass.config_entries.async_update_entry(entry, options=options) -async def update_listener(hass: HomeAssistant, entry: TadoConfigEntry): - """Handle options update.""" - await hass.config_entries.async_reload(entry.entry_id) - - async def async_unload_entry(hass: HomeAssistant, entry: TadoConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/tado/climate.py b/homeassistant/components/tado/climate.py index e6aa921d428..e6ae623d1fc 100644 --- a/homeassistant/components/tado/climate.py +++ b/homeassistant/components/tado/climate.py @@ -157,8 +157,8 @@ async def create_climate_entity( TADO_TO_HA_HVAC_MODE_MAP[CONST_MODE_OFF], TADO_TO_HA_HVAC_MODE_MAP[CONST_MODE_SMART_SCHEDULE], ] - supported_fan_modes = None - supported_swing_modes = None + supported_fan_modes: list[str] | None = None + supported_swing_modes: list[str] | None = None heat_temperatures = None cool_temperatures = None @@ -477,11 +477,9 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): @property def target_temperature(self) -> float | None: """Return the temperature we try to reach.""" - # If the target temperature will be None - # if the device is performing an action - # that does not affect the temperature or - # the device is switching states - return self._tado_zone_data.target_temp or self._tado_zone_data.current_temp + if self._current_tado_hvac_mode == CONST_MODE_OFF: + return TADO_DEFAULT_MIN_TEMP + return self._tado_zone_data.target_temp async def set_timer( self, diff --git a/homeassistant/components/tado/config_flow.py b/homeassistant/components/tado/config_flow.py index f251a292800..48c3d30cb2b 100644 --- a/homeassistant/components/tado/config_flow.py +++ b/homeassistant/components/tado/config_flow.py @@ -2,160 +2,176 @@ from __future__ import annotations +import asyncio +from collections.abc import Mapping import logging from typing import Any -import PyTado +from PyTado.exceptions import TadoException +from PyTado.http import DeviceActivationStatus from PyTado.interface import Tado -import requests.exceptions import voluptuous as vol +from yarl import URL from homeassistant.config_entries import ( + SOURCE_REAUTH, ConfigEntry, ConfigFlow, ConfigFlowResult, OptionsFlow, ) -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import callback from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.service_info.zeroconf import ( - ATTR_PROPERTIES_ID, - ZeroconfServiceInfo, -) +from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo from .const import ( CONF_FALLBACK, + CONF_REFRESH_TOKEN, CONST_OVERLAY_TADO_DEFAULT, CONST_OVERLAY_TADO_OPTIONS, DOMAIN, - UNIQUE_ID, ) _LOGGER = logging.getLogger(__name__) -DATA_SCHEMA = vol.Schema( - { - vol.Required(CONF_USERNAME): str, - vol.Required(CONF_PASSWORD): str, - } -) - - -async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]: - """Validate the user input allows us to connect. - - Data has the keys from DATA_SCHEMA with values provided by the user. - """ - - try: - tado = await hass.async_add_executor_job( - Tado, data[CONF_USERNAME], data[CONF_PASSWORD] - ) - tado_me = await hass.async_add_executor_job(tado.get_me) - except KeyError as ex: - raise InvalidAuth from ex - except RuntimeError as ex: - raise CannotConnect from ex - except requests.exceptions.HTTPError as ex: - if ex.response.status_code > 400 and ex.response.status_code < 500: - raise InvalidAuth from ex - raise CannotConnect from ex - - if "homes" not in tado_me or len(tado_me["homes"]) == 0: - raise NoHomes - - home = tado_me["homes"][0] - unique_id = str(home["id"]) - name = home["name"] - - return {"title": name, UNIQUE_ID: unique_id} - class TadoConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Tado.""" - VERSION = 1 + VERSION = 2 + login_task: asyncio.Task | None = None + refresh_token: str | None = None + tado: Tado | None = None + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle reauth on credential failure.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Prepare reauth.""" + if user_input is None: + return self.async_show_form(step_id="reauth_confirm") + + return await self.async_step_user() async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Handle the initial step.""" - errors = {} - if user_input is not None: + """Handle users reauth credentials.""" + + if self.tado is None: + _LOGGER.debug("Initiating device activation") try: - validated = await validate_input(self.hass, user_input) - except CannotConnect: - errors["base"] = "cannot_connect" - except InvalidAuth: - errors["base"] = "invalid_auth" - except NoHomes: - errors["base"] = "no_homes" - except Exception: - _LOGGER.exception("Unexpected exception") - errors["base"] = "unknown" + self.tado = await self.hass.async_add_executor_job(Tado) + except TadoException: + _LOGGER.exception("Error while initiating Tado") + return self.async_abort(reason="cannot_connect") + assert self.tado is not None + tado_device_url = self.tado.device_verification_url() + user_code = URL(tado_device_url).query["user_code"] - if "base" not in errors: - await self.async_set_unique_id(validated[UNIQUE_ID]) - self._abort_if_unique_id_configured() - return self.async_create_entry( - title=validated["title"], data=user_input - ) + async def _wait_for_login() -> None: + """Wait for the user to login.""" + assert self.tado is not None + _LOGGER.debug("Waiting for device activation") + try: + await self.hass.async_add_executor_job(self.tado.device_activation) + except Exception as ex: + _LOGGER.exception("Error while waiting for device activation") + raise CannotConnect from ex - return self.async_show_form( - step_id="user", data_schema=DATA_SCHEMA, errors=errors + if ( + self.tado.device_activation_status() + is not DeviceActivationStatus.COMPLETED + ): + raise CannotConnect + + _LOGGER.debug("Checking login task") + if self.login_task is None: + _LOGGER.debug("Creating task for device activation") + self.login_task = self.hass.async_create_task(_wait_for_login()) + + if self.login_task.done(): + _LOGGER.debug("Login task is done, checking results") + if self.login_task.exception(): + return self.async_show_progress_done(next_step_id="timeout") + self.refresh_token = await self.hass.async_add_executor_job( + self.tado.get_refresh_token + ) + return self.async_show_progress_done(next_step_id="finish_login") + + return self.async_show_progress( + step_id="user", + progress_action="wait_for_device", + description_placeholders={ + "url": tado_device_url, + "code": user_code, + }, + progress_task=self.login_task, ) + async def async_step_finish_login( + self, + user_input: dict[str, Any] | None = None, + ) -> ConfigFlowResult: + """Handle the finalization of reauth.""" + _LOGGER.debug("Finalizing reauth") + assert self.tado is not None + tado_me = await self.hass.async_add_executor_job(self.tado.get_me) + + if "homes" not in tado_me or len(tado_me["homes"]) == 0: + return self.async_abort(reason="no_homes") + + home = tado_me["homes"][0] + unique_id = str(home["id"]) + name = home["name"] + + if self.source != SOURCE_REAUTH: + await self.async_set_unique_id(unique_id) + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title=name, + data={CONF_REFRESH_TOKEN: self.refresh_token}, + ) + + self._abort_if_unique_id_mismatch(reason="reauth_account_mismatch") + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data={CONF_REFRESH_TOKEN: self.refresh_token}, + ) + + async def async_step_timeout( + self, + user_input: dict[str, Any] | None = None, + ) -> ConfigFlowResult: + """Handle issues that need transition await from progress step.""" + if user_input is None: + return self.async_show_form( + step_id="timeout", + ) + del self.login_task + return await self.async_step_user() + async def async_step_homekit( self, discovery_info: ZeroconfServiceInfo ) -> ConfigFlowResult: """Handle HomeKit discovery.""" - self._async_abort_entries_match() - properties = { - key.lower(): value for (key, value) in discovery_info.properties.items() - } - await self.async_set_unique_id(properties[ATTR_PROPERTIES_ID]) - self._abort_if_unique_id_configured() - return await self.async_step_user() + await self._async_handle_discovery_without_unique_id() + return await self.async_step_homekit_confirm() - async def async_step_reconfigure( + async def async_step_homekit_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Handle a reconfiguration flow initialized by the user.""" - errors: dict[str, str] = {} - reconfigure_entry = self._get_reconfigure_entry() + """Prepare for Homekit.""" + if user_input is None: + return self.async_show_form(step_id="homekit_confirm") - if user_input is not None: - user_input[CONF_USERNAME] = reconfigure_entry.data[CONF_USERNAME] - try: - await validate_input(self.hass, user_input) - except CannotConnect: - errors["base"] = "cannot_connect" - except PyTado.exceptions.TadoWrongCredentialsException: - errors["base"] = "invalid_auth" - except NoHomes: - errors["base"] = "no_homes" - except Exception: # pylint: disable=broad-except - _LOGGER.exception("Unexpected exception") - errors["base"] = "unknown" - - if not errors: - return self.async_update_reload_and_abort( - reconfigure_entry, data_updates=user_input - ) - - return self.async_show_form( - step_id="reconfigure", - data_schema=vol.Schema( - { - vol.Required(CONF_PASSWORD): str, - } - ), - errors=errors, - description_placeholders={ - CONF_USERNAME: reconfigure_entry.data[CONF_USERNAME] - }, - ) + return await self.async_step_user() @staticmethod @callback @@ -173,8 +189,10 @@ class OptionsFlowHandler(OptionsFlow): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle options flow.""" - if user_input is not None: - return self.async_create_entry(data=user_input) + if user_input: + result = self.async_create_entry(data=user_input) + await self.hass.config_entries.async_reload(self.config_entry.entry_id) + return result data_schema = vol.Schema( { @@ -191,11 +209,3 @@ class OptionsFlowHandler(OptionsFlow): class CannotConnect(HomeAssistantError): """Error to indicate we cannot connect.""" - - -class InvalidAuth(HomeAssistantError): - """Error to indicate there is invalid auth.""" - - -class NoHomes(HomeAssistantError): - """Error to indicate the account has no homes.""" diff --git a/homeassistant/components/tado/const.py b/homeassistant/components/tado/const.py index bdc4bff1943..7720ff09110 100644 --- a/homeassistant/components/tado/const.py +++ b/homeassistant/components/tado/const.py @@ -37,6 +37,7 @@ TADO_HVAC_ACTION_TO_HA_HVAC_ACTION = { # Configuration CONF_FALLBACK = "fallback" CONF_HOME_ID = "home_id" +CONF_REFRESH_TOKEN = "refresh_token" DATA = "data" # Weather diff --git a/homeassistant/components/tado/coordinator.py b/homeassistant/components/tado/coordinator.py index 559bc4a16fb..5f3aa1de1e4 100644 --- a/homeassistant/components/tado/coordinator.py +++ b/homeassistant/components/tado/coordinator.py @@ -10,7 +10,6 @@ from PyTado.interface import Tado from requests import RequestException from homeassistant.components.climate import PRESET_AWAY, PRESET_HOME -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -20,6 +19,7 @@ if TYPE_CHECKING: from .const import ( CONF_FALLBACK, + CONF_REFRESH_TOKEN, CONST_OVERLAY_TADO_DEFAULT, DOMAIN, INSIDE_TEMPERATURE_MEASUREMENT, @@ -58,8 +58,7 @@ class TadoDataUpdateCoordinator(DataUpdateCoordinator[dict[str, dict]]): update_interval=SCAN_INTERVAL, ) self._tado = tado - self._username = config_entry.data[CONF_USERNAME] - self._password = config_entry.data[CONF_PASSWORD] + self._refresh_token = config_entry.data[CONF_REFRESH_TOKEN] self._fallback = config_entry.options.get( CONF_FALLBACK, CONST_OVERLAY_TADO_DEFAULT ) @@ -108,6 +107,18 @@ class TadoDataUpdateCoordinator(DataUpdateCoordinator[dict[str, dict]]): self.data["weather"] = home["weather"] self.data["geofence"] = home["geofence"] + refresh_token = await self.hass.async_add_executor_job( + self._tado.get_refresh_token + ) + + if refresh_token != self._refresh_token: + _LOGGER.debug("New refresh token obtained from Tado: %s", refresh_token) + self._refresh_token = refresh_token + self.hass.config_entries.async_update_entry( + self.config_entry, + data={**self.config_entry.data, CONF_REFRESH_TOKEN: refresh_token}, + ) + return self.data async def _async_update_devices(self) -> dict[str, dict]: diff --git a/homeassistant/components/tado/helper.py b/homeassistant/components/tado/helper.py index 571a757a3e8..5c515e00cf0 100644 --- a/homeassistant/components/tado/helper.py +++ b/homeassistant/components/tado/helper.py @@ -53,13 +53,13 @@ def decide_duration( return duration -def generate_supported_fanmodes(tado_to_ha_mapping: dict[str, str], options: list[str]): +def generate_supported_fanmodes( + tado_to_ha_mapping: dict[str, str], options: list[str] +) -> list[str] | None: """Return correct list of fan modes or None.""" supported_fanmodes = [ - tado_to_ha_mapping.get(option) - for option in options - if tado_to_ha_mapping.get(option) is not None + val for option in options if (val := tado_to_ha_mapping.get(option)) is not None ] if not supported_fanmodes: return None diff --git a/homeassistant/components/tado/manifest.json b/homeassistant/components/tado/manifest.json index b83e2695137..75ddbacc585 100644 --- a/homeassistant/components/tado/manifest.json +++ b/homeassistant/components/tado/manifest.json @@ -14,5 +14,5 @@ }, "iot_class": "cloud_polling", "loggers": ["PyTado"], - "requirements": ["python-tado==0.18.6"] + "requirements": ["python-tado==0.18.9"] } diff --git a/homeassistant/components/tado/strings.json b/homeassistant/components/tado/strings.json index ff1afc3c03d..53de3969998 100644 --- a/homeassistant/components/tado/strings.json +++ b/homeassistant/components/tado/strings.json @@ -1,33 +1,28 @@ { "config": { + "progress": { + "wait_for_device": "To authenticate, open the following URL and login at Tado:\n{url}\nIf the code is not automatically copied, paste the following code to authorize the integration:\n\n```{code}```\n\n\nThe login attempt will time out after five minutes." + }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "could_not_authenticate": "Could not authenticate with Tado.", + "no_homes": "There are no homes linked to this Tado account.", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "step": { - "user": { - "data": { - "password": "[%key:common::config_flow::data::password%]", - "username": "[%key:common::config_flow::data::username%]" - }, - "title": "Connect to your Tado account" + "reauth_confirm": { + "title": "Authenticate with Tado", + "description": "You need to reauthenticate with Tado. Press `Submit` to start the authentication process." }, - "reconfigure": { - "title": "Reconfigure your Tado", - "description": "Reconfigure the entry for your account: `{username}`.", - "data": { - "password": "[%key:common::config_flow::data::password%]" - }, - "data_description": { - "password": "Enter the (new) password for Tado." - } + "homekit": { + "title": "Authenticate with Tado", + "description": "Your device has been discovered and needs to authenticate with Tado. Press `Submit` to start the authentication process." + }, + "timeout": { + "description": "The authentication process timed out. Please try again." } - }, - "error": { - "unknown": "[%key:common::config_flow::error::unknown%]", - "no_homes": "There are no homes linked to this Tado account.", - "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" } }, "options": { diff --git a/homeassistant/components/tankerkoenig/config_flow.py b/homeassistant/components/tankerkoenig/config_flow.py index 8796ae46ab7..b269eaaaf55 100644 --- a/homeassistant/components/tankerkoenig/config_flow.py +++ b/homeassistant/components/tankerkoenig/config_flow.py @@ -39,7 +39,7 @@ from homeassistant.helpers.selector import ( NumberSelectorConfig, ) -from .const import CONF_FUEL_TYPES, CONF_STATIONS, DEFAULT_RADIUS, DOMAIN, FUEL_TYPES +from .const import CONF_STATIONS, DEFAULT_RADIUS, DOMAIN async def async_get_nearby_stations( @@ -175,10 +175,6 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): vol.Required( CONF_API_KEY, default=user_input.get(CONF_API_KEY, "") ): cv.string, - vol.Required( - CONF_FUEL_TYPES, - default=user_input.get(CONF_FUEL_TYPES, list(FUEL_TYPES)), - ): cv.multi_select(FUEL_TYPES), vol.Required( CONF_LOCATION, default=user_input.get( diff --git a/homeassistant/components/tankerkoenig/const.py b/homeassistant/components/tankerkoenig/const.py index c2a1dba9b6a..6761d20f4ce 100644 --- a/homeassistant/components/tankerkoenig/const.py +++ b/homeassistant/components/tankerkoenig/const.py @@ -3,14 +3,11 @@ DOMAIN = "tankerkoenig" NAME = "tankerkoenig" -CONF_FUEL_TYPES = "fuel_types" CONF_STATIONS = "stations" DEFAULT_RADIUS = 2 DEFAULT_SCAN_INTERVAL = 30 -FUEL_TYPES = {"e5": "Super", "e10": "Super E10", "diesel": "Diesel"} - ATTR_BRAND = "brand" ATTR_CITY = "city" ATTR_FUEL_TYPE = "fuel_type" diff --git a/homeassistant/components/tankerkoenig/coordinator.py b/homeassistant/components/tankerkoenig/coordinator.py index 1f73d0577b3..f1e6bc8c865 100644 --- a/homeassistant/components/tankerkoenig/coordinator.py +++ b/homeassistant/components/tankerkoenig/coordinator.py @@ -24,7 +24,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import CONF_FUEL_TYPES, CONF_STATIONS, DOMAIN +from .const import CONF_STATIONS, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -54,7 +54,6 @@ class TankerkoenigDataUpdateCoordinator(DataUpdateCoordinator[dict[str, PriceInf self._selected_stations: list[str] = self.config_entry.data[CONF_STATIONS] self.stations: dict[str, Station] = {} - self.fuel_types: list[str] = self.config_entry.data[CONF_FUEL_TYPES] self.show_on_map: bool = self.config_entry.options[CONF_SHOW_ON_MAP] self._tankerkoenig = Tankerkoenig( diff --git a/homeassistant/components/tankerkoenig/strings.json b/homeassistant/components/tankerkoenig/strings.json index 29f4f439dd5..db620b2b11c 100644 --- a/homeassistant/components/tankerkoenig/strings.json +++ b/homeassistant/components/tankerkoenig/strings.json @@ -5,7 +5,6 @@ "data": { "name": "Region name", "api_key": "[%key:common::config_flow::data::api_key%]", - "fuel_types": "Fuel types", "location": "[%key:common::config_flow::data::location%]", "stations": "Additional fuel stations", "radius": "Search radius" diff --git a/homeassistant/components/tasmota/manifest.json b/homeassistant/components/tasmota/manifest.json index 783483c6ffd..2e0d8af2338 100644 --- a/homeassistant/components/tasmota/manifest.json +++ b/homeassistant/components/tasmota/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["hatasmota"], "mqtt": ["tasmota/discovery/#"], - "requirements": ["HATasmota==0.9.2"] + "requirements": ["HATasmota==0.10.0"] } diff --git a/homeassistant/components/tedee/binary_sensor.py b/homeassistant/components/tedee/binary_sensor.py index a01b889ef8f..6570d9c5428 100644 --- a/homeassistant/components/tedee/binary_sensor.py +++ b/homeassistant/components/tedee/binary_sensor.py @@ -41,7 +41,7 @@ ENTITIES: tuple[TedeeBinarySensorEntityDescription, ...] = ( TedeeBinarySensorEntityDescription( key="semi_locked", translation_key="semi_locked", - is_on_fn=lambda lock: lock.state == TedeeLockState.HALF_OPEN, + is_on_fn=lambda lock: lock.state is TedeeLockState.HALF_OPEN, entity_category=EntityCategory.DIAGNOSTIC, ), TedeeBinarySensorEntityDescription( @@ -53,7 +53,10 @@ ENTITIES: tuple[TedeeBinarySensorEntityDescription, ...] = ( TedeeBinarySensorEntityDescription( key="uncalibrated", translation_key="uncalibrated", - is_on_fn=lambda lock: lock.state == TedeeLockState.UNCALIBRATED, + is_on_fn=( + lambda lock: lock.state is TedeeLockState.UNCALIBRATED + or lock.state is TedeeLockState.UNKNOWN + ), device_class=BinarySensorDeviceClass.PROBLEM, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, diff --git a/homeassistant/components/telegram_bot/__init__.py b/homeassistant/components/telegram_bot/__init__.py index b3c09049ae5..15e1f7d4f0e 100644 --- a/homeassistant/components/telegram_bot/__init__.py +++ b/homeassistant/components/telegram_bot/__init__.py @@ -548,7 +548,7 @@ class TelegramNotificationService: """Initialize the service.""" self.allowed_chat_ids = allowed_chat_ids self._default_user = self.allowed_chat_ids[0] - self._last_message_id = {user: None for user in self.allowed_chat_ids} + self._last_message_id = dict.fromkeys(self.allowed_chat_ids) self._parsers = { PARSER_HTML: ParseMode.HTML, PARSER_MD: ParseMode.MARKDOWN, diff --git a/homeassistant/components/template/alarm_control_panel.py b/homeassistant/components/template/alarm_control_panel.py index 0a468994295..40206a5ccbb 100644 --- a/homeassistant/components/template/alarm_control_panel.py +++ b/homeassistant/components/template/alarm_control_panel.py @@ -36,7 +36,6 @@ from homeassistant.helpers.entity_platform import ( AddEntitiesCallback, ) from homeassistant.helpers.restore_state import RestoreEntity -from homeassistant.helpers.script import Script from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util import slugify @@ -199,70 +198,31 @@ class AlarmControlPanelTemplate(TemplateEntity, AlarmControlPanelEntity, Restore name = self._attr_name assert name is not None self._template = config.get(CONF_VALUE_TEMPLATE) - self._disarm_script = None self._attr_code_arm_required: bool = config[CONF_CODE_ARM_REQUIRED] self._attr_code_format = config[CONF_CODE_FORMAT].value - if (disarm_action := config.get(CONF_DISARM_ACTION)) is not None: - self._disarm_script = Script(hass, disarm_action, name, DOMAIN) - self._arm_away_script = None - if (arm_away_action := config.get(CONF_ARM_AWAY_ACTION)) is not None: - self._arm_away_script = Script(hass, arm_away_action, name, DOMAIN) - self._arm_home_script = None - if (arm_home_action := config.get(CONF_ARM_HOME_ACTION)) is not None: - self._arm_home_script = Script(hass, arm_home_action, name, DOMAIN) - self._arm_night_script = None - if (arm_night_action := config.get(CONF_ARM_NIGHT_ACTION)) is not None: - self._arm_night_script = Script(hass, arm_night_action, name, DOMAIN) - self._arm_vacation_script = None - if (arm_vacation_action := config.get(CONF_ARM_VACATION_ACTION)) is not None: - self._arm_vacation_script = Script(hass, arm_vacation_action, name, DOMAIN) - self._arm_custom_bypass_script = None - if ( - arm_custom_bypass_action := config.get(CONF_ARM_CUSTOM_BYPASS_ACTION) - ) is not None: - self._arm_custom_bypass_script = Script( - hass, arm_custom_bypass_action, name, DOMAIN - ) - self._trigger_script = None - if (trigger_action := config.get(CONF_TRIGGER_ACTION)) is not None: - self._trigger_script = Script(hass, trigger_action, name, DOMAIN) + + self._attr_supported_features = AlarmControlPanelEntityFeature(0) + for action_id, supported_feature in ( + (CONF_DISARM_ACTION, 0), + (CONF_ARM_AWAY_ACTION, AlarmControlPanelEntityFeature.ARM_AWAY), + (CONF_ARM_HOME_ACTION, AlarmControlPanelEntityFeature.ARM_HOME), + (CONF_ARM_NIGHT_ACTION, AlarmControlPanelEntityFeature.ARM_NIGHT), + (CONF_ARM_VACATION_ACTION, AlarmControlPanelEntityFeature.ARM_VACATION), + ( + CONF_ARM_CUSTOM_BYPASS_ACTION, + AlarmControlPanelEntityFeature.ARM_CUSTOM_BYPASS, + ), + (CONF_TRIGGER_ACTION, AlarmControlPanelEntityFeature.TRIGGER), + ): + if action_config := config.get(action_id): + self.add_script(action_id, action_config, name, DOMAIN) + self._attr_supported_features |= supported_feature self._state: AlarmControlPanelState | None = None self._attr_device_info = async_device_info_to_link_from_device_id( hass, config.get(CONF_DEVICE_ID), ) - supported_features = AlarmControlPanelEntityFeature(0) - if self._arm_night_script is not None: - supported_features = ( - supported_features | AlarmControlPanelEntityFeature.ARM_NIGHT - ) - - if self._arm_home_script is not None: - supported_features = ( - supported_features | AlarmControlPanelEntityFeature.ARM_HOME - ) - - if self._arm_away_script is not None: - supported_features = ( - supported_features | AlarmControlPanelEntityFeature.ARM_AWAY - ) - - if self._arm_vacation_script is not None: - supported_features = ( - supported_features | AlarmControlPanelEntityFeature.ARM_VACATION - ) - - if self._arm_custom_bypass_script is not None: - supported_features = ( - supported_features | AlarmControlPanelEntityFeature.ARM_CUSTOM_BYPASS - ) - - if self._trigger_script is not None: - supported_features = ( - supported_features | AlarmControlPanelEntityFeature.TRIGGER - ) - self._attr_supported_features = supported_features async def async_added_to_hass(self) -> None: """Restore last state.""" @@ -330,7 +290,7 @@ class AlarmControlPanelTemplate(TemplateEntity, AlarmControlPanelEntity, Restore """Arm the panel to Away.""" await self._async_alarm_arm( AlarmControlPanelState.ARMED_AWAY, - script=self._arm_away_script, + script=self._action_scripts.get(CONF_ARM_AWAY_ACTION), code=code, ) @@ -338,7 +298,7 @@ class AlarmControlPanelTemplate(TemplateEntity, AlarmControlPanelEntity, Restore """Arm the panel to Home.""" await self._async_alarm_arm( AlarmControlPanelState.ARMED_HOME, - script=self._arm_home_script, + script=self._action_scripts.get(CONF_ARM_HOME_ACTION), code=code, ) @@ -346,7 +306,7 @@ class AlarmControlPanelTemplate(TemplateEntity, AlarmControlPanelEntity, Restore """Arm the panel to Night.""" await self._async_alarm_arm( AlarmControlPanelState.ARMED_NIGHT, - script=self._arm_night_script, + script=self._action_scripts.get(CONF_ARM_NIGHT_ACTION), code=code, ) @@ -354,7 +314,7 @@ class AlarmControlPanelTemplate(TemplateEntity, AlarmControlPanelEntity, Restore """Arm the panel to Vacation.""" await self._async_alarm_arm( AlarmControlPanelState.ARMED_VACATION, - script=self._arm_vacation_script, + script=self._action_scripts.get(CONF_ARM_VACATION_ACTION), code=code, ) @@ -362,20 +322,22 @@ class AlarmControlPanelTemplate(TemplateEntity, AlarmControlPanelEntity, Restore """Arm the panel to Custom Bypass.""" await self._async_alarm_arm( AlarmControlPanelState.ARMED_CUSTOM_BYPASS, - script=self._arm_custom_bypass_script, + script=self._action_scripts.get(CONF_ARM_CUSTOM_BYPASS_ACTION), code=code, ) async def async_alarm_disarm(self, code: str | None = None) -> None: """Disarm the panel.""" await self._async_alarm_arm( - AlarmControlPanelState.DISARMED, script=self._disarm_script, code=code + AlarmControlPanelState.DISARMED, + script=self._action_scripts.get(CONF_DISARM_ACTION), + code=code, ) async def async_alarm_trigger(self, code: str | None = None) -> None: """Trigger the panel.""" await self._async_alarm_arm( AlarmControlPanelState.TRIGGERED, - script=self._trigger_script, + script=self._action_scripts.get(CONF_TRIGGER_ACTION), code=code, ) diff --git a/homeassistant/components/template/button.py b/homeassistant/components/template/button.py index f43fc242bba..7a205446585 100644 --- a/homeassistant/components/template/button.py +++ b/homeassistant/components/template/button.py @@ -23,7 +23,6 @@ from homeassistant.helpers.entity_platform import ( AddConfigEntryEntitiesCallback, AddEntitiesCallback, ) -from homeassistant.helpers.script import Script from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from .const import CONF_PRESS, DOMAIN @@ -121,11 +120,8 @@ class TemplateButtonEntity(TemplateEntity, ButtonEntity): """Initialize the button.""" super().__init__(hass, config=config, unique_id=unique_id) assert self._attr_name is not None - self._command_press = ( - Script(hass, config.get(CONF_PRESS), self._attr_name, DOMAIN) - if config.get(CONF_PRESS, None) is not None - else None - ) + if action := config.get(CONF_PRESS): + self.add_script(CONF_PRESS, action, self._attr_name, DOMAIN) self._attr_device_class = config.get(CONF_DEVICE_CLASS) self._attr_state = None self._attr_device_info = async_device_info_to_link_from_device_id( @@ -135,5 +131,5 @@ class TemplateButtonEntity(TemplateEntity, ButtonEntity): async def async_press(self) -> None: """Press the button.""" - if self._command_press: - await self.async_run_script(self._command_press, context=self._context) + if script := self._action_scripts.get(CONF_PRESS): + await self.async_run_script(script, context=self._context) diff --git a/homeassistant/components/template/config.py b/homeassistant/components/template/config.py index e0c5514def9..4e07d67f6e9 100644 --- a/homeassistant/components/template/config.py +++ b/homeassistant/components/template/config.py @@ -1,5 +1,6 @@ """Template config validator.""" +from collections.abc import Callable from contextlib import suppress import logging @@ -12,9 +13,11 @@ from homeassistant.components.blueprint import ( ) from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN from homeassistant.components.image import DOMAIN as IMAGE_DOMAIN +from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN from homeassistant.components.number import DOMAIN as NUMBER_DOMAIN from homeassistant.components.select import DOMAIN as SELECT_DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.components.weather import DOMAIN as WEATHER_DOMAIN from homeassistant.config import async_log_schema_error, config_without_domain from homeassistant.const import ( @@ -35,9 +38,11 @@ from . import ( binary_sensor as binary_sensor_platform, button as button_platform, image as image_platform, + light as light_platform, number as number_platform, select as select_platform, sensor as sensor_platform, + switch as switch_platform, weather as weather_platform, ) from .const import ( @@ -52,41 +57,71 @@ from .helpers import async_get_blueprints PACKAGE_MERGE_HINT = "list" + +def ensure_domains_do_not_have_trigger_or_action(*keys: str) -> Callable[[dict], dict]: + """Validate that config does not contain trigger and action.""" + domains = set(keys) + + def validate(obj: dict): + options = set(obj.keys()) + if found_domains := domains.intersection(options): + invalid = {CONF_TRIGGER, CONF_ACTION} + if found_invalid := invalid.intersection(set(obj.keys())): + raise vol.Invalid( + f"Unsupported option(s) found for domain {found_domains.pop()}, please remove ({', '.join(found_invalid)}) from your configuration", + ) + + return obj + + return validate + + CONFIG_SECTION_SCHEMA = vol.Schema( - { - vol.Optional(CONF_UNIQUE_ID): cv.string, - vol.Optional(CONF_TRIGGER): cv.TRIGGER_SCHEMA, - vol.Optional(CONF_CONDITION): cv.CONDITIONS_SCHEMA, - vol.Optional(CONF_ACTION): cv.SCRIPT_SCHEMA, - vol.Optional(CONF_VARIABLES): cv.SCRIPT_VARIABLES_SCHEMA, - vol.Optional(NUMBER_DOMAIN): vol.All( - cv.ensure_list, [number_platform.NUMBER_SCHEMA] + vol.All( + { + vol.Optional(CONF_UNIQUE_ID): cv.string, + vol.Optional(CONF_TRIGGER): cv.TRIGGER_SCHEMA, + vol.Optional(CONF_CONDITION): cv.CONDITIONS_SCHEMA, + vol.Optional(CONF_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_VARIABLES): cv.SCRIPT_VARIABLES_SCHEMA, + vol.Optional(NUMBER_DOMAIN): vol.All( + cv.ensure_list, [number_platform.NUMBER_SCHEMA] + ), + vol.Optional(SENSOR_DOMAIN): vol.All( + cv.ensure_list, [sensor_platform.SENSOR_SCHEMA] + ), + vol.Optional(CONF_SENSORS): cv.schema_with_slug_keys( + sensor_platform.LEGACY_SENSOR_SCHEMA + ), + vol.Optional(BINARY_SENSOR_DOMAIN): vol.All( + cv.ensure_list, [binary_sensor_platform.BINARY_SENSOR_SCHEMA] + ), + vol.Optional(CONF_BINARY_SENSORS): cv.schema_with_slug_keys( + binary_sensor_platform.LEGACY_BINARY_SENSOR_SCHEMA + ), + vol.Optional(SELECT_DOMAIN): vol.All( + cv.ensure_list, [select_platform.SELECT_SCHEMA] + ), + vol.Optional(BUTTON_DOMAIN): vol.All( + cv.ensure_list, [button_platform.BUTTON_SCHEMA] + ), + vol.Optional(IMAGE_DOMAIN): vol.All( + cv.ensure_list, [image_platform.IMAGE_SCHEMA] + ), + vol.Optional(LIGHT_DOMAIN): vol.All( + cv.ensure_list, [light_platform.LIGHT_SCHEMA] + ), + vol.Optional(WEATHER_DOMAIN): vol.All( + cv.ensure_list, [weather_platform.WEATHER_SCHEMA] + ), + vol.Optional(SWITCH_DOMAIN): vol.All( + cv.ensure_list, [switch_platform.SWITCH_SCHEMA] + ), + }, + ensure_domains_do_not_have_trigger_or_action( + BUTTON_DOMAIN, LIGHT_DOMAIN, SWITCH_DOMAIN ), - vol.Optional(SENSOR_DOMAIN): vol.All( - cv.ensure_list, [sensor_platform.SENSOR_SCHEMA] - ), - vol.Optional(CONF_SENSORS): cv.schema_with_slug_keys( - sensor_platform.LEGACY_SENSOR_SCHEMA - ), - vol.Optional(BINARY_SENSOR_DOMAIN): vol.All( - cv.ensure_list, [binary_sensor_platform.BINARY_SENSOR_SCHEMA] - ), - vol.Optional(CONF_BINARY_SENSORS): cv.schema_with_slug_keys( - binary_sensor_platform.LEGACY_BINARY_SENSOR_SCHEMA - ), - vol.Optional(SELECT_DOMAIN): vol.All( - cv.ensure_list, [select_platform.SELECT_SCHEMA] - ), - vol.Optional(BUTTON_DOMAIN): vol.All( - cv.ensure_list, [button_platform.BUTTON_SCHEMA] - ), - vol.Optional(IMAGE_DOMAIN): vol.All( - cv.ensure_list, [image_platform.IMAGE_SCHEMA] - ), - vol.Optional(WEATHER_DOMAIN): vol.All( - cv.ensure_list, [weather_platform.WEATHER_SCHEMA] - ), - }, + ) ) TEMPLATE_BLUEPRINT_INSTANCE_SCHEMA = vol.Schema( @@ -122,9 +157,15 @@ async def _async_resolve_blueprints( raise vol.Invalid("more than one platform defined per blueprint") if len(platforms) == 1: platform = platforms.pop() - for prop in (CONF_NAME, CONF_UNIQUE_ID, CONF_VARIABLES): + for prop in (CONF_NAME, CONF_UNIQUE_ID): if prop in config: config[platform][prop] = config.pop(prop) + # For regular template entities, CONF_VARIABLES should be removed because they just + # house input results for template entities. For Trigger based template entities + # CONF_VARIABLES should not be removed because the variables are always + # executed between the trigger and action. + if CONF_TRIGGER not in config and CONF_VARIABLES in config: + config[platform][CONF_VARIABLES] = config.pop(CONF_VARIABLES) raw_config = dict(config) template_config = TemplateConfig(CONFIG_SECTION_SCHEMA(config)) diff --git a/homeassistant/components/template/coordinator.py b/homeassistant/components/template/coordinator.py index 4d8fe78f2b5..c11e9b6101b 100644 --- a/homeassistant/components/template/coordinator.py +++ b/homeassistant/components/template/coordinator.py @@ -2,12 +2,14 @@ from collections.abc import Callable, Mapping import logging -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, cast -from homeassistant.const import EVENT_HOMEASSISTANT_START +from homeassistant.components.blueprint import CONF_USE_BLUEPRINT +from homeassistant.const import CONF_PATH, CONF_VARIABLES, EVENT_HOMEASSISTANT_START from homeassistant.core import Context, CoreState, Event, HomeAssistant, callback from homeassistant.helpers import condition, discovery, trigger as trigger_helper from homeassistant.helpers.script import Script +from homeassistant.helpers.script_variables import ScriptVariables from homeassistant.helpers.trace import trace_get from homeassistant.helpers.typing import ConfigType, TemplateVarsType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator @@ -22,7 +24,7 @@ class TriggerUpdateCoordinator(DataUpdateCoordinator): REMOVE_TRIGGER = object() - def __init__(self, hass: HomeAssistant, config: dict[str, Any]) -> None: + def __init__(self, hass: HomeAssistant, config: ConfigType) -> None: """Instantiate trigger data.""" super().__init__( hass, _LOGGER, config_entry=None, name="Trigger Update Coordinator" @@ -32,6 +34,18 @@ class TriggerUpdateCoordinator(DataUpdateCoordinator): self._unsub_start: Callable[[], None] | None = None self._unsub_trigger: Callable[[], None] | None = None self._script: Script | None = None + self._run_variables: ScriptVariables | None = None + self._blueprint_inputs: dict | None = None + if config is not None: + self._run_variables = config.get(CONF_VARIABLES) + self._blueprint_inputs = getattr(config, "raw_blueprint_inputs", None) + + @property + def referenced_blueprint(self) -> str | None: + """Return referenced blueprint or None.""" + if self._blueprint_inputs is None: + return None + return cast(str, self._blueprint_inputs[CONF_USE_BLUEPRINT][CONF_PATH]) @property def unique_id(self) -> str | None: @@ -104,6 +118,10 @@ class TriggerUpdateCoordinator(DataUpdateCoordinator): async def _handle_triggered_with_script( self, run_variables: TemplateVarsType, context: Context | None = None ) -> None: + # Render run variables after the trigger, before checking conditions. + if self._run_variables: + run_variables = self._run_variables.async_render(self.hass, run_variables) + if not self._check_condition(run_variables): return # Create a context referring to the trigger context. @@ -119,6 +137,9 @@ class TriggerUpdateCoordinator(DataUpdateCoordinator): async def _handle_triggered( self, run_variables: TemplateVarsType, context: Context | None = None ) -> None: + if self._run_variables: + run_variables = self._run_variables.async_render(self.hass, run_variables) + if not self._check_condition(run_variables): return self._execute_update(run_variables, context) diff --git a/homeassistant/components/template/cover.py b/homeassistant/components/template/cover.py index 306b4405c6a..7a8e347ee8f 100644 --- a/homeassistant/components/template/cover.py +++ b/homeassistant/components/template/cover.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import Any +from typing import TYPE_CHECKING, Any import voluptuous as vol @@ -30,7 +30,6 @@ from homeassistant.exceptions import TemplateError from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.script import Script from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from .const import DOMAIN @@ -103,7 +102,7 @@ PLATFORM_SCHEMA = COVER_PLATFORM_SCHEMA.extend( ) -async def _async_create_entities(hass, config): +async def _async_create_entities(hass: HomeAssistant, config): """Create the Template cover.""" covers = [] @@ -141,11 +140,11 @@ class CoverTemplate(TemplateEntity, CoverEntity): def __init__( self, - hass, + hass: HomeAssistant, object_id, - config, + config: dict[str, Any], unique_id, - ): + ) -> None: """Initialize the Template cover.""" super().__init__( hass, config=config, fallback_name=object_id, unique_id=unique_id @@ -153,45 +152,40 @@ class CoverTemplate(TemplateEntity, CoverEntity): self.entity_id = async_generate_entity_id( ENTITY_ID_FORMAT, object_id, hass=hass ) - friendly_name = self._attr_name + name = self._attr_name + if TYPE_CHECKING: + assert name is not None self._template = config.get(CONF_VALUE_TEMPLATE) self._position_template = config.get(CONF_POSITION_TEMPLATE) self._tilt_template = config.get(CONF_TILT_TEMPLATE) self._attr_device_class = config.get(CONF_DEVICE_CLASS) - self._open_script = None - if (open_action := config.get(OPEN_ACTION)) is not None: - self._open_script = Script(hass, open_action, friendly_name, DOMAIN) - self._close_script = None - if (close_action := config.get(CLOSE_ACTION)) is not None: - self._close_script = Script(hass, close_action, friendly_name, DOMAIN) - self._stop_script = None - if (stop_action := config.get(STOP_ACTION)) is not None: - self._stop_script = Script(hass, stop_action, friendly_name, DOMAIN) - self._position_script = None - if (position_action := config.get(POSITION_ACTION)) is not None: - self._position_script = Script(hass, position_action, friendly_name, DOMAIN) - self._tilt_script = None - if (tilt_action := config.get(TILT_ACTION)) is not None: - self._tilt_script = Script(hass, tilt_action, friendly_name, DOMAIN) + + # The config requires (open and close scripts) or a set position script, + # therefore the base supported features will always include them. + self._attr_supported_features = ( + CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE + ) + for action_id, supported_feature in ( + (OPEN_ACTION, 0), + (CLOSE_ACTION, 0), + (STOP_ACTION, CoverEntityFeature.STOP), + (POSITION_ACTION, CoverEntityFeature.SET_POSITION), + (TILT_ACTION, TILT_FEATURES), + ): + if action_config := config.get(action_id): + self.add_script(action_id, action_config, name, DOMAIN) + self._attr_supported_features |= supported_feature + optimistic = config.get(CONF_OPTIMISTIC) self._optimistic = optimistic or ( optimistic is None and not self._template and not self._position_template ) tilt_optimistic = config.get(CONF_TILT_OPTIMISTIC) self._tilt_optimistic = tilt_optimistic or not self._tilt_template - self._position = None + self._position: int | None = None self._is_opening = False self._is_closing = False - self._tilt_value = None - - supported_features = CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE - if self._stop_script is not None: - supported_features |= CoverEntityFeature.STOP - if self._position_script is not None: - supported_features |= CoverEntityFeature.SET_POSITION - if self._tilt_script is not None: - supported_features |= TILT_FEATURES - self._attr_supported_features = supported_features + self._tilt_value: int | None = None @callback def _async_setup_templates(self) -> None: @@ -317,7 +311,7 @@ class CoverTemplate(TemplateEntity, CoverEntity): None is unknown, 0 is closed, 100 is fully open. """ - if self._position_template or self._position_script: + if self._position_template or self._action_scripts.get(POSITION_ACTION): return self._position return None @@ -331,11 +325,11 @@ class CoverTemplate(TemplateEntity, CoverEntity): async def async_open_cover(self, **kwargs: Any) -> None: """Move the cover up.""" - if self._open_script: - await self.async_run_script(self._open_script, context=self._context) - elif self._position_script: + if open_script := self._action_scripts.get(OPEN_ACTION): + await self.async_run_script(open_script, context=self._context) + elif position_script := self._action_scripts.get(POSITION_ACTION): await self.async_run_script( - self._position_script, + position_script, run_variables={"position": 100}, context=self._context, ) @@ -345,11 +339,11 @@ class CoverTemplate(TemplateEntity, CoverEntity): async def async_close_cover(self, **kwargs: Any) -> None: """Move the cover down.""" - if self._close_script: - await self.async_run_script(self._close_script, context=self._context) - elif self._position_script: + if close_script := self._action_scripts.get(CLOSE_ACTION): + await self.async_run_script(close_script, context=self._context) + elif position_script := self._action_scripts.get(POSITION_ACTION): await self.async_run_script( - self._position_script, + position_script, run_variables={"position": 0}, context=self._context, ) @@ -359,14 +353,14 @@ class CoverTemplate(TemplateEntity, CoverEntity): async def async_stop_cover(self, **kwargs: Any) -> None: """Fire the stop action.""" - if self._stop_script: - await self.async_run_script(self._stop_script, context=self._context) + if stop_script := self._action_scripts.get(STOP_ACTION): + await self.async_run_script(stop_script, context=self._context) async def async_set_cover_position(self, **kwargs: Any) -> None: """Set cover position.""" self._position = kwargs[ATTR_POSITION] await self.async_run_script( - self._position_script, + self._action_scripts[POSITION_ACTION], run_variables={"position": self._position}, context=self._context, ) @@ -377,7 +371,7 @@ class CoverTemplate(TemplateEntity, CoverEntity): """Tilt the cover open.""" self._tilt_value = 100 await self.async_run_script( - self._tilt_script, + self._action_scripts[TILT_ACTION], run_variables={"tilt": self._tilt_value}, context=self._context, ) @@ -388,7 +382,7 @@ class CoverTemplate(TemplateEntity, CoverEntity): """Tilt the cover closed.""" self._tilt_value = 0 await self.async_run_script( - self._tilt_script, + self._action_scripts[TILT_ACTION], run_variables={"tilt": self._tilt_value}, context=self._context, ) @@ -399,7 +393,7 @@ class CoverTemplate(TemplateEntity, CoverEntity): """Move the cover tilt to a specific position.""" self._tilt_value = kwargs[ATTR_TILT_POSITION] await self.async_run_script( - self._tilt_script, + self._action_scripts[TILT_ACTION], run_variables={"tilt": self._tilt_value}, context=self._context, ) diff --git a/homeassistant/components/template/entity.py b/homeassistant/components/template/entity.py new file mode 100644 index 00000000000..3617d9acdee --- /dev/null +++ b/homeassistant/components/template/entity.py @@ -0,0 +1,64 @@ +"""Template entity base class.""" + +from collections.abc import Sequence +from typing import Any + +from homeassistant.core import Context, HomeAssistant, callback +from homeassistant.helpers.entity import Entity +from homeassistant.helpers.script import Script, _VarsType +from homeassistant.helpers.template import TemplateStateFromEntityId + + +class AbstractTemplateEntity(Entity): + """Actions linked to a template entity.""" + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize the entity.""" + + self.hass = hass + self._action_scripts: dict[str, Script] = {} + + @property + def referenced_blueprint(self) -> str | None: + """Return referenced blueprint or None.""" + raise NotImplementedError + + @callback + def _render_script_variables(self) -> dict: + """Render configured variables.""" + raise NotImplementedError + + def add_script( + self, + script_id: str, + config: Sequence[dict[str, Any]], + name: str, + domain: str, + ): + """Add an action script.""" + + self._action_scripts[script_id] = Script( + self.hass, + config, + f"{name} {script_id}", + domain, + ) + + async def async_run_script( + self, + script: Script, + *, + run_variables: _VarsType | None = None, + context: Context | None = None, + ) -> None: + """Run an action script.""" + if run_variables is None: + run_variables = {} + await script.async_run( + run_variables={ + "this": TemplateStateFromEntityId(self.hass, self.entity_id), + **self._render_script_variables(), + **run_variables, + }, + context=context, + ) diff --git a/homeassistant/components/template/fan.py b/homeassistant/components/template/fan.py index 6ed525fd45f..6e0f9fe5e0c 100644 --- a/homeassistant/components/template/fan.py +++ b/homeassistant/components/template/fan.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import Any +from typing import TYPE_CHECKING, Any import voluptuous as vol @@ -32,7 +32,6 @@ from homeassistant.exceptions import TemplateError from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.script import Script from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from .const import DOMAIN @@ -89,7 +88,7 @@ PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend( ) -async def _async_create_entities(hass, config): +async def _async_create_entities(hass: HomeAssistant, config): """Create the Template Fans.""" fans = [] @@ -127,11 +126,11 @@ class TemplateFan(TemplateEntity, FanEntity): def __init__( self, - hass, + hass: HomeAssistant, object_id, - config, + config: dict[str, Any], unique_id, - ): + ) -> None: """Initialize the fan.""" super().__init__( hass, config=config, fallback_name=object_id, unique_id=unique_id @@ -140,7 +139,9 @@ class TemplateFan(TemplateEntity, FanEntity): self.entity_id = async_generate_entity_id( ENTITY_ID_FORMAT, object_id, hass=hass ) - friendly_name = self._attr_name + name = self._attr_name + if TYPE_CHECKING: + assert name is not None self._template = config.get(CONF_VALUE_TEMPLATE) self._percentage_template = config.get(CONF_PERCENTAGE_TEMPLATE) @@ -148,44 +149,28 @@ class TemplateFan(TemplateEntity, FanEntity): self._oscillating_template = config.get(CONF_OSCILLATING_TEMPLATE) self._direction_template = config.get(CONF_DIRECTION_TEMPLATE) - self._on_script = Script(hass, config[CONF_ON_ACTION], friendly_name, DOMAIN) - self._off_script = Script(hass, config[CONF_OFF_ACTION], friendly_name, DOMAIN) - - self._set_percentage_script = None - if set_percentage_action := config.get(CONF_SET_PERCENTAGE_ACTION): - self._set_percentage_script = Script( - hass, set_percentage_action, friendly_name, DOMAIN - ) - - self._set_preset_mode_script = None - if set_preset_mode_action := config.get(CONF_SET_PRESET_MODE_ACTION): - self._set_preset_mode_script = Script( - hass, set_preset_mode_action, friendly_name, DOMAIN - ) - - self._set_oscillating_script = None - if set_oscillating_action := config.get(CONF_SET_OSCILLATING_ACTION): - self._set_oscillating_script = Script( - hass, set_oscillating_action, friendly_name, DOMAIN - ) - - self._set_direction_script = None - if set_direction_action := config.get(CONF_SET_DIRECTION_ACTION): - self._set_direction_script = Script( - hass, set_direction_action, friendly_name, DOMAIN - ) + for action_id in ( + CONF_ON_ACTION, + CONF_OFF_ACTION, + CONF_SET_PERCENTAGE_ACTION, + CONF_SET_PRESET_MODE_ACTION, + CONF_SET_OSCILLATING_ACTION, + CONF_SET_DIRECTION_ACTION, + ): + if action_config := config.get(action_id): + self.add_script(action_id, action_config, name, DOMAIN) self._state: bool | None = False - self._percentage = None - self._preset_mode = None - self._oscillating = None - self._direction = None + self._percentage: int | None = None + self._preset_mode: str | None = None + self._oscillating: bool | None = None + self._direction: str | None = None # Number of valid speeds self._speed_count = config.get(CONF_SPEED_COUNT) # List of valid preset modes - self._preset_modes = config.get(CONF_PRESET_MODES) + self._preset_modes: list[str] | None = config.get(CONF_PRESET_MODES) if self._percentage_template: self._attr_supported_features |= FanEntityFeature.SET_SPEED @@ -207,7 +192,7 @@ class TemplateFan(TemplateEntity, FanEntity): return self._speed_count or 100 @property - def preset_modes(self) -> list[str]: + def preset_modes(self) -> list[str] | None: """Get the list of available preset modes.""" return self._preset_modes @@ -244,7 +229,7 @@ class TemplateFan(TemplateEntity, FanEntity): ) -> None: """Turn on the fan.""" await self.async_run_script( - self._on_script, + self._action_scripts[CONF_ON_ACTION], run_variables={ ATTR_PERCENTAGE: percentage, ATTR_PRESET_MODE: preset_mode, @@ -263,7 +248,9 @@ class TemplateFan(TemplateEntity, FanEntity): async def async_turn_off(self, **kwargs: Any) -> None: """Turn off the fan.""" - await self.async_run_script(self._off_script, context=self._context) + await self.async_run_script( + self._action_scripts[CONF_OFF_ACTION], context=self._context + ) if self._template is None: self._state = False @@ -273,9 +260,9 @@ class TemplateFan(TemplateEntity, FanEntity): """Set the percentage speed of the fan.""" self._percentage = percentage - if self._set_percentage_script: + if script := self._action_scripts.get(CONF_SET_PERCENTAGE_ACTION): await self.async_run_script( - self._set_percentage_script, + script, run_variables={ATTR_PERCENTAGE: self._percentage}, context=self._context, ) @@ -288,9 +275,9 @@ class TemplateFan(TemplateEntity, FanEntity): """Set the preset_mode of the fan.""" self._preset_mode = preset_mode - if self._set_preset_mode_script: + if script := self._action_scripts.get(CONF_SET_PRESET_MODE_ACTION): await self.async_run_script( - self._set_preset_mode_script, + script, run_variables={ATTR_PRESET_MODE: self._preset_mode}, context=self._context, ) @@ -301,25 +288,25 @@ class TemplateFan(TemplateEntity, FanEntity): async def async_oscillate(self, oscillating: bool) -> None: """Set oscillation of the fan.""" - if self._set_oscillating_script is None: + if (script := self._action_scripts.get(CONF_SET_OSCILLATING_ACTION)) is None: return self._oscillating = oscillating await self.async_run_script( - self._set_oscillating_script, + script, run_variables={ATTR_OSCILLATING: self.oscillating}, context=self._context, ) async def async_set_direction(self, direction: str) -> None: """Set the direction of the fan.""" - if self._set_direction_script is None: + if (script := self._action_scripts.get(CONF_SET_DIRECTION_ACTION)) is None: return if direction in _VALID_DIRECTIONS: self._direction = direction await self.async_run_script( - self._set_direction_script, + script, run_variables={ATTR_DIRECTION: direction}, context=self._context, ) diff --git a/homeassistant/components/template/helpers.py b/homeassistant/components/template/helpers.py index b320f2128cd..d74a4a4ed00 100644 --- a/homeassistant/components/template/helpers.py +++ b/homeassistant/components/template/helpers.py @@ -9,7 +9,7 @@ from homeassistant.helpers.entity_platform import async_get_platforms from homeassistant.helpers.singleton import singleton from .const import DOMAIN, TEMPLATE_BLUEPRINT_SCHEMA -from .template_entity import TemplateEntity +from .entity import AbstractTemplateEntity DATA_BLUEPRINTS = "template_blueprints" @@ -23,7 +23,7 @@ def templates_with_blueprint(hass: HomeAssistant, blueprint_path: str) -> list[s entity_id for platform in async_get_platforms(hass, DOMAIN) for entity_id, template_entity in platform.entities.items() - if isinstance(template_entity, TemplateEntity) + if isinstance(template_entity, AbstractTemplateEntity) and template_entity.referenced_blueprint == blueprint_path ] @@ -33,7 +33,8 @@ def blueprint_in_template(hass: HomeAssistant, entity_id: str) -> str | None: """Return the blueprint the template entity is based on or None.""" for platform in async_get_platforms(hass, DOMAIN): if isinstance( - (template_entity := platform.entities.get(entity_id)), TemplateEntity + (template_entity := platform.entities.get(entity_id)), + AbstractTemplateEntity, ): return template_entity.referenced_blueprint return None diff --git a/homeassistant/components/template/light.py b/homeassistant/components/template/light.py index 206703ddcce..1cc47c74aa0 100644 --- a/homeassistant/components/template/light.py +++ b/homeassistant/components/template/light.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import Any +from typing import TYPE_CHECKING, Any import voluptuous as vol @@ -26,9 +26,13 @@ from homeassistant.components.light import ( filter_supported_color_modes, ) from homeassistant.const import ( + CONF_EFFECT, CONF_ENTITY_ID, CONF_FRIENDLY_NAME, CONF_LIGHTS, + CONF_NAME, + CONF_RGB, + CONF_STATE, CONF_UNIQUE_ID, CONF_VALUE_TEMPLATE, STATE_OFF, @@ -36,16 +40,18 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import TemplateError -from homeassistant.helpers import config_validation as cv +from homeassistant.helpers import config_validation as cv, template from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.script import Script from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util import color as color_util -from .const import DOMAIN +from .const import CONF_OBJECT_ID, CONF_PICTURE, DOMAIN from .template_entity import ( + LEGACY_FIELDS as TEMPLATE_ENTITY_LEGACY_FIELDS, + TEMPLATE_ENTITY_AVAILABILITY_SCHEMA, TEMPLATE_ENTITY_COMMON_SCHEMA_LEGACY, + TEMPLATE_ENTITY_ICON_SCHEMA, TemplateEntity, rewrite_common_legacy_to_modern_conf, ) @@ -57,33 +63,96 @@ _VALID_STATES = [STATE_ON, STATE_OFF, "true", "false"] CONF_COLOR_ACTION = "set_color" CONF_COLOR_TEMPLATE = "color_template" +CONF_HS = "hs" CONF_HS_ACTION = "set_hs" CONF_HS_TEMPLATE = "hs_template" CONF_RGB_ACTION = "set_rgb" CONF_RGB_TEMPLATE = "rgb_template" +CONF_RGBW = "rgbw" CONF_RGBW_ACTION = "set_rgbw" CONF_RGBW_TEMPLATE = "rgbw_template" +CONF_RGBWW = "rgbww" CONF_RGBWW_ACTION = "set_rgbww" CONF_RGBWW_TEMPLATE = "rgbww_template" CONF_EFFECT_ACTION = "set_effect" +CONF_EFFECT_LIST = "effect_list" CONF_EFFECT_LIST_TEMPLATE = "effect_list_template" CONF_EFFECT_TEMPLATE = "effect_template" +CONF_LEVEL = "level" CONF_LEVEL_ACTION = "set_level" CONF_LEVEL_TEMPLATE = "level_template" +CONF_MAX_MIREDS = "max_mireds" CONF_MAX_MIREDS_TEMPLATE = "max_mireds_template" +CONF_MIN_MIREDS = "min_mireds" CONF_MIN_MIREDS_TEMPLATE = "min_mireds_template" CONF_OFF_ACTION = "turn_off" CONF_ON_ACTION = "turn_on" -CONF_SUPPORTS_TRANSITION = "supports_transition_template" +CONF_SUPPORTS_TRANSITION = "supports_transition" +CONF_SUPPORTS_TRANSITION_TEMPLATE = "supports_transition_template" CONF_TEMPERATURE_ACTION = "set_temperature" +CONF_TEMPERATURE = "temperature" CONF_TEMPERATURE_TEMPLATE = "temperature_template" CONF_WHITE_VALUE_ACTION = "set_white_value" +CONF_WHITE_VALUE = "white_value" CONF_WHITE_VALUE_TEMPLATE = "white_value_template" DEFAULT_MIN_MIREDS = 153 DEFAULT_MAX_MIREDS = 500 -LIGHT_SCHEMA = vol.All( +LEGACY_FIELDS = TEMPLATE_ENTITY_LEGACY_FIELDS | { + CONF_COLOR_ACTION: CONF_HS_ACTION, + CONF_COLOR_TEMPLATE: CONF_HS, + CONF_EFFECT_LIST_TEMPLATE: CONF_EFFECT_LIST, + CONF_EFFECT_TEMPLATE: CONF_EFFECT, + CONF_HS_TEMPLATE: CONF_HS, + CONF_LEVEL_TEMPLATE: CONF_LEVEL, + CONF_MAX_MIREDS_TEMPLATE: CONF_MAX_MIREDS, + CONF_MIN_MIREDS_TEMPLATE: CONF_MIN_MIREDS, + CONF_RGB_TEMPLATE: CONF_RGB, + CONF_RGBW_TEMPLATE: CONF_RGBW, + CONF_RGBWW_TEMPLATE: CONF_RGBWW, + CONF_SUPPORTS_TRANSITION_TEMPLATE: CONF_SUPPORTS_TRANSITION, + CONF_TEMPERATURE_TEMPLATE: CONF_TEMPERATURE, + CONF_VALUE_TEMPLATE: CONF_STATE, + CONF_WHITE_VALUE_TEMPLATE: CONF_WHITE_VALUE, +} + +DEFAULT_NAME = "Template Light" + +LIGHT_SCHEMA = ( + vol.Schema( + { + vol.Inclusive(CONF_EFFECT_ACTION, "effect"): cv.SCRIPT_SCHEMA, + vol.Inclusive(CONF_EFFECT_LIST, "effect"): cv.template, + vol.Inclusive(CONF_EFFECT, "effect"): cv.template, + vol.Optional(CONF_HS_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_HS): cv.template, + vol.Optional(CONF_LEVEL_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_LEVEL): cv.template, + vol.Optional(CONF_MAX_MIREDS): cv.template, + vol.Optional(CONF_MIN_MIREDS): cv.template, + vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.template, + vol.Optional(CONF_PICTURE): cv.template, + vol.Optional(CONF_RGB_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_RGB): cv.template, + vol.Optional(CONF_RGBW_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_RGBW): cv.template, + vol.Optional(CONF_RGBWW_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_RGBWW): cv.template, + vol.Optional(CONF_STATE): cv.template, + vol.Optional(CONF_SUPPORTS_TRANSITION): cv.template, + vol.Optional(CONF_TEMPERATURE_ACTION): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_TEMPERATURE): cv.template, + vol.Optional(CONF_UNIQUE_ID): cv.string, + vol.Required(CONF_OFF_ACTION): cv.SCRIPT_SCHEMA, + vol.Required(CONF_ON_ACTION): cv.SCRIPT_SCHEMA, + } + ) + .extend(TEMPLATE_ENTITY_AVAILABILITY_SCHEMA.schema) + .extend(TEMPLATE_ENTITY_ICON_SCHEMA.schema) +) + +LEGACY_LIGHT_SCHEMA = vol.All( cv.deprecated(CONF_ENTITY_ID), vol.Schema( { @@ -108,7 +177,7 @@ LIGHT_SCHEMA = vol.All( vol.Optional(CONF_MIN_MIREDS_TEMPLATE): cv.template, vol.Required(CONF_OFF_ACTION): cv.SCRIPT_SCHEMA, vol.Required(CONF_ON_ACTION): cv.SCRIPT_SCHEMA, - vol.Optional(CONF_SUPPORTS_TRANSITION): cv.template, + vol.Optional(CONF_SUPPORTS_TRANSITION_TEMPLATE): cv.template, vol.Optional(CONF_TEMPERATURE_ACTION): cv.SCRIPT_SCHEMA, vol.Optional(CONF_TEMPERATURE_TEMPLATE): cv.template, vol.Optional(CONF_UNIQUE_ID): cv.string, @@ -122,29 +191,50 @@ PLATFORM_SCHEMA = vol.All( cv.removed(CONF_WHITE_VALUE_ACTION), cv.removed(CONF_WHITE_VALUE_TEMPLATE), LIGHT_PLATFORM_SCHEMA.extend( - {vol.Required(CONF_LIGHTS): cv.schema_with_slug_keys(LIGHT_SCHEMA)} + {vol.Required(CONF_LIGHTS): cv.schema_with_slug_keys(LEGACY_LIGHT_SCHEMA)} ), ) -async def _async_create_entities(hass, config): +def rewrite_legacy_to_modern_conf( + hass: HomeAssistant, config: dict[str, dict] +) -> list[dict]: + """Rewrite legacy switch configuration definitions to modern ones.""" + lights = [] + for object_id, entity_conf in config.items(): + entity_conf = {**entity_conf, CONF_OBJECT_ID: object_id} + + entity_conf = rewrite_common_legacy_to_modern_conf( + hass, entity_conf, LEGACY_FIELDS + ) + + if CONF_NAME not in entity_conf: + entity_conf[CONF_NAME] = template.Template(object_id, hass) + + lights.append(entity_conf) + + return lights + + +@callback +def _async_create_template_tracking_entities( + async_add_entities: AddEntitiesCallback, + hass: HomeAssistant, + definitions: list[dict], + unique_id_prefix: str | None, +) -> None: """Create the Template Lights.""" lights = [] - for object_id, entity_config in config[CONF_LIGHTS].items(): - entity_config = rewrite_common_legacy_to_modern_conf(hass, entity_config) - unique_id = entity_config.get(CONF_UNIQUE_ID) + for entity_conf in definitions: + unique_id = entity_conf.get(CONF_UNIQUE_ID) - lights.append( - LightTemplate( - hass, - object_id, - entity_config, - unique_id, - ) - ) + if unique_id and unique_id_prefix: + unique_id = f"{unique_id_prefix}-{unique_id}" - return lights + lights.append(LightTemplate(hass, entity_conf, unique_id)) + + async_add_entities(lights) async def async_setup_platform( @@ -154,7 +244,21 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the template lights.""" - async_add_entities(await _async_create_entities(hass, config)) + if discovery_info is None: + _async_create_template_tracking_entities( + async_add_entities, + hass, + rewrite_legacy_to_modern_conf(hass, config[CONF_LIGHTS]), + None, + ) + return + + _async_create_template_tracking_entities( + async_add_entities, + hass, + discovery_info["entities"], + discovery_info["unique_id"], + ) class LightTemplate(TemplateEntity, LightEntity): @@ -164,64 +268,40 @@ class LightTemplate(TemplateEntity, LightEntity): def __init__( self, - hass, - object_id, - config, - unique_id, - ): + hass: HomeAssistant, + config: dict[str, Any], + unique_id: str | None, + ) -> None: """Initialize the light.""" - super().__init__( - hass, config=config, fallback_name=object_id, unique_id=unique_id - ) - self.entity_id = async_generate_entity_id( - ENTITY_ID_FORMAT, object_id, hass=hass - ) - friendly_name = self._attr_name - self._template = config.get(CONF_VALUE_TEMPLATE) - self._on_script = Script(hass, config[CONF_ON_ACTION], friendly_name, DOMAIN) - self._off_script = Script(hass, config[CONF_OFF_ACTION], friendly_name, DOMAIN) - self._level_script = None - if (level_action := config.get(CONF_LEVEL_ACTION)) is not None: - self._level_script = Script(hass, level_action, friendly_name, DOMAIN) - self._level_template = config.get(CONF_LEVEL_TEMPLATE) - self._temperature_script = None - if (temperature_action := config.get(CONF_TEMPERATURE_ACTION)) is not None: - self._temperature_script = Script( - hass, temperature_action, friendly_name, DOMAIN + super().__init__(hass, config=config, fallback_name=None, unique_id=unique_id) + if (object_id := config.get(CONF_OBJECT_ID)) is not None: + self.entity_id = async_generate_entity_id( + ENTITY_ID_FORMAT, object_id, hass=hass ) - self._temperature_template = config.get(CONF_TEMPERATURE_TEMPLATE) - self._color_script = None - if (color_action := config.get(CONF_COLOR_ACTION)) is not None: - self._color_script = Script(hass, color_action, friendly_name, DOMAIN) - self._color_template = config.get(CONF_COLOR_TEMPLATE) - self._hs_script = None - if (hs_action := config.get(CONF_HS_ACTION)) is not None: - self._hs_script = Script(hass, hs_action, friendly_name, DOMAIN) - self._hs_template = config.get(CONF_HS_TEMPLATE) - self._rgb_script = None - if (rgb_action := config.get(CONF_RGB_ACTION)) is not None: - self._rgb_script = Script(hass, rgb_action, friendly_name, DOMAIN) - self._rgb_template = config.get(CONF_RGB_TEMPLATE) - self._rgbw_script = None - if (rgbw_action := config.get(CONF_RGBW_ACTION)) is not None: - self._rgbw_script = Script(hass, rgbw_action, friendly_name, DOMAIN) - self._rgbw_template = config.get(CONF_RGBW_TEMPLATE) - self._rgbww_script = None - if (rgbww_action := config.get(CONF_RGBWW_ACTION)) is not None: - self._rgbww_script = Script(hass, rgbww_action, friendly_name, DOMAIN) - self._rgbww_template = config.get(CONF_RGBWW_TEMPLATE) - self._effect_script = None - if (effect_action := config.get(CONF_EFFECT_ACTION)) is not None: - self._effect_script = Script(hass, effect_action, friendly_name, DOMAIN) - self._effect_list_template = config.get(CONF_EFFECT_LIST_TEMPLATE) - self._effect_template = config.get(CONF_EFFECT_TEMPLATE) - self._max_mireds_template = config.get(CONF_MAX_MIREDS_TEMPLATE) - self._min_mireds_template = config.get(CONF_MIN_MIREDS_TEMPLATE) + name = self._attr_name + if TYPE_CHECKING: + assert name is not None + + self._template = config.get(CONF_STATE) + self._level_template = config.get(CONF_LEVEL) + self._temperature_template = config.get(CONF_TEMPERATURE) + self._hs_template = config.get(CONF_HS) + self._rgb_template = config.get(CONF_RGB) + self._rgbw_template = config.get(CONF_RGBW) + self._rgbww_template = config.get(CONF_RGBWW) + self._effect_list_template = config.get(CONF_EFFECT_LIST) + self._effect_template = config.get(CONF_EFFECT) + self._max_mireds_template = config.get(CONF_MAX_MIREDS) + self._min_mireds_template = config.get(CONF_MIN_MIREDS) self._supports_transition_template = config.get(CONF_SUPPORTS_TRANSITION) + for action_id in (CONF_ON_ACTION, CONF_OFF_ACTION, CONF_EFFECT_ACTION): + if action_config := config.get(action_id): + self.add_script(action_id, action_config, name, DOMAIN) + self._state = False self._brightness = None - self._temperature = None + self._temperature: int | None = None self._hs_color = None self._rgb_color = None self._rgbw_color = None @@ -235,21 +315,17 @@ class LightTemplate(TemplateEntity, LightEntity): self._supported_color_modes = None color_modes = {ColorMode.ONOFF} - if self._level_script is not None: - color_modes.add(ColorMode.BRIGHTNESS) - if self._temperature_script is not None: - color_modes.add(ColorMode.COLOR_TEMP) - if self._hs_script is not None: - color_modes.add(ColorMode.HS) - if self._color_script is not None: - color_modes.add(ColorMode.HS) - if self._rgb_script is not None: - color_modes.add(ColorMode.RGB) - if self._rgbw_script is not None: - color_modes.add(ColorMode.RGBW) - if self._rgbww_script is not None: - color_modes.add(ColorMode.RGBWW) - + for action_id, color_mode in ( + (CONF_TEMPERATURE_ACTION, ColorMode.COLOR_TEMP), + (CONF_LEVEL_ACTION, ColorMode.BRIGHTNESS), + (CONF_HS_ACTION, ColorMode.HS), + (CONF_RGB_ACTION, ColorMode.RGB), + (CONF_RGBW_ACTION, ColorMode.RGBW), + (CONF_RGBWW_ACTION, ColorMode.RGBWW), + ): + if action_config := config.get(action_id): + self.add_script(action_id, action_config, name, DOMAIN) + color_modes.add(color_mode) self._supported_color_modes = filter_supported_color_modes(color_modes) if len(self._supported_color_modes) > 1: self._color_mode = ColorMode.UNKNOWN @@ -257,7 +333,7 @@ class LightTemplate(TemplateEntity, LightEntity): self._color_mode = next(iter(self._supported_color_modes)) self._attr_supported_features = LightEntityFeature(0) - if self._effect_script is not None: + if self._action_scripts.get(CONF_EFFECT_ACTION): self._attr_supported_features |= LightEntityFeature.EFFECT if self._supports_transition is True: self._attr_supported_features |= LightEntityFeature.TRANSITION @@ -321,12 +397,12 @@ class LightTemplate(TemplateEntity, LightEntity): return self._effect_list @property - def color_mode(self): + def color_mode(self) -> ColorMode | None: """Return current color mode.""" return self._color_mode @property - def supported_color_modes(self): + def supported_color_modes(self) -> set[ColorMode] | None: """Flag supported color modes.""" return self._supported_color_modes @@ -374,14 +450,6 @@ class LightTemplate(TemplateEntity, LightEntity): self._update_temperature, none_on_template_error=True, ) - if self._color_template: - self.add_template_attribute( - "_hs_color", - self._color_template, - None, - self._update_hs, - none_on_template_error=True, - ) if self._hs_template: self.add_template_attribute( "_hs_color", @@ -465,7 +533,7 @@ class LightTemplate(TemplateEntity, LightEntity): ) self._color_mode = ColorMode.COLOR_TEMP self._temperature = color_temp - if self._hs_template is None and self._color_template is None: + if self._hs_template is None: self._hs_color = None if self._rgb_template is None: self._rgb_color = None @@ -475,11 +543,7 @@ class LightTemplate(TemplateEntity, LightEntity): self._rgbww_color = None optimistic_set = True - if ( - self._hs_template is None - and self._color_template is None - and ATTR_HS_COLOR in kwargs - ): + if self._hs_template is None and ATTR_HS_COLOR in kwargs: _LOGGER.debug( "Optimistically setting hs color to %s", kwargs[ATTR_HS_COLOR], @@ -505,7 +569,7 @@ class LightTemplate(TemplateEntity, LightEntity): self._rgb_color = kwargs[ATTR_RGB_COLOR] if self._temperature_template is None: self._temperature = None - if self._hs_template is None and self._color_template is None: + if self._hs_template is None: self._hs_color = None if self._rgbw_template is None: self._rgbw_color = None @@ -522,7 +586,7 @@ class LightTemplate(TemplateEntity, LightEntity): self._rgbw_color = kwargs[ATTR_RGBW_COLOR] if self._temperature_template is None: self._temperature = None - if self._hs_template is None and self._color_template is None: + if self._hs_template is None: self._hs_color = None if self._rgb_template is None: self._rgb_color = None @@ -539,7 +603,7 @@ class LightTemplate(TemplateEntity, LightEntity): self._rgbww_color = kwargs[ATTR_RGBWW_COLOR] if self._temperature_template is None: self._temperature = None - if self._hs_template is None and self._color_template is None: + if self._hs_template is None: self._hs_color = None if self._rgb_template is None: self._rgb_color = None @@ -555,17 +619,22 @@ class LightTemplate(TemplateEntity, LightEntity): if ATTR_TRANSITION in kwargs and self._supports_transition is True: common_params["transition"] = kwargs[ATTR_TRANSITION] - if ATTR_COLOR_TEMP_KELVIN in kwargs and self._temperature_script: + if ATTR_COLOR_TEMP_KELVIN in kwargs and ( + temperature_script := self._action_scripts.get(CONF_TEMPERATURE_ACTION) + ): common_params["color_temp"] = color_util.color_temperature_kelvin_to_mired( kwargs[ATTR_COLOR_TEMP_KELVIN] ) await self.async_run_script( - self._temperature_script, + temperature_script, run_variables=common_params, context=self._context, ) - elif ATTR_EFFECT in kwargs and self._effect_script: + elif ATTR_EFFECT in kwargs and ( + effect_script := self._action_scripts.get(CONF_EFFECT_ACTION) + ): + assert self._effect_list is not None effect = kwargs[ATTR_EFFECT] if effect not in self._effect_list: _LOGGER.error( @@ -579,27 +648,22 @@ class LightTemplate(TemplateEntity, LightEntity): common_params["effect"] = effect await self.async_run_script( - self._effect_script, run_variables=common_params, context=self._context + effect_script, run_variables=common_params, context=self._context ) - elif ATTR_HS_COLOR in kwargs and self._color_script: + elif ATTR_HS_COLOR in kwargs and ( + hs_script := self._action_scripts.get(CONF_HS_ACTION) + ): hs_value = kwargs[ATTR_HS_COLOR] common_params["hs"] = hs_value common_params["h"] = int(hs_value[0]) common_params["s"] = int(hs_value[1]) await self.async_run_script( - self._color_script, run_variables=common_params, context=self._context + hs_script, run_variables=common_params, context=self._context ) - elif ATTR_HS_COLOR in kwargs and self._hs_script: - hs_value = kwargs[ATTR_HS_COLOR] - common_params["hs"] = hs_value - common_params["h"] = int(hs_value[0]) - common_params["s"] = int(hs_value[1]) - - await self.async_run_script( - self._hs_script, run_variables=common_params, context=self._context - ) - elif ATTR_RGBWW_COLOR in kwargs and self._rgbww_script: + elif ATTR_RGBWW_COLOR in kwargs and ( + rgbww_script := self._action_scripts.get(CONF_RGBWW_ACTION) + ): rgbww_value = kwargs[ATTR_RGBWW_COLOR] common_params["rgbww"] = rgbww_value common_params["rgb"] = ( @@ -614,9 +678,11 @@ class LightTemplate(TemplateEntity, LightEntity): common_params["ww"] = int(rgbww_value[4]) await self.async_run_script( - self._rgbww_script, run_variables=common_params, context=self._context + rgbww_script, run_variables=common_params, context=self._context ) - elif ATTR_RGBW_COLOR in kwargs and self._rgbw_script: + elif ATTR_RGBW_COLOR in kwargs and ( + rgbw_script := self._action_scripts.get(CONF_RGBW_ACTION) + ): rgbw_value = kwargs[ATTR_RGBW_COLOR] common_params["rgbw"] = rgbw_value common_params["rgb"] = ( @@ -630,9 +696,11 @@ class LightTemplate(TemplateEntity, LightEntity): common_params["w"] = int(rgbw_value[3]) await self.async_run_script( - self._rgbw_script, run_variables=common_params, context=self._context + rgbw_script, run_variables=common_params, context=self._context ) - elif ATTR_RGB_COLOR in kwargs and self._rgb_script: + elif ATTR_RGB_COLOR in kwargs and ( + rgb_script := self._action_scripts.get(CONF_RGB_ACTION) + ): rgb_value = kwargs[ATTR_RGB_COLOR] common_params["rgb"] = rgb_value common_params["r"] = int(rgb_value[0]) @@ -640,15 +708,19 @@ class LightTemplate(TemplateEntity, LightEntity): common_params["b"] = int(rgb_value[2]) await self.async_run_script( - self._rgb_script, run_variables=common_params, context=self._context + rgb_script, run_variables=common_params, context=self._context ) - elif ATTR_BRIGHTNESS in kwargs and self._level_script: + elif ATTR_BRIGHTNESS in kwargs and ( + level_script := self._action_scripts.get(CONF_LEVEL_ACTION) + ): await self.async_run_script( - self._level_script, run_variables=common_params, context=self._context + level_script, run_variables=common_params, context=self._context ) else: await self.async_run_script( - self._on_script, run_variables=common_params, context=self._context + self._action_scripts[CONF_ON_ACTION], + run_variables=common_params, + context=self._context, ) if optimistic_set: @@ -656,14 +728,15 @@ class LightTemplate(TemplateEntity, LightEntity): async def async_turn_off(self, **kwargs: Any) -> None: """Turn the light off.""" + off_script = self._action_scripts[CONF_OFF_ACTION] if ATTR_TRANSITION in kwargs and self._supports_transition is True: await self.async_run_script( - self._off_script, + off_script, run_variables={"transition": kwargs[ATTR_TRANSITION]}, context=self._context, ) else: - await self.async_run_script(self._off_script, context=self._context) + await self.async_run_script(off_script, context=self._context) if self._template is None: self._state = False self.async_write_ha_state() diff --git a/homeassistant/components/template/lock.py b/homeassistant/components/template/lock.py index 0804f92e46d..b19cadff26c 100644 --- a/homeassistant/components/template/lock.py +++ b/homeassistant/components/template/lock.py @@ -23,7 +23,6 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ServiceValidationError, TemplateError from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.script import Script from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from .const import DOMAIN @@ -90,13 +89,18 @@ class TemplateLock(TemplateEntity, LockEntity): ) self._state: LockState | None = None name = self._attr_name - assert name + if TYPE_CHECKING: + assert name is not None + self._state_template = config.get(CONF_VALUE_TEMPLATE) - self._command_lock = Script(hass, config[CONF_LOCK], name, DOMAIN) - self._command_unlock = Script(hass, config[CONF_UNLOCK], name, DOMAIN) - if CONF_OPEN in config: - self._command_open = Script(hass, config[CONF_OPEN], name, DOMAIN) - self._attr_supported_features |= LockEntityFeature.OPEN + for action_id, supported_feature in ( + (CONF_LOCK, 0), + (CONF_UNLOCK, 0), + (CONF_OPEN, LockEntityFeature.OPEN), + ): + if action_config := config.get(action_id): + self.add_script(action_id, action_config, name, DOMAIN) + self._attr_supported_features |= supported_feature self._code_format_template = config.get(CONF_CODE_FORMAT_TEMPLATE) self._code_format: str | None = None self._code_format_template_error: TemplateError | None = None @@ -210,7 +214,9 @@ class TemplateLock(TemplateEntity, LockEntity): tpl_vars = {ATTR_CODE: kwargs.get(ATTR_CODE) if kwargs else None} await self.async_run_script( - self._command_lock, run_variables=tpl_vars, context=self._context + self._action_scripts[CONF_LOCK], + run_variables=tpl_vars, + context=self._context, ) async def async_unlock(self, **kwargs: Any) -> None: @@ -226,7 +232,9 @@ class TemplateLock(TemplateEntity, LockEntity): tpl_vars = {ATTR_CODE: kwargs.get(ATTR_CODE) if kwargs else None} await self.async_run_script( - self._command_unlock, run_variables=tpl_vars, context=self._context + self._action_scripts[CONF_UNLOCK], + run_variables=tpl_vars, + context=self._context, ) async def async_open(self, **kwargs: Any) -> None: @@ -242,7 +250,9 @@ class TemplateLock(TemplateEntity, LockEntity): tpl_vars = {ATTR_CODE: kwargs.get(ATTR_CODE) if kwargs else None} await self.async_run_script( - self._command_open, run_variables=tpl_vars, context=self._context + self._action_scripts[CONF_OPEN], + run_variables=tpl_vars, + context=self._context, ) def _raise_template_error_if_available(self): diff --git a/homeassistant/components/template/manifest.json b/homeassistant/components/template/manifest.json index f1225f74f06..32bfd8ce02e 100644 --- a/homeassistant/components/template/manifest.json +++ b/homeassistant/components/template/manifest.json @@ -2,7 +2,7 @@ "domain": "template", "name": "Template", "after_dependencies": ["group"], - "codeowners": ["@PhracturedBlue", "@home-assistant/core"], + "codeowners": ["@Petro31", "@PhracturedBlue", "@home-assistant/core"], "config_flow": true, "dependencies": ["blueprint"], "documentation": "https://www.home-assistant.io/integrations/template", diff --git a/homeassistant/components/template/number.py b/homeassistant/components/template/number.py index 661dbb45dc1..3ecf1db565a 100644 --- a/homeassistant/components/template/number.py +++ b/homeassistant/components/template/number.py @@ -31,7 +31,6 @@ from homeassistant.helpers.entity_platform import ( AddConfigEntryEntitiesCallback, AddEntitiesCallback, ) -from homeassistant.helpers.script import Script from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import TriggerUpdateCoordinator @@ -157,9 +156,7 @@ class TemplateNumber(TemplateEntity, NumberEntity): super().__init__(hass, config=config, unique_id=unique_id) assert self._attr_name is not None self._value_template = config[CONF_STATE] - self._command_set_value = Script( - hass, config[CONF_SET_VALUE], self._attr_name, DOMAIN - ) + self.add_script(CONF_SET_VALUE, config[CONF_SET_VALUE], self._attr_name, DOMAIN) self._step_template = config[CONF_STEP] self._min_value_template = config[CONF_MIN] @@ -210,9 +207,9 @@ class TemplateNumber(TemplateEntity, NumberEntity): if self._optimistic: self._attr_native_value = value self.async_write_ha_state() - if self._command_set_value: + if set_value := self._action_scripts.get(CONF_SET_VALUE): await self.async_run_script( - self._command_set_value, + set_value, run_variables={ATTR_VALUE: value}, context=self._context, ) @@ -238,12 +235,8 @@ class TriggerNumberEntity(TriggerEntity, NumberEntity): """Initialize the entity.""" super().__init__(hass, coordinator, config) - self._command_set_value = Script( - hass, - config[CONF_SET_VALUE], - self._rendered.get(CONF_NAME, DEFAULT_NAME), - DOMAIN, - ) + name = self._rendered.get(CONF_NAME, DEFAULT_NAME) + self.add_script(CONF_SET_VALUE, config[CONF_SET_VALUE], name, DOMAIN) self._attr_native_unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT) @@ -278,6 +271,9 @@ class TriggerNumberEntity(TriggerEntity, NumberEntity): if self._config[CONF_OPTIMISTIC]: self._attr_native_value = value self.async_write_ha_state() - await self._command_set_value.async_run( - {ATTR_VALUE: value}, context=self._context - ) + if set_value := self._action_scripts.get(CONF_SET_VALUE): + await self.async_run_script( + set_value, + run_variables={ATTR_VALUE: value}, + context=self._context, + ) diff --git a/homeassistant/components/template/select.py b/homeassistant/components/template/select.py index a42ee3d0612..eb60a3dbfe4 100644 --- a/homeassistant/components/template/select.py +++ b/homeassistant/components/template/select.py @@ -28,7 +28,6 @@ from homeassistant.helpers.entity_platform import ( AddConfigEntryEntitiesCallback, AddEntitiesCallback, ) -from homeassistant.helpers.script import Script from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import TriggerUpdateCoordinator @@ -142,10 +141,8 @@ class TemplateSelect(TemplateEntity, SelectEntity): super().__init__(hass, config=config, unique_id=unique_id) assert self._attr_name is not None self._value_template = config[CONF_STATE] - if (selection_option := config.get(CONF_SELECT_OPTION)) is not None: - self._command_select_option = Script( - hass, selection_option, self._attr_name, DOMAIN - ) + if select_option := config.get(CONF_SELECT_OPTION): + self.add_script(CONF_SELECT_OPTION, select_option, self._attr_name, DOMAIN) self._options_template = config[ATTR_OPTIONS] self._attr_assumed_state = self._optimistic = config.get(CONF_OPTIMISTIC, False) self._attr_options = [] @@ -177,9 +174,9 @@ class TemplateSelect(TemplateEntity, SelectEntity): if self._optimistic: self._attr_current_option = option self.async_write_ha_state() - if self._command_select_option: + if select_option := self._action_scripts.get(CONF_SELECT_OPTION): await self.async_run_script( - self._command_select_option, + select_option, run_variables={ATTR_OPTION: option}, context=self._context, ) @@ -200,12 +197,13 @@ class TriggerSelectEntity(TriggerEntity, SelectEntity): ) -> None: """Initialize the entity.""" super().__init__(hass, coordinator, config) - self._command_select_option = Script( - hass, - config[CONF_SELECT_OPTION], - self._rendered.get(CONF_NAME, DEFAULT_NAME), - DOMAIN, - ) + if select_option := config.get(CONF_SELECT_OPTION): + self.add_script( + CONF_SELECT_OPTION, + select_option, + self._rendered.get(CONF_NAME, DEFAULT_NAME), + DOMAIN, + ) @property def current_option(self) -> str | None: @@ -222,6 +220,9 @@ class TriggerSelectEntity(TriggerEntity, SelectEntity): if self._config[CONF_OPTIMISTIC]: self._attr_current_option = option self.async_write_ha_state() - await self._command_select_option.async_run( - {ATTR_OPTION: option}, context=self._context - ) + if select_option := self._action_scripts.get(CONF_SELECT_OPTION): + await self.async_run_script( + select_option, + run_variables={ATTR_OPTION: option}, + context=self._context, + ) diff --git a/homeassistant/components/template/switch.py b/homeassistant/components/template/switch.py index 756866cfd44..fb3aeb1e42a 100644 --- a/homeassistant/components/template/switch.py +++ b/homeassistant/components/template/switch.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Any +from typing import TYPE_CHECKING, Any import voluptuous as vol @@ -17,6 +17,7 @@ from homeassistant.const import ( ATTR_FRIENDLY_NAME, CONF_DEVICE_ID, CONF_NAME, + CONF_STATE, CONF_SWITCHES, CONF_UNIQUE_ID, CONF_VALUE_TEMPLATE, @@ -25,7 +26,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import TemplateError -from homeassistant.helpers import config_validation as cv, selector +from homeassistant.helpers import config_validation as cv, selector, template from homeassistant.helpers.device import async_device_info_to_link_from_device_id from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_platform import ( @@ -33,19 +34,43 @@ from homeassistant.helpers.entity_platform import ( AddEntitiesCallback, ) from homeassistant.helpers.restore_state import RestoreEntity -from homeassistant.helpers.script import Script from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from .const import CONF_TURN_OFF, CONF_TURN_ON, DOMAIN +from .const import CONF_OBJECT_ID, CONF_PICTURE, CONF_TURN_OFF, CONF_TURN_ON, DOMAIN from .template_entity import ( + LEGACY_FIELDS as TEMPLATE_ENTITY_LEGACY_FIELDS, + TEMPLATE_ENTITY_AVAILABILITY_SCHEMA, TEMPLATE_ENTITY_COMMON_SCHEMA_LEGACY, + TEMPLATE_ENTITY_ICON_SCHEMA, TemplateEntity, rewrite_common_legacy_to_modern_conf, ) _VALID_STATES = [STATE_ON, STATE_OFF, "true", "false"] -SWITCH_SCHEMA = vol.All( +LEGACY_FIELDS = TEMPLATE_ENTITY_LEGACY_FIELDS | { + CONF_VALUE_TEMPLATE: CONF_STATE, +} + +DEFAULT_NAME = "Template Switch" + + +SWITCH_SCHEMA = ( + vol.Schema( + { + vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.template, + vol.Optional(CONF_STATE): cv.template, + vol.Required(CONF_TURN_ON): cv.SCRIPT_SCHEMA, + vol.Required(CONF_TURN_OFF): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_UNIQUE_ID): cv.string, + vol.Optional(CONF_PICTURE): cv.template, + } + ) + .extend(TEMPLATE_ENTITY_AVAILABILITY_SCHEMA.schema) + .extend(TEMPLATE_ENTITY_ICON_SCHEMA.schema) +) + +LEGACY_SWITCH_SCHEMA = vol.All( cv.deprecated(ATTR_ENTITY_ID), vol.Schema( { @@ -60,13 +85,13 @@ SWITCH_SCHEMA = vol.All( ) PLATFORM_SCHEMA = SWITCH_PLATFORM_SCHEMA.extend( - {vol.Required(CONF_SWITCHES): cv.schema_with_slug_keys(SWITCH_SCHEMA)} + {vol.Required(CONF_SWITCHES): cv.schema_with_slug_keys(LEGACY_SWITCH_SCHEMA)} ) SWITCH_CONFIG_SCHEMA = vol.Schema( { vol.Required(CONF_NAME): cv.template, - vol.Optional(CONF_VALUE_TEMPLATE): cv.template, + vol.Optional(CONF_STATE): cv.template, vol.Optional(CONF_TURN_ON): cv.SCRIPT_SCHEMA, vol.Optional(CONF_TURN_OFF): cv.SCRIPT_SCHEMA, vol.Optional(CONF_DEVICE_ID): selector.DeviceSelector(), @@ -74,24 +99,62 @@ SWITCH_CONFIG_SCHEMA = vol.Schema( ) -async def _async_create_entities(hass, config): - """Create the Template switches.""" +def rewrite_legacy_to_modern_conf( + hass: HomeAssistant, config: dict[str, dict] +) -> list[dict]: + """Rewrite legacy switch configuration definitions to modern ones.""" switches = [] - for object_id, entity_config in config[CONF_SWITCHES].items(): - entity_config = rewrite_common_legacy_to_modern_conf(hass, entity_config) - unique_id = entity_config.get(CONF_UNIQUE_ID) + for object_id, entity_conf in config.items(): + entity_conf = {**entity_conf, CONF_OBJECT_ID: object_id} + + entity_conf = rewrite_common_legacy_to_modern_conf( + hass, entity_conf, LEGACY_FIELDS + ) + + if CONF_NAME not in entity_conf: + entity_conf[CONF_NAME] = template.Template(object_id, hass) + + switches.append(entity_conf) + + return switches + + +def rewrite_options_to_modern_conf(option_config: dict[str, dict]) -> dict[str, dict]: + """Rewrite option configuration to modern configuration.""" + option_config = {**option_config} + + if CONF_VALUE_TEMPLATE in option_config: + option_config[CONF_STATE] = option_config.pop(CONF_VALUE_TEMPLATE) + + return option_config + + +@callback +def _async_create_template_tracking_entities( + async_add_entities: AddEntitiesCallback, + hass: HomeAssistant, + definitions: list[dict], + unique_id_prefix: str | None, +) -> None: + """Create the template switches.""" + switches = [] + + for entity_conf in definitions: + unique_id = entity_conf.get(CONF_UNIQUE_ID) + + if unique_id and unique_id_prefix: + unique_id = f"{unique_id_prefix}-{unique_id}" switches.append( SwitchTemplate( hass, - object_id, - entity_config, + entity_conf, unique_id, ) ) - return switches + async_add_entities(switches) async def async_setup_platform( @@ -101,7 +164,21 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the template switches.""" - async_add_entities(await _async_create_entities(hass, config)) + if discovery_info is None: + _async_create_template_tracking_entities( + async_add_entities, + hass, + rewrite_legacy_to_modern_conf(hass, config[CONF_SWITCHES]), + None, + ) + return + + _async_create_template_tracking_entities( + async_add_entities, + hass, + discovery_info["entities"], + discovery_info["unique_id"], + ) async def async_setup_entry( @@ -112,10 +189,9 @@ async def async_setup_entry( """Initialize config entry.""" _options = dict(config_entry.options) _options.pop("template_type") + _options = rewrite_options_to_modern_conf(_options) validated_config = SWITCH_CONFIG_SCHEMA(_options) - async_add_entities( - [SwitchTemplate(hass, None, validated_config, config_entry.entry_id)] - ) + async_add_entities([SwitchTemplate(hass, validated_config, config_entry.entry_id)]) @callback @@ -123,8 +199,9 @@ def async_create_preview_switch( hass: HomeAssistant, name: str, config: dict[str, Any] ) -> SwitchTemplate: """Create a preview switch.""" - validated_config = SWITCH_CONFIG_SCHEMA(config | {CONF_NAME: name}) - return SwitchTemplate(hass, None, validated_config, None) + updated_config = rewrite_options_to_modern_conf(config) + validated_config = SWITCH_CONFIG_SCHEMA(updated_config | {CONF_NAME: name}) + return SwitchTemplate(hass, validated_config, None) class SwitchTemplate(TemplateEntity, SwitchEntity, RestoreEntity): @@ -134,31 +211,26 @@ class SwitchTemplate(TemplateEntity, SwitchEntity, RestoreEntity): def __init__( self, - hass, - object_id, - config, - unique_id, - ): + hass: HomeAssistant, + config: ConfigType, + unique_id: str | None, + ) -> None: """Initialize the Template switch.""" - super().__init__( - hass, config=config, fallback_name=object_id, unique_id=unique_id - ) - if object_id is not None: + super().__init__(hass, config=config, fallback_name=None, unique_id=unique_id) + if (object_id := config.get(CONF_OBJECT_ID)) is not None: self.entity_id = async_generate_entity_id( ENTITY_ID_FORMAT, object_id, hass=hass ) - friendly_name = self._attr_name - self._template = config.get(CONF_VALUE_TEMPLATE) - self._on_script = ( - Script(hass, config.get(CONF_TURN_ON), friendly_name, DOMAIN) - if config.get(CONF_TURN_ON) is not None - else None - ) - self._off_script = ( - Script(hass, config.get(CONF_TURN_OFF), friendly_name, DOMAIN) - if config.get(CONF_TURN_OFF) is not None - else None - ) + name = self._attr_name + if TYPE_CHECKING: + assert name is not None + self._template = config.get(CONF_STATE) + + if on_action := config.get(CONF_TURN_ON): + self.add_script(CONF_TURN_ON, on_action, name, DOMAIN) + if off_action := config.get(CONF_TURN_OFF): + self.add_script(CONF_TURN_OFF, off_action, name, DOMAIN) + self._state: bool | None = False self._attr_assumed_state = self._template is None self._attr_device_info = async_device_info_to_link_from_device_id( @@ -209,16 +281,16 @@ class SwitchTemplate(TemplateEntity, SwitchEntity, RestoreEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Fire the on action.""" - if self._on_script: - await self.async_run_script(self._on_script, context=self._context) + if on_script := self._action_scripts.get(CONF_TURN_ON): + await self.async_run_script(on_script, context=self._context) if self._template is None: self._state = True self.async_write_ha_state() async def async_turn_off(self, **kwargs: Any) -> None: """Fire the off action.""" - if self._off_script: - await self.async_run_script(self._off_script, context=self._context) + if off_script := self._action_scripts.get(CONF_TURN_OFF): + await self.async_run_script(off_script, context=self._context) if self._template is None: self._state = False self.async_write_ha_state() diff --git a/homeassistant/components/template/template_entity.py b/homeassistant/components/template/template_entity.py index 8f9edca5976..88708278758 100644 --- a/homeassistant/components/template/template_entity.py +++ b/homeassistant/components/template/template_entity.py @@ -24,7 +24,6 @@ from homeassistant.const import ( ) from homeassistant.core import ( CALLBACK_TYPE, - Context, Event, EventStateChangedData, HomeAssistant, @@ -41,7 +40,7 @@ from homeassistant.helpers.event import ( TrackTemplateResultInfo, async_track_template_result, ) -from homeassistant.helpers.script import Script, _VarsType +from homeassistant.helpers.script_variables import ScriptVariables from homeassistant.helpers.start import async_at_start from homeassistant.helpers.template import ( Template, @@ -61,6 +60,7 @@ from .const import ( CONF_AVAILABILITY_TEMPLATE, CONF_PICTURE, ) +from .entity import AbstractTemplateEntity _LOGGER = logging.getLogger(__name__) @@ -248,7 +248,7 @@ class _TemplateAttribute: return -class TemplateEntity(Entity): # pylint: disable=hass-enforce-class-module +class TemplateEntity(AbstractTemplateEntity): """Entity that uses templates to calculate attributes.""" _attr_available = True @@ -268,6 +268,7 @@ class TemplateEntity(Entity): # pylint: disable=hass-enforce-class-module unique_id: str | None = None, ) -> None: """Template Entity.""" + super().__init__(hass) self._template_attrs: dict[Template, list[_TemplateAttribute]] = {} self._template_result_info: TrackTemplateResultInfo | None = None self._attr_extra_state_attributes = {} @@ -285,6 +286,7 @@ class TemplateEntity(Entity): # pylint: disable=hass-enforce-class-module ] | None ) = None + self._run_variables: ScriptVariables | dict if config is None: self._attribute_templates = attribute_templates self._availability_template = availability_template @@ -339,18 +341,6 @@ class TemplateEntity(Entity): # pylint: disable=hass-enforce-class-module variables=variables, parse_result=False ) - @callback - def _render_variables(self) -> dict: - if isinstance(self._run_variables, dict): - return self._run_variables - - return self._run_variables.async_render( - self.hass, - { - "this": TemplateStateFromEntityId(self.hass, self.entity_id), - }, - ) - @callback def _update_available(self, result: str | TemplateError) -> None: if isinstance(result, TemplateError): @@ -387,6 +377,18 @@ class TemplateEntity(Entity): # pylint: disable=hass-enforce-class-module return None return cast(str, self._blueprint_inputs[CONF_USE_BLUEPRINT][CONF_PATH]) + def _render_script_variables(self) -> dict[str, Any]: + """Render configured variables.""" + if isinstance(self._run_variables, dict): + return self._run_variables + + return self._run_variables.async_render( + self.hass, + { + "this": TemplateStateFromEntityId(self.hass, self.entity_id), + }, + ) + def add_template_attribute( self, attribute: str, @@ -488,7 +490,7 @@ class TemplateEntity(Entity): # pylint: disable=hass-enforce-class-module variables = { "this": TemplateStateFromEntityId(self.hass, self.entity_id), - **self._render_variables(), + **self._render_script_variables(), } for template, attributes in self._template_attrs.items(): @@ -581,22 +583,3 @@ class TemplateEntity(Entity): # pylint: disable=hass-enforce-class-module """Call for forced update.""" assert self._template_result_info self._template_result_info.async_refresh() - - async def async_run_script( - self, - script: Script, - *, - run_variables: _VarsType | None = None, - context: Context | None = None, - ) -> None: - """Run an action script.""" - if run_variables is None: - run_variables = {} - await script.async_run( - run_variables={ - "this": TemplateStateFromEntityId(self.hass, self.entity_id), - **self._render_variables(), - **run_variables, - }, - context=context, - ) diff --git a/homeassistant/components/template/trigger_entity.py b/homeassistant/components/template/trigger_entity.py index 5130f332d5b..87c93b6143b 100644 --- a/homeassistant/components/template/trigger_entity.py +++ b/homeassistant/components/template/trigger_entity.py @@ -8,10 +8,13 @@ from homeassistant.helpers.trigger_template_entity import TriggerBaseEntity from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import TriggerUpdateCoordinator +from .entity import AbstractTemplateEntity class TriggerEntity( # pylint: disable=hass-enforce-class-module - TriggerBaseEntity, CoordinatorEntity[TriggerUpdateCoordinator] + TriggerBaseEntity, + CoordinatorEntity[TriggerUpdateCoordinator], + AbstractTemplateEntity, ): """Template entity based on trigger data.""" @@ -24,6 +27,7 @@ class TriggerEntity( # pylint: disable=hass-enforce-class-module """Initialize the entity.""" CoordinatorEntity.__init__(self, coordinator) TriggerBaseEntity.__init__(self, hass, config) + AbstractTemplateEntity.__init__(self, hass) async def async_added_to_hass(self) -> None: """Handle being added to Home Assistant.""" @@ -38,6 +42,16 @@ class TriggerEntity( # pylint: disable=hass-enforce-class-module else: self._unique_id = unique_id + @property + def referenced_blueprint(self) -> str | None: + """Return referenced blueprint or None.""" + return self.coordinator.referenced_blueprint + + @callback + def _render_script_variables(self) -> dict: + """Render configured variables.""" + return self.coordinator.data["run_variables"] + @callback def _process_data(self) -> None: """Process new data.""" diff --git a/homeassistant/components/template/vacuum.py b/homeassistant/components/template/vacuum.py index b977f4e659a..c4d41b52f31 100644 --- a/homeassistant/components/template/vacuum.py +++ b/homeassistant/components/template/vacuum.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import Any +from typing import TYPE_CHECKING, Any import voluptuous as vol @@ -33,7 +33,6 @@ from homeassistant.exceptions import TemplateError from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.script import Script from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from .const import DOMAIN @@ -90,7 +89,7 @@ PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend( ) -async def _async_create_entities(hass, config): +async def _async_create_entities(hass: HomeAssistant, config: ConfigType): """Create the Template Vacuums.""" vacuums = [] @@ -127,11 +126,11 @@ class TemplateVacuum(TemplateEntity, StateVacuumEntity): def __init__( self, - hass, + hass: HomeAssistant, object_id, - config, + config: ConfigType, unique_id, - ): + ) -> None: """Initialize the vacuum.""" super().__init__( hass, config=config, fallback_name=object_id, unique_id=unique_id @@ -139,7 +138,9 @@ class TemplateVacuum(TemplateEntity, StateVacuumEntity): self.entity_id = async_generate_entity_id( ENTITY_ID_FORMAT, object_id, hass=hass ) - friendly_name = self._attr_name + name = self._attr_name + if TYPE_CHECKING: + assert name is not None self._template = config.get(CONF_VALUE_TEMPLATE) self._battery_level_template = config.get(CONF_BATTERY_LEVEL_TEMPLATE) @@ -148,43 +149,18 @@ class TemplateVacuum(TemplateEntity, StateVacuumEntity): VacuumEntityFeature.START | VacuumEntityFeature.STATE ) - self._start_script = Script(hass, config[SERVICE_START], friendly_name, DOMAIN) - - self._pause_script = None - if pause_action := config.get(SERVICE_PAUSE): - self._pause_script = Script(hass, pause_action, friendly_name, DOMAIN) - self._attr_supported_features |= VacuumEntityFeature.PAUSE - - self._stop_script = None - if stop_action := config.get(SERVICE_STOP): - self._stop_script = Script(hass, stop_action, friendly_name, DOMAIN) - self._attr_supported_features |= VacuumEntityFeature.STOP - - self._return_to_base_script = None - if return_to_base_action := config.get(SERVICE_RETURN_TO_BASE): - self._return_to_base_script = Script( - hass, return_to_base_action, friendly_name, DOMAIN - ) - self._attr_supported_features |= VacuumEntityFeature.RETURN_HOME - - self._clean_spot_script = None - if clean_spot_action := config.get(SERVICE_CLEAN_SPOT): - self._clean_spot_script = Script( - hass, clean_spot_action, friendly_name, DOMAIN - ) - self._attr_supported_features |= VacuumEntityFeature.CLEAN_SPOT - - self._locate_script = None - if locate_action := config.get(SERVICE_LOCATE): - self._locate_script = Script(hass, locate_action, friendly_name, DOMAIN) - self._attr_supported_features |= VacuumEntityFeature.LOCATE - - self._set_fan_speed_script = None - if set_fan_speed_action := config.get(SERVICE_SET_FAN_SPEED): - self._set_fan_speed_script = Script( - hass, set_fan_speed_action, friendly_name, DOMAIN - ) - self._attr_supported_features |= VacuumEntityFeature.FAN_SPEED + for action_id, supported_feature in ( + (SERVICE_START, 0), + (SERVICE_PAUSE, VacuumEntityFeature.PAUSE), + (SERVICE_STOP, VacuumEntityFeature.STOP), + (SERVICE_RETURN_TO_BASE, VacuumEntityFeature.RETURN_HOME), + (SERVICE_CLEAN_SPOT, VacuumEntityFeature.CLEAN_SPOT), + (SERVICE_LOCATE, VacuumEntityFeature.LOCATE), + (SERVICE_SET_FAN_SPEED, VacuumEntityFeature.FAN_SPEED), + ): + if action_config := config.get(action_id): + self.add_script(action_id, action_config, name, DOMAIN) + self._attr_supported_features |= supported_feature self._state = None self._battery_level = None @@ -203,62 +179,50 @@ class TemplateVacuum(TemplateEntity, StateVacuumEntity): async def async_start(self) -> None: """Start or resume the cleaning task.""" - await self.async_run_script(self._start_script, context=self._context) + await self.async_run_script( + self._action_scripts[SERVICE_START], context=self._context + ) async def async_pause(self) -> None: """Pause the cleaning task.""" - if self._pause_script is None: - return - - await self.async_run_script(self._pause_script, context=self._context) + if script := self._action_scripts.get(SERVICE_PAUSE): + await self.async_run_script(script, context=self._context) async def async_stop(self, **kwargs: Any) -> None: """Stop the cleaning task.""" - if self._stop_script is None: - return - - await self.async_run_script(self._stop_script, context=self._context) + if script := self._action_scripts.get(SERVICE_STOP): + await self.async_run_script(script, context=self._context) async def async_return_to_base(self, **kwargs: Any) -> None: """Set the vacuum cleaner to return to the dock.""" - if self._return_to_base_script is None: - return - - await self.async_run_script(self._return_to_base_script, context=self._context) + if script := self._action_scripts.get(SERVICE_RETURN_TO_BASE): + await self.async_run_script(script, context=self._context) async def async_clean_spot(self, **kwargs: Any) -> None: """Perform a spot clean-up.""" - if self._clean_spot_script is None: - return - - await self.async_run_script(self._clean_spot_script, context=self._context) + if script := self._action_scripts.get(SERVICE_CLEAN_SPOT): + await self.async_run_script(script, context=self._context) async def async_locate(self, **kwargs: Any) -> None: """Locate the vacuum cleaner.""" - if self._locate_script is None: - return - - await self.async_run_script(self._locate_script, context=self._context) + if script := self._action_scripts.get(SERVICE_LOCATE): + await self.async_run_script(script, context=self._context) async def async_set_fan_speed(self, fan_speed: str, **kwargs: Any) -> None: """Set fan speed.""" - if self._set_fan_speed_script is None: - return - - if fan_speed in self._attr_fan_speed_list: - self._attr_fan_speed = fan_speed - await self.async_run_script( - self._set_fan_speed_script, - run_variables={ATTR_FAN_SPEED: fan_speed}, - context=self._context, - ) - else: + if fan_speed not in self._attr_fan_speed_list: _LOGGER.error( "Received invalid fan speed: %s for entity %s. Expected: %s", fan_speed, self.entity_id, self._attr_fan_speed_list, ) + return + + if script := self._action_scripts.get(SERVICE_SET_FAN_SPEED): + await self.async_run_script( + script, run_variables={ATTR_FAN_SPEED: fan_speed}, context=self._context + ) @callback def _async_setup_templates(self) -> None: diff --git a/homeassistant/components/template/weather.py b/homeassistant/components/template/weather.py index 7f597f1d9a8..86bab6f5ad1 100644 --- a/homeassistant/components/template/weather.py +++ b/homeassistant/components/template/weather.py @@ -135,6 +135,33 @@ WEATHER_SCHEMA = vol.Schema( PLATFORM_SCHEMA = WEATHER_PLATFORM_SCHEMA.extend(WEATHER_SCHEMA.schema) +@callback +def _async_create_template_tracking_entities( + async_add_entities: AddEntitiesCallback, + hass: HomeAssistant, + definitions: list[dict], + unique_id_prefix: str | None, +) -> None: + """Create the weather entities.""" + entities = [] + + for entity_conf in definitions: + unique_id = entity_conf.get(CONF_UNIQUE_ID) + + if unique_id and unique_id_prefix: + unique_id = f"{unique_id_prefix}-{unique_id}" + + entities.append( + WeatherTemplate( + hass, + entity_conf, + unique_id, + ) + ) + + async_add_entities(entities) + + async def async_setup_platform( hass: HomeAssistant, config: ConfigType, @@ -142,24 +169,32 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Template weather.""" - if discovery_info and "coordinator" in discovery_info: + if discovery_info is None: + config = rewrite_common_legacy_to_modern_conf(hass, config) + unique_id = config.get(CONF_UNIQUE_ID) + async_add_entities( + [ + WeatherTemplate( + hass, + config, + unique_id, + ) + ] + ) + return + + if "coordinator" in discovery_info: async_add_entities( TriggerWeatherEntity(hass, discovery_info["coordinator"], config) for config in discovery_info["entities"] ) return - config = rewrite_common_legacy_to_modern_conf(hass, config) - unique_id = config.get(CONF_UNIQUE_ID) - - async_add_entities( - [ - WeatherTemplate( - hass, - config, - unique_id, - ) - ] + _async_create_template_tracking_entities( + async_add_entities, + hass, + discovery_info["entities"], + discovery_info["unique_id"], ) diff --git a/homeassistant/components/tesla_fleet/__init__.py b/homeassistant/components/tesla_fleet/__init__.py index 27bfb9134ab..2642bd2f7d5 100644 --- a/homeassistant/components/tesla_fleet/__init__.py +++ b/homeassistant/components/tesla_fleet/__init__.py @@ -5,12 +5,7 @@ from typing import Final from aiohttp.client_exceptions import ClientResponseError import jwt -from tesla_fleet_api import ( - EnergySpecific, - TeslaFleetApi, - VehicleSigned, - VehicleSpecific, -) +from tesla_fleet_api import TeslaFleetApi from tesla_fleet_api.const import Scope from tesla_fleet_api.exceptions import ( InvalidRegion, @@ -128,7 +123,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - vehicles: list[TeslaFleetVehicleData] = [] energysites: list[TeslaFleetEnergyData] = [] for product in products: - if "vin" in product and hasattr(tesla, "vehicle"): + if "vin" in product and Scope.VEHICLE_DEVICE_DATA in scopes: # Remove the protobuff 'cached_data' that we do not use to save memory product.pop("cached_data", None) vin = product["vin"] @@ -136,9 +131,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - if signing: if not tesla.private_key: await tesla.get_private_key(hass.config.path("tesla_fleet.key")) - api = VehicleSigned(tesla.vehicle, vin) + api = tesla.vehicles.createSigned(vin) else: - api = VehicleSpecific(tesla.vehicle, vin) + api = tesla.vehicles.createFleet(vin) coordinator = TeslaFleetVehicleDataCoordinator(hass, entry, api, product) await coordinator.async_config_entry_first_refresh() @@ -160,7 +155,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - signing=signing, ) ) - elif "energy_site_id" in product and hasattr(tesla, "energy"): + elif "energy_site_id" in product and Scope.ENERGY_DEVICE_DATA in scopes: site_id = product["energy_site_id"] if not ( product["components"]["battery"] @@ -173,7 +168,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - ) continue - api = EnergySpecific(tesla.energy, site_id) + api = tesla.energySites.create(site_id) live_coordinator = TeslaFleetEnergySiteLiveCoordinator(hass, entry, api) history_coordinator = TeslaFleetEnergySiteHistoryCoordinator( @@ -227,7 +222,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - # Setup Platforms entry.runtime_data = TeslaFleetData(vehicles, energysites, scopes) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - return True diff --git a/homeassistant/components/tesla_fleet/coordinator.py b/homeassistant/components/tesla_fleet/coordinator.py index 128c15068f6..50a69258a31 100644 --- a/homeassistant/components/tesla_fleet/coordinator.py +++ b/homeassistant/components/tesla_fleet/coordinator.py @@ -7,7 +7,6 @@ from random import randint from time import time from typing import TYPE_CHECKING, Any -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import TeslaEnergyPeriod, VehicleDataEndpoint from tesla_fleet_api.exceptions import ( InvalidToken, @@ -17,6 +16,7 @@ from tesla_fleet_api.exceptions import ( TeslaFleetError, VehicleOffline, ) +from tesla_fleet_api.tesla import EnergySite, VehicleFleet from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed @@ -70,7 +70,7 @@ class TeslaFleetVehicleDataCoordinator(DataUpdateCoordinator[dict[str, Any]]): self, hass: HomeAssistant, config_entry: TeslaFleetConfigEntry, - api: VehicleSpecific, + api: VehicleFleet, product: dict, ) -> None: """Initialize TeslaFleet Vehicle Update Coordinator.""" @@ -149,7 +149,7 @@ class TeslaFleetEnergySiteLiveCoordinator(DataUpdateCoordinator[dict[str, Any]]) self, hass: HomeAssistant, config_entry: TeslaFleetConfigEntry, - api: EnergySpecific, + api: EnergySite, ) -> None: """Initialize TeslaFleet Energy Site Live coordinator.""" super().__init__( @@ -202,7 +202,7 @@ class TeslaFleetEnergySiteHistoryCoordinator(DataUpdateCoordinator[dict[str, Any self, hass: HomeAssistant, config_entry: TeslaFleetConfigEntry, - api: EnergySpecific, + api: EnergySite, ) -> None: """Initialize Tesla Fleet Energy Site History coordinator.""" super().__init__( @@ -248,7 +248,7 @@ class TeslaFleetEnergySiteHistoryCoordinator(DataUpdateCoordinator[dict[str, Any self.updated_once = True # Add all time periods together - output = {key: 0 for key in ENERGY_HISTORY_FIELDS} + output = dict.fromkeys(ENERGY_HISTORY_FIELDS, 0) for period in data.get("time_series", []): for key in ENERGY_HISTORY_FIELDS: output[key] += period.get(key, 0) @@ -266,7 +266,7 @@ class TeslaFleetEnergySiteInfoCoordinator(DataUpdateCoordinator[dict[str, Any]]) self, hass: HomeAssistant, config_entry: TeslaFleetConfigEntry, - api: EnergySpecific, + api: EnergySite, product: dict, ) -> None: """Initialize TeslaFleet Energy Info coordinator.""" diff --git a/homeassistant/components/tesla_fleet/entity.py b/homeassistant/components/tesla_fleet/entity.py index 0260acf368e..583e92595d0 100644 --- a/homeassistant/components/tesla_fleet/entity.py +++ b/homeassistant/components/tesla_fleet/entity.py @@ -3,8 +3,9 @@ from abc import abstractmethod from typing import Any -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import Scope +from tesla_fleet_api.tesla.energysite import EnergySite +from tesla_fleet_api.tesla.vehicle.fleet import VehicleFleet from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.device_registry import DeviceInfo @@ -41,7 +42,7 @@ class TeslaFleetEntity( | TeslaFleetEnergySiteLiveCoordinator | TeslaFleetEnergySiteHistoryCoordinator | TeslaFleetEnergySiteInfoCoordinator, - api: VehicleSpecific | EnergySpecific, + api: VehicleFleet | EnergySite, key: str, ) -> None: """Initialize common aspects of a TeslaFleet entity.""" diff --git a/homeassistant/components/tesla_fleet/manifest.json b/homeassistant/components/tesla_fleet/manifest.json index 010197ccbd9..56dc49ad111 100644 --- a/homeassistant/components/tesla_fleet/manifest.json +++ b/homeassistant/components/tesla_fleet/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/tesla_fleet", "iot_class": "cloud_polling", "loggers": ["tesla-fleet-api"], - "requirements": ["tesla-fleet-api==0.9.13"] + "requirements": ["tesla-fleet-api==1.0.16"] } diff --git a/homeassistant/components/tesla_fleet/models.py b/homeassistant/components/tesla_fleet/models.py index 469ebdca914..17a2bf50ed1 100644 --- a/homeassistant/components/tesla_fleet/models.py +++ b/homeassistant/components/tesla_fleet/models.py @@ -5,8 +5,8 @@ from __future__ import annotations import asyncio from dataclasses import dataclass -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import Scope +from tesla_fleet_api.tesla import EnergySite, VehicleFleet from homeassistant.helpers.device_registry import DeviceInfo @@ -31,7 +31,7 @@ class TeslaFleetData: class TeslaFleetVehicleData: """Data for a vehicle in the TeslaFleet integration.""" - api: VehicleSpecific + api: VehicleFleet coordinator: TeslaFleetVehicleDataCoordinator vin: str device: DeviceInfo @@ -43,7 +43,7 @@ class TeslaFleetVehicleData: class TeslaFleetEnergyData: """Data for a vehicle in the TeslaFleet integration.""" - api: EnergySpecific + api: EnergySite live_coordinator: TeslaFleetEnergySiteLiveCoordinator history_coordinator: TeslaFleetEnergySiteHistoryCoordinator info_coordinator: TeslaFleetEnergySiteInfoCoordinator diff --git a/homeassistant/components/tesla_fleet/number.py b/homeassistant/components/tesla_fleet/number.py index a1123ab9553..b4f7e42cafd 100644 --- a/homeassistant/components/tesla_fleet/number.py +++ b/homeassistant/components/tesla_fleet/number.py @@ -7,8 +7,8 @@ from dataclasses import dataclass from itertools import chain from typing import Any -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import Scope +from tesla_fleet_api.tesla import EnergySite, VehicleFleet from homeassistant.components.number import ( NumberDeviceClass, @@ -33,7 +33,7 @@ PARALLEL_UPDATES = 0 class TeslaFleetNumberVehicleEntityDescription(NumberEntityDescription): """Describes TeslaFleet Number entity.""" - func: Callable[[VehicleSpecific, float], Awaitable[Any]] + func: Callable[[VehicleFleet, float], Awaitable[Any]] native_min_value: float native_max_value: float min_key: str | None = None @@ -74,7 +74,7 @@ VEHICLE_DESCRIPTIONS: tuple[TeslaFleetNumberVehicleEntityDescription, ...] = ( class TeslaFleetNumberBatteryEntityDescription(NumberEntityDescription): """Describes TeslaFleet Number entity.""" - func: Callable[[EnergySpecific, float], Awaitable[Any]] + func: Callable[[EnergySite, float], Awaitable[Any]] requires: str | None = None diff --git a/homeassistant/components/teslemetry/__init__.py b/homeassistant/components/teslemetry/__init__.py index eef974cc5a7..b820d2d1b43 100644 --- a/homeassistant/components/teslemetry/__init__.py +++ b/homeassistant/components/teslemetry/__init__.py @@ -4,7 +4,6 @@ import asyncio from collections.abc import Callable from typing import Final -from tesla_fleet_api import EnergySpecific, Teslemetry, VehicleSpecific from tesla_fleet_api.const import Scope from tesla_fleet_api.exceptions import ( Forbidden, @@ -12,6 +11,7 @@ from tesla_fleet_api.exceptions import ( SubscriptionRequired, TeslaFleetError, ) +from tesla_fleet_api.teslemetry import Teslemetry from teslemetry_stream import TeslemetryStream from homeassistant.config_entries import ConfigEntry @@ -111,7 +111,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - # Remove the protobuff 'cached_data' that we do not use to save memory product.pop("cached_data", None) vin = product["vin"] - api = VehicleSpecific(teslemetry.vehicle, vin) + api = teslemetry.vehicles.create(vin) coordinator = TeslemetryVehicleDataCoordinator(hass, entry, api, product) device = DeviceInfo( identifiers={(DOMAIN, vin)}, @@ -156,7 +156,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - ) continue - api = EnergySpecific(teslemetry.energy, site_id) + api = teslemetry.energySites.create(site_id) device = DeviceInfo( identifiers={(DOMAIN, str(site_id))}, manufacturer="Tesla", diff --git a/homeassistant/components/teslemetry/climate.py b/homeassistant/components/teslemetry/climate.py index 86811131ab6..c1c8fcd2f73 100644 --- a/homeassistant/components/teslemetry/climate.py +++ b/homeassistant/components/teslemetry/climate.py @@ -6,9 +6,11 @@ from itertools import chain from typing import Any, cast from tesla_fleet_api.const import CabinOverheatProtectionTemp, Scope +from tesla_fleet_api.teslemetry import Vehicle from homeassistant.components.climate import ( ATTR_HVAC_MODE, + HVAC_MODES, ClimateEntity, ClimateEntityFeature, HVACMode, @@ -22,15 +24,32 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.restore_state import RestoreEntity from . import TeslemetryConfigEntry from .const import DOMAIN, TeslemetryClimateSide -from .entity import TeslemetryVehicleEntity +from .entity import ( + TeslemetryRootEntity, + TeslemetryVehicleEntity, + TeslemetryVehicleStreamEntity, +) from .helpers import handle_vehicle_command from .models import TeslemetryVehicleData DEFAULT_MIN_TEMP = 15 DEFAULT_MAX_TEMP = 28 +COP_TEMPERATURES = { + 30: CabinOverheatProtectionTemp.LOW, + 35: CabinOverheatProtectionTemp.MEDIUM, + 40: CabinOverheatProtectionTemp.HIGH, +} +PRESET_MODES = { + "Off": "off", + "On": "keep", + "Dog": "dog", + "Party": "camp", +} + PARALLEL_UPDATES = 0 @@ -45,13 +64,21 @@ async def async_setup_entry( async_add_entities( chain( ( - TeslemetryClimateEntity( + TeslemetryPollingClimateEntity( + vehicle, TeslemetryClimateSide.DRIVER, entry.runtime_data.scopes + ) + if vehicle.api.pre2021 or vehicle.firmware < "2024.44.25" + else TeslemetryStreamingClimateEntity( vehicle, TeslemetryClimateSide.DRIVER, entry.runtime_data.scopes ) for vehicle in entry.runtime_data.vehicles ), ( - TeslemetryCabinOverheatProtectionEntity( + TeslemetryPollingCabinOverheatProtectionEntity( + vehicle, entry.runtime_data.scopes + ) + if vehicle.api.pre2021 or vehicle.firmware < "2024.44.25" + else TeslemetryStreamingCabinOverheatProtectionEntity( vehicle, entry.runtime_data.scopes ) for vehicle in entry.runtime_data.vehicles @@ -60,66 +87,22 @@ async def async_setup_entry( ) -class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): - """Telemetry vehicle climate entity.""" +class TeslemetryClimateEntity(TeslemetryRootEntity, ClimateEntity): + """Vehicle Climate Control.""" + + api: Vehicle _attr_precision = PRECISION_HALVES - _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_hvac_modes = [HVACMode.HEAT_COOL, HVACMode.OFF] - _attr_supported_features = ( - ClimateEntityFeature.TURN_ON - | ClimateEntityFeature.TURN_OFF - | ClimateEntityFeature.TARGET_TEMPERATURE - | ClimateEntityFeature.PRESET_MODE - ) - _attr_preset_modes = ["off", "keep", "dog", "camp"] - - def __init__( - self, - data: TeslemetryVehicleData, - side: TeslemetryClimateSide, - scopes: Scope, - ) -> None: - """Initialize the climate.""" - self.scoped = Scope.VEHICLE_CMDS in scopes - - if not self.scoped: - self._attr_supported_features = ClimateEntityFeature(0) - self._attr_hvac_modes = [] - - super().__init__( - data, - side, - ) - - def _async_update_attrs(self) -> None: - """Update the attributes of the entity.""" - value = self.get("climate_state_is_climate_on") - if value: - self._attr_hvac_mode = HVACMode.HEAT_COOL - else: - self._attr_hvac_mode = HVACMode.OFF - - # If not scoped, prevent the user from changing the HVAC mode by making it the only option - if self._attr_hvac_mode and not self.scoped: - self._attr_hvac_modes = [self._attr_hvac_mode] - - self._attr_current_temperature = self.get("climate_state_inside_temp") - self._attr_target_temperature = self.get(f"climate_state_{self.key}_setting") - self._attr_preset_mode = self.get("climate_state_climate_keeper_mode") - self._attr_min_temp = cast( - float, self.get("climate_state_min_avail_temp", DEFAULT_MIN_TEMP) - ) - self._attr_max_temp = cast( - float, self.get("climate_state_max_avail_temp", DEFAULT_MAX_TEMP) - ) + _attr_preset_modes = list(PRESET_MODES.values()) + _attr_fan_modes = ["off", "bioweapon"] + _enable_turn_on_off_backwards_compatibility = False async def async_turn_on(self) -> None: """Set the climate state to on.""" - self.raise_for_scope(Scope.VEHICLE_CMDS) - await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.auto_conditioning_start()) self._attr_hvac_mode = HVACMode.HEAT_COOL @@ -127,19 +110,21 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): async def async_turn_off(self) -> None: """Set the climate state to off.""" - self.raise_for_scope(Scope.VEHICLE_CMDS) - await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.auto_conditioning_stop()) self._attr_hvac_mode = HVACMode.OFF self._attr_preset_mode = self._attr_preset_modes[0] + self._attr_fan_mode = self._attr_fan_modes[0] self.async_write_ha_state() async def async_set_temperature(self, **kwargs: Any) -> None: """Set the climate temperature.""" + if temp := kwargs.get(ATTR_TEMPERATURE): - await self.wake_up_if_asleep() + self.raise_for_scope(Scope.VEHICLE_CMDS) + await handle_vehicle_command( self.api.set_temps( driver_temp=temp, @@ -163,18 +148,210 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): async def async_set_preset_mode(self, preset_mode: str) -> None: """Set the climate preset mode.""" - await self.wake_up_if_asleep() + self.raise_for_scope(Scope.VEHICLE_CMDS) + await handle_vehicle_command( self.api.set_climate_keeper_mode( climate_keeper_mode=self._attr_preset_modes.index(preset_mode) ) ) self._attr_preset_mode = preset_mode - if preset_mode != self._attr_preset_modes[0]: - # Changing preset mode will also turn on climate + if preset_mode == self._attr_preset_modes[0]: + self._attr_hvac_mode = HVACMode.OFF + else: self._attr_hvac_mode = HVACMode.HEAT_COOL self.async_write_ha_state() + async def async_set_fan_mode(self, fan_mode: str) -> None: + """Set the Bioweapon defense mode.""" + self.raise_for_scope(Scope.VEHICLE_CMDS) + + await handle_vehicle_command( + self.api.set_bioweapon_mode( + on=(fan_mode != "off"), + manual_override=True, + ) + ) + self._attr_fan_mode = fan_mode + if fan_mode == self._attr_fan_modes[1]: + self._attr_hvac_mode = HVACMode.HEAT_COOL + self.async_write_ha_state() + + +class TeslemetryPollingClimateEntity(TeslemetryClimateEntity, TeslemetryVehicleEntity): + """Polling vehicle climate entity.""" + + _attr_supported_features = ( + ClimateEntityFeature.TURN_ON + | ClimateEntityFeature.TURN_OFF + | ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.PRESET_MODE + | ClimateEntityFeature.FAN_MODE + ) + + def __init__( + self, + data: TeslemetryVehicleData, + side: TeslemetryClimateSide, + scopes: list[Scope], + ) -> None: + """Initialize the climate.""" + self.scoped = Scope.VEHICLE_CMDS in scopes + if not self.scoped: + self._attr_supported_features = ClimateEntityFeature(0) + + super().__init__(data, side) + + def _async_update_attrs(self) -> None: + """Update the attributes of the entity.""" + value = self.get("climate_state_is_climate_on") + if value is None: + self._attr_hvac_mode = None + if value: + self._attr_hvac_mode = HVACMode.HEAT_COOL + else: + self._attr_hvac_mode = HVACMode.OFF + + self._attr_current_temperature = self.get("climate_state_inside_temp") + self._attr_target_temperature = self.get(f"climate_state_{self.key}_setting") + self._attr_preset_mode = self.get("climate_state_climate_keeper_mode") + if self.get("climate_state_bioweapon_mode"): + self._attr_fan_mode = "bioweapon" + else: + self._attr_fan_mode = "off" + self._attr_min_temp = cast( + float, self.get("climate_state_min_avail_temp", DEFAULT_MIN_TEMP) + ) + self._attr_max_temp = cast( + float, self.get("climate_state_max_avail_temp", DEFAULT_MAX_TEMP) + ) + + +class TeslemetryStreamingClimateEntity( + TeslemetryClimateEntity, TeslemetryVehicleStreamEntity, RestoreEntity +): + """Teslemetry steering wheel climate control.""" + + _attr_supported_features = ( + ClimateEntityFeature.TURN_ON + | ClimateEntityFeature.TURN_OFF + | ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.PRESET_MODE + ) + + def __init__( + self, + data: TeslemetryVehicleData, + side: TeslemetryClimateSide, + scopes: list[Scope], + ) -> None: + """Initialize the climate.""" + + # Initialize defaults + self._attr_hvac_mode = None + self._attr_current_temperature = None + self._attr_target_temperature = None + self._attr_fan_mode = None + self._attr_preset_mode = None + + self.scoped = Scope.VEHICLE_CMDS in scopes + if not self.scoped: + self._attr_supported_features = ClimateEntityFeature(0) + self.side = side + super().__init__( + data, + side, + ) + + self._attr_min_temp = cast( + float, + data.coordinator.data.get("climate_state_min_avail_temp", DEFAULT_MIN_TEMP), + ) + self._attr_max_temp = cast( + float, + data.coordinator.data.get("climate_state_max_avail_temp", DEFAULT_MAX_TEMP), + ) + self.rhd: bool = data.coordinator.data.get("vehicle_config_rhd", False) + + async def async_added_to_hass(self) -> None: + """Handle entity which will be added.""" + await super().async_added_to_hass() + if (state := await self.async_get_last_state()) is not None: + self._attr_hvac_mode = ( + HVACMode(state.state) if state.state in HVAC_MODES else None + ) + self._attr_current_temperature = state.attributes.get("current_temperature") + self._attr_target_temperature = state.attributes.get("temperature") + self._attr_preset_mode = state.attributes.get("preset_mode") + + self.async_on_remove( + self.vehicle.stream_vehicle.listen_InsideTemp( + self._async_handle_inside_temp + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_HvacACEnabled( + self._async_handle_hvac_ac_enabled + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_ClimateKeeperMode( + self._async_handle_climate_keeper_mode + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_RightHandDrive(self._async_handle_rhd) + ) + + if self.side == TeslemetryClimateSide.DRIVER: + if self.rhd: + self.async_on_remove( + self.vehicle.stream_vehicle.listen_HvacRightTemperatureRequest( + self._async_handle_hvac_temperature_request + ) + ) + else: + self.async_on_remove( + self.vehicle.stream_vehicle.listen_HvacLeftTemperatureRequest( + self._async_handle_hvac_temperature_request + ) + ) + elif self.side == TeslemetryClimateSide.PASSENGER: + if self.rhd: + self.async_on_remove( + self.vehicle.stream_vehicle.listen_HvacLeftTemperatureRequest( + self._async_handle_hvac_temperature_request + ) + ) + else: + self.async_on_remove( + self.vehicle.stream_vehicle.listen_HvacRightTemperatureRequest( + self._async_handle_hvac_temperature_request + ) + ) + + def _async_handle_inside_temp(self, data: float | None): + self._attr_current_temperature = data + self.async_write_ha_state() + + def _async_handle_hvac_ac_enabled(self, data: bool | None): + self._attr_hvac_mode = ( + None if data is None else HVACMode.HEAT_COOL if data else HVACMode.OFF + ) + self.async_write_ha_state() + + def _async_handle_climate_keeper_mode(self, data: str | None): + self._attr_preset_mode = PRESET_MODES.get(data) if data else None + self.async_write_ha_state() + + def _async_handle_hvac_temperature_request(self, data: float | None): + self._attr_target_temperature = data + self.async_write_ha_state() + + def _async_handle_rhd(self, data: bool | None): + if data is not None: + self.rhd = data + COP_MODES = { "Off": HVACMode.OFF, @@ -182,73 +359,27 @@ COP_MODES = { "FanOnly": HVACMode.FAN_ONLY, } -# String to celsius COP_LEVELS = { "Low": 30, "Medium": 35, "High": 40, } -# Celsius to IntEnum -TEMP_LEVELS = { - 30: CabinOverheatProtectionTemp.LOW, - 35: CabinOverheatProtectionTemp.MEDIUM, - 40: CabinOverheatProtectionTemp.HIGH, -} +class TeslemetryCabinOverheatProtectionEntity(TeslemetryRootEntity, ClimateEntity): + """Vehicle Cabin Overheat Protection.""" -class TeslemetryCabinOverheatProtectionEntity(TeslemetryVehicleEntity, ClimateEntity): - """Telemetry vehicle cabin overheat protection entity.""" + api: Vehicle _attr_precision = PRECISION_WHOLE _attr_target_temperature_step = 5 - _attr_min_temp = COP_LEVELS["Low"] - _attr_max_temp = COP_LEVELS["High"] + _attr_min_temp = 30 + _attr_max_temp = 40 _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_hvac_modes = list(COP_MODES.values()) - _attr_entity_registry_enabled_default = False - def __init__( - self, - data: TeslemetryVehicleData, - scopes: Scope, - ) -> None: - """Initialize the climate.""" - - self.scoped = Scope.VEHICLE_CMDS in scopes - if self.scoped: - self._attr_supported_features = ( - ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_OFF - ) - else: - self._attr_supported_features = ClimateEntityFeature(0) - self._attr_hvac_modes = [] - - super().__init__(data, "climate_state_cabin_overheat_protection") - - # Supported Features from data - if self.scoped and self.get("vehicle_config_cop_user_set_temp_supported"): - self._attr_supported_features |= ClimateEntityFeature.TARGET_TEMPERATURE - - def _async_update_attrs(self) -> None: - """Update the attributes of the entity.""" - - if (state := self.get("climate_state_cabin_overheat_protection")) is None: - self._attr_hvac_mode = None - else: - self._attr_hvac_mode = COP_MODES.get(state) - - # If not scoped, prevent the user from changing the HVAC mode by making it the only option - if self._attr_hvac_mode and not self.scoped: - self._attr_hvac_modes = [self._attr_hvac_mode] - - if (level := self.get("climate_state_cop_activation_temperature")) is None: - self._attr_target_temperature = None - else: - self._attr_target_temperature = COP_LEVELS.get(level) - - self._attr_current_temperature = self.get("climate_state_inside_temp") + _enable_turn_on_off_backwards_compatibility = False async def async_turn_on(self) -> None: """Set the climate state to on.""" @@ -260,26 +391,28 @@ class TeslemetryCabinOverheatProtectionEntity(TeslemetryVehicleEntity, ClimateEn async def async_set_temperature(self, **kwargs: Any) -> None: """Set the climate temperature.""" - self.raise_for_scope(Scope.VEHICLE_CMDS) - if (temp := kwargs.get(ATTR_TEMPERATURE)) is None or ( - cop_mode := TEMP_LEVELS.get(temp) - ) is None: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="invalid_cop_temp", - ) + if temp := kwargs.get(ATTR_TEMPERATURE): + if (cop_mode := COP_TEMPERATURES.get(temp)) is None: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_cop_temp", + ) + self.raise_for_scope(Scope.VEHICLE_CMDS) - await self.wake_up_if_asleep() - await handle_vehicle_command(self.api.set_cop_temp(cop_mode)) - self._attr_target_temperature = temp + await handle_vehicle_command(self.api.set_cop_temp(cop_mode)) + self._attr_target_temperature = temp if mode := kwargs.get(ATTR_HVAC_MODE): - await self._async_set_cop(mode) + # Set HVAC mode will call write_ha_state + await self.async_set_hvac_mode(mode) + else: + self.async_write_ha_state() - self.async_write_ha_state() + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set the climate mode and state.""" + self.raise_for_scope(Scope.VEHICLE_CMDS) - async def _async_set_cop(self, hvac_mode: HVACMode) -> None: if hvac_mode == HVACMode.OFF: await handle_vehicle_command( self.api.set_cabin_overheat_protection(on=False, fan_only=False) @@ -294,10 +427,125 @@ class TeslemetryCabinOverheatProtectionEntity(TeslemetryVehicleEntity, ClimateEn ) self._attr_hvac_mode = hvac_mode - - async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: - """Set the climate mode and state.""" - self.raise_for_scope(Scope.VEHICLE_CMDS) - await self.wake_up_if_asleep() - await self._async_set_cop(hvac_mode) + self.async_write_ha_state() + + +class TeslemetryPollingCabinOverheatProtectionEntity( + TeslemetryVehicleEntity, TeslemetryCabinOverheatProtectionEntity +): + """Vehicle Cabin Overheat Protection.""" + + def __init__( + self, + data: TeslemetryVehicleData, + scopes: list[Scope], + ) -> None: + """Initialize the climate.""" + + super().__init__( + data, + "climate_state_cabin_overheat_protection", + ) + + # Supported Features + self._attr_supported_features = ( + ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_OFF + ) + if self.get("vehicle_config_cop_user_set_temp_supported"): + self._attr_supported_features |= ClimateEntityFeature.TARGET_TEMPERATURE + + # Scopes + self.scoped = Scope.VEHICLE_CMDS in scopes + if not self.scoped: + self._attr_supported_features = ClimateEntityFeature(0) + + def _async_update_attrs(self) -> None: + """Update the attributes of the entity.""" + + if (state := self.get("climate_state_cabin_overheat_protection")) is None: + self._attr_hvac_mode = None + else: + self._attr_hvac_mode = COP_MODES.get(state) + + if (level := self.get("climate_state_cop_activation_temperature")) is None: + self._attr_target_temperature = None + else: + self._attr_target_temperature = COP_LEVELS.get(level) + + self._attr_current_temperature = self.get("climate_state_inside_temp") + + +class TeslemetryStreamingCabinOverheatProtectionEntity( + TeslemetryVehicleStreamEntity, + TeslemetryCabinOverheatProtectionEntity, + RestoreEntity, +): + """Vehicle Cabin Overheat Protection.""" + + def __init__( + self, + data: TeslemetryVehicleData, + scopes: list[Scope], + ) -> None: + """Initialize the climate.""" + + # Initialize defaults + self._attr_hvac_mode = None + self._attr_current_temperature = None + self._attr_target_temperature = None + self._attr_fan_mode = None + self._attr_preset_mode = None + + super().__init__(data, "climate_state_cabin_overheat_protection") + + # Supported Features + self._attr_supported_features = ( + ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_OFF + ) + if data.coordinator.data.get("vehicle_config_cop_user_set_temp_supported"): + self._attr_supported_features |= ClimateEntityFeature.TARGET_TEMPERATURE + + # Scopes + self.scoped = Scope.VEHICLE_CMDS in scopes + if not self.scoped: + self._attr_supported_features = ClimateEntityFeature(0) + + async def async_added_to_hass(self) -> None: + """Handle entity which will be added.""" + await super().async_added_to_hass() + if (state := await self.async_get_last_state()) is not None: + self._attr_hvac_mode = ( + HVACMode(state.state) if state.state in HVAC_MODES else None + ) + self._attr_current_temperature = state.attributes.get("temperature") + self._attr_target_temperature = state.attributes.get("target_temperature") + + self.async_on_remove( + self.vehicle.stream_vehicle.listen_InsideTemp( + self._async_handle_inside_temp + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_CabinOverheatProtectionMode( + self._async_handle_protection_mode + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_CabinOverheatProtectionTemperatureLimit( + self._async_handle_temperature_limit + ) + ) + + def _async_handle_inside_temp(self, value: float | None): + self._attr_current_temperature = value + self.async_write_ha_state() + + def _async_handle_protection_mode(self, value: str | None): + self._attr_hvac_mode = COP_MODES.get(value) if value is not None else None + self.async_write_ha_state() + + def _async_handle_temperature_limit(self, value: str | None): + self._attr_target_temperature = ( + COP_LEVELS.get(value) if value is not None else None + ) self.async_write_ha_state() diff --git a/homeassistant/components/teslemetry/config_flow.py b/homeassistant/components/teslemetry/config_flow.py index d8cf2bd7945..a25a98d6c68 100644 --- a/homeassistant/components/teslemetry/config_flow.py +++ b/homeassistant/components/teslemetry/config_flow.py @@ -6,12 +6,12 @@ from collections.abc import Mapping from typing import Any from aiohttp import ClientConnectionError -from tesla_fleet_api import Teslemetry from tesla_fleet_api.exceptions import ( InvalidToken, SubscriptionRequired, TeslaFleetError, ) +from tesla_fleet_api.teslemetry import Teslemetry import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult diff --git a/homeassistant/components/teslemetry/coordinator.py b/homeassistant/components/teslemetry/coordinator.py index 0cd2a5a62d6..07549008a6c 100644 --- a/homeassistant/components/teslemetry/coordinator.py +++ b/homeassistant/components/teslemetry/coordinator.py @@ -5,13 +5,13 @@ from __future__ import annotations from datetime import datetime, timedelta from typing import TYPE_CHECKING, Any -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import TeslaEnergyPeriod, VehicleDataEndpoint from tesla_fleet_api.exceptions import ( InvalidToken, SubscriptionRequired, TeslaFleetError, ) +from tesla_fleet_api.teslemetry import EnergySite, Vehicle from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed @@ -49,7 +49,7 @@ class TeslemetryVehicleDataCoordinator(DataUpdateCoordinator[dict[str, Any]]): self, hass: HomeAssistant, config_entry: TeslemetryConfigEntry, - api: VehicleSpecific, + api: Vehicle, product: dict, ) -> None: """Initialize Teslemetry Vehicle Update Coordinator.""" @@ -87,7 +87,7 @@ class TeslemetryEnergySiteLiveCoordinator(DataUpdateCoordinator[dict[str, Any]]) self, hass: HomeAssistant, config_entry: TeslemetryConfigEntry, - api: EnergySpecific, + api: EnergySite, data: dict, ) -> None: """Initialize Teslemetry Energy Site Live coordinator.""" @@ -133,7 +133,7 @@ class TeslemetryEnergySiteInfoCoordinator(DataUpdateCoordinator[dict[str, Any]]) self, hass: HomeAssistant, config_entry: TeslemetryConfigEntry, - api: EnergySpecific, + api: EnergySite, product: dict, ) -> None: """Initialize Teslemetry Energy Info coordinator.""" @@ -169,7 +169,7 @@ class TeslemetryEnergyHistoryCoordinator(DataUpdateCoordinator[dict[str, Any]]): self, hass: HomeAssistant, config_entry: TeslemetryConfigEntry, - api: EnergySpecific, + api: EnergySite, ) -> None: """Initialize Teslemetry Energy Info coordinator.""" super().__init__( @@ -192,7 +192,7 @@ class TeslemetryEnergyHistoryCoordinator(DataUpdateCoordinator[dict[str, Any]]): raise UpdateFailed(e.message) from e # Add all time periods together - output = {key: 0 for key in ENERGY_HISTORY_FIELDS} + output = dict.fromkeys(ENERGY_HISTORY_FIELDS, 0) for period in data.get("time_series", []): for key in ENERGY_HISTORY_FIELDS: output[key] += period.get(key, 0) diff --git a/homeassistant/components/teslemetry/entity.py b/homeassistant/components/teslemetry/entity.py index 82d3db123c3..3d145d24b0c 100644 --- a/homeassistant/components/teslemetry/entity.py +++ b/homeassistant/components/teslemetry/entity.py @@ -4,8 +4,8 @@ from abc import abstractmethod from typing import Any from propcache.api import cached_property -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import Scope +from tesla_fleet_api.teslemetry import EnergySite, Vehicle from teslemetry_stream import Signal from homeassistant.exceptions import ServiceValidationError @@ -29,7 +29,7 @@ class TeslemetryRootEntity(Entity): _attr_has_entity_name = True scoped: bool - api: VehicleSpecific | EnergySpecific + api: Vehicle | EnergySite def raise_for_scope(self, scope: Scope): """Raise an error if a scope is not available.""" @@ -105,7 +105,7 @@ class TeslemetryVehicleEntity(TeslemetryEntity): """Parent class for Teslemetry Vehicle entities.""" _last_update: int = 0 - api: VehicleSpecific + api: Vehicle vehicle: TeslemetryVehicleData def __init__( @@ -134,7 +134,7 @@ class TeslemetryVehicleEntity(TeslemetryEntity): class TeslemetryEnergyLiveEntity(TeslemetryEntity): """Parent class for Teslemetry Energy Site Live entities.""" - api: EnergySpecific + api: EnergySite def __init__( self, @@ -155,7 +155,7 @@ class TeslemetryEnergyLiveEntity(TeslemetryEntity): class TeslemetryEnergyInfoEntity(TeslemetryEntity): """Parent class for Teslemetry Energy Site Info Entities.""" - api: EnergySpecific + api: EnergySite def __init__( self, @@ -194,7 +194,7 @@ class TeslemetryWallConnectorEntity(TeslemetryEntity): """Parent class for Teslemetry Wall Connector Entities.""" _attr_has_entity_name = True - api: EnergySpecific + api: EnergySite def __init__( self, diff --git a/homeassistant/components/teslemetry/manifest.json b/homeassistant/components/teslemetry/manifest.json index 3d37ced8cff..cae5a8f3c01 100644 --- a/homeassistant/components/teslemetry/manifest.json +++ b/homeassistant/components/teslemetry/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/teslemetry", "iot_class": "cloud_polling", "loggers": ["tesla-fleet-api"], - "requirements": ["tesla-fleet-api==0.9.13", "teslemetry-stream==0.6.12"] + "requirements": ["tesla-fleet-api==1.0.16", "teslemetry-stream==0.6.12"] } diff --git a/homeassistant/components/teslemetry/media_player.py b/homeassistant/components/teslemetry/media_player.py index 1bfc9bf66dc..50f15618e66 100644 --- a/homeassistant/components/teslemetry/media_player.py +++ b/homeassistant/components/teslemetry/media_player.py @@ -3,6 +3,7 @@ from __future__ import annotations from tesla_fleet_api.const import Scope +from tesla_fleet_api.teslemetry import Vehicle from homeassistant.components.media_player import ( MediaPlayerDeviceClass, @@ -12,9 +13,14 @@ from homeassistant.components.media_player import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.restore_state import RestoreEntity from . import TeslemetryConfigEntry -from .entity import TeslemetryVehicleEntity +from .entity import ( + TeslemetryRootEntity, + TeslemetryVehicleEntity, + TeslemetryVehicleStreamEntity, +) from .helpers import handle_vehicle_command from .models import TeslemetryVehicleData @@ -24,8 +30,16 @@ STATES = { "Stopped": MediaPlayerState.IDLE, "Off": MediaPlayerState.OFF, } -VOLUME_MAX = 11.0 -VOLUME_STEP = 1.0 / 3 +DISPLAY_STATES = { + "On": MediaPlayerState.IDLE, + "Accessory": MediaPlayerState.IDLE, + "Charging": MediaPlayerState.OFF, + "Sentry": MediaPlayerState.OFF, + "Off": MediaPlayerState.OFF, +} +# Tesla uses 31 steps, in 0.333 increments up to 10.333 +VOLUME_STEP = 1 / 31 +VOLUME_FACTOR = 31 / 3 # 10.333 PARALLEL_UPDATES = 0 @@ -38,68 +52,99 @@ async def async_setup_entry( """Set up the Teslemetry Media platform from a config entry.""" async_add_entities( - TeslemetryMediaEntity(vehicle, Scope.VEHICLE_CMDS in entry.runtime_data.scopes) + TeslemetryPollingMediaEntity(vehicle, entry.runtime_data.scopes) + if vehicle.api.pre2021 or vehicle.firmware < "2025.2.6" + else TeslemetryStreamingMediaEntity(vehicle, entry.runtime_data.scopes) for vehicle in entry.runtime_data.vehicles ) -class TeslemetryMediaEntity(TeslemetryVehicleEntity, MediaPlayerEntity): - """Vehicle media player class.""" +class TeslemetryMediaEntity(TeslemetryRootEntity, MediaPlayerEntity): + """Base vehicle media player class.""" + + api: Vehicle _attr_device_class = MediaPlayerDeviceClass.SPEAKER - _attr_supported_features = ( - MediaPlayerEntityFeature.NEXT_TRACK - | MediaPlayerEntityFeature.PAUSE - | MediaPlayerEntityFeature.PLAY - | MediaPlayerEntityFeature.PREVIOUS_TRACK - | MediaPlayerEntityFeature.VOLUME_SET - ) - _volume_max: float = VOLUME_MAX + _attr_volume_step = VOLUME_STEP + + async def async_set_volume_level(self, volume: float) -> None: + """Set volume level, range 0..1.""" + self.raise_for_scope(Scope.VEHICLE_CMDS) + + await handle_vehicle_command(self.api.adjust_volume(volume * VOLUME_FACTOR)) + self._attr_volume_level = volume + self.async_write_ha_state() + + async def async_media_play(self) -> None: + """Send play command.""" + if self.state != MediaPlayerState.PLAYING: + self.raise_for_scope(Scope.VEHICLE_CMDS) + + await handle_vehicle_command(self.api.media_toggle_playback()) + self._attr_state = MediaPlayerState.PLAYING + self.async_write_ha_state() + + async def async_media_pause(self) -> None: + """Send pause command.""" + + if self.state == MediaPlayerState.PLAYING: + self.raise_for_scope(Scope.VEHICLE_CMDS) + + await handle_vehicle_command(self.api.media_toggle_playback()) + self._attr_state = MediaPlayerState.PAUSED + self.async_write_ha_state() + + async def async_media_next_track(self) -> None: + """Send next track command.""" + + self.raise_for_scope(Scope.VEHICLE_CMDS) + await handle_vehicle_command(self.api.media_next_track()) + + async def async_media_previous_track(self) -> None: + """Send previous track command.""" + + self.raise_for_scope(Scope.VEHICLE_CMDS) + await handle_vehicle_command(self.api.media_prev_track()) + + +class TeslemetryPollingMediaEntity(TeslemetryVehicleEntity, TeslemetryMediaEntity): + """Polling vehicle media player class.""" def __init__( self, data: TeslemetryVehicleData, - scoped: bool, + scopes: list[Scope], ) -> None: """Initialize the media player entity.""" super().__init__(data, "media") - self.scoped = scoped - if not scoped: + + self._attr_supported_features = ( + MediaPlayerEntityFeature.NEXT_TRACK + | MediaPlayerEntityFeature.PAUSE + | MediaPlayerEntityFeature.PLAY + | MediaPlayerEntityFeature.PREVIOUS_TRACK + | MediaPlayerEntityFeature.VOLUME_SET + ) + self.scoped = Scope.VEHICLE_CMDS in scopes + if not self.scoped: self._attr_supported_features = MediaPlayerEntityFeature(0) def _async_update_attrs(self) -> None: """Update entity attributes.""" - self._volume_max = ( - self.get("vehicle_state_media_info_audio_volume_max") or VOLUME_MAX - ) - self._attr_state = STATES.get( - self.get("vehicle_state_media_info_media_playback_status") or "Off", - ) - self._attr_volume_step = ( - 1.0 - / self._volume_max - / ( - self.get("vehicle_state_media_info_audio_volume_increment") - or VOLUME_STEP - ) - ) + state = self.get("vehicle_state_media_info_media_playback_status") + self._attr_state = STATES.get(state) if state else None + self._attr_volume_level = ( + self.get("vehicle_state_media_info_audio_volume") or 0 + ) / VOLUME_FACTOR - if volume := self.get("vehicle_state_media_info_audio_volume"): - self._attr_volume_level = volume / self._volume_max - else: - self._attr_volume_level = None + duration = self.get("vehicle_state_media_info_now_playing_duration") + self._attr_media_duration = duration / 1000 if duration is not None else None - if duration := self.get("vehicle_state_media_info_now_playing_duration"): - self._attr_media_duration = duration / 1000 - else: - self._attr_media_duration = None - - if duration and ( - position := self.get("vehicle_state_media_info_now_playing_elapsed") - ): - self._attr_media_position = position / 1000 - else: - self._attr_media_position = None + # Return media position only when a media duration is > 0. + elapsed = self.get("vehicle_state_media_info_now_playing_elapsed") + self._attr_media_position = ( + elapsed / 1000 if duration and elapsed is not None else None + ) self._attr_media_title = self.get("vehicle_state_media_info_now_playing_title") self._attr_media_artist = self.get( @@ -113,42 +158,151 @@ class TeslemetryMediaEntity(TeslemetryVehicleEntity, MediaPlayerEntity): ) self._attr_source = self.get("vehicle_state_media_info_now_playing_source") - async def async_set_volume_level(self, volume: float) -> None: - """Set volume level, range 0..1.""" - self.raise_for_scope(Scope.VEHICLE_CMDS) - await self.wake_up_if_asleep() - await handle_vehicle_command( - self.api.adjust_volume(int(volume * self._volume_max)) + +class TeslemetryStreamingMediaEntity( + TeslemetryVehicleStreamEntity, TeslemetryMediaEntity, RestoreEntity +): + """Streaming vehicle media player class.""" + + def __init__( + self, + data: TeslemetryVehicleData, + scopes: list[Scope], + ) -> None: + """Initialize the media player entity.""" + super().__init__(data, "media") + + self._attr_supported_features = ( + MediaPlayerEntityFeature.NEXT_TRACK + | MediaPlayerEntityFeature.PAUSE + | MediaPlayerEntityFeature.PLAY + | MediaPlayerEntityFeature.PREVIOUS_TRACK + | MediaPlayerEntityFeature.VOLUME_SET ) - self._attr_volume_level = volume + self.scoped = Scope.VEHICLE_CMDS in scopes + if not self.scoped: + self._attr_supported_features = MediaPlayerEntityFeature(0) + + async def async_added_to_hass(self) -> None: + """Call when entity is added to hass.""" + + await super().async_added_to_hass() + if (state := await self.async_get_last_state()) is not None: + try: + self._attr_state = MediaPlayerState(state.state) + except ValueError: + self._attr_state = None + self._attr_volume_level = state.attributes.get("volume_level") + self._attr_media_title = state.attributes.get("media_title") + self._attr_media_artist = state.attributes.get("media_artist") + self._attr_media_album_name = state.attributes.get("media_album_name") + self._attr_media_playlist = state.attributes.get("media_playlist") + self._attr_media_duration = state.attributes.get("media_duration") + self._attr_media_position = state.attributes.get("media_position") + self._attr_source = state.attributes.get("source") + + self.async_write_ha_state() + + self.async_on_remove( + self.vehicle.stream_vehicle.listen_CenterDisplay( + self._async_handle_center_display + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_MediaPlaybackStatus( + self._async_handle_media_playback_status + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_MediaPlaybackSource( + self._async_handle_media_playback_source + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_MediaAudioVolume( + self._async_handle_media_audio_volume + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_MediaNowPlayingDuration( + self._async_handle_media_now_playing_duration + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_MediaNowPlayingElapsed( + self._async_handle_media_now_playing_elapsed + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_MediaNowPlayingArtist( + self._async_handle_media_now_playing_artist + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_MediaNowPlayingAlbum( + self._async_handle_media_now_playing_album + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_MediaNowPlayingTitle( + self._async_handle_media_now_playing_title + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_MediaNowPlayingStation( + self._async_handle_media_now_playing_station + ) + ) + + def _async_handle_center_display(self, value: str | None) -> None: + """Update entity attributes.""" + if value is not None: + self._attr_state = DISPLAY_STATES.get(value) + self.async_write_ha_state() + + def _async_handle_media_playback_status(self, value: str | None) -> None: + """Update entity attributes.""" + self._attr_state = MediaPlayerState.OFF if value is None else STATES.get(value) self.async_write_ha_state() - async def async_media_play(self) -> None: - """Send play command.""" - if self.state != MediaPlayerState.PLAYING: - self.raise_for_scope(Scope.VEHICLE_CMDS) - await self.wake_up_if_asleep() - await handle_vehicle_command(self.api.media_toggle_playback()) - self._attr_state = MediaPlayerState.PLAYING - self.async_write_ha_state() + def _async_handle_media_playback_source(self, value: str | None) -> None: + """Update entity attributes.""" + self._attr_source = value + self.async_write_ha_state() - async def async_media_pause(self) -> None: - """Send pause command.""" - if self.state == MediaPlayerState.PLAYING: - self.raise_for_scope(Scope.VEHICLE_CMDS) - await self.wake_up_if_asleep() - await handle_vehicle_command(self.api.media_toggle_playback()) - self._attr_state = MediaPlayerState.PAUSED - self.async_write_ha_state() + def _async_handle_media_audio_volume(self, value: float | None) -> None: + """Update entity attributes.""" + self._attr_volume_level = None if value is None else value / VOLUME_FACTOR + self.async_write_ha_state() - async def async_media_next_track(self) -> None: - """Send next track command.""" - self.raise_for_scope(Scope.VEHICLE_CMDS) - await self.wake_up_if_asleep() - await handle_vehicle_command(self.api.media_next_track()) + def _async_handle_media_now_playing_duration(self, value: int | None) -> None: + """Update entity attributes.""" + self._attr_media_duration = None if value is None else int(value / 1000) + self.async_write_ha_state() - async def async_media_previous_track(self) -> None: - """Send previous track command.""" - self.raise_for_scope(Scope.VEHICLE_CMDS) - await self.wake_up_if_asleep() - await handle_vehicle_command(self.api.media_prev_track()) + def _async_handle_media_now_playing_elapsed(self, value: int | None) -> None: + """Update entity attributes.""" + self._attr_media_position = None if value is None else int(value / 1000) + self.async_write_ha_state() + + def _async_handle_media_now_playing_artist(self, value: str | None) -> None: + """Update entity attributes.""" + self._attr_media_artist = value # Check if this is album artist or not + self.async_write_ha_state() + + def _async_handle_media_now_playing_album(self, value: str | None) -> None: + """Update entity attributes.""" + self._attr_media_album_name = value + self.async_write_ha_state() + + def _async_handle_media_now_playing_title(self, value: str | None) -> None: + """Update entity attributes.""" + self._attr_media_title = value + self.async_write_ha_state() + + def _async_handle_media_now_playing_station(self, value: str | None) -> None: + """Update entity attributes.""" + self._attr_media_channel = ( + value # could also be _attr_media_playlist when Spotify + ) + self.async_write_ha_state() diff --git a/homeassistant/components/teslemetry/models.py b/homeassistant/components/teslemetry/models.py index 5b78386c68a..fd6cf12b5b9 100644 --- a/homeassistant/components/teslemetry/models.py +++ b/homeassistant/components/teslemetry/models.py @@ -6,8 +6,8 @@ import asyncio from collections.abc import Callable from dataclasses import dataclass -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import Scope +from tesla_fleet_api.teslemetry import EnergySite, Vehicle from teslemetry_stream import TeslemetryStream, TeslemetryStreamVehicle from homeassistant.config_entries import ConfigEntry @@ -34,7 +34,7 @@ class TeslemetryData: class TeslemetryVehicleData: """Data for a vehicle in the Teslemetry integration.""" - api: VehicleSpecific + api: Vehicle config_entry: ConfigEntry coordinator: TeslemetryVehicleDataCoordinator stream: TeslemetryStream @@ -50,7 +50,7 @@ class TeslemetryVehicleData: class TeslemetryEnergyData: """Data for a vehicle in the Teslemetry integration.""" - api: EnergySpecific + api: EnergySite live_coordinator: TeslemetryEnergySiteLiveCoordinator | None info_coordinator: TeslemetryEnergySiteInfoCoordinator history_coordinator: TeslemetryEnergyHistoryCoordinator | None diff --git a/homeassistant/components/teslemetry/number.py b/homeassistant/components/teslemetry/number.py index 10c15a68b09..ff25dec59b8 100644 --- a/homeassistant/components/teslemetry/number.py +++ b/homeassistant/components/teslemetry/number.py @@ -7,8 +7,8 @@ from dataclasses import dataclass from itertools import chain from typing import Any -from tesla_fleet_api import EnergySpecific, VehicleSpecific from tesla_fleet_api.const import Scope +from tesla_fleet_api.teslemetry import EnergySite, Vehicle from teslemetry_stream import TeslemetryStreamVehicle from homeassistant.components.number import ( @@ -46,7 +46,7 @@ PARALLEL_UPDATES = 0 class TeslemetryNumberVehicleEntityDescription(NumberEntityDescription): """Describes Teslemetry Number entity.""" - func: Callable[[VehicleSpecific, int], Awaitable[Any]] + func: Callable[[Vehicle, int], Awaitable[Any]] min_key: str | None = None max_key: str native_min_value: float @@ -99,7 +99,7 @@ VEHICLE_DESCRIPTIONS: tuple[TeslemetryNumberVehicleEntityDescription, ...] = ( class TeslemetryNumberBatteryEntityDescription(NumberEntityDescription): """Describes Teslemetry Number entity.""" - func: Callable[[EnergySpecific, float], Awaitable[Any]] + func: Callable[[EnergySite, float], Awaitable[Any]] requires: str | None = None scopes: list[Scope] diff --git a/homeassistant/components/teslemetry/select.py b/homeassistant/components/teslemetry/select.py index 0d268e302de..9e13d15edc4 100644 --- a/homeassistant/components/teslemetry/select.py +++ b/homeassistant/components/teslemetry/select.py @@ -7,8 +7,8 @@ from dataclasses import dataclass from itertools import chain from typing import Any -from tesla_fleet_api import VehicleSpecific from tesla_fleet_api.const import EnergyExportMode, EnergyOperationMode, Scope, Seat +from tesla_fleet_api.teslemetry import Vehicle from teslemetry_stream import TeslemetryStreamVehicle from homeassistant.components.select import SelectEntity, SelectEntityDescription @@ -40,7 +40,7 @@ LEVEL = {OFF: 0, LOW: 1, MEDIUM: 2, HIGH: 3} class TeslemetrySelectEntityDescription(SelectEntityDescription): """Seat Heater entity description.""" - select_fn: Callable[[VehicleSpecific, int], Awaitable[Any]] + select_fn: Callable[[Vehicle, int], Awaitable[Any]] supported_fn: Callable[[dict], bool] = lambda _: True streaming_listener: ( Callable[ diff --git a/homeassistant/components/teslemetry/sensor.py b/homeassistant/components/teslemetry/sensor.py index f1859ad39de..b1c6b487bf9 100644 --- a/homeassistant/components/teslemetry/sensor.py +++ b/homeassistant/components/teslemetry/sensor.py @@ -7,7 +7,7 @@ from dataclasses import dataclass from datetime import datetime, timedelta from propcache.api import cached_property -from teslemetry_stream import Signal +from teslemetry_stream import Signal, TeslemetryStreamVehicle from teslemetry_stream.const import ShiftState from homeassistant.components.sensor import ( @@ -50,6 +50,7 @@ from .models import TeslemetryEnergyData, TeslemetryVehicleData PARALLEL_UPDATES = 0 + CHARGE_STATES = { "Starting": "starting", "Charging": "charging", @@ -350,21 +351,26 @@ class TeslemetryTimeEntityDescription(SensorEntityDescription): """Describes Teslemetry Sensor entity.""" variance: int - streaming_key: Signal + streaming_listener: Callable[ + [TeslemetryStreamVehicle, Callable[[float | None], None]], + Callable[[], None], + ] streaming_firmware: str = "2024.26" + streaming_value_fn: Callable[[float], float] = lambda x: x VEHICLE_TIME_DESCRIPTIONS: tuple[TeslemetryTimeEntityDescription, ...] = ( TeslemetryTimeEntityDescription( key="charge_state_minutes_to_full_charge", - streaming_key=Signal.TIME_TO_FULL_CHARGE, + streaming_value_fn=lambda x: x * 60, + streaming_listener=lambda x, y: x.listen_TimeToFullCharge(y), device_class=SensorDeviceClass.TIMESTAMP, entity_category=EntityCategory.DIAGNOSTIC, variance=4, ), TeslemetryTimeEntityDescription( key="drive_state_active_route_minutes_to_arrival", - streaming_key=Signal.MINUTES_TO_ARRIVAL, + streaming_listener=lambda x, y: x.listen_MinutesToArrival(y), device_class=SensorDeviceClass.TIMESTAMP, variance=1, ), @@ -667,18 +673,22 @@ class TeslemetryStreamTimeSensorEntity(TeslemetryVehicleStreamEntity, SensorEnti """Initialize the sensor.""" self.entity_description = description self._get_timestamp = ignore_variance( - func=lambda value: dt_util.now() + timedelta(minutes=value), + func=lambda value: dt_util.now() + + timedelta(minutes=description.streaming_value_fn(value)), ignored_variance=timedelta(minutes=description.variance), ) - assert description.streaming_key - super().__init__(data, description.key, description.streaming_key) + super().__init__(data, description.key) - @cached_property - def available(self) -> bool: - """Return True if entity is available.""" - return self.stream.connected + async def async_added_to_hass(self) -> None: + """When entity is added to hass.""" + await super().async_added_to_hass() + self.async_on_remove( + self.entity_description.streaming_listener( + self.vehicle.stream_vehicle, self._value_callback + ) + ) - def _async_value_from_stream(self, value) -> None: + def _value_callback(self, value: float | None) -> None: """Update the value of the entity.""" if value is None: self._attr_native_value = None diff --git a/homeassistant/components/teslemetry/strings.json b/homeassistant/components/teslemetry/strings.json index 9dc17fd2ef7..ceb8b3c1af9 100644 --- a/homeassistant/components/teslemetry/strings.json +++ b/homeassistant/components/teslemetry/strings.json @@ -132,7 +132,7 @@ "name": "Tire pressure warning rear right" }, "pin_to_drive_enabled": { - "name": "Pin to drive enabled" + "name": "PIN to Drive enabled" }, "drive_rail": { "name": "Drive rail" @@ -226,6 +226,12 @@ "dog": "Dog mode", "camp": "Camp mode" } + }, + "fan_mode": { + "state": { + "off": "[%key:common::state::off%]", + "bioweapon": "Bioweapon defense" + } } } } diff --git a/homeassistant/components/teslemetry/switch.py b/homeassistant/components/teslemetry/switch.py index 83441e6c4f6..516a6f9852f 100644 --- a/homeassistant/components/teslemetry/switch.py +++ b/homeassistant/components/teslemetry/switch.py @@ -7,7 +7,8 @@ from dataclasses import dataclass from itertools import chain from typing import Any -from tesla_fleet_api.const import Scope, Seat +from tesla_fleet_api.const import Scope +from teslemetry_stream import TeslemetryStreamVehicle from homeassistant.components.switch import ( SwitchDeviceClass, @@ -16,10 +17,16 @@ from homeassistant.components.switch import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import StateType from . import TeslemetryConfigEntry -from .entity import TeslemetryEnergyInfoEntity, TeslemetryVehicleEntity +from .entity import ( + TeslemetryEnergyInfoEntity, + TeslemetryRootEntity, + TeslemetryVehicleEntity, + TeslemetryVehicleStreamEntity, +) from .helpers import handle_command, handle_vehicle_command from .models import TeslemetryEnergyData, TeslemetryVehicleData @@ -34,36 +41,41 @@ class TeslemetrySwitchEntityDescription(SwitchEntityDescription): off_func: Callable scopes: list[Scope] value_func: Callable[[StateType], bool] = bool + streaming_listener: Callable[ + [TeslemetryStreamVehicle, Callable[[StateType], None]], + Callable[[], None], + ] + streaming_value_fn: Callable[[StateType], bool] = bool + streaming_firmware: str = "2024.26" unique_id: str | None = None VEHICLE_DESCRIPTIONS: tuple[TeslemetrySwitchEntityDescription, ...] = ( TeslemetrySwitchEntityDescription( key="vehicle_state_sentry_mode", + streaming_listener=lambda x, y: x.listen_SentryMode(y), + streaming_value_fn=lambda x: x != "Off", on_func=lambda api: api.set_sentry_mode(on=True), off_func=lambda api: api.set_sentry_mode(on=False), scopes=[Scope.VEHICLE_CMDS], ), TeslemetrySwitchEntityDescription( key="climate_state_auto_seat_climate_left", - on_func=lambda api: api.remote_auto_seat_climate_request(Seat.FRONT_LEFT, True), - off_func=lambda api: api.remote_auto_seat_climate_request( - Seat.FRONT_LEFT, False - ), + streaming_listener=lambda x, y: x.listen_AutoSeatClimateLeft(y), + on_func=lambda api: api.remote_auto_seat_climate_request(1, True), + off_func=lambda api: api.remote_auto_seat_climate_request(1, False), scopes=[Scope.VEHICLE_CMDS], ), TeslemetrySwitchEntityDescription( key="climate_state_auto_seat_climate_right", - on_func=lambda api: api.remote_auto_seat_climate_request( - Seat.FRONT_RIGHT, True - ), - off_func=lambda api: api.remote_auto_seat_climate_request( - Seat.FRONT_RIGHT, False - ), + streaming_listener=lambda x, y: x.listen_AutoSeatClimateRight(y), + on_func=lambda api: api.remote_auto_seat_climate_request(2, True), + off_func=lambda api: api.remote_auto_seat_climate_request(2, False), scopes=[Scope.VEHICLE_CMDS], ), TeslemetrySwitchEntityDescription( key="climate_state_auto_steering_wheel_heat", + streaming_listener=lambda x, y: x.listen_HvacSteeringWheelHeatAuto(y), on_func=lambda api: api.remote_auto_steering_wheel_heat_climate_request( on=True ), @@ -74,6 +86,8 @@ VEHICLE_DESCRIPTIONS: tuple[TeslemetrySwitchEntityDescription, ...] = ( ), TeslemetrySwitchEntityDescription( key="climate_state_defrost_mode", + streaming_listener=lambda x, y: x.listen_DefrostMode(y), + streaming_value_fn=lambda x: x != "Off", on_func=lambda api: api.set_preconditioning_max(on=True, manual_override=False), off_func=lambda api: api.set_preconditioning_max( on=False, manual_override=False @@ -83,9 +97,11 @@ VEHICLE_DESCRIPTIONS: tuple[TeslemetrySwitchEntityDescription, ...] = ( TeslemetrySwitchEntityDescription( key="charge_state_charging_state", unique_id="charge_state_user_charge_enable_request", + value_func=lambda state: state in {"Starting", "Charging"}, + streaming_listener=lambda x, y: x.listen_DetailedChargeState(y), + streaming_value_fn=lambda x: x in {"Starting", "Charging"}, on_func=lambda api: api.charge_start(), off_func=lambda api: api.charge_stop(), - value_func=lambda state: state in {"Starting", "Charging"}, scopes=[Scope.VEHICLE_CMDS, Scope.VEHICLE_CHARGING_CMDS], ), ) @@ -101,12 +117,16 @@ async def async_setup_entry( async_add_entities( chain( ( - TeslemetryVehicleSwitchEntity( + TeslemetryPollingVehicleSwitchEntity( + vehicle, description, entry.runtime_data.scopes + ) + if vehicle.api.pre2021 + or vehicle.firmware < description.streaming_firmware + else TeslemetryStreamingVehicleSwitchEntity( vehicle, description, entry.runtime_data.scopes ) for vehicle in entry.runtime_data.vehicles for description in VEHICLE_DESCRIPTIONS - if description.key in vehicle.coordinator.data ), ( TeslemetryChargeFromGridSwitchEntity( @@ -126,15 +146,31 @@ async def async_setup_entry( ) -class TeslemetrySwitchEntity(SwitchEntity): +class TeslemetryVehicleSwitchEntity(TeslemetryRootEntity, SwitchEntity): """Base class for all Teslemetry switch entities.""" _attr_device_class = SwitchDeviceClass.SWITCH entity_description: TeslemetrySwitchEntityDescription + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the Switch.""" + self.raise_for_scope(self.entity_description.scopes[0]) + await handle_vehicle_command(self.entity_description.on_func(self.api)) + self._attr_is_on = True + self.async_write_ha_state() -class TeslemetryVehicleSwitchEntity(TeslemetryVehicleEntity, TeslemetrySwitchEntity): - """Base class for Teslemetry vehicle switch entities.""" + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the Switch.""" + self.raise_for_scope(self.entity_description.scopes[0]) + await handle_vehicle_command(self.entity_description.off_func(self.api)) + self._attr_is_on = False + self.async_write_ha_state() + + +class TeslemetryPollingVehicleSwitchEntity( + TeslemetryVehicleEntity, TeslemetryVehicleSwitchEntity +): + """Base class for Teslemetry polling vehicle switch entities.""" def __init__( self, @@ -151,30 +187,63 @@ class TeslemetryVehicleSwitchEntity(TeslemetryVehicleEntity, TeslemetrySwitchEnt def _async_update_attrs(self) -> None: """Update the attributes of the sensor.""" - self._attr_is_on = self.entity_description.value_func(self._value) - - async def async_turn_on(self, **kwargs: Any) -> None: - """Turn on the Switch.""" - self.raise_for_scope(self.entity_description.scopes[0]) - await self.wake_up_if_asleep() - await handle_vehicle_command(self.entity_description.on_func(self.api)) - self._attr_is_on = True - self.async_write_ha_state() - - async def async_turn_off(self, **kwargs: Any) -> None: - """Turn off the Switch.""" - self.raise_for_scope(self.entity_description.scopes[0]) - await self.wake_up_if_asleep() - await handle_vehicle_command(self.entity_description.off_func(self.api)) - self._attr_is_on = False - self.async_write_ha_state() + self._attr_is_on = ( + None + if self._value is None + else self.entity_description.value_func(self._value) + ) -class TeslemetryChargeFromGridSwitchEntity( - TeslemetryEnergyInfoEntity, TeslemetrySwitchEntity +class TeslemetryStreamingVehicleSwitchEntity( + TeslemetryVehicleStreamEntity, TeslemetryVehicleSwitchEntity, RestoreEntity ): + """Base class for Teslemetry streaming vehicle switch entities.""" + + def __init__( + self, + data: TeslemetryVehicleData, + description: TeslemetrySwitchEntityDescription, + scopes: list[Scope], + ) -> None: + """Initialize the Switch.""" + + self.entity_description = description + self.scoped = any(scope in scopes for scope in description.scopes) + super().__init__(data, description.key) + if description.unique_id: + self._attr_unique_id = f"{data.vin}-{description.unique_id}" + + async def async_added_to_hass(self) -> None: + """Handle entity which will be added.""" + await super().async_added_to_hass() + + # Restore previous state + if (state := await self.async_get_last_state()) is not None: + if state.state == "on": + self._attr_is_on = True + elif state.state == "off": + self._attr_is_on = False + + # Add listener + self.async_on_remove( + self.entity_description.streaming_listener( + self.vehicle.stream_vehicle, self._value_callback + ) + ) + + def _value_callback(self, value: StateType) -> None: + """Update the value of the entity.""" + self._attr_is_on = ( + None if value is None else self.entity_description.streaming_value_fn(value) + ) + self.async_write_ha_state() + + +class TeslemetryChargeFromGridSwitchEntity(TeslemetryEnergyInfoEntity, SwitchEntity): """Entity class for Charge From Grid switch.""" + _attr_device_class = SwitchDeviceClass.SWITCH + def __init__( self, data: TeslemetryEnergyData, @@ -215,11 +284,11 @@ class TeslemetryChargeFromGridSwitchEntity( self.async_write_ha_state() -class TeslemetryStormModeSwitchEntity( - TeslemetryEnergyInfoEntity, TeslemetrySwitchEntity -): +class TeslemetryStormModeSwitchEntity(TeslemetryEnergyInfoEntity, SwitchEntity): """Entity class for Storm Mode switch.""" + _attr_device_class = SwitchDeviceClass.SWITCH + def __init__( self, data: TeslemetryEnergyData, diff --git a/homeassistant/components/teslemetry/update.py b/homeassistant/components/teslemetry/update.py index f560f25a8ff..b8d40877de4 100644 --- a/homeassistant/components/teslemetry/update.py +++ b/homeassistant/components/teslemetry/update.py @@ -2,16 +2,22 @@ from __future__ import annotations -from typing import Any, cast +from typing import Any from tesla_fleet_api.const import Scope +from tesla_fleet_api.teslemetry import Vehicle from homeassistant.components.update import UpdateEntity, UpdateEntityFeature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.helpers.restore_state import RestoreEntity from . import TeslemetryConfigEntry -from .entity import TeslemetryVehicleEntity +from .entity import ( + TeslemetryRootEntity, + TeslemetryVehicleEntity, + TeslemetryVehicleStreamEntity, +) from .helpers import handle_vehicle_command from .models import TeslemetryVehicleData @@ -32,12 +38,31 @@ async def async_setup_entry( """Set up the Teslemetry update platform from a config entry.""" async_add_entities( - TeslemetryUpdateEntity(vehicle, entry.runtime_data.scopes) + TeslemetryPollingUpdateEntity(vehicle, entry.runtime_data.scopes) + if vehicle.api.pre2021 or vehicle.firmware < "2024.44.25" + else TeslemetryStreamingUpdateEntity(vehicle, entry.runtime_data.scopes) for vehicle in entry.runtime_data.vehicles ) -class TeslemetryUpdateEntity(TeslemetryVehicleEntity, UpdateEntity): +class TeslemetryUpdateEntity(TeslemetryRootEntity, UpdateEntity): + """Teslemetry Updates entity.""" + + api: Vehicle + _attr_supported_features = UpdateEntityFeature.PROGRESS + + async def async_install( + self, version: str | None, backup: bool, **kwargs: Any + ) -> None: + """Install an update.""" + self.raise_for_scope(Scope.VEHICLE_CMDS) + + await handle_vehicle_command(self.api.schedule_software_update(offset_sec=0)) + self._attr_in_progress = True + self.async_write_ha_state() + + +class TeslemetryPollingUpdateEntity(TeslemetryVehicleEntity, TeslemetryUpdateEntity): """Teslemetry Updates entity.""" def __init__( @@ -94,18 +119,125 @@ class TeslemetryUpdateEntity(TeslemetryVehicleEntity, UpdateEntity): ): self._attr_in_progress = True if install_perc := self.get("vehicle_state_software_update_install_perc"): - self._attr_update_percentage = cast(int, install_perc) + self._attr_update_percentage = install_perc else: self._attr_in_progress = False self._attr_update_percentage = None - async def async_install( - self, version: str | None, backup: bool, **kwargs: Any + +class TeslemetryStreamingUpdateEntity( + TeslemetryVehicleStreamEntity, TeslemetryUpdateEntity, RestoreEntity +): + """Teslemetry Updates entity.""" + + _download_percentage: int = 0 + _install_percentage: int = 0 + + def __init__( + self, + data: TeslemetryVehicleData, + scopes: list[Scope], ) -> None: - """Install an update.""" - self.raise_for_scope(Scope.ENERGY_CMDS) - await self.wake_up_if_asleep() - await handle_vehicle_command(self.api.schedule_software_update(offset_sec=60)) - self._attr_in_progress = True - self._attr_update_percentage = None + """Initialize the Update.""" + self.scoped = Scope.VEHICLE_CMDS in scopes + super().__init__( + data, + "vehicle_state_software_update_status", + ) + + async def async_added_to_hass(self) -> None: + """Handle entity which will be added.""" + await super().async_added_to_hass() + if (state := await self.async_get_last_state()) is not None: + self._attr_in_progress = state.attributes.get("in_progress", False) + self._install_percentage = state.attributes.get("install_percentage", False) + self._attr_installed_version = state.attributes.get("installed_version") + self._attr_latest_version = state.attributes.get("latest_version") + self._attr_supported_features = UpdateEntityFeature( + state.attributes.get( + "supported_features", self._attr_supported_features + ) + ) + self.async_write_ha_state() + + self.async_on_remove( + self.vehicle.stream_vehicle.listen_SoftwareUpdateDownloadPercentComplete( + self._async_handle_software_update_download_percent_complete + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_SoftwareUpdateInstallationPercentComplete( + self._async_handle_software_update_installation_percent_complete + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_SoftwareUpdateScheduledStartTime( + self._async_handle_software_update_scheduled_start_time + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_SoftwareUpdateVersion( + self._async_handle_software_update_version + ) + ) + self.async_on_remove( + self.vehicle.stream_vehicle.listen_Version(self._async_handle_version) + ) + + def _async_handle_software_update_download_percent_complete( + self, value: float | None + ): + """Handle software update download percent complete.""" + + self._download_percentage = round(value) if value is not None else 0 + if self.scoped and self._download_percentage == 100: + self._attr_supported_features = ( + UpdateEntityFeature.PROGRESS | UpdateEntityFeature.INSTALL + ) + else: + self._attr_supported_features = UpdateEntityFeature.PROGRESS + self._async_update_progress() self.async_write_ha_state() + + def _async_handle_software_update_installation_percent_complete( + self, value: float | None + ): + """Handle software update installation percent complete.""" + + self._install_percentage = round(value) if value is not None else 0 + self._async_update_progress() + self.async_write_ha_state() + + def _async_handle_software_update_scheduled_start_time(self, value: str | None): + """Handle software update scheduled start time.""" + + self._attr_in_progress = value is not None + self.async_write_ha_state() + + def _async_handle_software_update_version(self, value: str | None): + """Handle software update version.""" + + self._attr_latest_version = ( + value if value and value != " " else self._attr_installed_version + ) + self.async_write_ha_state() + + def _async_handle_version(self, value: str | None): + """Handle version.""" + + if value is not None: + self._attr_installed_version = value.split(" ")[0] + self.async_write_ha_state() + + def _async_update_progress(self) -> None: + """Update the progress of the update.""" + + if self._download_percentage > 1 and self._download_percentage < 100: + self._attr_in_progress = True + self._attr_update_percentage = self._download_percentage + elif self._install_percentage > 1: + self._attr_in_progress = True + self._attr_update_percentage = self._install_percentage + else: + self._attr_in_progress = False + self._attr_update_percentage = None diff --git a/homeassistant/components/tessie/__init__.py b/homeassistant/components/tessie/__init__.py index f73ecc7a729..e247931e3ba 100644 --- a/homeassistant/components/tessie/__init__.py +++ b/homeassistant/components/tessie/__init__.py @@ -5,9 +5,9 @@ from http import HTTPStatus import logging from aiohttp import ClientError, ClientResponseError -from tesla_fleet_api import EnergySpecific, Tessie from tesla_fleet_api.const import Scope from tesla_fleet_api.exceptions import TeslaFleetError +from tesla_fleet_api.tessie import Tessie from tessie_api import get_state_of_all_vehicles from homeassistant.config_entries import ConfigEntry @@ -123,7 +123,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TessieConfigEntry) -> bo ) continue - api = EnergySpecific(tessie.energy, site_id) + api = tessie.energySites.create(site_id) energysites.append( TessieEnergyData( api=api, diff --git a/homeassistant/components/tessie/coordinator.py b/homeassistant/components/tessie/coordinator.py index b06fe6123a5..2382595b058 100644 --- a/homeassistant/components/tessie/coordinator.py +++ b/homeassistant/components/tessie/coordinator.py @@ -8,8 +8,8 @@ import logging from typing import TYPE_CHECKING, Any from aiohttp import ClientResponseError -from tesla_fleet_api import EnergySpecific from tesla_fleet_api.exceptions import InvalidToken, MissingToken, TeslaFleetError +from tesla_fleet_api.tessie import EnergySite from tessie_api import get_state, get_status from homeassistant.core import HomeAssistant @@ -102,7 +102,7 @@ class TessieEnergySiteLiveCoordinator(DataUpdateCoordinator[dict[str, Any]]): config_entry: TessieConfigEntry def __init__( - self, hass: HomeAssistant, config_entry: TessieConfigEntry, api: EnergySpecific + self, hass: HomeAssistant, config_entry: TessieConfigEntry, api: EnergySite ) -> None: """Initialize Tessie Energy Site Live coordinator.""" super().__init__( @@ -138,7 +138,7 @@ class TessieEnergySiteInfoCoordinator(DataUpdateCoordinator[dict[str, Any]]): config_entry: TessieConfigEntry def __init__( - self, hass: HomeAssistant, config_entry: TessieConfigEntry, api: EnergySpecific + self, hass: HomeAssistant, config_entry: TessieConfigEntry, api: EnergySite ) -> None: """Initialize Tessie Energy Info coordinator.""" super().__init__( diff --git a/homeassistant/components/tessie/manifest.json b/homeassistant/components/tessie/manifest.json index 4ddd63552f0..3f96bb226ab 100644 --- a/homeassistant/components/tessie/manifest.json +++ b/homeassistant/components/tessie/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/tessie", "iot_class": "cloud_polling", "loggers": ["tessie", "tesla-fleet-api"], - "requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.9.13"] + "requirements": ["tessie-api==0.1.1", "tesla-fleet-api==1.0.16"] } diff --git a/homeassistant/components/tessie/models.py b/homeassistant/components/tessie/models.py index ca670b9650b..03652782cfe 100644 --- a/homeassistant/components/tessie/models.py +++ b/homeassistant/components/tessie/models.py @@ -4,7 +4,7 @@ from __future__ import annotations from dataclasses import dataclass -from tesla_fleet_api import EnergySpecific +from tesla_fleet_api.tessie import EnergySite from homeassistant.helpers.device_registry import DeviceInfo @@ -27,7 +27,7 @@ class TessieData: class TessieEnergyData: """Data for a Energy Site in the Tessie integration.""" - api: EnergySpecific + api: EnergySite live_coordinator: TessieEnergySiteLiveCoordinator info_coordinator: TessieEnergySiteInfoCoordinator id: int diff --git a/homeassistant/components/tessie/number.py b/homeassistant/components/tessie/number.py index 1e857345278..77d8037fb14 100644 --- a/homeassistant/components/tessie/number.py +++ b/homeassistant/components/tessie/number.py @@ -7,7 +7,7 @@ from dataclasses import dataclass from itertools import chain from typing import Any -from tesla_fleet_api import EnergySpecific +from tesla_fleet_api.tessie import EnergySite from tessie_api import set_charge_limit, set_charging_amps, set_speed_limit from homeassistant.components.number import ( @@ -90,7 +90,7 @@ VEHICLE_DESCRIPTIONS: tuple[TessieNumberEntityDescription, ...] = ( class TessieNumberBatteryEntityDescription(NumberEntityDescription): """Describes Tessie Number entity.""" - func: Callable[[EnergySpecific, float], Awaitable[Any]] + func: Callable[[EnergySite, float], Awaitable[Any]] requires: str diff --git a/homeassistant/components/thethingsnetwork/sensor.py b/homeassistant/components/thethingsnetwork/sensor.py index ba512d07f18..5aa851d99ae 100644 --- a/homeassistant/components/thethingsnetwork/sensor.py +++ b/homeassistant/components/thethingsnetwork/sensor.py @@ -40,7 +40,7 @@ async def async_setup_entry( if (device_id, field_id) not in sensors and isinstance(ttn_value, TTNSensorValue) } - if len(new_sensors): + if new_sensors: async_add_entities(new_sensors.values()) sensors.update(new_sensors.keys()) diff --git a/homeassistant/components/tibber/coordinator.py b/homeassistant/components/tibber/coordinator.py index 2de9ebd1ec6..e565fdc7dd8 100644 --- a/homeassistant/components/tibber/coordinator.py +++ b/homeassistant/components/tibber/coordinator.py @@ -9,7 +9,11 @@ from typing import cast import tibber from homeassistant.components.recorder import get_instance -from homeassistant.components.recorder.models import StatisticData, StatisticMetaData +from homeassistant.components.recorder.models import ( + StatisticData, + StatisticMeanType, + StatisticMetaData, +) from homeassistant.components.recorder.statistics import ( async_add_external_statistics, get_last_statistics, @@ -159,7 +163,7 @@ class TibberDataCoordinator(DataUpdateCoordinator[None]): ) metadata = StatisticMetaData( - has_mean=False, + mean_type=StatisticMeanType.NONE, has_sum=True, name=f"{home.name} {sensor_type}", source=TIBBER_DOMAIN, diff --git a/homeassistant/components/tibber/strings.json b/homeassistant/components/tibber/strings.json index 05b98b97995..ec2c005d4e3 100644 --- a/homeassistant/components/tibber/strings.json +++ b/homeassistant/components/tibber/strings.json @@ -87,7 +87,7 @@ "services": { "get_prices": { "name": "Get energy prices", - "description": "Get hourly energy prices from Tibber", + "description": "Fetches hourly energy prices including price level.", "fields": { "start": { "name": "Start", diff --git a/homeassistant/components/todo/intent.py b/homeassistant/components/todo/intent.py index c678408a576..d679a57bf96 100644 --- a/homeassistant/components/todo/intent.py +++ b/homeassistant/components/todo/intent.py @@ -11,11 +11,13 @@ from . import TodoItem, TodoItemStatus, TodoListEntity from .const import DATA_COMPONENT, DOMAIN INTENT_LIST_ADD_ITEM = "HassListAddItem" +INTENT_LIST_COMPLETE_ITEM = "HassListCompleteItem" async def async_setup_intents(hass: HomeAssistant) -> None: """Set up the todo intents.""" intent.async_register(hass, ListAddItemIntent()) + intent.async_register(hass, ListCompleteItemIntent()) class ListAddItemIntent(intent.IntentHandler): @@ -53,14 +55,92 @@ class ListAddItemIntent(intent.IntentHandler): match_result.states[0].entity_id ) if target_list is None: - raise intent.IntentHandleError(f"No to-do list: {list_name}") + raise intent.IntentHandleError( + f"No to-do list: {list_name}", "list_not_found" + ) # Add to list await target_list.async_create_todo_item( TodoItem(summary=item, status=TodoItemStatus.NEEDS_ACTION) ) - response = intent_obj.create_response() + response: intent.IntentResponse = intent_obj.create_response() + response.response_type = intent.IntentResponseType.ACTION_DONE + response.async_set_results( + [ + intent.IntentResponseTarget( + type=intent.IntentResponseTargetType.ENTITY, + name=list_name, + id=match_result.states[0].entity_id, + ) + ] + ) + return response + + +class ListCompleteItemIntent(intent.IntentHandler): + """Handle ListCompleteItem intents.""" + + intent_type = INTENT_LIST_COMPLETE_ITEM + description = "Complete item on a todo list" + slot_schema = { + vol.Required("item"): intent.non_empty_string, + vol.Required("name"): intent.non_empty_string, + } + platforms = {DOMAIN} + + async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: + """Handle the intent.""" + hass = intent_obj.hass + + slots = self.async_validate_slots(intent_obj.slots) + item = slots["item"]["value"] + list_name = slots["name"]["value"] + + target_list: TodoListEntity | None = None + + # Find matching list + match_constraints = intent.MatchTargetsConstraints( + name=list_name, domains=[DOMAIN], assistant=intent_obj.assistant + ) + match_result = intent.async_match_targets(hass, match_constraints) + if not match_result.is_match: + raise intent.MatchFailedError( + result=match_result, constraints=match_constraints + ) + + target_list = hass.data[DATA_COMPONENT].get_entity( + match_result.states[0].entity_id + ) + if target_list is None: + raise intent.IntentHandleError( + f"No to-do list: {list_name}", "list_not_found" + ) + + # Find item in list + matching_item = None + for todo_item in target_list.todo_items or (): + if ( + item in (todo_item.uid, todo_item.summary) + and todo_item.status == TodoItemStatus.NEEDS_ACTION + ): + matching_item = todo_item + break + if not matching_item or not matching_item.uid: + raise intent.IntentHandleError( + f"Item '{item}' not found on list", "item_not_found" + ) + + # Mark as completed + await target_list.async_update_todo_item( + TodoItem( + uid=matching_item.uid, + summary=matching_item.summary, + status=TodoItemStatus.COMPLETED, + ) + ) + + response: intent.IntentResponse = intent_obj.create_response() response.response_type = intent.IntentResponseType.ACTION_DONE response.async_set_results( [ diff --git a/homeassistant/components/totalconnect/diagnostics.py b/homeassistant/components/totalconnect/diagnostics.py index f42ed5e44c3..fc310bf850c 100644 --- a/homeassistant/components/totalconnect/diagnostics.py +++ b/homeassistant/components/totalconnect/diagnostics.py @@ -83,6 +83,7 @@ async def async_get_config_entry_diagnostics( "is_new_partition": partition.is_new_partition, "is_night_stay_enabled": partition.is_night_stay_enabled, "exit_delay_timer": partition.exit_delay_timer, + "arming_state": partition.arming_state, } new_location["partitions"].append(new_partition) diff --git a/homeassistant/components/tplink_omada/manifest.json b/homeassistant/components/tplink_omada/manifest.json index af20b54675b..274f2815330 100644 --- a/homeassistant/components/tplink_omada/manifest.json +++ b/homeassistant/components/tplink_omada/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/tplink_omada", "integration_type": "hub", "iot_class": "local_polling", - "requirements": ["tplink-omada-client==1.4.3"] + "requirements": ["tplink-omada-client==1.4.4"] } diff --git a/homeassistant/components/tractive/device_tracker.py b/homeassistant/components/tractive/device_tracker.py index 73be7216a2f..bd1380ade4c 100644 --- a/homeassistant/components/tractive/device_tracker.py +++ b/homeassistant/components/tractive/device_tracker.py @@ -55,11 +55,9 @@ class TractiveDeviceTracker(TractiveEntity, TrackerEntity): @property def source_type(self) -> SourceType: - """Return the source type, eg gps or router, of the device.""" + """Return the source type of the device.""" if self._source_type == "PHONE": return SourceType.BLUETOOTH - if self._source_type == "KNOWN_WIFI": - return SourceType.ROUTER return SourceType.GPS @property diff --git a/homeassistant/components/tradfri/config_flow.py b/homeassistant/components/tradfri/config_flow.py index 9f5b39a9657..f4adb1cc09e 100644 --- a/homeassistant/components/tradfri/config_flow.py +++ b/homeassistant/components/tradfri/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations import asyncio -from typing import Any +from typing import Any, cast from uuid import uuid4 from pytradfri import Gateway, RequestError @@ -54,7 +54,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): errors: dict[str, str] = {} if user_input is not None: - host = user_input.get(CONF_HOST, self._host) + host = cast(str, user_input.get(CONF_HOST, self._host)) try: auth = await authenticate( self.hass, host, user_input[KEY_SECURITY_CODE] diff --git a/homeassistant/components/tradfri/strings.json b/homeassistant/components/tradfri/strings.json index 9ed7e167e71..66c46dd482e 100644 --- a/homeassistant/components/tradfri/strings.json +++ b/homeassistant/components/tradfri/strings.json @@ -6,7 +6,7 @@ "description": "You can find the security code on the back of your gateway.", "data": { "host": "[%key:common::config_flow::data::host%]", - "security_code": "Security Code" + "security_code": "Security code" }, "data_description": { "host": "Hostname or IP address of your Trådfri gateway." @@ -14,7 +14,7 @@ } }, "error": { - "invalid_security_code": "Failed to register with provided key. If this keeps happening, try restarting the gateway.", + "invalid_security_code": "Failed to register with provided code. If this keeps happening, try restarting the gateway.", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "timeout": "Timeout validating the code.", "cannot_authenticate": "Cannot authenticate, is Gateway paired with another server like e.g. Homekit?" diff --git a/homeassistant/components/trafikverket_camera/strings.json b/homeassistant/components/trafikverket_camera/strings.json index b6e2209fc57..8fdc6357156 100644 --- a/homeassistant/components/trafikverket_camera/strings.json +++ b/homeassistant/components/trafikverket_camera/strings.json @@ -18,7 +18,7 @@ "location": "[%key:common::config_flow::data::location%]" }, "data_description": { - "location": "Equal or part of name, description or camera id. Be as specific as possible to avoid getting multiple cameras as result" + "location": "Equal or part of name, description or camera ID. Be as specific as possible to avoid getting multiple cameras as result" } }, "multiple_cameras": { @@ -60,7 +60,7 @@ "name": "[%key:common::config_flow::data::location%]" }, "photo_url": { - "name": "Photo url" + "name": "Photo URL" }, "status": { "name": "Status" @@ -87,7 +87,7 @@ "name": "Photo time" }, "photo_url": { - "name": "Photo url" + "name": "Photo URL" }, "status": { "name": "Status" diff --git a/homeassistant/components/trafikverket_ferry/config_flow.py b/homeassistant/components/trafikverket_ferry/config_flow.py index 002dc421273..dfa64ed2953 100644 --- a/homeassistant/components/trafikverket_ferry/config_flow.py +++ b/homeassistant/components/trafikverket_ferry/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from typing import Any from pytrafikverket import TrafikverketFerry @@ -17,6 +18,8 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import CONF_FROM, CONF_TIME, CONF_TO, DOMAIN from .util import create_unique_id +_LOGGER = logging.getLogger(__name__) + DATA_SCHEMA = vol.Schema( { vol.Required(CONF_API_KEY): selector.TextSelector( @@ -81,7 +84,8 @@ class TVFerryConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_auth" except NoFerryFound: errors["base"] = "invalid_route" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "cannot_connect" else: return self.async_update_reload_and_abort( @@ -120,7 +124,8 @@ class TVFerryConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_auth" except NoFerryFound: errors["base"] = "invalid_route" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "cannot_connect" else: if not errors: diff --git a/homeassistant/components/trafikverket_train/config_flow.py b/homeassistant/components/trafikverket_train/config_flow.py index f6a58e464a1..fb39e14815e 100644 --- a/homeassistant/components/trafikverket_train/config_flow.py +++ b/homeassistant/components/trafikverket_train/config_flow.py @@ -86,8 +86,8 @@ async def validate_station( except UnknownError as error: _LOGGER.error("Unknown error occurred during validation %s", str(error)) errors["base"] = "cannot_connect" - except Exception as error: # noqa: BLE001 - _LOGGER.error("Unknown exception occurred during validation %s", str(error)) + except Exception: + _LOGGER.exception("Unknown exception occurred during validation") errors["base"] = "cannot_connect" return (stations, errors) @@ -266,7 +266,7 @@ class TVTrainConfigFlow(ConfigFlow, domain=DOMAIN): { CONF_API_KEY: api_key, CONF_FROM: train_from, - CONF_TO: user_input[CONF_TO], + CONF_TO: train_to, CONF_TIME: train_time, CONF_WEEKDAY: train_days, CONF_FILTER_PRODUCT: filter_product, diff --git a/homeassistant/components/trafikverket_weatherstation/config_flow.py b/homeassistant/components/trafikverket_weatherstation/config_flow.py index f4316b887b3..ee9fe264692 100644 --- a/homeassistant/components/trafikverket_weatherstation/config_flow.py +++ b/homeassistant/components/trafikverket_weatherstation/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from typing import Any from pytrafikverket.exceptions import ( @@ -25,6 +26,8 @@ from homeassistant.helpers.selector import ( from .const import CONF_STATION, DOMAIN +_LOGGER = logging.getLogger(__name__) + class TVWeatherConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Trafikverket Weatherstation integration.""" @@ -56,7 +59,8 @@ class TVWeatherConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_station" except MultipleWeatherStationsFound: errors["base"] = "more_stations" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected error") errors["base"] = "cannot_connect" else: return self.async_create_entry( @@ -102,7 +106,8 @@ class TVWeatherConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_station" except MultipleWeatherStationsFound: errors["base"] = "more_stations" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "cannot_connect" else: return self.async_update_reload_and_abort( @@ -132,7 +137,8 @@ class TVWeatherConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_station" except MultipleWeatherStationsFound: errors["base"] = "more_stations" - except Exception: # noqa: BLE001 + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "cannot_connect" else: return self.async_update_reload_and_abort( diff --git a/homeassistant/components/trafikverket_weatherstation/sensor.py b/homeassistant/components/trafikverket_weatherstation/sensor.py index cb923037a24..bbc6764e3ef 100644 --- a/homeassistant/components/trafikverket_weatherstation/sensor.py +++ b/homeassistant/components/trafikverket_weatherstation/sensor.py @@ -89,7 +89,8 @@ SENSOR_TYPES: tuple[TrafikverketSensorEntityDescription, ...] = ( translation_key="wind_direction", value_fn=lambda data: data.winddirection, native_unit_of_measurement=DEGREE, - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, ), TrafikverketSensorEntityDescription( key="wind_speed", diff --git a/homeassistant/components/triggercmd/config_flow.py b/homeassistant/components/triggercmd/config_flow.py index fc02dd0b2fc..48c4eacfd5a 100644 --- a/homeassistant/components/triggercmd/config_flow.py +++ b/homeassistant/components/triggercmd/config_flow.py @@ -57,7 +57,7 @@ class TriggerCMDConfigFlow(ConfigFlow, domain=DOMAIN): errors[CONF_TOKEN] = "invalid_token" except TRIGGERcmdConnectionError: errors["base"] = "cannot_connect" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: diff --git a/homeassistant/components/tts/__init__.py b/homeassistant/components/tts/__init__.py index 6c7e521f3ef..cb207643471 100644 --- a/homeassistant/components/tts/__init__.py +++ b/homeassistant/components/tts/__init__.py @@ -3,9 +3,9 @@ from __future__ import annotations import asyncio -from collections.abc import Mapping +from collections.abc import AsyncGenerator +from dataclasses import dataclass from datetime import datetime -from functools import partial import hashlib from http import HTTPStatus import io @@ -16,7 +16,8 @@ import re import secrets import subprocess import tempfile -from typing import Any, Final, TypedDict, final +from time import monotonic +from typing import Any, Final from aiohttp import web import mutagen @@ -26,30 +27,24 @@ import voluptuous as vol from homeassistant.components import ffmpeg, websocket_api from homeassistant.components.http import HomeAssistantView -from homeassistant.components.media_player import ( - ATTR_MEDIA_ANNOUNCE, - ATTR_MEDIA_CONTENT_ID, - ATTR_MEDIA_CONTENT_TYPE, - DOMAIN as DOMAIN_MP, - SERVICE_PLAY_MEDIA, - MediaType, -) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_ENTITY_ID, - PLATFORM_FORMAT, - STATE_UNAVAILABLE, - STATE_UNKNOWN, +from homeassistant.const import EVENT_HOMEASSISTANT_STOP, PLATFORM_FORMAT +from homeassistant.core import ( + CALLBACK_TYPE, + Event, + HassJob, + HassJobType, + HomeAssistant, + ServiceCall, + callback, ) -from homeassistant.core import HassJob, HomeAssistant, ServiceCall, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.event import async_call_later from homeassistant.helpers.network import get_url -from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import UNDEFINED, ConfigType -from homeassistant.util import dt as dt_util, language as language_util +from homeassistant.util import language as language_util from .const import ( ATTR_CACHE, @@ -67,6 +62,7 @@ from .const import ( DOMAIN, TtsAudioType, ) +from .entity import TextToSpeechEntity, TTSAudioRequest from .helper import get_engine_instance from .legacy import PLATFORM_SCHEMA, PLATFORM_SCHEMA_BASE, Provider, async_setup_legacy from .media_source import generate_media_source_id, media_source_id_to_kwargs @@ -83,12 +79,13 @@ __all__ = [ "PLATFORM_SCHEMA", "PLATFORM_SCHEMA_BASE", "Provider", + "ResultStream", "SampleFormat", + "TextToSpeechEntity", "TtsAudioType", "Voice", "async_default_engine", "async_get_media_source_audio", - "async_support_options", "generate_media_source_id", ] @@ -126,12 +123,94 @@ KEY_PATTERN = "{0}_{1}_{2}_{3}" SCHEMA_SERVICE_CLEAR_CACHE = vol.Schema({}) -class TTSCache(TypedDict): - """Cached TTS file.""" +class TTSCache: + """Cached bytes of a TTS result.""" - filename: str - voice: bytes - pending: asyncio.Task | None + _result_data: bytes | None = None + """When fully loaded, contains the result data.""" + + _partial_data: list[bytes] | None = None + """While loading, contains the data already received from the generator.""" + + _loading_error: Exception | None = None + """If an error occurred while loading, contains the error.""" + + _consumers: list[asyncio.Queue[bytes | None]] | None = None + """A queue for each current consumer to notify of new data while the generator is loading.""" + + def __init__( + self, + cache_key: str, + extension: str, + data_gen: AsyncGenerator[bytes], + ) -> None: + """Initialize the TTS cache.""" + self.cache_key = cache_key + self.extension = extension + self.last_used = monotonic() + self._data_gen = data_gen + + async def async_load_data(self) -> bytes: + """Load the data from the generator.""" + if self._result_data is not None or self._partial_data is not None: + raise RuntimeError("Data already being loaded") + + self._partial_data = [] + self._consumers = [] + + try: + async for chunk in self._data_gen: + self._partial_data.append(chunk) + for queue in self._consumers: + queue.put_nowait(chunk) + except Exception as err: + self._loading_error = err + raise + finally: + for queue in self._consumers: + queue.put_nowait(None) + self._consumers = None + + self._result_data = b"".join(self._partial_data) + self._partial_data = None + return self._result_data + + async def async_stream_data(self) -> AsyncGenerator[bytes]: + """Stream the data. + + Will return all data already returned from the generator. + Will listen for future data returned from the generator. + Raises error if one occurred. + """ + if self._result_data is not None: + yield self._result_data + return + if self._loading_error: + raise self._loading_error + + if self._partial_data is None: + raise RuntimeError("Data not being loaded") + + queue: asyncio.Queue[bytes | None] | None = None + # Check if generator is still feeding data + if self._consumers is not None: + queue = asyncio.Queue() + self._consumers.append(queue) + + for chunk in list(self._partial_data): + yield chunk + + if self._loading_error: + raise self._loading_error + + if queue is not None: + while (chunk2 := await queue.get()) is not None: + yield chunk2 + + if self._loading_error: + raise self._loading_error + + self.last_used = monotonic() @callback @@ -169,22 +248,25 @@ def async_resolve_engine(hass: HomeAssistant, engine: str | None) -> str | None: return async_default_engine(hass) -async def async_support_options( +@callback +def async_create_stream( hass: HomeAssistant, engine: str, language: str | None = None, options: dict | None = None, -) -> bool: - """Return if an engine supports options.""" - if (engine_instance := get_engine_instance(hass, engine)) is None: - raise HomeAssistantError(f"Provider {engine} not found") +) -> ResultStream: + """Create a streaming URL where the rendered TTS can be retrieved.""" + return hass.data[DATA_TTS_MANAGER].async_create_result_stream( + engine=engine, + language=language, + options=options, + ) - try: - hass.data[DATA_TTS_MANAGER].process_options(engine_instance, language, options) - except HomeAssistantError: - return False - return True +@callback +def async_get_stream(hass: HomeAssistant, token: str) -> ResultStream | None: + """Return a result stream given a token.""" + return hass.data[DATA_TTS_MANAGER].token_to_stream.get(token) async def async_get_media_source_audio( @@ -192,9 +274,12 @@ async def async_get_media_source_audio( media_source_id: str, ) -> tuple[str, bytes]: """Get TTS audio as extension, data.""" - return await hass.data[DATA_TTS_MANAGER].async_get_tts_audio( - **media_source_id_to_kwargs(media_source_id), + manager = hass.data[DATA_TTS_MANAGER] + cache = manager.async_cache_message_in_memory( + **media_source_id_to_kwargs(media_source_id) ) + data = b"".join([chunk async for chunk in cache.async_stream_data()]) + return cache.extension, data @callback @@ -213,18 +298,19 @@ def async_get_text_to_speech_languages(hass: HomeAssistant) -> set[str]: return languages -async def async_convert_audio( +async def _async_convert_audio( hass: HomeAssistant, from_extension: str, - audio_bytes: bytes, + audio_bytes_gen: AsyncGenerator[bytes], to_extension: str, to_sample_rate: int | None = None, to_sample_channels: int | None = None, to_sample_bytes: int | None = None, -) -> bytes: +) -> AsyncGenerator[bytes]: """Convert audio to a preferred format using ffmpeg.""" ffmpeg_manager = ffmpeg.get_ffmpeg_manager(hass) - return await hass.async_add_executor_job( + audio_bytes = b"".join([chunk async for chunk in audio_bytes_gen]) + data = await hass.async_add_executor_job( lambda: _convert_audio( ffmpeg_manager.binary, from_extension, @@ -235,6 +321,7 @@ async def async_convert_audio( to_sample_bytes=to_sample_bytes, ) ) + yield data def _convert_audio( @@ -306,11 +393,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # Legacy config options conf = config[DOMAIN][0] if config.get(DOMAIN) else {} - use_cache: bool = conf.get(CONF_CACHE, DEFAULT_CACHE) + use_file_cache: bool = conf.get(CONF_CACHE, DEFAULT_CACHE) cache_dir: str = conf.get(CONF_CACHE_DIR, DEFAULT_CACHE_DIR) - time_memory: int = conf.get(CONF_TIME_MEMORY, DEFAULT_TIME_MEMORY) + memory_cache_maxage: int = conf.get(CONF_TIME_MEMORY, DEFAULT_TIME_MEMORY) - tts = SpeechManager(hass, use_cache, cache_dir, time_memory) + tts = SpeechManager(hass, use_file_cache, cache_dir, memory_cache_maxage) try: await tts.async_init_cache() @@ -375,140 +462,56 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return await hass.data[DATA_COMPONENT].async_unload_entry(entry) -CACHED_PROPERTIES_WITH_ATTR_ = { - "default_language", - "default_options", - "supported_languages", - "supported_options", -} +@dataclass +class ResultStream: + """Class that will stream the result when available.""" + # Streaming/conversion properties + token: str + extension: str + content_type: str -class TextToSpeechEntity(RestoreEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): - """Represent a single TTS engine.""" + # TTS properties + engine: str + use_file_cache: bool + language: str + options: dict - _attr_should_poll = False - __last_tts_loaded: str | None = None - - _attr_default_language: str - _attr_default_options: Mapping[str, Any] | None = None - _attr_supported_languages: list[str] - _attr_supported_options: list[str] | None = None - - @property - @final - def state(self) -> str | None: - """Return the state of the entity.""" - if self.__last_tts_loaded is None: - return None - return self.__last_tts_loaded + _manager: SpeechManager @cached_property - def supported_languages(self) -> list[str]: - """Return a list of supported languages.""" - return self._attr_supported_languages + def url(self) -> str: + """Get the URL to stream the result.""" + return f"/api/tts_proxy/{self.token}" @cached_property - def default_language(self) -> str: - """Return the default language.""" - return self._attr_default_language - - @cached_property - def supported_options(self) -> list[str] | None: - """Return a list of supported options like voice, emotions.""" - return self._attr_supported_options - - @cached_property - def default_options(self) -> Mapping[str, Any] | None: - """Return a mapping with the default options.""" - return self._attr_default_options + def _result_cache(self) -> asyncio.Future[TTSCache]: + """Get the future that returns the cache.""" + return asyncio.Future() @callback - def async_get_supported_voices(self, language: str) -> list[Voice] | None: - """Return a list of supported voices for a language.""" - return None + def async_set_message_cache(self, cache: TTSCache) -> None: + """Set cache containing message audio to be streamed.""" + self._result_cache.set_result(cache) - async def async_internal_added_to_hass(self) -> None: - """Call when the entity is added to hass.""" - await super().async_internal_added_to_hass() - try: - _ = self.default_language - except AttributeError as err: - raise AttributeError( - "TTS entities must either set the '_attr_default_language' attribute or override the 'default_language' property" - ) from err - try: - _ = self.supported_languages - except AttributeError as err: - raise AttributeError( - "TTS entities must either set the '_attr_supported_languages' attribute or override the 'supported_languages' property" - ) from err - state = await self.async_get_last_state() - if ( - state is not None - and state.state is not None - and state.state not in (STATE_UNAVAILABLE, STATE_UNKNOWN) - ): - self.__last_tts_loaded = state.state - - async def async_speak( - self, - media_player_entity_id: list[str], - message: str, - cache: bool, - language: str | None = None, - options: dict | None = None, - ) -> None: - """Speak via a Media Player.""" - await self.hass.services.async_call( - DOMAIN_MP, - SERVICE_PLAY_MEDIA, - { - ATTR_ENTITY_ID: media_player_entity_id, - ATTR_MEDIA_CONTENT_ID: generate_media_source_id( - self.hass, - message=message, - engine=self.entity_id, - language=language, - options=options, - cache=cache, - ), - ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, - ATTR_MEDIA_ANNOUNCE: True, - }, - blocking=True, - context=self._context, + @callback + def async_set_message(self, message: str) -> None: + """Set message to be generated.""" + self._result_cache.set_result( + self._manager.async_cache_message_in_memory( + engine=self.engine, + message=message, + use_file_cache=self.use_file_cache, + language=self.language, + options=self.options, + ) ) - @final - async def internal_async_get_tts_audio( - self, message: str, language: str, options: dict[str, Any] - ) -> TtsAudioType: - """Process an audio stream to TTS service. - - Only streaming content is allowed! - """ - self.__last_tts_loaded = dt_util.utcnow().isoformat() - self.async_write_ha_state() - return await self.async_get_tts_audio( - message=message, language=language, options=options - ) - - def get_tts_audio( - self, message: str, language: str, options: dict[str, Any] - ) -> TtsAudioType: - """Load tts audio file from the engine.""" - raise NotImplementedError - - async def async_get_tts_audio( - self, message: str, language: str, options: dict[str, Any] - ) -> TtsAudioType: - """Load tts audio file from the engine. - - Return a tuple of file extension and data as bytes. - """ - return await self.hass.async_add_executor_job( - partial(self.get_tts_audio, message, language, options=options) - ) + async def async_stream_result(self) -> AsyncGenerator[bytes]: + """Get the stream of this result.""" + cache = await self._result_cache + async for chunk in cache.async_stream_data(): + yield chunk def _hash_options(options: dict) -> str: @@ -521,29 +524,82 @@ def _hash_options(options: dict) -> str: return opts_hash.hexdigest() +class MemcacheCleanup: + """Helper to clean up the stale sessions.""" + + unsub: CALLBACK_TYPE | None = None + + def __init__( + self, hass: HomeAssistant, maxage: float, memcache: dict[str, TTSCache] + ) -> None: + """Initialize the cleanup.""" + self.hass = hass + self.maxage = maxage + self.memcache = memcache + hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, self._on_hass_stop) + self.cleanup_job = HassJob( + self._cleanup, "chat_session_cleanup", job_type=HassJobType.Callback + ) + + @callback + def schedule(self) -> None: + """Schedule the cleanup.""" + if self.unsub: + return + self.unsub = async_call_later( + self.hass, + self.maxage + 1, + self.cleanup_job, + ) + + @callback + def _on_hass_stop(self, event: Event) -> None: + """Cancel the cleanup on shutdown.""" + if self.unsub: + self.unsub() + self.unsub = None + + @callback + def _cleanup(self, _now: datetime) -> None: + """Clean up and schedule follow-up if necessary.""" + self.unsub = None + memcache = self.memcache + maxage = self.maxage + now = monotonic() + + for cache_key, info in list(memcache.items()): + if info.last_used + maxage < now: + _LOGGER.debug("Cleaning up %s", cache_key) + del memcache[cache_key] + + # Still items left, check again in timeout time. + if memcache: + self.schedule() + + class SpeechManager: """Representation of a speech store.""" def __init__( self, hass: HomeAssistant, - use_cache: bool, + use_file_cache: bool, cache_dir: str, - time_memory: int, + memory_cache_maxage: int, ) -> None: """Initialize a speech store.""" self.hass = hass self.providers: dict[str, Provider] = {} - self.use_cache = use_cache + self.use_file_cache = use_file_cache self.cache_dir = cache_dir - self.time_memory = time_memory + self.memory_cache_maxage = memory_cache_maxage self.file_cache: dict[str, str] = {} self.mem_cache: dict[str, TTSCache] = {} - - # filename <-> token - self.filename_to_token: dict[str, str] = {} - self.token_to_filename: dict[str, str] = {} + self.token_to_stream: dict[str, ResultStream] = {} + self.memcache_cleanup = MemcacheCleanup( + hass, memory_cache_maxage, self.mem_cache + ) def _init_cache(self) -> dict[str, str]: """Init cache folder and fetch files.""" @@ -563,18 +619,21 @@ class SpeechManager: async def async_clear_cache(self) -> None: """Read file cache and delete files.""" - self.mem_cache = {} + self.mem_cache.clear() - def remove_files() -> None: + def remove_files(files: list[str]) -> None: """Remove files from filesystem.""" - for filename in self.file_cache.values(): + for filename in files: try: os.remove(os.path.join(self.cache_dir, filename)) except OSError as err: _LOGGER.warning("Can't remove cache file '%s': %s", filename, err) - await self.hass.async_add_executor_job(remove_files) - self.file_cache = {} + task = self.hass.async_add_executor_job( + remove_files, list(self.file_cache.values()) + ) + self.file_cache.clear() + await task @callback def async_register_legacy_engine( @@ -629,110 +688,198 @@ class SpeechManager: return language, merged_options - async def async_get_url_path( + @callback + def async_create_result_stream( self, engine: str, - message: str, - cache: bool | None = None, + message: str | None = None, + use_file_cache: bool | None = None, language: str | None = None, options: dict | None = None, - ) -> str: - """Get URL for play message. - - This method is a coroutine. - """ + ) -> ResultStream: + """Create a streaming URL where the rendered TTS can be retrieved.""" if (engine_instance := get_engine_instance(self.hass, engine)) is None: raise HomeAssistantError(f"Provider {engine} not found") language, options = self.process_options(engine_instance, language, options) - cache_key = self._generate_cache_key(message, language, options, engine) - use_cache = cache if cache is not None else self.use_cache + if use_file_cache is None: + use_file_cache = self.use_file_cache - # Is speech already in memory - if cache_key in self.mem_cache: - filename = self.mem_cache[cache_key]["filename"] - # Is file store in file cache - elif use_cache and cache_key in self.file_cache: - filename = self.file_cache[cache_key] - self.hass.async_create_task(self._async_file_to_mem(cache_key)) - # Load speech from engine into memory - else: - filename = await self._async_get_tts_audio( - engine_instance, cache_key, message, use_cache, language, options + extension = options.get(ATTR_PREFERRED_FORMAT, _DEFAULT_FORMAT) + token = f"{secrets.token_urlsafe(16)}.{extension}" + content, _ = mimetypes.guess_type(token) + result_stream = ResultStream( + token=token, + extension=extension, + content_type=content or "audio/mpeg", + use_file_cache=use_file_cache, + engine=engine, + language=language, + options=options, + _manager=self, + ) + self.token_to_stream[token] = result_stream + + if message is None: + return result_stream + + # We added this method as an alternative to stream.async_set_message + # to avoid the options being processed twice + result_stream.async_set_message_cache( + self._async_ensure_cached_in_memory( + engine=engine, + engine_instance=engine_instance, + message=message, + use_file_cache=use_file_cache, + language=language, + options=options, ) + ) - # Use a randomly generated token instead of exposing the filename - token = self.filename_to_token.get(filename) - if not token: - # Keep extension (.mp3, etc.) - token = secrets.token_urlsafe(16) + os.path.splitext(filename)[1] - - # Map token <-> filename - self.filename_to_token[filename] = token - self.token_to_filename[token] = filename - - return f"/api/tts_proxy/{token}" - - async def async_get_tts_audio( - self, - engine: str, - message: str, - cache: bool | None = None, - language: str | None = None, - options: dict | None = None, - ) -> tuple[str, bytes]: - """Fetch TTS audio.""" - if (engine_instance := get_engine_instance(self.hass, engine)) is None: - raise HomeAssistantError(f"Provider {engine} not found") - - language, options = self.process_options(engine_instance, language, options) - cache_key = self._generate_cache_key(message, language, options, engine) - use_cache = cache if cache is not None else self.use_cache - - # If we have the file, load it into memory if necessary - if cache_key not in self.mem_cache: - if use_cache and cache_key in self.file_cache: - await self._async_file_to_mem(cache_key) - else: - await self._async_get_tts_audio( - engine_instance, cache_key, message, use_cache, language, options - ) - - extension = os.path.splitext(self.mem_cache[cache_key]["filename"])[1][1:] - cached = self.mem_cache[cache_key] - if pending := cached.get("pending"): - await pending - cached = self.mem_cache[cache_key] - return extension, cached["voice"] + return result_stream @callback - def _generate_cache_key( + def async_cache_message_in_memory( self, - message: str, - language: str, - options: dict | None, engine: str, - ) -> str: - """Generate a cache key for a message.""" + message: str, + use_file_cache: bool | None = None, + language: str | None = None, + options: dict | None = None, + ) -> TTSCache: + """Make sure a message is cached in memory and returns cache key.""" + if (engine_instance := get_engine_instance(self.hass, engine)) is None: + raise HomeAssistantError(f"Provider {engine} not found") + + language, options = self.process_options(engine_instance, language, options) + if use_file_cache is None: + use_file_cache = self.use_file_cache + + return self._async_ensure_cached_in_memory( + engine=engine, + engine_instance=engine_instance, + message=message, + use_file_cache=use_file_cache, + language=language, + options=options, + ) + + @callback + def _async_ensure_cached_in_memory( + self, + engine: str, + engine_instance: TextToSpeechEntity | Provider, + message: str, + use_file_cache: bool, + language: str, + options: dict, + ) -> TTSCache: + """Ensure a message is cached. + + Requires options, language to be processed. + """ options_key = _hash_options(options) if options else "-" msg_hash = hashlib.sha1(bytes(message, "utf-8")).hexdigest() - return KEY_PATTERN.format( + cache_key = KEY_PATTERN.format( msg_hash, language.replace("_", "-"), options_key, engine ).lower() - async def _async_get_tts_audio( + # Is speech already in memory + if cache := self.mem_cache.get(cache_key): + _LOGGER.debug("Found audio in cache for %s", message[0:32]) + return cache + + store_to_disk = use_file_cache + + if use_file_cache and (filename := self.file_cache.get(cache_key)): + _LOGGER.debug("Loading audio from disk for %s", message[0:32]) + extension = os.path.splitext(filename)[1][1:] + data_gen = self._async_load_file(cache_key) + store_to_disk = False + else: + _LOGGER.debug("Generating audio for %s", message[0:32]) + extension = options.get(ATTR_PREFERRED_FORMAT, _DEFAULT_FORMAT) + data_gen = self._async_generate_tts_audio( + engine_instance, message, language, options + ) + + cache = TTSCache( + cache_key=cache_key, + extension=extension, + data_gen=data_gen, + ) + + self.mem_cache[cache_key] = cache + self.hass.async_create_background_task( + self._load_data_into_cache( + cache, engine_instance, message, store_to_disk, language, options + ), + f"tts_load_data_into_cache_{engine_instance.name}", + ) + self.memcache_cleanup.schedule() + return cache + + async def _load_data_into_cache( + self, + cache: TTSCache, + engine_instance: TextToSpeechEntity | Provider, + message: str, + store_to_disk: bool, + language: str, + options: dict, + ) -> None: + """Load and process a finished loading TTS Cache.""" + try: + data = await cache.async_load_data() + except Exception as err: # pylint: disable=broad-except # noqa: BLE001 + # Truncate message so we don't flood the logs. Cutting off at 32 chars + # but since we add 3 dots to truncated message, we cut off at 35. + trunc_msg = message if len(message) < 35 else f"{message[0:32]}…" + _LOGGER.error("Error getting audio for %s: %s", trunc_msg, err) + self.mem_cache.pop(cache.cache_key, None) + return + + if not store_to_disk: + return + + filename = f"{cache.cache_key}.{cache.extension}".lower() + + # Validate filename + if not _RE_VOICE_FILE.match(filename) and not _RE_LEGACY_VOICE_FILE.match( + filename + ): + raise HomeAssistantError( + f"TTS filename '{filename}' from {engine_instance.name} is invalid!" + ) + + if cache.extension == "mp3": + name = ( + engine_instance.name if isinstance(engine_instance.name, str) else "-" + ) + data = self.write_tags(filename, data, name, message, language, options) + + voice_file = os.path.join(self.cache_dir, filename) + + def save_speech() -> None: + """Store speech to filesystem.""" + with open(voice_file, "wb") as speech: + speech.write(data) + + try: + await self.hass.async_add_executor_job(save_speech) + except OSError as err: + _LOGGER.error("Can't write %s: %s", filename, err) + else: + self.file_cache[cache.cache_key] = filename + + async def _async_generate_tts_audio( self, engine_instance: TextToSpeechEntity | Provider, - cache_key: str, message: str, - cache: bool, language: str, options: dict[str, Any], - ) -> str: - """Receive TTS, store for view in cache and return filename. - - This method is a coroutine. - """ + ) -> AsyncGenerator[bytes]: + """Generate TTS audio from an engine.""" options = dict(options or {}) supported_options = engine_instance.supported_options or [] @@ -773,114 +920,61 @@ class SpeechManager: if sample_bytes is not None: sample_bytes = int(sample_bytes) - async def get_tts_data() -> str: - """Handle data available.""" - if engine_instance.name is None or engine_instance.name is UNDEFINED: - raise HomeAssistantError("TTS engine name is not set.") + if engine_instance.name is None or engine_instance.name is UNDEFINED: + raise HomeAssistantError("TTS engine name is not set.") - if isinstance(engine_instance, Provider): - extension, data = await engine_instance.async_get_tts_audio( - message, language, options - ) - else: - extension, data = await engine_instance.internal_async_get_tts_audio( - message, language, options - ) + if isinstance(engine_instance, Provider): + extension, data = await engine_instance.async_get_tts_audio( + message, language, options + ) if data is None or extension is None: raise HomeAssistantError( f"No TTS from {engine_instance.name} for '{message}'" ) - # Only convert if we have a preferred format different than the - # expected format from the TTS system, or if a specific sample - # rate/format/channel count is requested. - needs_conversion = ( - (final_extension != extension) - or (sample_rate is not None) - or (sample_channels is not None) - or (sample_bytes is not None) + async def make_data_generator(data: bytes) -> AsyncGenerator[bytes]: + yield data + + data_gen = make_data_generator(data) + + else: + + async def message_gen() -> AsyncGenerator[str]: + yield message + + tts_result = await engine_instance.internal_async_stream_tts_audio( + TTSAudioRequest(language, options, message_gen()) + ) + extension = tts_result.extension + data_gen = tts_result.data_gen + + # Only convert if we have a preferred format different than the + # expected format from the TTS system, or if a specific sample + # rate/format/channel count is requested. + needs_conversion = ( + (final_extension != extension) + or (sample_rate is not None) + or (sample_channels is not None) + or (sample_bytes is not None) + ) + + if needs_conversion: + data_gen = _async_convert_audio( + self.hass, + extension, + data_gen, + to_extension=final_extension, + to_sample_rate=sample_rate, + to_sample_channels=sample_channels, + to_sample_bytes=sample_bytes, ) - if needs_conversion: - data = await async_convert_audio( - self.hass, - extension, - data, - to_extension=final_extension, - to_sample_rate=sample_rate, - to_sample_channels=sample_channels, - to_sample_bytes=sample_bytes, - ) + async for chunk in data_gen: + yield chunk - # Create file infos - filename = f"{cache_key}.{final_extension}".lower() - - # Validate filename - if not _RE_VOICE_FILE.match(filename) and not _RE_LEGACY_VOICE_FILE.match( - filename - ): - raise HomeAssistantError( - f"TTS filename '{filename}' from {engine_instance.name} is invalid!" - ) - - # Save to memory - if final_extension == "mp3": - data = self.write_tags( - filename, data, engine_instance.name, message, language, options - ) - - self._async_store_to_memcache(cache_key, filename, data) - - if cache: - self.hass.async_create_task( - self._async_save_tts_audio(cache_key, filename, data) - ) - - return filename - - audio_task = self.hass.async_create_task(get_tts_data(), eager_start=False) - - def handle_error(_future: asyncio.Future) -> None: - """Handle error.""" - if audio_task.exception(): - self.mem_cache.pop(cache_key, None) - - audio_task.add_done_callback(handle_error) - - filename = f"{cache_key}.{final_extension}".lower() - self.mem_cache[cache_key] = { - "filename": filename, - "voice": b"", - "pending": audio_task, - } - return filename - - async def _async_save_tts_audio( - self, cache_key: str, filename: str, data: bytes - ) -> None: - """Store voice data to file and file_cache. - - This method is a coroutine. - """ - voice_file = os.path.join(self.cache_dir, filename) - - def save_speech() -> None: - """Store speech to filesystem.""" - with open(voice_file, "wb") as speech: - speech.write(data) - - try: - await self.hass.async_add_executor_job(save_speech) - self.file_cache[cache_key] = filename - except OSError as err: - _LOGGER.error("Can't write %s: %s", filename, err) - - async def _async_file_to_mem(self, cache_key: str) -> None: - """Load voice from file cache into memory. - - This method is a coroutine. - """ + async def _async_load_file(self, cache_key: str) -> AsyncGenerator[bytes]: + """Load TTS audio from disk.""" if not (filename := self.file_cache.get(cache_key)): raise HomeAssistantError(f"Key {cache_key} not in file cache!") @@ -897,64 +991,7 @@ class SpeechManager: del self.file_cache[cache_key] raise HomeAssistantError(f"Can't read {voice_file}") from err - self._async_store_to_memcache(cache_key, filename, data) - - @callback - def _async_store_to_memcache( - self, cache_key: str, filename: str, data: bytes - ) -> None: - """Store data to memcache and set timer to remove it.""" - self.mem_cache[cache_key] = { - "filename": filename, - "voice": data, - "pending": None, - } - - @callback - def async_remove_from_mem(_: datetime) -> None: - """Cleanup memcache.""" - self.mem_cache.pop(cache_key, None) - - async_call_later( - self.hass, - self.time_memory, - HassJob( - async_remove_from_mem, - name="tts remove_from_mem", - cancel_on_shutdown=True, - ), - ) - - async def async_read_tts(self, token: str) -> tuple[str | None, bytes]: - """Read a voice file and return binary. - - This method is a coroutine. - """ - filename = self.token_to_filename.get(token) - if not filename: - raise HomeAssistantError(f"{token} was not recognized!") - - if not (record := _RE_VOICE_FILE.match(filename.lower())) and not ( - record := _RE_LEGACY_VOICE_FILE.match(filename.lower()) - ): - raise HomeAssistantError("Wrong tts file format!") - - cache_key = KEY_PATTERN.format( - record.group(1), record.group(2), record.group(3), record.group(4) - ) - - if cache_key not in self.mem_cache: - if cache_key not in self.file_cache: - raise HomeAssistantError(f"{cache_key} not in cache!") - await self._async_file_to_mem(cache_key) - - cached = self.mem_cache[cache_key] - if pending := cached.get("pending"): - await pending - cached = self.mem_cache[cache_key] - - content, _ = mimetypes.guess_type(filename) - return content, cached["voice"] + yield data @staticmethod def write_tags( @@ -1042,9 +1079,9 @@ class TextToSpeechUrlView(HomeAssistantView): url = "/api/tts_get_url" name = "api:tts:geturl" - def __init__(self, tts: SpeechManager) -> None: + def __init__(self, manager: SpeechManager) -> None: """Initialize a tts view.""" - self.tts = tts + self.manager = manager async def post(self, request: web.Request) -> web.Response: """Generate speech and provide url.""" @@ -1061,45 +1098,65 @@ class TextToSpeechUrlView(HomeAssistantView): engine = data.get("engine_id") or data[ATTR_PLATFORM] message = data[ATTR_MESSAGE] - cache = data.get(ATTR_CACHE) + use_file_cache = data.get(ATTR_CACHE) language = data.get(ATTR_LANGUAGE) options = data.get(ATTR_OPTIONS) try: - path = await self.tts.async_get_url_path( - engine, message, cache=cache, language=language, options=options + stream = self.manager.async_create_result_stream( + engine, + message, + use_file_cache=use_file_cache, + language=language, + options=options, ) except HomeAssistantError as err: _LOGGER.error("Error on init tts: %s", err) return self.json({"error": err}, HTTPStatus.BAD_REQUEST) - base = get_url(self.tts.hass) - url = base + path + base = get_url(self.manager.hass) + url = base + stream.url - return self.json({"url": url, "path": path}) + return self.json({"url": url, "path": stream.url}) class TextToSpeechView(HomeAssistantView): """TTS view to serve a speech audio.""" requires_auth = False - url = "/api/tts_proxy/{filename}" + url = "/api/tts_proxy/{token}" name = "api:tts_speech" - def __init__(self, tts: SpeechManager) -> None: + def __init__(self, manager: SpeechManager) -> None: """Initialize a tts view.""" - self.tts = tts + self.manager = manager - async def get(self, request: web.Request, filename: str) -> web.Response: + async def get(self, request: web.Request, token: str) -> web.StreamResponse: """Start a get request.""" - try: - # filename is actually token, but we keep its name for compatibility - content, data = await self.tts.async_read_tts(filename) - except HomeAssistantError as err: - _LOGGER.error("Error on load tts: %s", err) + stream = self.manager.token_to_stream.get(token) + + if stream is None: return web.Response(status=HTTPStatus.NOT_FOUND) - return web.Response(body=data, content_type=content) + response: web.StreamResponse | None = None + try: + async for data in stream.async_stream_result(): + if response is None: + response = web.StreamResponse() + response.content_type = stream.content_type + await response.prepare(request) + + await response.write(data) + # pylint: disable=broad-except + except Exception as err: # noqa: BLE001 + _LOGGER.error("Error streaming tts: %s", err) + + # Empty result or exception happened + if response is None: + return web.Response(status=HTTPStatus.INTERNAL_SERVER_ERROR) + + await response.write_eof() + return response @websocket_api.websocket_command( diff --git a/homeassistant/components/tts/entity.py b/homeassistant/components/tts/entity.py new file mode 100644 index 00000000000..199d673398e --- /dev/null +++ b/homeassistant/components/tts/entity.py @@ -0,0 +1,196 @@ +"""Entity for Text-to-Speech.""" + +from collections.abc import AsyncGenerator, Mapping +from dataclasses import dataclass +from functools import partial +from typing import Any, final + +from propcache.api import cached_property + +from homeassistant.components.media_player import ( + ATTR_MEDIA_ANNOUNCE, + ATTR_MEDIA_CONTENT_ID, + ATTR_MEDIA_CONTENT_TYPE, + DOMAIN as DOMAIN_MP, + SERVICE_PLAY_MEDIA, + MediaType, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.core import callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.restore_state import RestoreEntity +from homeassistant.util import dt as dt_util + +from .const import TtsAudioType +from .media_source import generate_media_source_id +from .models import Voice + +CACHED_PROPERTIES_WITH_ATTR_ = { + "default_language", + "default_options", + "supported_languages", + "supported_options", +} + + +@dataclass +class TTSAudioRequest: + """Request to get TTS audio.""" + + language: str + options: dict[str, Any] + message_gen: AsyncGenerator[str] + + +@dataclass +class TTSAudioResponse: + """Response containing TTS audio stream.""" + + extension: str + data_gen: AsyncGenerator[bytes] + + +class TextToSpeechEntity(RestoreEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): + """Represent a single TTS engine.""" + + _attr_should_poll = False + __last_tts_loaded: str | None = None + + _attr_default_language: str + _attr_default_options: Mapping[str, Any] | None = None + _attr_supported_languages: list[str] + _attr_supported_options: list[str] | None = None + + @property + @final + def state(self) -> str | None: + """Return the state of the entity.""" + if self.__last_tts_loaded is None: + return None + return self.__last_tts_loaded + + @cached_property + def supported_languages(self) -> list[str]: + """Return a list of supported languages.""" + return self._attr_supported_languages + + @cached_property + def default_language(self) -> str: + """Return the default language.""" + return self._attr_default_language + + @cached_property + def supported_options(self) -> list[str] | None: + """Return a list of supported options like voice, emotions.""" + return self._attr_supported_options + + @cached_property + def default_options(self) -> Mapping[str, Any] | None: + """Return a mapping with the default options.""" + return self._attr_default_options + + @callback + def async_get_supported_voices(self, language: str) -> list[Voice] | None: + """Return a list of supported voices for a language.""" + return None + + async def async_internal_added_to_hass(self) -> None: + """Call when the entity is added to hass.""" + await super().async_internal_added_to_hass() + try: + _ = self.default_language + except AttributeError as err: + raise AttributeError( + "TTS entities must either set the '_attr_default_language' attribute or override the 'default_language' property" + ) from err + try: + _ = self.supported_languages + except AttributeError as err: + raise AttributeError( + "TTS entities must either set the '_attr_supported_languages' attribute or override the 'supported_languages' property" + ) from err + state = await self.async_get_last_state() + if ( + state is not None + and state.state is not None + and state.state not in (STATE_UNAVAILABLE, STATE_UNKNOWN) + ): + self.__last_tts_loaded = state.state + + async def async_speak( + self, + media_player_entity_id: list[str], + message: str, + cache: bool, + language: str | None = None, + options: dict | None = None, + ) -> None: + """Speak via a Media Player.""" + await self.hass.services.async_call( + DOMAIN_MP, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: media_player_entity_id, + ATTR_MEDIA_CONTENT_ID: generate_media_source_id( + self.hass, + message=message, + engine=self.entity_id, + language=language, + options=options, + cache=cache, + ), + ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, + ATTR_MEDIA_ANNOUNCE: True, + }, + blocking=True, + context=self._context, + ) + + @final + async def internal_async_stream_tts_audio( + self, request: TTSAudioRequest + ) -> TTSAudioResponse: + """Process an audio stream to TTS service. + + Only streaming content is allowed! + """ + self.__last_tts_loaded = dt_util.utcnow().isoformat() + self.async_write_ha_state() + return await self.async_stream_tts_audio(request) + + async def async_stream_tts_audio( + self, request: TTSAudioRequest + ) -> TTSAudioResponse: + """Generate speech from an incoming message. + + The default implementation is backwards compatible with async_get_tts_audio. + """ + message = "".join([chunk async for chunk in request.message_gen]) + extension, data = await self.async_get_tts_audio( + message, request.language, request.options + ) + + if extension is None or data is None: + raise HomeAssistantError(f"No TTS from {self.entity_id} for '{message}'") + + async def data_gen() -> AsyncGenerator[bytes]: + yield data + + return TTSAudioResponse(extension, data_gen()) + + def get_tts_audio( + self, message: str, language: str, options: dict[str, Any] + ) -> TtsAudioType: + """Load tts audio file from the engine.""" + raise NotImplementedError + + async def async_get_tts_audio( + self, message: str, language: str, options: dict[str, Any] + ) -> TtsAudioType: + """Load tts audio file from the engine. + + Return a tuple of file extension and data as bytes. + """ + return await self.hass.async_add_executor_job( + partial(self.get_tts_audio, message, language, options=options) + ) diff --git a/homeassistant/components/tts/media_source.py b/homeassistant/components/tts/media_source.py index 4f1fa59f001..aa2cd6e7555 100644 --- a/homeassistant/components/tts/media_source.py +++ b/homeassistant/components/tts/media_source.py @@ -3,7 +3,6 @@ from __future__ import annotations import json -import mimetypes from typing import TypedDict from yarl import URL @@ -73,7 +72,7 @@ class MediaSourceOptions(TypedDict): message: str language: str | None options: dict | None - cache: bool | None + use_file_cache: bool | None @callback @@ -98,10 +97,10 @@ def media_source_id_to_kwargs(media_source_id: str) -> MediaSourceOptions: "message": parsed.query["message"], "language": parsed.query.get("language"), "options": options, - "cache": None, + "use_file_cache": None, } if "cache" in parsed.query: - kwargs["cache"] = parsed.query["cache"] == "true" + kwargs["use_file_cache"] = parsed.query["cache"] == "true" return kwargs @@ -119,7 +118,7 @@ class TTSMediaSource(MediaSource): async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia: """Resolve media to a url.""" try: - url = await self.hass.data[DATA_TTS_MANAGER].async_get_url_path( + stream = self.hass.data[DATA_TTS_MANAGER].async_create_result_stream( **media_source_id_to_kwargs(item.identifier) ) except Unresolvable: @@ -127,9 +126,7 @@ class TTSMediaSource(MediaSource): except HomeAssistantError as err: raise Unresolvable(str(err)) from err - mime_type = mimetypes.guess_type(url)[0] or "audio/mpeg" - - return PlayMedia(url, mime_type) + return PlayMedia(stream.url, stream.content_type) async def async_browse_media( self, diff --git a/homeassistant/components/tuya/binary_sensor.py b/homeassistant/components/tuya/binary_sensor.py index 1e13f101110..486dd6e1387 100644 --- a/homeassistant/components/tuya/binary_sensor.py +++ b/homeassistant/components/tuya/binary_sensor.py @@ -291,6 +291,9 @@ BINARY_SENSORS: dict[str, tuple[TuyaBinarySensorEntityDescription, ...]] = { # Temperature and Humidity Sensor # https://developer.tuya.com/en/docs/iot/categorywsdcg?id=Kaiuz3hinij34 "wsdcg": (TAMPER_BINARY_SENSOR,), + # Temperature and Humidity Sensor with External Probe + # New undocumented category qxj, see https://github.com/home-assistant/core/issues/136472 + "qxj": (TAMPER_BINARY_SENSOR,), # Pressure Sensor # https://developer.tuya.com/en/docs/iot/categoryylcg?id=Kaiuz3kc2e4gm "ylcg": ( diff --git a/homeassistant/components/tuya/const.py b/homeassistant/components/tuya/const.py index 08bdef474ef..a40260ed787 100644 --- a/homeassistant/components/tuya/const.py +++ b/homeassistant/components/tuya/const.py @@ -333,6 +333,12 @@ class DPCode(StrEnum): TEMP_CONTROLLER = "temp_controller" TEMP_CURRENT = "temp_current" # Current temperature in °C TEMP_CURRENT_F = "temp_current_f" # Current temperature in °F + TEMP_CURRENT_EXTERNAL = ( + "temp_current_external" # Current external temperature in Celsius + ) + TEMP_CURRENT_EXTERNAL_F = ( + "temp_current_external_f" # Current external temperature in Fahrenheit + ) TEMP_INDOOR = "temp_indoor" # Indoor temperature in °C TEMP_SET = "temp_set" # Set the temperature in °C TEMP_SET_F = "temp_set_f" # Set the temperature in °F diff --git a/homeassistant/components/tuya/light.py b/homeassistant/components/tuya/light.py index d94308ebd33..67a94c4e267 100644 --- a/homeassistant/components/tuya/light.py +++ b/homeassistant/components/tuya/light.py @@ -327,6 +327,18 @@ LIGHTS: dict[str, tuple[TuyaLightEntityDescription, ...]] = { brightness_min=DPCode.BRIGHTNESS_MIN_1, ), ), + # Outdoor Flood Light + # Not documented + "tyd": ( + TuyaLightEntityDescription( + key=DPCode.SWITCH_LED, + name=None, + color_mode=DPCode.WORK_MODE, + brightness=DPCode.BRIGHT_VALUE, + color_temp=DPCode.TEMP_VALUE, + color_data=DPCode.COLOUR_DATA, + ), + ), # Solar Light # https://developer.tuya.com/en/docs/iot/tynd?id=Kaof8j02e1t98 "tyndj": ( diff --git a/homeassistant/components/tuya/sensor.py b/homeassistant/components/tuya/sensor.py index 073202bed94..9e40bda5d4d 100644 --- a/homeassistant/components/tuya/sensor.py +++ b/homeassistant/components/tuya/sensor.py @@ -454,6 +454,37 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = { state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, ), + TuyaSensorEntityDescription( + key=DPCode.VA_TEMPERATURE, + translation_key="temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + ), + TuyaSensorEntityDescription( + key=DPCode.TEMP_CURRENT, + translation_key="temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + ), + TuyaSensorEntityDescription( + key=DPCode.VA_HUMIDITY, + translation_key="humidity", + device_class=SensorDeviceClass.HUMIDITY, + state_class=SensorStateClass.MEASUREMENT, + ), + TuyaSensorEntityDescription( + key=DPCode.HUMIDITY_VALUE, + translation_key="humidity", + device_class=SensorDeviceClass.HUMIDITY, + state_class=SensorStateClass.MEASUREMENT, + ), + TuyaSensorEntityDescription( + key=DPCode.BRIGHT_VALUE, + translation_key="illuminance", + device_class=SensorDeviceClass.ILLUMINANCE, + state_class=SensorStateClass.MEASUREMENT, + ), + *BATTERY_SENSORS, ), # Luminance Sensor # https://developer.tuya.com/en/docs/iot/categoryldcg?id=Kaiuz3n7u69l8 @@ -715,6 +746,47 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = { ), *BATTERY_SENSORS, ), + # Temperature and Humidity Sensor with External Probe + # New undocumented category qxj, see https://github.com/home-assistant/core/issues/136472 + "qxj": ( + TuyaSensorEntityDescription( + key=DPCode.VA_TEMPERATURE, + translation_key="temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + ), + TuyaSensorEntityDescription( + key=DPCode.TEMP_CURRENT, + translation_key="temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + ), + TuyaSensorEntityDescription( + key=DPCode.TEMP_CURRENT_EXTERNAL, + translation_key="temperature_external", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + ), + TuyaSensorEntityDescription( + key=DPCode.VA_HUMIDITY, + translation_key="humidity", + device_class=SensorDeviceClass.HUMIDITY, + state_class=SensorStateClass.MEASUREMENT, + ), + TuyaSensorEntityDescription( + key=DPCode.HUMIDITY_VALUE, + translation_key="humidity", + device_class=SensorDeviceClass.HUMIDITY, + state_class=SensorStateClass.MEASUREMENT, + ), + TuyaSensorEntityDescription( + key=DPCode.BRIGHT_VALUE, + translation_key="illuminance", + device_class=SensorDeviceClass.ILLUMINANCE, + state_class=SensorStateClass.MEASUREMENT, + ), + *BATTERY_SENSORS, + ), # Pressure Sensor # https://developer.tuya.com/en/docs/iot/categoryylcg?id=Kaiuz3kc2e4gm "ylcg": ( @@ -760,7 +832,6 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = { translation_key="total_power", device_class=SensorDeviceClass.POWER, state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfPower.KILO_WATT, subkey="power", ), TuyaSensorEntityDescription( diff --git a/homeassistant/components/tuya/strings.json b/homeassistant/components/tuya/strings.json index 8ec61cc8aa5..83847d32fb5 100644 --- a/homeassistant/components/tuya/strings.json +++ b/homeassistant/components/tuya/strings.json @@ -469,6 +469,9 @@ "temperature": { "name": "[%key:component::sensor::entity_component::temperature::name%]" }, + "temperature_external": { + "name": "Probe temperature" + }, "humidity": { "name": "[%key:component::sensor::entity_component::humidity::name%]" }, diff --git a/homeassistant/components/tuya/switch.py b/homeassistant/components/tuya/switch.py index 76d8b481a90..4000e8d9b24 100644 --- a/homeassistant/components/tuya/switch.py +++ b/homeassistant/components/tuya/switch.py @@ -612,6 +612,15 @@ SWITCHES: dict[str, tuple[SwitchEntityDescription, ...]] = { device_class=SwitchDeviceClass.OUTLET, ), ), + # SIREN: Siren (switch) with Temperature and Humidity Sensor with External Probe + # New undocumented category qxj, see https://github.com/home-assistant/core/issues/136472 + "qxj": ( + SwitchEntityDescription( + key=DPCode.SWITCH, + translation_key="switch", + device_class=SwitchDeviceClass.OUTLET, + ), + ), # Ceiling Light # https://developer.tuya.com/en/docs/iot/ceiling-light?id=Kaiuz03xxfc4r "xdd": ( diff --git a/homeassistant/components/twilio/strings.json b/homeassistant/components/twilio/strings.json index 871711ff087..f4b7dee707f 100644 --- a/homeassistant/components/twilio/strings.json +++ b/homeassistant/components/twilio/strings.json @@ -12,7 +12,7 @@ "webhook_not_internet_accessible": "[%key:common::config_flow::abort::webhook_not_internet_accessible%]" }, "create_entry": { - "default": "To send events to Home Assistant, you will need to setup [Webhooks with Twilio]({twilio_url}).\n\nFill in the following info:\n\n- URL: `{webhook_url}`\n- Method: POST\n- Content Type: application/x-www-form-urlencoded\n\nSee [the documentation]({docs_url}) on how to configure automations to handle incoming data." + "default": "To send events to Home Assistant, you will need to set up [webhooks with Twilio]({twilio_url}).\n\nFill in the following info:\n\n- URL: `{webhook_url}`\n- Method: POST\n- Content Type: application/x-www-form-urlencoded\n\nSee [the documentation]({docs_url}) on how to configure automations to handle incoming data." } } } diff --git a/homeassistant/components/uk_transport/sensor.py b/homeassistant/components/uk_transport/sensor.py index b06d0e24891..594d46c74ab 100644 --- a/homeassistant/components/uk_transport/sensor.py +++ b/homeassistant/components/uk_transport/sensor.py @@ -32,6 +32,7 @@ ATTR_NEXT_BUSES = "next_buses" ATTR_STATION_CODE = "station_code" ATTR_CALLING_AT = "calling_at" ATTR_NEXT_TRAINS = "next_trains" +ATTR_LAST_UPDATED = "last_updated" CONF_API_APP_KEY = "app_key" CONF_API_APP_ID = "app_id" @@ -199,7 +200,9 @@ class UkTransportLiveBusTimeSensor(UkTransportSensor): def extra_state_attributes(self) -> dict[str, Any] | None: """Return other details about the sensor state.""" if self._data is not None: - attrs = {ATTR_NEXT_BUSES: self._next_buses} + attrs = { + ATTR_NEXT_BUSES: self._next_buses, + } for key in ( ATTR_ATCOCODE, ATTR_LOCALITY, @@ -272,6 +275,7 @@ class UkTransportLiveTrainTimeSensor(UkTransportSensor): attrs = { ATTR_STATION_CODE: self._station_code, ATTR_CALLING_AT: self._calling_at, + ATTR_LAST_UPDATED: self._data[ATTR_REQUEST_TIME], } if self._next_trains: attrs[ATTR_NEXT_TRAINS] = self._next_trains diff --git a/homeassistant/components/ukraine_alarm/coordinator.py b/homeassistant/components/ukraine_alarm/coordinator.py index 267358e4aa6..b4e1decb1a1 100644 --- a/homeassistant/components/ukraine_alarm/coordinator.py +++ b/homeassistant/components/ukraine_alarm/coordinator.py @@ -52,7 +52,7 @@ class UkraineAlarmDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): except aiohttp.ClientError as error: raise UpdateFailed(f"Error fetching alerts from API: {error}") from error - current = {alert_type: False for alert_type in ALERT_TYPES} + current = dict.fromkeys(ALERT_TYPES, False) for alert in res[0]["activeAlerts"]: current[alert["type"]] = True diff --git a/homeassistant/components/update/__init__.py b/homeassistant/components/update/__init__.py index 0ff8c448197..47cc5aa369b 100644 --- a/homeassistant/components/update/__init__.py +++ b/homeassistant/components/update/__init__.py @@ -226,7 +226,7 @@ class UpdateEntity( _attr_installed_version: str | None = None _attr_device_class: UpdateDeviceClass | None _attr_display_precision: int - _attr_in_progress: bool | int = False + _attr_in_progress: bool = False _attr_latest_version: str | None = None _attr_release_summary: str | None = None _attr_release_url: str | None = None @@ -295,7 +295,7 @@ class UpdateEntity( ) @cached_property - def in_progress(self) -> bool | int | None: + def in_progress(self) -> bool | None: """Update installation progress. Needs UpdateEntityFeature.PROGRESS flag to be set for it to be used. @@ -442,7 +442,7 @@ class UpdateEntity( in_progress = self.in_progress update_percentage = self.update_percentage if in_progress else None if type(in_progress) is not bool and isinstance(in_progress, int): - update_percentage = in_progress + update_percentage = in_progress # type: ignore[unreachable] in_progress = True else: in_progress = self.__in_progress diff --git a/homeassistant/components/upnp/manifest.json b/homeassistant/components/upnp/manifest.json index df4daa8782c..62ee4ede7d9 100644 --- a/homeassistant/components/upnp/manifest.json +++ b/homeassistant/components/upnp/manifest.json @@ -8,7 +8,7 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["async_upnp_client"], - "requirements": ["async-upnp-client==0.43.0", "getmac==0.9.5"], + "requirements": ["async-upnp-client==0.44.0", "getmac==0.9.5"], "ssdp": [ { "st": "urn:schemas-upnp-org:device:InternetGatewayDevice:1" diff --git a/homeassistant/components/usb/__init__.py b/homeassistant/components/usb/__init__.py index d68742522a0..90433b0f728 100644 --- a/homeassistant/components/usb/__init__.py +++ b/homeassistant/components/usb/__init__.py @@ -14,8 +14,6 @@ import sys from typing import Any, overload from aiousbwatcher import AIOUSBWatcher, InotifyNotAvailableError -from serial.tools.list_ports import comports -from serial.tools.list_ports_common import ListPortInfo import voluptuous as vol from homeassistant import config_entries @@ -43,7 +41,10 @@ from homeassistant.loader import USBMatcher, async_get_usb from .const import DOMAIN from .models import USBDevice -from .utils import usb_device_from_port +from .utils import ( + scan_serial_ports, + usb_device_from_port, # noqa: F401 +) _LOGGER = logging.getLogger(__name__) @@ -241,6 +242,13 @@ def _is_matching(device: USBDevice, matcher: USBMatcher | USBCallbackMatcher) -> return True +async def async_request_scan(hass: HomeAssistant) -> None: + """Request a USB scan.""" + usb_discovery: USBDiscovery = hass.data[DOMAIN] + if not usb_discovery.observer_active: + await usb_discovery.async_request_scan() + + class USBDiscovery: """Manage USB Discovery.""" @@ -265,8 +273,15 @@ class USBDiscovery: async def async_setup(self) -> None: """Set up USB Discovery.""" - if self._async_supports_monitoring(): - await self._async_start_monitor() + try: + await self._async_start_aiousbwatcher() + except InotifyNotAvailableError as ex: + _LOGGER.info( + "Falling back to periodic filesystem polling for development, " + "aiousbwatcher is not available on this system: %s", + ex, + ) + self._async_start_monitor_polling() self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STARTED, self.async_start) self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, self.async_stop) @@ -281,22 +296,6 @@ class USBDiscovery: if self._request_debouncer: self._request_debouncer.async_shutdown() - @hass_callback - def _async_supports_monitoring(self) -> bool: - return sys.platform == "linux" - - async def _async_start_monitor(self) -> None: - """Start monitoring hardware.""" - try: - await self._async_start_aiousbwatcher() - except InotifyNotAvailableError as ex: - _LOGGER.info( - "Falling back to periodic filesystem polling for development, aiousbwatcher " - "is not available on this system: %s", - ex, - ) - self._async_start_monitor_polling() - @hass_callback def _async_start_monitor_polling(self) -> None: """Start monitoring hardware with polling (for development only!).""" @@ -426,14 +425,8 @@ class USBDiscovery: service_info, ) - async def _async_process_ports(self, ports: Sequence[ListPortInfo]) -> None: + async def _async_process_ports(self, usb_devices: Sequence[USBDevice]) -> None: """Process each discovered port.""" - _LOGGER.debug("Processing ports: %r", ports) - usb_devices = { - usb_device_from_port(port) - for port in ports - if port.vid is not None or port.pid is not None - } _LOGGER.debug("USB devices: %r", usb_devices) # CP2102N chips create *two* serial ports on macOS: `/dev/cu.usbserial-` and @@ -445,7 +438,7 @@ class USBDiscovery: if dev.device.startswith("/dev/cu.SLAB_USBtoUART") } - usb_devices = { + filtered_usb_devices = { dev for dev in usb_devices if dev.serial_number not in silabs_serials @@ -454,10 +447,12 @@ class USBDiscovery: and dev.device.startswith("/dev/cu.SLAB_USBtoUART") ) } + else: + filtered_usb_devices = set(usb_devices) - added_devices = usb_devices - self._last_processed_devices - removed_devices = self._last_processed_devices - usb_devices - self._last_processed_devices = usb_devices + added_devices = filtered_usb_devices - self._last_processed_devices + removed_devices = self._last_processed_devices - filtered_usb_devices + self._last_processed_devices = filtered_usb_devices _LOGGER.debug( "Added devices: %r, removed devices: %r", added_devices, removed_devices @@ -470,7 +465,7 @@ class USBDiscovery: except Exception: _LOGGER.exception("Error in USB port event callback") - for usb_device in usb_devices: + for usb_device in filtered_usb_devices: await self._async_process_discovered_usb_device(usb_device) @hass_callback @@ -492,7 +487,7 @@ class USBDiscovery: _LOGGER.debug("Executing comports scan") async with self._scan_lock: await self._async_process_ports( - await self.hass.async_add_executor_job(comports) + await self.hass.async_add_executor_job(scan_serial_ports) ) if self.initial_scan_done: return @@ -530,9 +525,7 @@ async def websocket_usb_scan( msg: dict[str, Any], ) -> None: """Scan for new usb devices.""" - usb_discovery: USBDiscovery = hass.data[DOMAIN] - if not usb_discovery.observer_active: - await usb_discovery.async_request_scan() + await async_request_scan(hass) connection.send_result(msg["id"]) diff --git a/homeassistant/components/usb/utils.py b/homeassistant/components/usb/utils.py index d1d6fb17f3c..1bb620ec5f7 100644 --- a/homeassistant/components/usb/utils.py +++ b/homeassistant/components/usb/utils.py @@ -2,6 +2,9 @@ from __future__ import annotations +from collections.abc import Sequence + +from serial.tools.list_ports import comports from serial.tools.list_ports_common import ListPortInfo from .models import USBDevice @@ -17,3 +20,12 @@ def usb_device_from_port(port: ListPortInfo) -> USBDevice: manufacturer=port.manufacturer, description=port.description, ) + + +def scan_serial_ports() -> Sequence[USBDevice]: + """Scan serial ports for USB devices.""" + return [ + usb_device_from_port(port) + for port in comports() + if port.vid is not None or port.pid is not None + ] diff --git a/homeassistant/components/vallox/config_flow.py b/homeassistant/components/vallox/config_flow.py index 30d1d153d9e..c7e6af8891a 100644 --- a/homeassistant/components/vallox/config_flow.py +++ b/homeassistant/components/vallox/config_flow.py @@ -108,7 +108,7 @@ class ValloxConfigFlow(ConfigFlow, domain=DOMAIN): errors[CONF_HOST] = "invalid_host" except ValloxApiException: errors[CONF_HOST] = "cannot_connect" - except Exception: # pylint: disable=broad-except + except Exception: _LOGGER.exception("Unexpected exception") errors[CONF_HOST] = "unknown" else: diff --git a/homeassistant/components/velbus/strings.json b/homeassistant/components/velbus/strings.json index a50395af115..35f94e54470 100644 --- a/homeassistant/components/velbus/strings.json +++ b/homeassistant/components/velbus/strings.json @@ -2,10 +2,11 @@ "config": { "step": { "user": { - "title": "Define the Velbus connection type", - "data": { - "name": "The name for this Velbus connection", - "port": "Connection string" + "title": "Define the Velbus connection", + "description": "How do you want to configure the Velbus hub?", + "menu_options": { + "network": "Via network connection", + "usbselect": "Via USB device" } }, "network": { diff --git a/homeassistant/components/vesync/const.py b/homeassistant/components/vesync/const.py index 1273ab914f8..4e39fe40f2d 100644 --- a/homeassistant/components/vesync/const.py +++ b/homeassistant/components/vesync/const.py @@ -30,9 +30,13 @@ VS_HUMIDIFIER_MODE_HUMIDITY = "humidity" VS_HUMIDIFIER_MODE_MANUAL = "manual" VS_HUMIDIFIER_MODE_SLEEP = "sleep" -NIGHT_LIGHT_LEVEL_BRIGHT = "bright" -NIGHT_LIGHT_LEVEL_DIM = "dim" -NIGHT_LIGHT_LEVEL_OFF = "off" +FAN_NIGHT_LIGHT_LEVEL_DIM = "dim" +FAN_NIGHT_LIGHT_LEVEL_OFF = "off" +FAN_NIGHT_LIGHT_LEVEL_ON = "on" + +HUMIDIFIER_NIGHT_LIGHT_LEVEL_BRIGHT = "bright" +HUMIDIFIER_NIGHT_LIGHT_LEVEL_DIM = "dim" +HUMIDIFIER_NIGHT_LIGHT_LEVEL_OFF = "off" VeSyncHumidifierDevice = VeSyncHumid200300S | VeSyncSuperior6000S """Humidifier device types""" diff --git a/homeassistant/components/vesync/select.py b/homeassistant/components/vesync/select.py index c266985fc2b..a9d2e1b533a 100644 --- a/homeassistant/components/vesync/select.py +++ b/homeassistant/components/vesync/select.py @@ -15,9 +15,12 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from .common import rgetattr from .const import ( DOMAIN, - NIGHT_LIGHT_LEVEL_BRIGHT, - NIGHT_LIGHT_LEVEL_DIM, - NIGHT_LIGHT_LEVEL_OFF, + FAN_NIGHT_LIGHT_LEVEL_DIM, + FAN_NIGHT_LIGHT_LEVEL_OFF, + FAN_NIGHT_LIGHT_LEVEL_ON, + HUMIDIFIER_NIGHT_LIGHT_LEVEL_BRIGHT, + HUMIDIFIER_NIGHT_LIGHT_LEVEL_DIM, + HUMIDIFIER_NIGHT_LIGHT_LEVEL_OFF, VS_COORDINATOR, VS_DEVICES, VS_DISCOVERY, @@ -27,14 +30,14 @@ from .entity import VeSyncBaseEntity _LOGGER = logging.getLogger(__name__) -VS_TO_HA_NIGHT_LIGHT_LEVEL_MAP = { - 100: NIGHT_LIGHT_LEVEL_BRIGHT, - 50: NIGHT_LIGHT_LEVEL_DIM, - 0: NIGHT_LIGHT_LEVEL_OFF, +VS_TO_HA_HUMIDIFIER_NIGHT_LIGHT_LEVEL_MAP = { + 100: HUMIDIFIER_NIGHT_LIGHT_LEVEL_BRIGHT, + 50: HUMIDIFIER_NIGHT_LIGHT_LEVEL_DIM, + 0: HUMIDIFIER_NIGHT_LIGHT_LEVEL_OFF, } -HA_TO_VS_NIGHT_LIGHT_LEVEL_MAP = { - v: k for k, v in VS_TO_HA_NIGHT_LIGHT_LEVEL_MAP.items() +HA_TO_VS_HUMIDIFIER_NIGHT_LIGHT_LEVEL_MAP = { + v: k for k, v in VS_TO_HA_HUMIDIFIER_NIGHT_LIGHT_LEVEL_MAP.items() } @@ -48,20 +51,39 @@ class VeSyncSelectEntityDescription(SelectEntityDescription): SELECT_DESCRIPTIONS: list[VeSyncSelectEntityDescription] = [ + # night_light for humidifier VeSyncSelectEntityDescription( key="night_light_level", translation_key="night_light_level", - options=list(VS_TO_HA_NIGHT_LIGHT_LEVEL_MAP.values()), + options=list(VS_TO_HA_HUMIDIFIER_NIGHT_LIGHT_LEVEL_MAP.values()), icon="mdi:brightness-6", - exists_fn=lambda device: rgetattr(device, "night_light"), + exists_fn=lambda device: rgetattr(device, "set_night_light_brightness"), # The select_option service framework ensures that only options specified are # accepted. ServiceValidationError gets raised for invalid value. select_option_fn=lambda device, value: device.set_night_light_brightness( - HA_TO_VS_NIGHT_LIGHT_LEVEL_MAP.get(value, 0) + HA_TO_VS_HUMIDIFIER_NIGHT_LIGHT_LEVEL_MAP.get(value, 0) ), # Reporting "off" as the choice for unhandled level. - current_option_fn=lambda device: VS_TO_HA_NIGHT_LIGHT_LEVEL_MAP.get( - device.details.get("night_light_brightness"), NIGHT_LIGHT_LEVEL_OFF + current_option_fn=lambda device: VS_TO_HA_HUMIDIFIER_NIGHT_LIGHT_LEVEL_MAP.get( + device.details.get("night_light_brightness"), + HUMIDIFIER_NIGHT_LIGHT_LEVEL_OFF, + ), + ), + # night_light for fan devices based on pyvesync.VeSyncAirBypass + VeSyncSelectEntityDescription( + key="night_light_level", + translation_key="night_light_level", + options=[ + FAN_NIGHT_LIGHT_LEVEL_OFF, + FAN_NIGHT_LIGHT_LEVEL_DIM, + FAN_NIGHT_LIGHT_LEVEL_ON, + ], + icon="mdi:brightness-6", + exists_fn=lambda device: rgetattr(device, "set_night_light"), + select_option_fn=lambda device, value: device.set_night_light(value), + current_option_fn=lambda device: VS_TO_HA_HUMIDIFIER_NIGHT_LIGHT_LEVEL_MAP.get( + device.details.get("night_light"), + FAN_NIGHT_LIGHT_LEVEL_OFF, ), ), ] diff --git a/homeassistant/components/vesync/strings.json b/homeassistant/components/vesync/strings.json index eabb2969580..9b63bf3e614 100644 --- a/homeassistant/components/vesync/strings.json +++ b/homeassistant/components/vesync/strings.json @@ -71,7 +71,8 @@ "state": { "bright": "Bright", "dim": "Dim", - "off": "[%key:common::state::off%]" + "off": "[%key:common::state::off%]", + "on": "[%key:common::state::on%]" } } }, diff --git a/homeassistant/components/vicare/strings.json b/homeassistant/components/vicare/strings.json index 733cda363e5..04049f026bd 100644 --- a/homeassistant/components/vicare/strings.json +++ b/homeassistant/components/vicare/strings.json @@ -515,11 +515,11 @@ "services": { "set_vicare_mode": { "name": "Set ViCare mode", - "description": "Set a ViCare mode.", + "description": "Sets the mode of the climate device as defined by Viessmann.", "fields": { "vicare_mode": { "name": "ViCare mode", - "description": "ViCare mode." + "description": "For supported values, see the `vicare_modes` attribute of the climate entity." } } } diff --git a/homeassistant/components/vilfo/config_flow.py b/homeassistant/components/vilfo/config_flow.py index cdba7f1b8c2..5612591c595 100644 --- a/homeassistant/components/vilfo/config_flow.py +++ b/homeassistant/components/vilfo/config_flow.py @@ -114,8 +114,8 @@ class DomainConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" except InvalidAuth: errors["base"] = "invalid_auth" - except Exception as err: # noqa: BLE001 - _LOGGER.error("Unexpected exception: %s", err) + except Exception: + _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: await self.async_set_unique_id(info[CONF_ID]) diff --git a/homeassistant/components/vodafone_station/__init__.py b/homeassistant/components/vodafone_station/__init__.py index 871afe09a2e..9f118fe4fbd 100644 --- a/homeassistant/components/vodafone_station/__init__.py +++ b/homeassistant/components/vodafone_station/__init__.py @@ -1,16 +1,15 @@ """Vodafone Station integration.""" -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from .const import DOMAIN -from .coordinator import VodafoneStationRouter +from .coordinator import VodafoneConfigEntry, VodafoneStationRouter PLATFORMS = [Platform.BUTTON, Platform.DEVICE_TRACKER, Platform.SENSOR] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: VodafoneConfigEntry) -> bool: """Set up Vodafone Station platform.""" coordinator = VodafoneStationRouter( hass, @@ -22,7 +21,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator entry.async_on_unload(entry.add_update_listener(update_listener)) @@ -31,10 +30,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: VodafoneConfigEntry) -> bool: """Unload a config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - coordinator: VodafoneStationRouter = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data await coordinator.api.logout() await coordinator.api.close() hass.data[DOMAIN].pop(entry.entry_id) @@ -42,7 +41,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return unload_ok -async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def update_listener(hass: HomeAssistant, entry: VodafoneConfigEntry) -> None: """Update when config_entry options update.""" if entry.options: await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/vodafone_station/button.py b/homeassistant/components/vodafone_station/button.py index 9812cef48d6..8dda4d49c7b 100644 --- a/homeassistant/components/vodafone_station/button.py +++ b/homeassistant/components/vodafone_station/button.py @@ -4,21 +4,32 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass +from json.decoder import JSONDecodeError from typing import Any, Final +from aiovodafone.exceptions import ( + AlreadyLogged, + CannotAuthenticate, + CannotConnect, + GenericLoginError, +) + from homeassistant.components.button import ( ButtonDeviceClass, ButtonEntity, ButtonEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import _LOGGER, DOMAIN -from .coordinator import VodafoneStationRouter +from .coordinator import VodafoneConfigEntry, VodafoneStationRouter + +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) @@ -68,13 +79,13 @@ BUTTON_TYPES: Final = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VodafoneConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up entry.""" _LOGGER.debug("Setting up Vodafone Station buttons") - coordinator: VodafoneStationRouter = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data sensors_data = coordinator.data.sensors @@ -106,4 +117,25 @@ class VodafoneStationSensorEntity( async def async_press(self) -> None: """Triggers the Shelly button press service.""" - await self.entity_description.press_action(self.coordinator) + + try: + await self.entity_description.press_action(self.coordinator) + except CannotAuthenticate as err: + self.coordinator.config_entry.async_start_reauth(self.hass) + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="cannot_authenticate", + translation_placeholders={"error": repr(err)}, + ) from err + except ( + CannotConnect, + AlreadyLogged, + GenericLoginError, + JSONDecodeError, + ) as err: + self.coordinator.last_update_success = False + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="cannot_execute_action", + translation_placeholders={"error": repr(err)}, + ) from err diff --git a/homeassistant/components/vodafone_station/config_flow.py b/homeassistant/components/vodafone_station/config_flow.py index 7a80244f8d6..6641f5f5711 100644 --- a/homeassistant/components/vodafone_station/config_flow.py +++ b/homeassistant/components/vodafone_station/config_flow.py @@ -12,16 +12,12 @@ from homeassistant.components.device_tracker import ( CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME, ) -from homeassistant.config_entries import ( - ConfigEntry, - ConfigFlow, - ConfigFlowResult, - OptionsFlow, -) +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant, callback from .const import _LOGGER, DEFAULT_HOST, DEFAULT_USERNAME, DOMAIN +from .coordinator import VodafoneConfigEntry def user_form_schema(user_input: dict[str, Any] | None) -> vol.Schema: @@ -63,7 +59,7 @@ class VodafoneStationConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: VodafoneConfigEntry, ) -> VodafoneStationOptionsFlowHandler: """Get the options flow for this handler.""" return VodafoneStationOptionsFlowHandler() @@ -143,6 +139,47 @@ class VodafoneStationConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of the device.""" + reconfigure_entry = self._get_reconfigure_entry() + if not user_input: + return self.async_show_form( + step_id="reconfigure", data_schema=user_form_schema(user_input) + ) + + updated_host = user_input[CONF_HOST] + + if reconfigure_entry.data[CONF_HOST] != updated_host: + self._async_abort_entries_match({CONF_HOST: updated_host}) + + errors: dict[str, str] = {} + + errors = {} + + try: + await validate_input(self.hass, user_input) + except aiovodafone_exceptions.AlreadyLogged: + errors["base"] = "already_logged" + except aiovodafone_exceptions.CannotConnect: + errors["base"] = "cannot_connect" + except aiovodafone_exceptions.CannotAuthenticate: + errors["base"] = "invalid_auth" + except Exception: # noqa: BLE001 + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + return self.async_update_reload_and_abort( + reconfigure_entry, data_updates={CONF_HOST: updated_host} + ) + + return self.async_show_form( + step_id="reconfigure", + data_schema=user_form_schema(user_input), + errors=errors, + ) + class VodafoneStationOptionsFlowHandler(OptionsFlow): """Handle a option flow.""" diff --git a/homeassistant/components/vodafone_station/coordinator.py b/homeassistant/components/vodafone_station/coordinator.py index cd640d10cb6..cee66bd2e7c 100644 --- a/homeassistant/components/vodafone_station/coordinator.py +++ b/homeassistant/components/vodafone_station/coordinator.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from datetime import datetime, timedelta from json.decoder import JSONDecodeError -from typing import Any +from typing import Any, cast from aiovodafone import VodafoneStationDevice, VodafoneStationSercommApi, exceptions @@ -21,6 +21,8 @@ from .helpers import cleanup_device_tracker CONSIDER_HOME_SECONDS = DEFAULT_CONSIDER_HOME.total_seconds() +type VodafoneConfigEntry = ConfigEntry[VodafoneStationRouter] + @dataclass(slots=True) class VodafoneStationDeviceInfo: @@ -42,7 +44,7 @@ class UpdateCoordinatorDataType: class VodafoneStationRouter(DataUpdateCoordinator[UpdateCoordinatorDataType]): """Queries router running Vodafone Station firmware.""" - config_entry: ConfigEntry + config_entry: VodafoneConfigEntry def __init__( self, @@ -50,7 +52,7 @@ class VodafoneStationRouter(DataUpdateCoordinator[UpdateCoordinatorDataType]): host: str, username: str, password: str, - config_entry: ConfigEntry, + config_entry: VodafoneConfigEntry, ) -> None: """Initialize the scanner.""" @@ -120,14 +122,22 @@ class VodafoneStationRouter(DataUpdateCoordinator[UpdateCoordinatorDataType]): data_sensors = await self.api.get_sensor_data() await self.api.logout() except exceptions.CannotAuthenticate as err: - raise ConfigEntryAuthFailed from err + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="cannot_authenticate", + translation_placeholders={"error": repr(err)}, + ) from err except ( exceptions.CannotConnect, exceptions.AlreadyLogged, exceptions.GenericLoginError, JSONDecodeError, ) as err: - raise UpdateFailed(f"Error fetching data: {err!r}") from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_failed", + translation_placeholders={"error": repr(err)}, + ) from err except (ConfigEntryAuthFailed, UpdateFailed): await self.api.close() raise @@ -164,7 +174,7 @@ class VodafoneStationRouter(DataUpdateCoordinator[UpdateCoordinatorDataType]): @property def serial_number(self) -> str: """Device serial number.""" - return self.data.sensors["sys_serial_number"] + return cast(str, self.data.sensors["sys_serial_number"]) @property def device_info(self) -> DeviceInfo: diff --git a/homeassistant/components/vodafone_station/device_tracker.py b/homeassistant/components/vodafone_station/device_tracker.py index ece4bd05a02..4efa26cda8c 100644 --- a/homeassistant/components/vodafone_station/device_tracker.py +++ b/homeassistant/components/vodafone_station/device_tracker.py @@ -3,25 +3,31 @@ from __future__ import annotations from homeassistant.components.device_tracker import ScannerEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import _LOGGER, DOMAIN -from .coordinator import VodafoneStationDeviceInfo, VodafoneStationRouter +from .const import _LOGGER +from .coordinator import ( + VodafoneConfigEntry, + VodafoneStationDeviceInfo, + VodafoneStationRouter, +) + +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VodafoneConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up device tracker for Vodafone Station component.""" _LOGGER.debug("Start device trackers setup") - coordinator: VodafoneStationRouter = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data tracked: set = set() diff --git a/homeassistant/components/vodafone_station/diagnostics.py b/homeassistant/components/vodafone_station/diagnostics.py index e306d6caca2..4778e7d5a4e 100644 --- a/homeassistant/components/vodafone_station/diagnostics.py +++ b/homeassistant/components/vodafone_station/diagnostics.py @@ -5,22 +5,20 @@ from __future__ import annotations from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import VodafoneStationRouter +from .coordinator import VodafoneConfigEntry TO_REDACT = {CONF_USERNAME, CONF_PASSWORD} async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: VodafoneConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: VodafoneStationRouter = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data sensors_data = coordinator.data.sensors return { diff --git a/homeassistant/components/vodafone_station/manifest.json b/homeassistant/components/vodafone_station/manifest.json index 4acafc8df3a..29cb3c070ab 100644 --- a/homeassistant/components/vodafone_station/manifest.json +++ b/homeassistant/components/vodafone_station/manifest.json @@ -7,5 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["aiovodafone"], + "quality_scale": "silver", "requirements": ["aiovodafone==0.6.1"] } diff --git a/homeassistant/components/vodafone_station/quality_scale.yaml b/homeassistant/components/vodafone_station/quality_scale.yaml new file mode 100644 index 00000000000..d60020f5e47 --- /dev/null +++ b/homeassistant/components/vodafone_station/quality_scale.yaml @@ -0,0 +1,76 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: no actions + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: no actions + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: no events + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: done + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: done + reauthentication-flow: done + test-coverage: done + + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: device not discoverable + discovery: + status: exempt + comment: device not discoverable + docs-data-update: done + docs-examples: done + docs-known-limitations: + status: exempt + comment: no known limitations, yet + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: done + dynamic-devices: done + entity-category: done + entity-device-class: done + entity-disabled-by-default: + status: exempt + comment: no known use case + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: done + repair-issues: + status: exempt + comment: no known use cases for repair issues or flows, yet + stale-devices: done + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/vodafone_station/sensor.py b/homeassistant/components/vodafone_station/sensor.py index d29fb7f21e9..2573864330d 100644 --- a/homeassistant/components/vodafone_station/sensor.py +++ b/homeassistant/components/vodafone_station/sensor.py @@ -12,14 +12,16 @@ from homeassistant.components.sensor import ( SensorEntity, SensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfDataRate from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import _LOGGER, DOMAIN, LINE_TYPES -from .coordinator import VodafoneStationRouter +from .const import _LOGGER, LINE_TYPES +from .coordinator import VodafoneConfigEntry, VodafoneStationRouter + +# Coordinator is used to centralize the data updates +PARALLEL_UPDATES = 0 NOT_AVAILABLE: list = ["", "N/A", "0.0.0.0"] UPTIME_DEVIATION = 60 @@ -166,13 +168,13 @@ SENSOR_TYPES: Final = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: VodafoneConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up entry.""" _LOGGER.debug("Setting up Vodafone Station sensors") - coordinator: VodafoneStationRouter = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data sensors_data = coordinator.data.sensors diff --git a/homeassistant/components/vodafone_station/strings.json b/homeassistant/components/vodafone_station/strings.json index 8910d7178b7..958b774a485 100644 --- a/homeassistant/components/vodafone_station/strings.json +++ b/homeassistant/components/vodafone_station/strings.json @@ -3,9 +3,11 @@ "flow_title": "{host}", "step": { "reauth_confirm": { - "description": "Please enter the correct password for host: {host}", "data": { "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "[%key:component::vodafone_station::config::step::user::data_description::password%]" } }, "user": { @@ -15,7 +17,21 @@ "password": "[%key:common::config_flow::data::password%]" }, "data_description": { - "host": "The hostname or IP address of your Vodafone Station." + "host": "The hostname or IP address of your Vodafone Station.", + "username": "The username for your Vodafone Station.", + "password": "The password for your Vodafone Station." + } + }, + "reconfigure": { + "data": { + "host": "[%key:common::config_flow::data::host%]", + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "host": "[%key:component::vodafone_station::config::step::user::data_description::host%]", + "username": "[%key:component::vodafone_station::config::step::user::data_description::username%]", + "password": "[%key:component::vodafone_station::config::step::user::data_description::password%]" } } }, @@ -23,16 +39,17 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", "already_logged": "User already logged-in, please try again later.", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "model_not_supported": "The device model is currently unsupported.", "unknown": "[%key:common::config_flow::error::unknown%]" }, "error": { - "already_logged": "User already logged-in, please try again later.", + "already_logged": "[%key:component::vodafone_station::config::abort::already_logged%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "model_not_supported": "The device model is currently unsupported.", + "model_not_supported": "[%key:component::vodafone_station::config::abort::model_not_supported%]", "unknown": "[%key:common::config_flow::error::unknown%]" } }, @@ -41,20 +58,35 @@ "init": { "data": { "consider_home": "Seconds to consider a device at 'home'" + }, + "data_description": { + "consider_home": "The number of seconds to wait until marking a device as not home after it disconnects from the network." } } } }, "entity": { "button": { - "dsl_reconnect": { "name": "DSL reconnect" }, - "fiber_reconnect": { "name": "Fiber reconnect" }, - "internet_key_reconnect": { "name": "Internet key reconnect" } + "dsl_reconnect": { + "name": "DSL reconnect" + }, + "fiber_reconnect": { + "name": "Fiber reconnect" + }, + "internet_key_reconnect": { + "name": "Internet key reconnect" + } }, "sensor": { - "external_ipv4": { "name": "WAN IPv4 address" }, - "external_ipv6": { "name": "WAN IPv6 address" }, - "external_ip_key": { "name": "WAN internet key address" }, + "external_ipv4": { + "name": "WAN IPv4 address" + }, + "external_ipv6": { + "name": "WAN IPv6 address" + }, + "external_ip_key": { + "name": "WAN internet key address" + }, "active_connection": { "name": "Active connection", "state": { @@ -64,15 +96,44 @@ "internet_key": "Internet key" } }, - "down_stream": { "name": "WAN download rate" }, - "up_stream": { "name": "WAN upload rate" }, - "fw_version": { "name": "Firmware version" }, - "phone_num1": { "name": "Phone number (1)" }, - "phone_num2": { "name": "Phone number (2)" }, - "sys_uptime": { "name": "Uptime" }, - "sys_cpu_usage": { "name": "CPU usage" }, - "sys_memory_usage": { "name": "Memory usage" }, - "sys_reboot_cause": { "name": "Reboot cause" } + "down_stream": { + "name": "WAN download rate" + }, + "up_stream": { + "name": "WAN upload rate" + }, + "fw_version": { + "name": "Firmware version" + }, + "phone_num1": { + "name": "Phone number (1)" + }, + "phone_num2": { + "name": "Phone number (2)" + }, + "sys_uptime": { + "name": "Uptime" + }, + "sys_cpu_usage": { + "name": "CPU usage" + }, + "sys_memory_usage": { + "name": "Memory usage" + }, + "sys_reboot_cause": { + "name": "Reboot cause" + } + } + }, + "exceptions": { + "update_failed": { + "message": "Error fetching data: {error}" + }, + "cannot_execute_action": { + "message": "Cannot execute requested action: {error}" + }, + "cannot_authenticate": { + "message": "Error authenticating: {error}" } } } diff --git a/homeassistant/components/voip/assist_satellite.py b/homeassistant/components/voip/assist_satellite.py index a0aeaaf38d3..2c0a3b9641a 100644 --- a/homeassistant/components/voip/assist_satellite.py +++ b/homeassistant/components/voip/assist_satellite.py @@ -3,6 +3,7 @@ from __future__ import annotations import asyncio +from datetime import timedelta from enum import IntFlag from functools import partial import io @@ -16,7 +17,7 @@ import wave from voip_utils import SIP_PORT, RtpDatagramProtocol from voip_utils.sip import SipDatagramProtocol, SipEndpoint, get_sip_endpoint -from homeassistant.components import tts +from homeassistant.components import intent, tts from homeassistant.components.assist_pipeline import PipelineEvent, PipelineEventType from homeassistant.components.assist_satellite import ( AssistSatelliteAnnouncement, @@ -25,9 +26,11 @@ from homeassistant.components.assist_satellite import ( AssistSatelliteEntityDescription, AssistSatelliteEntityFeature, ) +from homeassistant.components.intent import TimerEventType, TimerInfo from homeassistant.components.network import async_get_source_ip from homeassistant.config_entries import ConfigEntry from homeassistant.core import Context, HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from .const import ( @@ -160,6 +163,13 @@ class VoipAssistSatellite(VoIPEntity, AssistSatelliteEntity, RtpDatagramProtocol await super().async_added_to_hass() self.voip_device.protocol = self + assert self.device_entry is not None + self.async_on_remove( + intent.async_register_timer_handler( + self.hass, self.device_entry.id, self.async_handle_timer_event + ) + ) + async def async_will_remove_from_hass(self) -> None: """Run when entity will be removed from hass.""" await super().async_will_remove_from_hass() @@ -173,6 +183,29 @@ class VoipAssistSatellite(VoIPEntity, AssistSatelliteEntity, RtpDatagramProtocol """Get the current satellite configuration.""" raise NotImplementedError + @callback + def async_handle_timer_event( + self, + event_type: TimerEventType, + timer_info: TimerInfo, + ) -> None: + """Handle timer event.""" + if event_type != TimerEventType.FINISHED: + return + + if timer_info.name: + message = f"{timer_info.name} finished" + else: + message = f"{timedelta(seconds=timer_info.created_seconds)} timer finished" + + async def announce_message(): + announcement = await self._resolve_announcement_media_id(message, None) + await self.async_announce(announcement) + + self.config_entry.async_create_background_task( + self.hass, announce_message(), "voip_announce_timer" + ) + async def async_set_configuration( self, config: AssistSatelliteConfiguration ) -> None: @@ -193,6 +226,12 @@ class VoipAssistSatellite(VoIPEntity, AssistSatelliteEntity, RtpDatagramProtocol Optionally run a voice pipeline after the announcement has finished. """ + if announcement.media_id_source != "tts": + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="non_tts_announcement", + ) + self._announcement_future = asyncio.Future() self._run_pipeline_after_announce = run_pipeline_after diff --git a/homeassistant/components/voip/manifest.json b/homeassistant/components/voip/manifest.json index e3b2861dbe5..dfd397fde14 100644 --- a/homeassistant/components/voip/manifest.json +++ b/homeassistant/components/voip/manifest.json @@ -3,9 +3,10 @@ "name": "Voice over IP", "codeowners": ["@balloob", "@synesthesiam"], "config_flow": true, - "dependencies": ["assist_pipeline", "assist_satellite", "network"], + "dependencies": ["assist_pipeline", "assist_satellite", "intent", "network"], "documentation": "https://www.home-assistant.io/integrations/voip", "iot_class": "local_push", + "loggers": ["voip_utils"], "quality_scale": "internal", "requirements": ["voip-utils==0.3.1"] } diff --git a/homeassistant/components/voip/strings.json b/homeassistant/components/voip/strings.json index 96c902bf39a..4f37ad1d6f7 100644 --- a/homeassistant/components/voip/strings.json +++ b/homeassistant/components/voip/strings.json @@ -58,5 +58,10 @@ } } } + }, + "exceptions": { + "non_tts_announcement": { + "message": "VoIP does not currently support non-TTS announcements" + } } } diff --git a/homeassistant/components/wallbox/manifest.json b/homeassistant/components/wallbox/manifest.json index 63102646508..d217a018303 100644 --- a/homeassistant/components/wallbox/manifest.json +++ b/homeassistant/components/wallbox/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/wallbox", "iot_class": "cloud_polling", "loggers": ["wallbox"], - "requirements": ["wallbox==0.7.0"] + "requirements": ["wallbox==0.8.0"] } diff --git a/homeassistant/components/watergate/__init__.py b/homeassistant/components/watergate/__init__.py index c1747af1f11..fd591215d8b 100644 --- a/homeassistant/components/watergate/__init__.py +++ b/homeassistant/components/watergate/__init__.py @@ -18,8 +18,9 @@ from homeassistant.components.webhook import ( ) from homeassistant.const import CONF_IP_ADDRESS, CONF_WEBHOOK_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers.dispatcher import async_dispatcher_send -from .const import DOMAIN +from .const import AUTO_SHUT_OFF_EVENT_NAME, DOMAIN from .coordinator import WatergateConfigEntry, WatergateDataCoordinator _LOGGER = logging.getLogger(__name__) @@ -28,8 +29,10 @@ WEBHOOK_TELEMETRY_TYPE = "telemetry" WEBHOOK_VALVE_TYPE = "valve" WEBHOOK_WIFI_CHANGED_TYPE = "wifi-changed" WEBHOOK_POWER_SUPPLY_CHANGED_TYPE = "power-supply-changed" +WEBHOOK_AUTO_SHUT_OFF = "auto-shut-off-report" PLATFORMS: list[Platform] = [ + Platform.EVENT, Platform.SENSOR, Platform.VALVE, ] @@ -120,6 +123,10 @@ def get_webhook_handler( coordinator_data.networking.rssi = data.rssi elif body_type == WEBHOOK_POWER_SUPPLY_CHANGED_TYPE: coordinator_data.state.power_supply = data.supply + elif body_type == WEBHOOK_AUTO_SHUT_OFF: + async_dispatcher_send( + hass, AUTO_SHUT_OFF_EVENT_NAME.format(data.type.lower()), data + ) coordinator.async_set_updated_data(coordinator_data) diff --git a/homeassistant/components/watergate/const.py b/homeassistant/components/watergate/const.py index 22a14330af9..c6726d9185f 100644 --- a/homeassistant/components/watergate/const.py +++ b/homeassistant/components/watergate/const.py @@ -3,3 +3,5 @@ DOMAIN = "watergate" MANUFACTURER = "Watergate" + +AUTO_SHUT_OFF_EVENT_NAME = "watergate_{}" diff --git a/homeassistant/components/watergate/event.py b/homeassistant/components/watergate/event.py new file mode 100644 index 00000000000..cf2447df4b3 --- /dev/null +++ b/homeassistant/components/watergate/event.py @@ -0,0 +1,78 @@ +"""Module contains the AutoShutOffEvent class for handling auto shut off events.""" + +from watergate_local_api.models.auto_shut_off_report import AutoShutOffReport + +from homeassistant.components.event import EventEntity, EventEntityDescription +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback + +from . import WatergateConfigEntry +from .const import AUTO_SHUT_OFF_EVENT_NAME +from .coordinator import WatergateDataCoordinator +from .entity import WatergateEntity + +VOLUME_AUTO_SHUT_OFF = "volume_threshold" +DURATION_AUTO_SHUT_OFF = "duration_threshold" + + +DESCRIPTIONS: list[EventEntityDescription] = [ + EventEntityDescription( + translation_key="auto_shut_off_volume", + key="auto_shut_off_volume", + event_types=[VOLUME_AUTO_SHUT_OFF], + ), + EventEntityDescription( + translation_key="auto_shut_off_duration", + key="auto_shut_off_duration", + event_types=[DURATION_AUTO_SHUT_OFF], + ), +] + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: WatergateConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up Event entities from config entry.""" + + coordinator = config_entry.runtime_data + + async_add_entities( + AutoShutOffEvent(coordinator, description) for description in DESCRIPTIONS + ) + + +class AutoShutOffEvent(WatergateEntity, EventEntity): + """Event for Auto Shut Off.""" + + def __init__( + self, + coordinator: WatergateDataCoordinator, + entity_description: EventEntityDescription, + ) -> None: + """Initialize Auto Shut Off Entity.""" + super().__init__(coordinator, entity_description.key) + self.entity_description = entity_description + + async def async_added_to_hass(self): + """Register the callback for event handling when the entity is added.""" + await super().async_added_to_hass() + self.async_on_remove( + async_dispatcher_connect( + self.hass, + AUTO_SHUT_OFF_EVENT_NAME.format(self.event_types[0]), + self._async_handle_event, + ) + ) + + @callback + def _async_handle_event(self, event: AutoShutOffReport) -> None: + self._trigger_event( + event.type.lower(), + {"volume": event.volume, "duration": event.duration}, + ) + self.async_write_ha_state() diff --git a/homeassistant/components/watergate/icons.json b/homeassistant/components/watergate/icons.json new file mode 100644 index 00000000000..28a0bfbc825 --- /dev/null +++ b/homeassistant/components/watergate/icons.json @@ -0,0 +1,12 @@ +{ + "entity": { + "event": { + "auto_shut_off_volume": { + "default": "mdi:water" + }, + "auto_shut_off_duration": { + "default": "mdi:timelapse" + } + } + } +} diff --git a/homeassistant/components/watergate/quality_scale.yaml b/homeassistant/components/watergate/quality_scale.yaml index b116eff970e..73a39bd5264 100644 --- a/homeassistant/components/watergate/quality_scale.yaml +++ b/homeassistant/components/watergate/quality_scale.yaml @@ -17,10 +17,7 @@ rules: docs-high-level-description: done docs-installation-instructions: done docs-removal-instructions: done - entity-event-setup: - status: exempt - comment: | - Entities of this integration does not explicitly subscribe to events. + entity-event-setup: done entity-unique-id: done has-entity-name: done runtime-data: done diff --git a/homeassistant/components/watergate/strings.json b/homeassistant/components/watergate/strings.json index c312525e420..634e05e7973 100644 --- a/homeassistant/components/watergate/strings.json +++ b/homeassistant/components/watergate/strings.json @@ -19,6 +19,42 @@ } }, "entity": { + "event": { + "auto_shut_off_volume": { + "name": "Volume auto shut-off", + "state_attributes": { + "event_type": { + "state": { + "volume_threshold": "Volume", + "duration_threshold": "Duration" + } + }, + "volume": { + "name": "[%key:component::watergate::entity::event::auto_shut_off_volume::state_attributes::event_type::state::volume_threshold%]" + }, + "duration": { + "name": "[%key:component::watergate::entity::event::auto_shut_off_volume::state_attributes::event_type::state::duration_threshold%]" + } + } + }, + "auto_shut_off_duration": { + "name": "Duration auto shut-off", + "state_attributes": { + "event_type": { + "state": { + "volume_threshold": "[%key:component::watergate::entity::event::auto_shut_off_volume::state_attributes::event_type::state::volume_threshold%]", + "duration_threshold": "[%key:component::watergate::entity::event::auto_shut_off_volume::state_attributes::event_type::state::duration_threshold%]" + } + }, + "volume": { + "name": "[%key:component::watergate::entity::event::auto_shut_off_volume::state_attributes::event_type::state::volume_threshold%]" + }, + "duration": { + "name": "[%key:component::watergate::entity::event::auto_shut_off_volume::state_attributes::event_type::state::duration_threshold%]" + } + } + } + }, "sensor": { "water_meter_volume": { "name": "Water meter volume" diff --git a/homeassistant/components/weatherflow/sensor.py b/homeassistant/components/weatherflow/sensor.py index 683413236c1..10c04b3283b 100644 --- a/homeassistant/components/weatherflow/sensor.py +++ b/homeassistant/components/weatherflow/sensor.py @@ -267,16 +267,17 @@ SENSORS: tuple[WeatherFlowSensorEntityDescription, ...] = ( WeatherFlowSensorEntityDescription( key="wind_direction", translation_key="wind_direction", + device_class=SensorDeviceClass.WIND_DIRECTION, + state_class=SensorStateClass.MEASUREMENT_ANGLE, native_unit_of_measurement=DEGREE, - state_class=SensorStateClass.MEASUREMENT, event_subscriptions=[EVENT_RAPID_WIND, EVENT_OBSERVATION], raw_data_conv_fn=lambda raw_data: raw_data.magnitude, ), WeatherFlowSensorEntityDescription( key="wind_direction_average", translation_key="wind_direction_average", + device_class=SensorDeviceClass.WIND_DIRECTION, native_unit_of_measurement=DEGREE, - state_class=SensorStateClass.MEASUREMENT, raw_data_conv_fn=lambda raw_data: raw_data.magnitude, ), ) diff --git a/homeassistant/components/webdav/backup.py b/homeassistant/components/webdav/backup.py index 11d0a459852..fb2927a58bb 100644 --- a/homeassistant/components/webdav/backup.py +++ b/homeassistant/components/webdav/backup.py @@ -5,10 +5,10 @@ from __future__ import annotations from collections.abc import AsyncIterator, Callable, Coroutine from functools import wraps import logging +from time import time from typing import Any, Concatenate from aiohttp import ClientTimeout -from aiowebdav2 import Property, PropertyRequest from aiowebdav2.exceptions import UnauthorizedError, WebDavError from propcache.api import cached_property @@ -28,9 +28,8 @@ from .const import CONF_BACKUP_PATH, DATA_BACKUP_AGENT_LISTENERS, DOMAIN _LOGGER = logging.getLogger(__name__) -METADATA_VERSION = "1" BACKUP_TIMEOUT = ClientTimeout(connect=10, total=43200) -NAMESPACE = "https://home-assistant.io" +CACHE_TTL = 300 async def async_get_backup_agents( @@ -96,23 +95,6 @@ def suggested_filenames(backup: AgentBackup) -> tuple[str, str]: return f"{base_name}.tar", f"{base_name}.metadata.json" -def _is_current_metadata_version(properties: list[Property]) -> bool: - """Check if any property is of the current metadata version.""" - return any( - prop.value == METADATA_VERSION - for prop in properties - if prop.namespace == NAMESPACE and prop.name == "metadata_version" - ) - - -def _backup_id_from_properties(properties: list[Property]) -> str | None: - """Return the backup ID from properties.""" - for prop in properties: - if prop.namespace == NAMESPACE and prop.name == "backup_id": - return prop.value - return None - - class WebDavBackupAgent(BackupAgent): """Backup agent interface.""" @@ -126,6 +108,8 @@ class WebDavBackupAgent(BackupAgent): self._client = entry.runtime_data self.name = entry.title self.unique_id = entry.entry_id + self._cache_metadata_files: dict[str, AgentBackup] = {} + self._cache_expiration = time() @cached_property def _backup_path(self) -> str: @@ -144,8 +128,6 @@ class WebDavBackupAgent(BackupAgent): :return: An async iterator that yields bytes. """ backup = await self._find_backup_by_id(backup_id) - if backup is None: - raise BackupNotFound("Backup not found") return await self._client.download_iter( f"{self._backup_path}/{suggested_filename(backup)}", @@ -184,27 +166,14 @@ class WebDavBackupAgent(BackupAgent): f"{self._backup_path}/{filename_meta}", ) - await self._client.set_property_batch( - f"{self._backup_path}/{filename_meta}", - [ - Property( - namespace=NAMESPACE, - name="backup_id", - value=backup.backup_id, - ), - Property( - namespace=NAMESPACE, - name="metadata_version", - value=METADATA_VERSION, - ), - ], - ) - _LOGGER.debug( "Uploaded metadata file for %s", f"{self._backup_path}/{filename_meta}", ) + # reset cache + self._cache_expiration = time() + @handle_backup_errors async def async_delete_backup( self, @@ -216,8 +185,6 @@ class WebDavBackupAgent(BackupAgent): :param backup_id: The ID of the backup that was returned in async_list_backups. """ backup = await self._find_backup_by_id(backup_id) - if backup is None: - return (filename_tar, filename_meta) = suggested_filenames(backup) backup_path = f"{self._backup_path}/{filename_tar}" @@ -230,56 +197,52 @@ class WebDavBackupAgent(BackupAgent): backup_path, ) + # reset cache + self._cache_expiration = time() + @handle_backup_errors async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: """List backups.""" - metadata_files = await self._list_metadata_files() - return [ - await self._download_metadata(metadata_file) - for metadata_file in metadata_files.values() - ] + return list((await self._list_cached_metadata_files()).values()) @handle_backup_errors async def async_get_backup( self, backup_id: str, **kwargs: Any, - ) -> AgentBackup | None: + ) -> AgentBackup: """Return a backup.""" return await self._find_backup_by_id(backup_id) - async def _list_metadata_files(self) -> dict[str, str]: - """List metadata files.""" - files = await self._client.list_with_properties( - self._backup_path, - [ - PropertyRequest( - namespace=NAMESPACE, - name="metadata_version", - ), - PropertyRequest( - namespace=NAMESPACE, - name="backup_id", - ), - ], - ) - return { - backup_id: file_name - for file_name, properties in files.items() - if file_name.endswith(".json") and _is_current_metadata_version(properties) - if (backup_id := _backup_id_from_properties(properties)) - } + async def _list_cached_metadata_files(self) -> dict[str, AgentBackup]: + """List metadata files with a cache.""" + if time() <= self._cache_expiration: + return self._cache_metadata_files - async def _find_backup_by_id(self, backup_id: str) -> AgentBackup | None: + async def _download_metadata(path: str) -> AgentBackup: + """Download metadata file.""" + iterator = await self._client.download_iter(path) + metadata = await anext(iterator) + return AgentBackup.from_dict(json_loads_object(metadata)) + + async def _list_metadata_files() -> dict[str, AgentBackup]: + """List metadata files.""" + files = await self._client.list_files(self._backup_path) + return { + metadata_content.backup_id: metadata_content + for file_name in files + if file_name.endswith(".json") + if (metadata_content := await _download_metadata(file_name)) + } + + self._cache_metadata_files = await _list_metadata_files() + self._cache_expiration = time() + CACHE_TTL + return self._cache_metadata_files + + async def _find_backup_by_id(self, backup_id: str) -> AgentBackup: """Find a backup by its backup ID on remote.""" - metadata_files = await self._list_metadata_files() + metadata_files = await self._list_cached_metadata_files() if metadata_file := metadata_files.get(backup_id): - return await self._download_metadata(metadata_file) + return metadata_file - return None - - async def _download_metadata(self, path: str) -> AgentBackup: - """Download metadata file.""" - iterator = await self._client.download_iter(path) - metadata = await anext(iterator) - return AgentBackup.from_dict(json_loads_object(metadata)) + raise BackupNotFound(f"Backup {backup_id} not found") diff --git a/homeassistant/components/webdav/config_flow.py b/homeassistant/components/webdav/config_flow.py index f75544d25ad..e3e46d2575a 100644 --- a/homeassistant/components/webdav/config_flow.py +++ b/homeassistant/components/webdav/config_flow.py @@ -5,7 +5,7 @@ from __future__ import annotations import logging from typing import Any -from aiowebdav2.exceptions import UnauthorizedError +from aiowebdav2.exceptions import MethodNotSupportedError, UnauthorizedError import voluptuous as vol import yarl @@ -65,7 +65,9 @@ class WebDavConfigFlow(ConfigFlow, domain=DOMAIN): result = await client.check() except UnauthorizedError: errors["base"] = "invalid_auth" - except Exception: # pylint: disable=broad-except + except MethodNotSupportedError: + errors["base"] = "invalid_method" + except Exception: _LOGGER.exception("Unexpected error") errors["base"] = "unknown" else: diff --git a/homeassistant/components/webdav/manifest.json b/homeassistant/components/webdav/manifest.json index 30028cb28c9..63d093745d1 100644 --- a/homeassistant/components/webdav/manifest.json +++ b/homeassistant/components/webdav/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["aiowebdav2"], "quality_scale": "bronze", - "requirements": ["aiowebdav2==0.4.2"] + "requirements": ["aiowebdav2==0.4.5"] } diff --git a/homeassistant/components/webdav/strings.json b/homeassistant/components/webdav/strings.json index b03ffaf2a3d..ac6418f1239 100644 --- a/homeassistant/components/webdav/strings.json +++ b/homeassistant/components/webdav/strings.json @@ -21,6 +21,7 @@ "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "invalid_method": "The server does not support the required methods. Please check whether you have the correct URL. Check with your provider for the correct URL.", "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { diff --git a/homeassistant/components/webmin/strings.json b/homeassistant/components/webmin/strings.json index 9a6d6d4fbe4..b92986f917a 100644 --- a/homeassistant/components/webmin/strings.json +++ b/homeassistant/components/webmin/strings.json @@ -29,13 +29,13 @@ "entity": { "sensor": { "load_1m": { - "name": "Load (1m)" + "name": "Load (1 min)" }, "load_5m": { - "name": "Load (5m)" + "name": "Load (5 min)" }, "load_15m": { - "name": "Load (15m)" + "name": "Load (15 min)" }, "mem_total": { "name": "Memory total" diff --git a/homeassistant/components/websocket_api/const.py b/homeassistant/components/websocket_api/const.py index a0d031834ae..fce85339430 100644 --- a/homeassistant/components/websocket_api/const.py +++ b/homeassistant/components/websocket_api/const.py @@ -21,7 +21,7 @@ type AsyncWebSocketCommandHandler = Callable[ DOMAIN: Final = "websocket_api" URL: Final = "/api/websocket" PENDING_MSG_PEAK: Final = 1024 -PENDING_MSG_PEAK_TIME: Final = 5 +PENDING_MSG_PEAK_TIME: Final = 10 # Maximum number of messages that can be pending at any given time. # This is effectively the upper limit of the number of entities # that can fire state changes within ~1 second. diff --git a/homeassistant/components/whirlpool/__init__.py b/homeassistant/components/whirlpool/__init__.py index 6231324bb0d..cb073779379 100644 --- a/homeassistant/components/whirlpool/__init__.py +++ b/homeassistant/components/whirlpool/__init__.py @@ -1,6 +1,5 @@ """The Whirlpool Appliances integration.""" -from dataclasses import dataclass import logging from aiohttp import ClientError @@ -20,7 +19,7 @@ _LOGGER = logging.getLogger(__name__) PLATFORMS = [Platform.CLIMATE, Platform.SENSOR] -type WhirlpoolConfigEntry = ConfigEntry[WhirlpoolData] +type WhirlpoolConfigEntry = ConfigEntry[AppliancesManager] async def async_setup_entry(hass: HomeAssistant, entry: WhirlpoolConfigEntry) -> bool: @@ -52,8 +51,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: WhirlpoolConfigEntry) -> if not await appliances_manager.fetch_appliances(): _LOGGER.error("Cannot fetch appliances") return False + await appliances_manager.connect() - entry.runtime_data = WhirlpoolData(appliances_manager, auth, backend_selector) + entry.runtime_data = appliances_manager await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True @@ -61,13 +61,5 @@ async def async_setup_entry(hass: HomeAssistant, entry: WhirlpoolConfigEntry) -> async def async_unload_entry(hass: HomeAssistant, entry: WhirlpoolConfigEntry) -> bool: """Unload a config entry.""" + await entry.runtime_data.disconnect() return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - -@dataclass -class WhirlpoolData: - """Whirlpool integaration shared data.""" - - appliances_manager: AppliancesManager - auth: Auth - backend_selector: BackendSelector diff --git a/homeassistant/components/whirlpool/climate.py b/homeassistant/components/whirlpool/climate.py index 6baf738e54e..84a2c0d52ca 100644 --- a/homeassistant/components/whirlpool/climate.py +++ b/homeassistant/components/whirlpool/climate.py @@ -5,10 +5,7 @@ from __future__ import annotations import logging from typing import Any -from aiohttp import ClientSession from whirlpool.aircon import Aircon, FanSpeed as AirconFanSpeed, Mode as AirconMode -from whirlpool.auth import Auth -from whirlpool.backendselector import BackendSelector from homeassistant.components.climate import ( ENTITY_ID_FORMAT, @@ -25,7 +22,6 @@ from homeassistant.components.climate import ( ) from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant -from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import generate_entity_id from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback @@ -73,19 +69,8 @@ async def async_setup_entry( async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up entry.""" - whirlpool_data = config_entry.runtime_data - - aircons = [ - AirConEntity( - hass, - ac_data["SAID"], - ac_data["NAME"], - whirlpool_data.backend_selector, - whirlpool_data.auth, - async_get_clientsession(hass), - ) - for ac_data in whirlpool_data.appliances_manager.aircons - ] + appliances_manager = config_entry.runtime_data + aircons = [AirConEntity(hass, aircon) for aircon in appliances_manager.aircons] async_add_entities(aircons, True) @@ -110,36 +95,26 @@ class AirConEntity(ClimateEntity): _attr_target_temperature_step = SUPPORTED_TARGET_TEMPERATURE_STEP _attr_temperature_unit = UnitOfTemperature.CELSIUS - def __init__( - self, - hass: HomeAssistant, - said: str, - name: str | None, - backend_selector: BackendSelector, - auth: Auth, - session: ClientSession, - ) -> None: + def __init__(self, hass: HomeAssistant, aircon: Aircon) -> None: """Initialize the entity.""" - self._aircon = Aircon(backend_selector, auth, said, session) - self.entity_id = generate_entity_id(ENTITY_ID_FORMAT, said, hass=hass) - self._attr_unique_id = said + self._aircon = aircon + self.entity_id = generate_entity_id(ENTITY_ID_FORMAT, aircon.said, hass=hass) + self._attr_unique_id = aircon.said self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, said)}, - name=name if name is not None else said, + identifiers={(DOMAIN, aircon.said)}, + name=aircon.name if aircon.name is not None else aircon.said, manufacturer="Whirlpool", model="Sixth Sense", ) async def async_added_to_hass(self) -> None: - """Connect aircon to the cloud.""" + """Register updates callback.""" self._aircon.register_attr_callback(self.async_write_ha_state) - await self._aircon.connect() async def async_will_remove_from_hass(self) -> None: - """Close Whrilpool Appliance sockets before removing.""" + """Unregister updates callback.""" self._aircon.unregister_attr_callback(self.async_write_ha_state) - await self._aircon.disconnect() @property def available(self) -> bool: diff --git a/homeassistant/components/whirlpool/diagnostics.py b/homeassistant/components/whirlpool/diagnostics.py index 87d6ea827e2..09338396de4 100644 --- a/homeassistant/components/whirlpool/diagnostics.py +++ b/homeassistant/components/whirlpool/diagnostics.py @@ -4,6 +4,8 @@ from __future__ import annotations from typing import Any +from whirlpool.appliance import Appliance + from homeassistant.components.diagnostics import async_redact_data from homeassistant.core import HomeAssistant @@ -26,18 +28,25 @@ async def async_get_config_entry_diagnostics( ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - whirlpool = config_entry.runtime_data + def get_appliance_diagnostics(appliance: Appliance) -> dict[str, Any]: + return { + "data_model": appliance.appliance_info.data_model, + "category": appliance.appliance_info.category, + "model_number": appliance.appliance_info.model_number, + } + + appliances_manager = config_entry.runtime_data diagnostics_data = { - "Washer_dryers": { - wd["NAME"]: dict(wd.items()) - for wd in whirlpool.appliances_manager.washer_dryers + "washer_dryers": { + wd.name: get_appliance_diagnostics(wd) + for wd in appliances_manager.washer_dryers }, "aircons": { - ac["NAME"]: dict(ac.items()) for ac in whirlpool.appliances_manager.aircons + ac.name: get_appliance_diagnostics(ac) for ac in appliances_manager.aircons }, "ovens": { - oven["NAME"]: dict(oven.items()) - for oven in whirlpool.appliances_manager.ovens + oven.name: get_appliance_diagnostics(oven) + for oven in appliances_manager.ovens }, } diff --git a/homeassistant/components/whirlpool/manifest.json b/homeassistant/components/whirlpool/manifest.json index 67901eea482..ace2e31791d 100644 --- a/homeassistant/components/whirlpool/manifest.json +++ b/homeassistant/components/whirlpool/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["whirlpool"], - "requirements": ["whirlpool-sixth-sense==0.18.12"] + "requirements": ["whirlpool-sixth-sense==0.19.1"] } diff --git a/homeassistant/components/whirlpool/sensor.py b/homeassistant/components/whirlpool/sensor.py index f4811feb2c9..d0d13a128e2 100644 --- a/homeassistant/components/whirlpool/sensor.py +++ b/homeassistant/components/whirlpool/sensor.py @@ -16,7 +16,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, ) from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.helpers.typing import StateType @@ -134,37 +133,16 @@ async def async_setup_entry( config_entry: WhirlpoolConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: - """Config flow entry for Whrilpool Laundry.""" + """Config flow entry for Whirlpool sensors.""" entities: list = [] - whirlpool_data = config_entry.runtime_data - for appliance in whirlpool_data.appliances_manager.washer_dryers: - _wd = WasherDryer( - whirlpool_data.backend_selector, - whirlpool_data.auth, - appliance["SAID"], - async_get_clientsession(hass), - ) - await _wd.connect() - + appliances_manager = config_entry.runtime_data + for washer_dryer in appliances_manager.washer_dryers: entities.extend( - [ - WasherDryerClass( - appliance["SAID"], - appliance["NAME"], - description, - _wd, - ) - for description in SENSORS - ] + [WasherDryerClass(washer_dryer, description) for description in SENSORS] ) entities.extend( [ - WasherDryerTimeClass( - appliance["SAID"], - appliance["NAME"], - description, - _wd, - ) + WasherDryerTimeClass(washer_dryer, description) for description in SENSOR_TIMER ] ) @@ -178,34 +156,30 @@ class WasherDryerClass(SensorEntity): _attr_has_entity_name = True def __init__( - self, - said: str, - name: str, - description: WhirlpoolSensorEntityDescription, - washdry: WasherDryer, + self, washer_dryer: WasherDryer, description: WhirlpoolSensorEntityDescription ) -> None: """Initialize the washer sensor.""" - self._wd: WasherDryer = washdry + self._wd: WasherDryer = washer_dryer - if name == "dryer": + if washer_dryer.name == "dryer": self._attr_icon = ICON_D else: self._attr_icon = ICON_W self.entity_description: WhirlpoolSensorEntityDescription = description self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, said)}, - name=name.capitalize(), + identifiers={(DOMAIN, washer_dryer.said)}, + name=washer_dryer.name.capitalize(), manufacturer="Whirlpool", ) - self._attr_unique_id = f"{said}-{description.key}" + self._attr_unique_id = f"{washer_dryer.said}-{description.key}" async def async_added_to_hass(self) -> None: - """Connect washer/dryer to the cloud.""" + """Register updates callback.""" self._wd.register_attr_callback(self.async_write_ha_state) async def async_will_remove_from_hass(self) -> None: - """Close Whirlpool Appliance sockets before removing.""" + """Unregister updates callback.""" self._wd.unregister_attr_callback(self.async_write_ha_state) @property @@ -226,16 +200,12 @@ class WasherDryerTimeClass(RestoreSensor): _attr_has_entity_name = True def __init__( - self, - said: str, - name: str, - description: SensorEntityDescription, - washdry: WasherDryer, + self, washer_dryer: WasherDryer, description: SensorEntityDescription ) -> None: """Initialize the washer sensor.""" - self._wd: WasherDryer = washdry + self._wd: WasherDryer = washer_dryer - if name == "dryer": + if washer_dryer.name == "dryer": self._attr_icon = ICON_D else: self._attr_icon = ICON_W @@ -243,11 +213,11 @@ class WasherDryerTimeClass(RestoreSensor): self.entity_description: SensorEntityDescription = description self._running: bool | None = None self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, said)}, - name=name.capitalize(), + identifiers={(DOMAIN, washer_dryer.said)}, + name=washer_dryer.name.capitalize(), manufacturer="Whirlpool", ) - self._attr_unique_id = f"{said}-{description.key}" + self._attr_unique_id = f"{washer_dryer.said}-{description.key}" async def async_added_to_hass(self) -> None: """Connect washer/dryer to the cloud.""" @@ -259,7 +229,6 @@ class WasherDryerTimeClass(RestoreSensor): async def async_will_remove_from_hass(self) -> None: """Close Whrilpool Appliance sockets before removing.""" self._wd.unregister_attr_callback(self.update_from_latest_data) - await self._wd.disconnect() @property def available(self) -> bool: diff --git a/homeassistant/components/whois/config_flow.py b/homeassistant/components/whois/config_flow.py index cb4326d996d..a8306be7632 100644 --- a/homeassistant/components/whois/config_flow.py +++ b/homeassistant/components/whois/config_flow.py @@ -11,6 +11,8 @@ from whois.exceptions import ( UnknownDateFormat, UnknownTld, WhoisCommandFailed, + WhoisPrivateRegistry, + WhoisQuotaExceeded, ) from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -48,6 +50,10 @@ class WhoisFlowHandler(ConfigFlow, domain=DOMAIN): errors["base"] = "unexpected_response" except UnknownDateFormat: errors["base"] = "unknown_date_format" + except WhoisPrivateRegistry: + errors["base"] = "private_registry" + except WhoisQuotaExceeded: + errors["base"] = "quota_exceeded" else: return self.async_create_entry( title=self.imported_name or user_input[CONF_DOMAIN], diff --git a/homeassistant/components/whois/strings.json b/homeassistant/components/whois/strings.json index c28c079784d..3b0f9dfd4d1 100644 --- a/homeassistant/components/whois/strings.json +++ b/homeassistant/components/whois/strings.json @@ -11,7 +11,9 @@ "unexpected_response": "Unexpected response from whois server", "unknown_date_format": "Unknown date format in whois server response", "unknown_tld": "The given TLD is unknown or not available to this integration", - "whois_command_failed": "Whois command failed: could not retrieve whois information" + "whois_command_failed": "Whois command failed: could not retrieve whois information", + "private_registry": "The given domain is registered in a private registry and cannot be monitored", + "quota_exceeded": "Your whois quota has been exceeded for this TLD" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" diff --git a/homeassistant/components/withings/sensor.py b/homeassistant/components/withings/sensor.py index 28a0fbd1492..f20145f8bf9 100644 --- a/homeassistant/components/withings/sensor.py +++ b/homeassistant/components/withings/sensor.py @@ -122,7 +122,7 @@ MEASUREMENT_SENSORS: dict[ measurement_type=MeasurementType.HEIGHT, translation_key="height", native_unit_of_measurement=UnitOfLength.METERS, - suggested_display_precision=1, + suggested_display_precision=2, device_class=SensorDeviceClass.DISTANCE, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, @@ -326,6 +326,7 @@ SLEEP_SENSORS = [ value_fn=lambda sleep_summary: sleep_summary.deep_sleep_duration, translation_key="deep_sleep", native_unit_of_measurement=UnitOfTime.SECONDS, + suggested_unit_of_measurement=UnitOfTime.HOURS, device_class=SensorDeviceClass.DURATION, state_class=SensorStateClass.MEASUREMENT, ), @@ -334,6 +335,7 @@ SLEEP_SENSORS = [ value_fn=lambda sleep_summary: sleep_summary.sleep_latency, translation_key="time_to_sleep", native_unit_of_measurement=UnitOfTime.SECONDS, + suggested_unit_of_measurement=UnitOfTime.HOURS, device_class=SensorDeviceClass.DURATION, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, @@ -343,6 +345,7 @@ SLEEP_SENSORS = [ value_fn=lambda sleep_summary: sleep_summary.wake_up_latency, translation_key="time_to_wakeup", native_unit_of_measurement=UnitOfTime.SECONDS, + suggested_unit_of_measurement=UnitOfTime.HOURS, device_class=SensorDeviceClass.DURATION, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, @@ -376,6 +379,7 @@ SLEEP_SENSORS = [ value_fn=lambda sleep_summary: sleep_summary.light_sleep_duration, translation_key="light_sleep", native_unit_of_measurement=UnitOfTime.SECONDS, + suggested_unit_of_measurement=UnitOfTime.HOURS, device_class=SensorDeviceClass.DURATION, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, @@ -385,6 +389,7 @@ SLEEP_SENSORS = [ value_fn=lambda sleep_summary: sleep_summary.rem_sleep_duration, translation_key="rem_sleep", native_unit_of_measurement=UnitOfTime.SECONDS, + suggested_unit_of_measurement=UnitOfTime.HOURS, device_class=SensorDeviceClass.DURATION, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, @@ -451,6 +456,7 @@ SLEEP_SENSORS = [ value_fn=lambda sleep_summary: sleep_summary.total_time_awake, translation_key="wakeup_time", native_unit_of_measurement=UnitOfTime.SECONDS, + suggested_unit_of_measurement=UnitOfTime.HOURS, device_class=SensorDeviceClass.DURATION, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, diff --git a/homeassistant/components/wled/select.py b/homeassistant/components/wled/select.py index e340c323151..76837652ae5 100644 --- a/homeassistant/components/wled/select.py +++ b/homeassistant/components/wled/select.py @@ -79,9 +79,10 @@ class WLEDPresetSelect(WLEDEntity, SelectEntity): super().__init__(coordinator=coordinator) self._attr_unique_id = f"{coordinator.data.info.mac_address}_preset" - self._attr_options = [ - preset.name for preset in self.coordinator.data.presets.values() - ] + sorted_values = sorted( + coordinator.data.presets.values(), key=lambda preset: preset.name + ) + self._attr_options = [preset.name for preset in sorted_values] @property def available(self) -> bool: @@ -115,9 +116,10 @@ class WLEDPlaylistSelect(WLEDEntity, SelectEntity): super().__init__(coordinator=coordinator) self._attr_unique_id = f"{coordinator.data.info.mac_address}_playlist" - self._attr_options = [ - playlist.name for playlist in self.coordinator.data.playlists.values() - ] + sorted_values = sorted( + coordinator.data.playlists.values(), key=lambda playlist: playlist.name + ) + self._attr_options = [playlist.name for playlist in sorted_values] @property def available(self) -> bool: @@ -159,9 +161,10 @@ class WLEDPaletteSelect(WLEDEntity, SelectEntity): self._attr_translation_placeholders = {"segment": str(segment)} self._attr_unique_id = f"{coordinator.data.info.mac_address}_palette_{segment}" - self._attr_options = [ - palette.name for palette in self.coordinator.data.palettes.values() - ] + sorted_values = sorted( + coordinator.data.palettes.values(), key=lambda palette: palette.name + ) + self._attr_options = [palette.name for palette in sorted_values] self._segment = segment @property diff --git a/homeassistant/components/wolflink/manifest.json b/homeassistant/components/wolflink/manifest.json index 964d192d279..5f3a6366fe1 100644 --- a/homeassistant/components/wolflink/manifest.json +++ b/homeassistant/components/wolflink/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/wolflink", "iot_class": "cloud_polling", "loggers": ["wolf_comm"], - "requirements": ["wolf-comm==0.0.19"] + "requirements": ["wolf-comm==0.0.23"] } diff --git a/homeassistant/components/wolflink/sensor.py b/homeassistant/components/wolflink/sensor.py index cf6d712dd0d..9380c28de89 100644 --- a/homeassistant/components/wolflink/sensor.py +++ b/homeassistant/components/wolflink/sensor.py @@ -2,19 +2,42 @@ from __future__ import annotations +from collections.abc import Callable +from dataclasses import dataclass + from wolf_comm.models import ( + EnergyParameter, + FlowParameter, + FrequencyParameter, HoursParameter, ListItemParameter, Parameter, PercentageParameter, + PowerParameter, Pressure, + RPMParameter, SimpleParameter, Temperature, ) -from homeassistant.components.sensor import SensorDeviceClass, SensorEntity +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import UnitOfPressure, UnitOfTemperature, UnitOfTime +from homeassistant.const import ( + PERCENTAGE, + REVOLUTIONS_PER_MINUTE, + UnitOfEnergy, + UnitOfFrequency, + UnitOfPower, + UnitOfPressure, + UnitOfTemperature, + UnitOfTime, + UnitOfVolumeFlowRate, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback @@ -23,31 +46,106 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import COORDINATOR, DEVICE_ID, DOMAIN, MANUFACTURER, PARAMETERS, STATES +def get_listitem_resolve_state(wolf_object, state): + """Resolve list item state.""" + resolved_state = [item for item in wolf_object.items if item.value == int(state)] + if resolved_state: + resolved_name = resolved_state[0].name + state = STATES.get(resolved_name, resolved_name) + return state + + +@dataclass(kw_only=True, frozen=True) +class WolflinkSensorEntityDescription(SensorEntityDescription): + """Describes Wolflink sensor entity.""" + + value_fn: Callable[[Parameter, str], str | None] = lambda param, value: value + supported_fn: Callable[[Parameter], bool] + + +SENSOR_DESCRIPTIONS = [ + WolflinkSensorEntityDescription( + key="temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + supported_fn=lambda param: isinstance(param, Temperature), + ), + WolflinkSensorEntityDescription( + key="pressure", + device_class=SensorDeviceClass.PRESSURE, + native_unit_of_measurement=UnitOfPressure.BAR, + supported_fn=lambda param: isinstance(param, Pressure), + ), + WolflinkSensorEntityDescription( + key="energy", + device_class=SensorDeviceClass.ENERGY, + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + supported_fn=lambda param: isinstance(param, EnergyParameter), + ), + WolflinkSensorEntityDescription( + key="power", + device_class=SensorDeviceClass.POWER, + native_unit_of_measurement=UnitOfPower.KILO_WATT, + supported_fn=lambda param: isinstance(param, PowerParameter), + ), + WolflinkSensorEntityDescription( + key="percentage", + native_unit_of_measurement=PERCENTAGE, + supported_fn=lambda param: isinstance(param, PercentageParameter), + ), + WolflinkSensorEntityDescription( + key="list_item", + translation_key="state", + supported_fn=lambda param: isinstance(param, ListItemParameter), + value_fn=get_listitem_resolve_state, + ), + WolflinkSensorEntityDescription( + key="hours", + icon="mdi:clock", + native_unit_of_measurement=UnitOfTime.HOURS, + supported_fn=lambda param: isinstance(param, HoursParameter), + ), + WolflinkSensorEntityDescription( + key="flow", + device_class=SensorDeviceClass.VOLUME_FLOW_RATE, + native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE, + supported_fn=lambda param: isinstance(param, FlowParameter), + ), + WolflinkSensorEntityDescription( + key="frequency", + device_class=SensorDeviceClass.FREQUENCY, + native_unit_of_measurement=UnitOfFrequency.HERTZ, + supported_fn=lambda param: isinstance(param, FrequencyParameter), + ), + WolflinkSensorEntityDescription( + key="rpm", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, + supported_fn=lambda param: isinstance(param, RPMParameter), + ), + WolflinkSensorEntityDescription( + key="default", + supported_fn=lambda param: isinstance(param, SimpleParameter), + ), +] + + async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities: AddConfigEntryEntitiesCallback, ) -> None: """Set up all entries for Wolf Platform.""" - coordinator = hass.data[DOMAIN][config_entry.entry_id][COORDINATOR] parameters = hass.data[DOMAIN][config_entry.entry_id][PARAMETERS] device_id = hass.data[DOMAIN][config_entry.entry_id][DEVICE_ID] - entities: list[WolfLinkSensor] = [] - for parameter in parameters: - if isinstance(parameter, Temperature): - entities.append(WolfLinkTemperature(coordinator, parameter, device_id)) - if isinstance(parameter, Pressure): - entities.append(WolfLinkPressure(coordinator, parameter, device_id)) - if isinstance(parameter, PercentageParameter): - entities.append(WolfLinkPercentage(coordinator, parameter, device_id)) - if isinstance(parameter, ListItemParameter): - entities.append(WolfLinkState(coordinator, parameter, device_id)) - if isinstance(parameter, HoursParameter): - entities.append(WolfLinkHours(coordinator, parameter, device_id)) - if isinstance(parameter, SimpleParameter): - entities.append(WolfLinkSensor(coordinator, parameter, device_id)) + entities: list[WolfLinkSensor] = [ + WolfLinkSensor(coordinator, parameter, device_id, description) + for parameter in parameters + for description in SENSOR_DESCRIPTIONS + if description.supported_fn(parameter) + ] async_add_entities(entities, True) @@ -55,9 +153,18 @@ async def async_setup_entry( class WolfLinkSensor(CoordinatorEntity, SensorEntity): """Base class for all Wolf entities.""" - def __init__(self, coordinator, wolf_object: Parameter, device_id) -> None: + entity_description: WolflinkSensorEntityDescription + + def __init__( + self, + coordinator, + wolf_object: Parameter, + device_id: str, + description: WolflinkSensorEntityDescription, + ) -> None: """Initialize.""" super().__init__(coordinator) + self.entity_description = description self.wolf_object = wolf_object self._attr_name = wolf_object.name self._attr_unique_id = f"{device_id}:{wolf_object.parameter_id}" @@ -69,68 +176,26 @@ class WolfLinkSensor(CoordinatorEntity, SensorEntity): ) @property - def native_value(self): + def native_value(self) -> str | None: """Return the state. Wolf Client is returning only changed values so we need to store old value here.""" if self.wolf_object.parameter_id in self.coordinator.data: new_state = self.coordinator.data[self.wolf_object.parameter_id] self.wolf_object.value_id = new_state[0] self._state = new_state[1] + if ( + isinstance(self.wolf_object, ListItemParameter) + and self._state is not None + ): + self._state = self.entity_description.value_fn( + self.wolf_object, self._state + ) return self._state @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, str | None]: """Return the state attributes.""" return { "parameter_id": self.wolf_object.parameter_id, "value_id": self.wolf_object.value_id, "parent": self.wolf_object.parent, } - - -class WolfLinkHours(WolfLinkSensor): - """Class for hour based entities.""" - - _attr_icon = "mdi:clock" - _attr_native_unit_of_measurement = UnitOfTime.HOURS - - -class WolfLinkTemperature(WolfLinkSensor): - """Class for temperature based entities.""" - - _attr_device_class = SensorDeviceClass.TEMPERATURE - _attr_native_unit_of_measurement = UnitOfTemperature.CELSIUS - - -class WolfLinkPressure(WolfLinkSensor): - """Class for pressure based entities.""" - - _attr_device_class = SensorDeviceClass.PRESSURE - _attr_native_unit_of_measurement = UnitOfPressure.BAR - - -class WolfLinkPercentage(WolfLinkSensor): - """Class for percentage based entities.""" - - @property - def native_unit_of_measurement(self): - """Return the unit the value is expressed in.""" - return self.wolf_object.unit - - -class WolfLinkState(WolfLinkSensor): - """Class for entities which has defined list of state.""" - - _attr_translation_key = "state" - - @property - def native_value(self): - """Return the state converting with supported values.""" - state = super().native_value - if state is not None: - resolved_state = [ - item for item in self.wolf_object.items if item.value == int(state) - ] - if resolved_state: - resolved_name = resolved_state[0].name - return STATES.get(resolved_name, resolved_name) - return state diff --git a/homeassistant/components/workday/manifest.json b/homeassistant/components/workday/manifest.json index cc6b0f30002..b08a5ed9fff 100644 --- a/homeassistant/components/workday/manifest.json +++ b/homeassistant/components/workday/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["holidays"], "quality_scale": "internal", - "requirements": ["holidays==0.68"] + "requirements": ["holidays==0.69"] } diff --git a/homeassistant/components/workday/strings.json b/homeassistant/components/workday/strings.json index 87fa294dbba..feedc52331b 100644 --- a/homeassistant/components/workday/strings.json +++ b/homeassistant/components/workday/strings.json @@ -2,13 +2,13 @@ "title": "Workday", "config": { "abort": { - "already_configured": "Workday has already been setup with chosen configuration" + "already_configured": "Workday has already been set up with chosen configuration" }, "step": { "user": { "data": { "name": "[%key:common::config_flow::data::name%]", - "country": "Country" + "country": "[%key:common::config_flow::data::country%]" } }, "options": { @@ -18,7 +18,7 @@ "days_offset": "Offset", "workdays": "Days to include", "add_holidays": "Add holidays", - "remove_holidays": "Remove Holidays", + "remove_holidays": "Remove holidays", "province": "Subdivision of country", "language": "Language for named holidays", "category": "Additional category as holiday" @@ -116,14 +116,14 @@ }, "issues": { "bad_country": { - "title": "Configured Country for {title} does not exist", + "title": "Configured country for {title} does not exist", "fix_flow": { "step": { "country": { "title": "Select country for {title}", "description": "Select a country to use for your Workday sensor.", "data": { - "country": "[%key:component::workday::config::step::user::data::country%]" + "country": "[%key:common::config_flow::data::country%]" } }, "province": { @@ -133,7 +133,7 @@ "province": "[%key:component::workday::config::step::options::data::province%]" }, "data_description": { - "province": "State, Territory, Province, Region of Country" + "province": "[%key:component::workday::config::step::options::data_description::province%]" } } } @@ -150,7 +150,7 @@ "province": "[%key:component::workday::config::step::options::data::province%]" }, "data_description": { - "province": "[%key:component::workday::issues::bad_country::fix_flow::step::province::data_description::province%]" + "province": "[%key:component::workday::config::step::options::data_description::province%]" } } } @@ -217,7 +217,7 @@ "services": { "check_date": { "name": "Check date", - "description": "Check if date is workday.", + "description": "Checks if a given date is a workday.", "fields": { "check_date": { "name": "Date", diff --git a/homeassistant/components/wyoming/__init__.py b/homeassistant/components/wyoming/__init__.py index d639933ece6..4e76287d8e7 100644 --- a/homeassistant/components/wyoming/__init__.py +++ b/homeassistant/components/wyoming/__init__.py @@ -8,15 +8,19 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import config_validation as cv, device_registry as dr +from homeassistant.helpers.typing import ConfigType from .const import ATTR_SPEAKER, DOMAIN from .data import WyomingService from .devices import SatelliteDevice from .models import DomainDataItem +from .websocket_api import async_register_websocket_api _LOGGER = logging.getLogger(__name__) +CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) + SATELLITE_PLATFORMS = [ Platform.ASSIST_SATELLITE, Platform.BINARY_SENSOR, @@ -28,11 +32,19 @@ SATELLITE_PLATFORMS = [ __all__ = [ "ATTR_SPEAKER", "DOMAIN", + "async_setup", "async_setup_entry", "async_unload_entry", ] +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the Wyoming integration.""" + async_register_websocket_api(hass) + + return True + + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Load Wyoming.""" service = await WyomingService.create(entry.data["host"], entry.data["port"]) diff --git a/homeassistant/components/wyoming/websocket_api.py b/homeassistant/components/wyoming/websocket_api.py new file mode 100644 index 00000000000..613238c302a --- /dev/null +++ b/homeassistant/components/wyoming/websocket_api.py @@ -0,0 +1,42 @@ +"""Wyoming Websocket API.""" + +import logging +from typing import Any + +import voluptuous as vol + +from homeassistant.components import websocket_api +from homeassistant.core import HomeAssistant, callback + +from .const import DOMAIN +from .models import DomainDataItem + +_LOGGER = logging.getLogger(__name__) + + +@callback +def async_register_websocket_api(hass: HomeAssistant) -> None: + """Register the websocket API.""" + websocket_api.async_register_command(hass, websocket_info) + + +@callback +@websocket_api.require_admin +@websocket_api.websocket_command({vol.Required("type"): "wyoming/info"}) +def websocket_info( + hass: HomeAssistant, + connection: websocket_api.connection.ActiveConnection, + msg: dict[str, Any], +) -> None: + """List service information for Wyoming all config entries.""" + entry_items: dict[str, DomainDataItem] = hass.data.get(DOMAIN, {}) + + connection.send_result( + msg["id"], + { + "info": { + entry_id: item.service.info.to_dict() + for entry_id, item in entry_items.items() + } + }, + ) diff --git a/homeassistant/components/xiaomi_miio/button.py b/homeassistant/components/xiaomi_miio/button.py index a5d1b4b69c6..a7bcb3a12fe 100644 --- a/homeassistant/components/xiaomi_miio/button.py +++ b/homeassistant/components/xiaomi_miio/button.py @@ -117,7 +117,7 @@ MODEL_TO_BUTTON_MAP: dict[str, tuple[str, ...]] = { ATTR_RESET_DUST_FILTER, ATTR_RESET_UPPER_FILTER, ), - **{model: BUTTONS_FOR_VACUUM for model in MODELS_VACUUM}, + **dict.fromkeys(MODELS_VACUUM, BUTTONS_FOR_VACUUM), } diff --git a/homeassistant/components/yeelight/manifest.json b/homeassistant/components/yeelight/manifest.json index cf7bc9c9035..07970cb25ca 100644 --- a/homeassistant/components/yeelight/manifest.json +++ b/homeassistant/components/yeelight/manifest.json @@ -16,7 +16,7 @@ }, "iot_class": "local_push", "loggers": ["async_upnp_client", "yeelight"], - "requirements": ["yeelight==0.7.16", "async-upnp-client==0.43.0"], + "requirements": ["yeelight==0.7.16", "async-upnp-client==0.44.0"], "zeroconf": [ { "type": "_miio._udp.local.", diff --git a/homeassistant/components/yolink/manifest.json b/homeassistant/components/yolink/manifest.json index 52ae8281f59..8c297c68670 100644 --- a/homeassistant/components/yolink/manifest.json +++ b/homeassistant/components/yolink/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["auth", "application_credentials"], "documentation": "https://www.home-assistant.io/integrations/yolink", "iot_class": "cloud_push", - "requirements": ["yolink-api==0.4.8"] + "requirements": ["yolink-api==0.4.9"] } diff --git a/homeassistant/components/zamg/sensor.py b/homeassistant/components/zamg/sensor.py index 5846092e555..fdb9d51185c 100644 --- a/homeassistant/components/zamg/sensor.py +++ b/homeassistant/components/zamg/sensor.py @@ -82,7 +82,8 @@ SENSOR_TYPES: tuple[ZamgSensorEntityDescription, ...] = ( key="wind_bearing", name="Wind Bearing", native_unit_of_measurement=DEGREE, - state_class=SensorStateClass.MEASUREMENT, + state_class=SensorStateClass.MEASUREMENT_ANGLE, + device_class=SensorDeviceClass.WIND_DIRECTION, para_name="DD", ), ZamgSensorEntityDescription( diff --git a/homeassistant/components/zengge/light.py b/homeassistant/components/zengge/light.py index 2ab46820b56..ccb6733c650 100644 --- a/homeassistant/components/zengge/light.py +++ b/homeassistant/components/zengge/light.py @@ -2,138 +2,38 @@ from __future__ import annotations -import logging -from typing import Any - import voluptuous as vol -from zengge import zengge -from homeassistant.components.light import ( - ATTR_BRIGHTNESS, - ATTR_HS_COLOR, - ATTR_WHITE, - PLATFORM_SCHEMA as LIGHT_PLATFORM_SCHEMA, - ColorMode, - LightEntity, -) +from homeassistant.components.light import PLATFORM_SCHEMA as LIGHT_PLATFORM_SCHEMA from homeassistant.const import CONF_DEVICES, CONF_NAME from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv +from homeassistant.helpers import config_validation as cv, issue_registry as ir from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.util import color as color_util - -_LOGGER = logging.getLogger(__name__) DEVICE_SCHEMA = vol.Schema({vol.Optional(CONF_NAME): cv.string}) +DOMAIN = "zengge" PLATFORM_SCHEMA = LIGHT_PLATFORM_SCHEMA.extend( {vol.Optional(CONF_DEVICES, default={}): {cv.string: DEVICE_SCHEMA}} ) -def setup_platform( +def async_setup_platform( hass: HomeAssistant, config: ConfigType, add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Zengge platform.""" - lights = [] - for address, device_config in config[CONF_DEVICES].items(): - light = ZenggeLight(device_config[CONF_NAME], address) - if light.is_valid: - lights.append(light) - - add_entities(lights, True) - - -class ZenggeLight(LightEntity): - """Representation of a Zengge light.""" - - _attr_supported_color_modes = {ColorMode.HS, ColorMode.WHITE} - - def __init__(self, name: str, address: str) -> None: - """Initialize the light.""" - - self._attr_name = name - self._attr_unique_id = address - self.is_valid = True - self._bulb = zengge(address) - self._white = 0 - self._attr_brightness = 0 - self._attr_hs_color = (0, 0) - self._attr_is_on = False - if self._bulb.connect() is False: - self.is_valid = False - _LOGGER.error("Failed to connect to bulb %s, %s", address, name) - return - - @property - def white_value(self) -> int: - """Return the white property.""" - return self._white - - @property - def color_mode(self) -> ColorMode: - """Return the current color mode.""" - if self._white != 0: - return ColorMode.WHITE - return ColorMode.HS - - def _set_rgb(self, red: int, green: int, blue: int) -> None: - """Set the rgb state.""" - self._bulb.set_rgb(red, green, blue) - - def _set_white(self, white): - """Set the white state.""" - return self._bulb.set_white(white) - - def turn_on(self, **kwargs: Any) -> None: - """Turn the specified light on.""" - self._attr_is_on = True - self._bulb.on() - - hs_color = kwargs.get(ATTR_HS_COLOR) - white = kwargs.get(ATTR_WHITE) - brightness = kwargs.get(ATTR_BRIGHTNESS) - - if white is not None: - # Change the bulb to white - self._attr_brightness = white - self._white = white - self._attr_hs_color = (0, 0) - - if hs_color is not None: - # Change the bulb to hs - self._white = 0 - self._attr_hs_color = hs_color - - if brightness is not None: - self._attr_brightness = brightness - - if self._white != 0: - self._set_white(self.brightness) - else: - assert self.hs_color is not None - assert self.brightness is not None - rgb = color_util.color_hsv_to_RGB( - self.hs_color[0], self.hs_color[1], self.brightness / 255 * 100 - ) - self._set_rgb(*rgb) - - def turn_off(self, **kwargs: Any) -> None: - """Turn the specified light off.""" - self._attr_is_on = False - self._bulb.off() - - def update(self) -> None: - """Synchronise internal state with the actual light state.""" - rgb = self._bulb.get_colour() - hsv = color_util.color_RGB_to_hsv(*rgb) - self._attr_hs_color = hsv[:2] - self._attr_brightness = int((hsv[2] / 100) * 255) - self._white = self._bulb.get_white() - if self._white: - self._attr_brightness = self._white - self._attr_is_on = self._bulb.get_on() + ir.async_create_issue( + hass, + DOMAIN, + DOMAIN, + is_fixable=False, + severity=ir.IssueSeverity.ERROR, + translation_key="integration_removed", + translation_placeholders={ + "led_ble_url": "https://www.home-assistant.io/integrations/led_ble/", + }, + ) diff --git a/homeassistant/components/zengge/manifest.json b/homeassistant/components/zengge/manifest.json index 03d989c5f3b..daa63b4de3d 100644 --- a/homeassistant/components/zengge/manifest.json +++ b/homeassistant/components/zengge/manifest.json @@ -5,6 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/zengge", "iot_class": "local_polling", "loggers": ["zengge"], - "quality_scale": "legacy", - "requirements": ["bluepy==1.3.0", "zengge==0.2"] + "quality_scale": "legacy" } diff --git a/homeassistant/components/zengge/strings.json b/homeassistant/components/zengge/strings.json new file mode 100644 index 00000000000..abc3b2450aa --- /dev/null +++ b/homeassistant/components/zengge/strings.json @@ -0,0 +1,8 @@ +{ + "issues": { + "integration_removed": { + "title": "The Zengge integration has been removed", + "description": "The Zengge integration has been removed from Home Assistant. Support for Zengge lights is provided by the `led_ble` integration.\n\nTo resolve this issue, please remove the (now defunct) `zengge` light configuration from your Home Assistant configuration and [configure the `led_ble` integration]({led_ble_url})." + } + } +} diff --git a/homeassistant/components/zeroconf/__init__.py b/homeassistant/components/zeroconf/__init__.py index e80b6b8cfdb..86f8dbca792 100644 --- a/homeassistant/components/zeroconf/__init__.py +++ b/homeassistant/components/zeroconf/__init__.py @@ -145,8 +145,6 @@ def _async_get_instance(hass: HomeAssistant) -> HaAsyncZeroconf: if DOMAIN in hass.data: return cast(HaAsyncZeroconf, hass.data[DOMAIN]) - logging.getLogger("zeroconf").setLevel(logging.NOTSET) - zeroconf = HaZeroconf(**_async_get_zc_args(hass)) aio_zc = HaAsyncZeroconf(zc=zeroconf) diff --git a/homeassistant/components/zeroconf/manifest.json b/homeassistant/components/zeroconf/manifest.json index 8abaa4a838e..a7fbfdfeada 100644 --- a/homeassistant/components/zeroconf/manifest.json +++ b/homeassistant/components/zeroconf/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["zeroconf"], "quality_scale": "internal", - "requirements": ["zeroconf==0.145.1"] + "requirements": ["zeroconf==0.146.0"] } diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index 6ed8b253e75..1c2d6556271 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["zha==0.0.53"], + "requirements": ["zha==0.0.55"], "usb": [ { "vid": "10C4", diff --git a/homeassistant/components/zha/strings.json b/homeassistant/components/zha/strings.json index be1642227bd..a35dd50df54 100644 --- a/homeassistant/components/zha/strings.json +++ b/homeassistant/components/zha/strings.json @@ -176,7 +176,7 @@ }, "config_panel": { "zha_options": { - "title": "Global Options", + "title": "Global options", "enhanced_light_transition": "Enable enhanced light color/temperature transition from an off-state", "light_transitioning_flag": "Enable enhanced brightness slider during light transition", "group_members_assume_state": "Group members assume state of group", @@ -187,7 +187,7 @@ "consider_unavailable_battery": "Consider battery powered devices unavailable after (seconds)" }, "zha_alarm_options": { - "title": "Alarm Control Panel Options", + "title": "Alarm control panel options", "alarm_master_code": "Master code for the alarm control panel(s)", "alarm_failed_tries": "The number of consecutive failed code entries to trigger an alarm", "alarm_arm_requires_code": "Code required for arming actions" @@ -610,6 +610,12 @@ }, "flow_switch": { "name": "Flow switch" + }, + "water_leak": { + "name": "Water leak" + }, + "water_supply": { + "name": "Water supply" } }, "button": { @@ -1101,6 +1107,27 @@ }, "shutdown_timer": { "name": "Shutdown timer" + }, + "calibration_vertical_run_time_up": { + "name": "Calibration vertical run time up" + }, + "calibration_vertical_run_time_down": { + "name": "Calibration vertical run time down" + }, + "calibration_rotation_run_time_up": { + "name": "Calibration rotation run time up" + }, + "calibration_rotation_run_time_down": { + "name": "Calibration rotation run time down" + }, + "impulse_mode_duration": { + "name": "Impulse mode duration" + }, + "water_duration": { + "name": "Water duration" + }, + "water_interval": { + "name": "Water interval" } }, "select": { @@ -1144,10 +1171,10 @@ "name": "Switch type" }, "led_scaling_mode": { - "name": "Led scaling mode" + "name": "LED scaling mode" }, "smart_fan_led_display_levels": { - "name": "Smart fan led display levels" + "name": "Smart fan LED display levels" }, "increased_non_neutral_output": { "name": "Non neutral output" @@ -1319,6 +1346,9 @@ }, "hysteresis_mode": { "name": "Hysteresis mode" + }, + "speed": { + "name": "Speed" } }, "sensor": { @@ -1666,6 +1696,9 @@ }, "last_watering_duration": { "name": "Last watering duration" + }, + "device_status": { + "name": "Device status" } }, "switch": { diff --git a/homeassistant/components/zone/__init__.py b/homeassistant/components/zone/__init__.py index 1c43a79e10e..813425c95f2 100644 --- a/homeassistant/components/zone/__init__.py +++ b/homeassistant/components/zone/__init__.py @@ -363,7 +363,7 @@ class Zone(collection.CollectionEntity): """Return entity instance initialized from storage.""" zone = cls(config) zone.editable = True - zone._generate_attrs() # noqa: SLF001 + zone._generate_attrs() return zone @classmethod @@ -371,7 +371,7 @@ class Zone(collection.CollectionEntity): """Return entity instance initialized from yaml.""" zone = cls(config) zone.editable = False - zone._generate_attrs() # noqa: SLF001 + zone._generate_attrs() return zone @property diff --git a/homeassistant/components/zwave_js/__init__.py b/homeassistant/components/zwave_js/__init__.py index c8503b1f4c6..a7b8f9ed665 100644 --- a/homeassistant/components/zwave_js/__init__.py +++ b/homeassistant/components/zwave_js/__init__.py @@ -4,7 +4,7 @@ from __future__ import annotations import asyncio from collections import defaultdict -from contextlib import suppress +import contextlib import logging from typing import Any @@ -12,7 +12,11 @@ from awesomeversion import AwesomeVersion import voluptuous as vol from zwave_js_server.client import Client as ZwaveClient from zwave_js_server.const import CommandClass, RemoveNodeReason -from zwave_js_server.exceptions import BaseZwaveJSServerError, InvalidServerVersion +from zwave_js_server.exceptions import ( + BaseZwaveJSServerError, + InvalidServerVersion, + NotConnected, +) from zwave_js_server.model.driver import Driver from zwave_js_server.model.node import Node as ZwaveNode from zwave_js_server.model.notification import ( @@ -25,7 +29,7 @@ from zwave_js_server.model.value import Value, ValueNotification from homeassistant.components.hassio import AddonError, AddonManager, AddonState from homeassistant.components.persistent_notification import async_create -from homeassistant.config_entries import ConfigEntry +from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.const import ( ATTR_DEVICE_ID, ATTR_DOMAIN, @@ -36,7 +40,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import Event, HomeAssistant, callback -from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError from homeassistant.helpers import ( config_validation as cv, device_registry as dr, @@ -130,9 +134,8 @@ from .migrate import async_migrate_discovered_value from .services import ZWaveServices CONNECT_TIMEOUT = 10 -DATA_CLIENT_LISTEN_TASK = "client_listen_task" DATA_DRIVER_EVENTS = "driver_events" -DATA_START_CLIENT_TASK = "start_client_task" +DRIVER_READY_TIMEOUT = 60 CONFIG_SCHEMA = vol.Schema( { @@ -145,6 +148,24 @@ CONFIG_SCHEMA = vol.Schema( extra=vol.ALLOW_EXTRA, ) +PLATFORMS = [ + Platform.BINARY_SENSOR, + Platform.BUTTON, + Platform.CLIMATE, + Platform.COVER, + Platform.EVENT, + Platform.FAN, + Platform.HUMIDIFIER, + Platform.LIGHT, + Platform.LOCK, + Platform.NUMBER, + Platform.SELECT, + Platform.SENSOR, + Platform.SIREN, + Platform.SWITCH, + Platform.UPDATE, +] + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Z-Wave JS component.""" @@ -196,53 +217,99 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: raise ConfigEntryNotReady(f"Failed to connect: {err}") from err async_delete_issue(hass, DOMAIN, "invalid_server_version") - LOGGER.info("Connected to Zwave JS Server") + LOGGER.debug("Connected to Zwave JS Server") # Set up websocket API async_register_api(hass) - entry.runtime_data = {} - # Create a task to allow the config entry to be unloaded before the driver is ready. - # Unloading the config entry is needed if the client listen task errors. - start_client_task = hass.async_create_task(start_client(hass, entry, client)) - entry.runtime_data[DATA_START_CLIENT_TASK] = start_client_task + driver_ready = asyncio.Event() + listen_task = entry.async_create_background_task( + hass, + client_listen(hass, entry, client, driver_ready), + f"{DOMAIN}_{entry.title}_client_listen", + ) - return True - - -async def start_client( - hass: HomeAssistant, entry: ConfigEntry, client: ZwaveClient -) -> None: - """Start listening with the client.""" - entry.runtime_data[DATA_CLIENT] = client - driver_events = entry.runtime_data[DATA_DRIVER_EVENTS] = DriverEvents(hass, entry) + entry.async_on_unload(client.disconnect) async def handle_ha_shutdown(event: Event) -> None: """Handle HA shutdown.""" - await disconnect_client(hass, entry) + await client.disconnect() - listen_task = asyncio.create_task( - client_listen(hass, entry, client, driver_events.ready) - ) - entry.runtime_data[DATA_CLIENT_LISTEN_TASK] = listen_task entry.async_on_unload( hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, handle_ha_shutdown) ) - try: - await driver_events.ready.wait() - except asyncio.CancelledError: - LOGGER.debug("Cancelling start client") - return - - LOGGER.info("Connection to Zwave JS Server initialized") - - assert client.driver - async_dispatcher_send( - hass, f"{DOMAIN}_{client.driver.controller.home_id}_connected_to_server" + driver_ready_task = entry.async_create_task( + hass, + driver_ready.wait(), + f"{DOMAIN}_{entry.title}_driver_ready", + ) + done, pending = await asyncio.wait( + (driver_ready_task, listen_task), + return_when=asyncio.FIRST_COMPLETED, + timeout=DRIVER_READY_TIMEOUT, ) - await driver_events.setup(client.driver) + if driver_ready_task in pending or listen_task in done: + error_message = "Driver ready timed out" + listen_error: BaseException | None = None + if listen_task.done(): + listen_error, error_message = _get_listen_task_error(listen_task) + else: + listen_task.cancel() + driver_ready_task.cancel() + raise ConfigEntryNotReady(error_message) from listen_error + + LOGGER.debug("Connection to Zwave JS Server initialized") + + entry_runtime_data = entry.runtime_data = { + DATA_CLIENT: client, + } + entry_runtime_data[DATA_DRIVER_EVENTS] = driver_events = DriverEvents(hass, entry) + + driver = client.driver + # When the driver is ready we know it's set on the client. + assert driver is not None + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + with contextlib.suppress(NotConnected): + # If the client isn't connected the listen task may have an exception + # and we'll handle the clean up below. + await driver_events.setup(driver) + + # If the listen task is already failed, we need to raise ConfigEntryNotReady + if listen_task.done(): + listen_error, error_message = _get_listen_task_error(listen_task) + await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + raise ConfigEntryNotReady(error_message) from listen_error + + # Re-attach trigger listeners. + # Schedule this call to make sure the config entry is loaded first. + + @callback + def on_config_entry_loaded() -> None: + """Signal that server connection and driver are ready.""" + if entry.state is ConfigEntryState.LOADED: + async_dispatcher_send( + hass, + f"{DOMAIN}_{driver.controller.home_id}_connected_to_server", + ) + + entry.async_on_unload(entry.async_on_state_change(on_config_entry_loaded)) + + return True + + +def _get_listen_task_error( + listen_task: asyncio.Task, +) -> tuple[BaseException | None, str]: + """Check the listen task for errors.""" + if listen_error := listen_task.exception(): + error_message = f"Client listen failed: {listen_error}" + else: + error_message = "Client connection was closed" + return listen_error, error_message class DriverEvents: @@ -255,8 +322,6 @@ class DriverEvents: self.config_entry = entry self.dev_reg = dr.async_get(hass) self.hass = hass - self.platform_setup_tasks: dict[str, asyncio.Task] = {} - self.ready = asyncio.Event() # Make sure to not pass self to ControllerEvents until all attributes are set. self.controller_events = ControllerEvents(hass, self) @@ -339,16 +404,6 @@ class DriverEvents: controller.on("identify", self.controller_events.async_on_identify) ) - async def async_setup_platform(self, platform: Platform) -> None: - """Set up platform if needed.""" - if platform not in self.platform_setup_tasks: - self.platform_setup_tasks[platform] = self.hass.async_create_task( - self.hass.config_entries.async_forward_entry_setups( - self.config_entry, [platform] - ) - ) - await self.platform_setup_tasks[platform] - class ControllerEvents: """Represent controller events. @@ -380,9 +435,6 @@ class ControllerEvents: async def async_on_node_added(self, node: ZwaveNode) -> None: """Handle node added event.""" - # Every node including the controller will have at least one sensor - await self.driver_events.async_setup_platform(Platform.SENSOR) - # Remove stale entities that may exist from a previous interview when an # interview is started. base_unique_id = get_valueless_base_unique_id(self.driver_events.driver, node) @@ -411,7 +463,6 @@ class ControllerEvents: ) # Create a ping button for each device - await self.driver_events.async_setup_platform(Platform.BUTTON) async_dispatcher_send( self.hass, f"{DOMAIN}_{self.config_entry.entry_id}_add_ping_button_entity", @@ -668,9 +719,6 @@ class NodeEvents: cc.id == CommandClass.FIRMWARE_UPDATE_MD.value for cc in node.command_classes ): - await self.controller_events.driver_events.async_setup_platform( - Platform.UPDATE - ) async_dispatcher_send( self.hass, f"{DOMAIN}_{self.config_entry.entry_id}_add_firmware_update_entity", @@ -701,21 +749,19 @@ class NodeEvents: value_updates_disc_info: dict[str, ZwaveDiscoveryInfo], ) -> None: """Handle discovery info and all dependent tasks.""" + platform = disc_info.platform # This migration logic was added in 2021.3 to handle a breaking change to # the value_id format. Some time in the future, this call (as well as the # helper functions) can be removed. async_migrate_discovered_value( self.hass, self.ent_reg, - self.controller_events.registered_unique_ids[device.id][disc_info.platform], + self.controller_events.registered_unique_ids[device.id][platform], device, self.controller_events.driver_events.driver, disc_info, ) - platform = disc_info.platform - await self.controller_events.driver_events.async_setup_platform(platform) - LOGGER.debug("Discovered entity: %s", disc_info) async_dispatcher_send( self.hass, @@ -930,63 +976,37 @@ async def client_listen( driver_ready: asyncio.Event, ) -> None: """Listen with the client.""" - should_reload = True try: await client.listen(driver_ready) - except asyncio.CancelledError: - should_reload = False except BaseZwaveJSServerError as err: - LOGGER.error("Failed to listen: %s", err) - except Exception as err: # noqa: BLE001 + if entry.state is not ConfigEntryState.LOADED: + raise + LOGGER.error("Client listen failed: %s", err) + except Exception as err: # We need to guard against unknown exceptions to not crash this task. LOGGER.exception("Unexpected exception: %s", err) + if entry.state is not ConfigEntryState.LOADED: + raise # The entry needs to be reloaded since a new driver state # will be acquired on reconnect. # All model instances will be replaced when the new state is acquired. - if should_reload: - LOGGER.info("Disconnected from server. Reloading integration") - hass.async_create_task(hass.config_entries.async_reload(entry.entry_id)) - - -async def disconnect_client(hass: HomeAssistant, entry: ConfigEntry) -> None: - """Disconnect client.""" - client: ZwaveClient = entry.runtime_data[DATA_CLIENT] - listen_task: asyncio.Task = entry.runtime_data[DATA_CLIENT_LISTEN_TASK] - start_client_task: asyncio.Task = entry.runtime_data[DATA_START_CLIENT_TASK] - driver_events: DriverEvents = entry.runtime_data[DATA_DRIVER_EVENTS] - listen_task.cancel() - start_client_task.cancel() - platform_setup_tasks = driver_events.platform_setup_tasks.values() - for task in platform_setup_tasks: - task.cancel() - - tasks = (listen_task, start_client_task, *platform_setup_tasks) - await asyncio.gather(*tasks, return_exceptions=True) - for task in tasks: - with suppress(asyncio.CancelledError): - await task - - if client.connected: - await client.disconnect() - LOGGER.info("Disconnected from Zwave JS Server") + if not hass.is_stopping: + if entry.state is not ConfigEntryState.LOADED: + raise HomeAssistantError("Listen task ended unexpectedly") + LOGGER.debug("Disconnected from server. Reloading integration") + hass.config_entries.async_schedule_reload(entry.entry_id) async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - client: ZwaveClient = entry.runtime_data[DATA_CLIENT] - driver_events: DriverEvents = entry.runtime_data[DATA_DRIVER_EVENTS] - platforms = [ - platform - for platform, task in driver_events.platform_setup_tasks.items() - if not task.cancel() - ] - unload_ok = await hass.config_entries.async_unload_platforms(entry, platforms) + unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if client.connected and client.driver: - await async_disable_server_logging_if_needed(hass, entry, client.driver) - if DATA_CLIENT_LISTEN_TASK in entry.runtime_data: - await disconnect_client(hass, entry) + entry_runtime_data = entry.runtime_data + client: ZwaveClient = entry_runtime_data[DATA_CLIENT] + + if client.connected and (driver := client.driver): + await async_disable_server_logging_if_needed(hass, entry, driver) if entry.data.get(CONF_USE_ADDON) and entry.disabled_by: addon_manager: AddonManager = get_addon_manager(hass) diff --git a/homeassistant/components/zwave_js/api.py b/homeassistant/components/zwave_js/api.py index aef23cb73ea..dd698d9ed66 100644 --- a/homeassistant/components/zwave_js/api.py +++ b/homeassistant/components/zwave_js/api.py @@ -405,6 +405,7 @@ def async_register_api(hass: HomeAssistant) -> None: websocket_api.async_register_command( hass, websocket_try_parse_dsk_from_qr_code_string ) + websocket_api.async_register_command(hass, websocket_lookup_device) websocket_api.async_register_command(hass, websocket_supports_feature) websocket_api.async_register_command(hass, websocket_stop_inclusion) websocket_api.async_register_command(hass, websocket_stop_exclusion) @@ -454,6 +455,8 @@ def async_register_api(hass: HomeAssistant) -> None: websocket_api.async_register_command(hass, websocket_node_capabilities) websocket_api.async_register_command(hass, websocket_invoke_cc_api) websocket_api.async_register_command(hass, websocket_get_integration_settings) + websocket_api.async_register_command(hass, websocket_backup_nvm) + websocket_api.async_register_command(hass, websocket_restore_nvm) hass.http.register_view(FirmwareUploadView(dr.async_get(hass))) @@ -518,6 +521,7 @@ async def websocket_network_status( "supported_function_types": controller.supported_function_types, "suc_node_id": controller.suc_node_id, "supports_timers": controller.supports_timers, + "supports_long_range": controller.supports_long_range, "is_rebuilding_routes": controller.is_rebuilding_routes, "inclusion_state": controller.inclusion_state, "rf_region": controller.rf_region, @@ -972,13 +976,7 @@ async def websocket_validate_dsk_and_enter_pin( { vol.Required(TYPE): "zwave_js/provision_smart_start_node", vol.Required(ENTRY_ID): str, - vol.Exclusive( - PLANNED_PROVISIONING_ENTRY, "options" - ): PLANNED_PROVISIONING_ENTRY_SCHEMA, - vol.Exclusive( - QR_PROVISIONING_INFORMATION, "options" - ): QR_PROVISIONING_INFORMATION_SCHEMA, - vol.Exclusive(QR_CODE_STRING, "options"): QR_CODE_STRING_SCHEMA, + vol.Required(QR_PROVISIONING_INFORMATION): QR_PROVISIONING_INFORMATION_SCHEMA, } ) @websocket_api.async_response @@ -993,28 +991,10 @@ async def websocket_provision_smart_start_node( driver: Driver, ) -> None: """Pre-provision a smart start node.""" - try: - cv.has_at_least_one_key( - PLANNED_PROVISIONING_ENTRY, QR_PROVISIONING_INFORMATION, QR_CODE_STRING - )(msg) - except vol.Invalid as err: - connection.send_error( - msg[ID], - ERR_INVALID_FORMAT, - err.args[0], - ) - return - provisioning_info = ( - msg.get(PLANNED_PROVISIONING_ENTRY) - or msg.get(QR_PROVISIONING_INFORMATION) - or msg[QR_CODE_STRING] - ) + provisioning_info = msg[QR_PROVISIONING_INFORMATION] - if ( - QR_PROVISIONING_INFORMATION in msg - and provisioning_info.version == QRCodeVersion.S2 - ): + if provisioning_info.version == QRCodeVersion.S2: connection.send_error( msg[ID], ERR_INVALID_FORMAT, @@ -1135,6 +1115,41 @@ async def websocket_try_parse_dsk_from_qr_code_string( ) +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/lookup_device", + vol.Required(ENTRY_ID): str, + vol.Required(MANUFACTURER_ID): int, + vol.Required(PRODUCT_TYPE): int, + vol.Required(PRODUCT_ID): int, + vol.Optional(APPLICATION_VERSION): str, + } +) +@websocket_api.async_response +@async_handle_failed_command +@async_get_entry +async def websocket_lookup_device( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], + entry: ConfigEntry, + client: Client, + driver: Driver, +) -> None: + """Look up the definition of a given device in the configuration DB.""" + device = await driver.config_manager.lookup_device( + msg[MANUFACTURER_ID], + msg[PRODUCT_TYPE], + msg[PRODUCT_ID], + msg.get(APPLICATION_VERSION), + ) + if device is None: + connection.send_error(msg[ID], ERR_NOT_FOUND, "Device not found") + else: + connection.send_result(msg[ID], device.to_dict()) + + @websocket_api.require_admin @websocket_api.websocket_command( { @@ -2779,3 +2794,126 @@ def websocket_get_integration_settings( CONF_INSTALLER_MODE: hass.data[DOMAIN].get(CONF_INSTALLER_MODE, False), }, ) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/backup_nvm", + vol.Required(ENTRY_ID): str, + } +) +@websocket_api.async_response +@async_handle_failed_command +@async_get_entry +async def websocket_backup_nvm( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], + entry: ConfigEntry, + client: Client, + driver: Driver, +) -> None: + """Backup NVM data.""" + controller = driver.controller + + @callback + def async_cleanup() -> None: + """Remove signal listeners.""" + for unsub in unsubs: + unsub() + + @callback + def forward_progress(event: dict) -> None: + """Forward progress events to websocket.""" + connection.send_message( + websocket_api.event_message( + msg[ID], + { + "event": event["event"], + "bytesRead": event["bytesRead"], + "total": event["total"], + }, + ) + ) + + # Set up subscription for progress events + connection.subscriptions[msg["id"]] = async_cleanup + msg[DATA_UNSUBSCRIBE] = unsubs = [ + controller.on("nvm backup progress", forward_progress), + ] + + result = await controller.async_backup_nvm_raw_base64() + # Send the finished event with the backup data + connection.send_message( + websocket_api.event_message( + msg[ID], + { + "event": "finished", + "data": result, + }, + ) + ) + connection.send_result(msg[ID]) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/restore_nvm", + vol.Required(ENTRY_ID): str, + vol.Required("data"): str, + } +) +@websocket_api.async_response +@async_handle_failed_command +@async_get_entry +async def websocket_restore_nvm( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], + entry: ConfigEntry, + client: Client, + driver: Driver, +) -> None: + """Restore NVM data.""" + controller = driver.controller + + @callback + def async_cleanup() -> None: + """Remove signal listeners.""" + for unsub in unsubs: + unsub() + + @callback + def forward_progress(event: dict) -> None: + """Forward progress events to websocket.""" + connection.send_message( + websocket_api.event_message( + msg[ID], + { + "event": event["event"], + "bytesRead": event.get("bytesRead"), + "bytesWritten": event.get("bytesWritten"), + "total": event["total"], + }, + ) + ) + + # Set up subscription for progress events + connection.subscriptions[msg["id"]] = async_cleanup + msg[DATA_UNSUBSCRIBE] = unsubs = [ + controller.on("nvm convert progress", forward_progress), + controller.on("nvm restore progress", forward_progress), + ] + + await controller.async_restore_nvm_base64(msg["data"]) + connection.send_message( + websocket_api.event_message( + msg[ID], + { + "event": "finished", + }, + ) + ) + connection.send_result(msg[ID]) diff --git a/homeassistant/components/zwave_js/config_flow.py b/homeassistant/components/zwave_js/config_flow.py index 44adf6a12ab..aed0dd839be 100644 --- a/homeassistant/components/zwave_js/config_flow.py +++ b/homeassistant/components/zwave_js/config_flow.py @@ -42,7 +42,6 @@ from homeassistant.helpers.service_info.usb import UsbServiceInfo from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo from homeassistant.helpers.typing import VolDictType -from . import disconnect_client from .addon import get_addon_manager from .const import ( ADDON_SLUG, @@ -861,7 +860,7 @@ class OptionsFlowHandler(BaseZwaveJSFlow, OptionsFlow): and self.config_entry.state == ConfigEntryState.LOADED ): # Disconnect integration before restarting add-on. - await disconnect_client(self.hass, self.config_entry) + await self.hass.config_entries.async_unload(self.config_entry.entry_id) return await self.async_step_start_addon() diff --git a/homeassistant/components/zwave_js/helpers.py b/homeassistant/components/zwave_js/helpers.py index 904a26acc78..8a90ebf6f88 100644 --- a/homeassistant/components/zwave_js/helpers.py +++ b/homeassistant/components/zwave_js/helpers.py @@ -187,7 +187,7 @@ async def async_disable_server_logging_if_needed( old_server_log_level, ) await driver.async_update_log_config(LogConfig(level=old_server_log_level)) - await driver.client.disable_server_logging() + driver.client.disable_server_logging() LOGGER.info("Zwave-js-server logging is enabled") diff --git a/homeassistant/components/zwave_js/light.py b/homeassistant/components/zwave_js/light.py index a610bbcb91e..f60e129cc77 100644 --- a/homeassistant/components/zwave_js/light.py +++ b/homeassistant/components/zwave_js/light.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Any, cast +from typing import TYPE_CHECKING, Any, cast from zwave_js_server.client import Client as ZwaveClient from zwave_js_server.const import ( @@ -483,7 +483,7 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): red = multi_color.get(COLOR_SWITCH_COMBINED_RED, red_val.value) green = multi_color.get(COLOR_SWITCH_COMBINED_GREEN, green_val.value) blue = multi_color.get(COLOR_SWITCH_COMBINED_BLUE, blue_val.value) - if None not in (red, green, blue): + if red is not None and green is not None and blue is not None: # convert to HS self._hs_color = color_util.color_RGB_to_hs(red, green, blue) # Light supports color, set color mode to hs @@ -496,7 +496,8 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): # Calculate color temps based on whites if cold_white or warm_white: self._color_temp = color_util.color_temperature_mired_to_kelvin( - MAX_MIREDS - ((cold_white / 255) * (MAX_MIREDS - MIN_MIREDS)) + MAX_MIREDS + - ((cast(int, cold_white) / 255) * (MAX_MIREDS - MIN_MIREDS)) ) # White channels turned on, set color mode to color_temp self._color_mode = ColorMode.COLOR_TEMP @@ -505,6 +506,13 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): # only one white channel (warm white) = rgbw support elif red_val and green_val and blue_val and ww_val: white = multi_color.get(COLOR_SWITCH_COMBINED_WARM_WHITE, ww_val.value) + if TYPE_CHECKING: + assert ( + red is not None + and green is not None + and blue is not None + and white is not None + ) self._rgbw_color = (red, green, blue, white) # Light supports rgbw, set color mode to rgbw self._color_mode = ColorMode.RGBW @@ -512,6 +520,13 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): elif cw_val: self._supports_rgbw = True white = multi_color.get(COLOR_SWITCH_COMBINED_COLD_WHITE, cw_val.value) + if TYPE_CHECKING: + assert ( + red is not None + and green is not None + and blue is not None + and white is not None + ) self._rgbw_color = (red, green, blue, white) # Light supports rgbw, set color mode to rgbw self._color_mode = ColorMode.RGBW diff --git a/homeassistant/components/zwave_js/manifest.json b/homeassistant/components/zwave_js/manifest.json index 3178bdf46ad..7e8b473922f 100644 --- a/homeassistant/components/zwave_js/manifest.json +++ b/homeassistant/components/zwave_js/manifest.json @@ -9,7 +9,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["zwave_js_server"], - "requirements": ["pyserial==3.5", "zwave-js-server-python==0.60.1"], + "requirements": ["pyserial==3.5", "zwave-js-server-python==0.62.0"], "usb": [ { "vid": "0658", diff --git a/homeassistant/config_entries.py b/homeassistant/config_entries.py index 2639c429e71..d3e681ecca1 100644 --- a/homeassistant/config_entries.py +++ b/homeassistant/config_entries.py @@ -195,8 +195,6 @@ DISCOVERY_SOURCES = { SOURCE_ZEROCONF, } -RECONFIGURE_NOTIFICATION_ID = "config_entry_reconfigure" - EVENT_FLOW_DISCOVERED = "config_entry_discovered" SIGNAL_CONFIG_ENTRY_CHANGED = SignalType["ConfigEntryChange", "ConfigEntry"]( @@ -1628,6 +1626,23 @@ class ConfigEntriesFlowManager( result["handler"], flow.unique_id ) + if existing_entry is not None and flow.handler != "mobile_app": + # This causes the old entry to be removed and replaced, when the flow + # should instead be aborted. + # In case of manual flows, integrations should implement options, reauth, + # reconfigure to allow the user to change settings. + # In case of non user visible flows, the integration should optionally + # update the existing entry before aborting. + # see https://developers.home-assistant.io/blog/2025/03/01/config-flow-unique-id/ + report_usage( + "creates a config entry when another entry with the same unique ID " + "exists", + core_behavior=ReportBehavior.LOG, + core_integration_behavior=ReportBehavior.LOG, + custom_integration_behavior=ReportBehavior.LOG, + integration_domain=flow.handler, + ) + # Unload the entry before setting up the new one. if existing_entry is not None and existing_entry.state.recoverable: await self.config_entries.async_unload(existing_entry.entry_id) @@ -1697,16 +1712,6 @@ class ConfigEntriesFlowManager( # Create notification. if source in DISCOVERY_SOURCES: await self._discovery_debouncer.async_call() - elif source == SOURCE_REAUTH: - persistent_notification.async_create( - self.hass, - title="Integration requires reconfiguration", - message=( - "At least one of your integrations requires reconfiguration to " - "continue functioning. [Check it out](/config/integrations)." - ), - notification_id=RECONFIGURE_NOTIFICATION_ID, - ) @callback def _async_discovery(self) -> None: @@ -2969,8 +2974,11 @@ class ConfigFlow(ConfigEntryBaseFlow): return None if raise_on_progress: - if self._async_in_progress( - include_uninitialized=True, match_context={"unique_id": unique_id} + if any( + flow["context"]["source"] != SOURCE_REAUTH + for flow in self._async_in_progress( + include_uninitialized=True, match_context={"unique_id": unique_id} + ) ): raise data_entry_flow.AbortFlow("already_in_progress") @@ -3099,29 +3107,6 @@ class ConfigFlow(ConfigEntryBaseFlow): """Handle a flow initialized by discovery.""" return await self._async_step_discovery_without_unique_id() - @callback - def async_abort( - self, - *, - reason: str, - description_placeholders: Mapping[str, str] | None = None, - ) -> ConfigFlowResult: - """Abort the config flow.""" - # Remove reauth notification if no reauth flows are in progress - if self.source == SOURCE_REAUTH and not any( - ent["flow_id"] != self.flow_id - for ent in self.hass.config_entries.flow.async_progress_by_handler( - self.handler, match_context={"source": SOURCE_REAUTH} - ) - ): - persistent_notification.async_dismiss( - self.hass, RECONFIGURE_NOTIFICATION_ID - ) - - return super().async_abort( - reason=reason, description_placeholders=description_placeholders - ) - async def async_step_bluetooth( self, discovery_info: BluetoothServiceInfoBleak ) -> ConfigFlowResult: @@ -3471,18 +3456,14 @@ class ConfigSubentryFlow( return self.async_abort(reason="reconfigure_successful") @property - def _reconfigure_entry_id(self) -> str: - """Return reconfigure entry id.""" - if self.source != SOURCE_RECONFIGURE: - raise ValueError(f"Source is {self.source}, expected {SOURCE_RECONFIGURE}") + def _entry_id(self) -> str: + """Return config entry id.""" return self.handler[0] @callback - def _get_reconfigure_entry(self) -> ConfigEntry: - """Return the reconfigure config entry linked to the current context.""" - return self.hass.config_entries.async_get_known_entry( - self._reconfigure_entry_id - ) + def _get_entry(self) -> ConfigEntry: + """Return the config entry linked to the current context.""" + return self.hass.config_entries.async_get_known_entry(self._entry_id) @property def _reconfigure_subentry_id(self) -> str: @@ -3494,9 +3475,7 @@ class ConfigSubentryFlow( @callback def _get_reconfigure_subentry(self) -> ConfigSubentry: """Return the reconfigure config subentry linked to the current context.""" - entry = self.hass.config_entries.async_get_known_entry( - self._reconfigure_entry_id - ) + entry = self.hass.config_entries.async_get_known_entry(self._entry_id) subentry_id = self._reconfigure_subentry_id if subentry_id not in entry.subentries: raise UnknownSubEntry(subentry_id) diff --git a/homeassistant/const.py b/homeassistant/const.py index bd7a96e0e14..b362b0a61e3 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -24,8 +24,8 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2025 -MINOR_VERSION: Final = 3 -PATCH_VERSION: Final = "4" +MINOR_VERSION: Final = 4 +PATCH_VERSION: Final = "0" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 0) diff --git a/homeassistant/data_entry_flow.py b/homeassistant/data_entry_flow.py index 251e22e7990..f7be891b61b 100644 --- a/homeassistant/data_entry_flow.py +++ b/homeassistant/data_entry_flow.py @@ -657,6 +657,19 @@ class FlowHandler(Generic[_FlowContextT, _FlowResultT, _HandlerT]): ): continue + # Process the section schema options + if ( + suggested_values is not None + and isinstance(val, section) + and key in suggested_values + ): + new_section_key = copy.copy(key) + schema[new_section_key] = val + val.schema = self.add_suggested_values_to_schema( + val.schema, suggested_values[key] + ) + continue + new_key = key if ( suggested_values diff --git a/homeassistant/generated/application_credentials.py b/homeassistant/generated/application_credentials.py index b891e807a7f..68c6de405e6 100644 --- a/homeassistant/generated/application_credentials.py +++ b/homeassistant/generated/application_credentials.py @@ -25,6 +25,7 @@ APPLICATION_CREDENTIALS = [ "neato", "nest", "netatmo", + "ondilo_ico", "onedrive", "point", "senz", diff --git a/homeassistant/generated/bluetooth.py b/homeassistant/generated/bluetooth.py index 587fea8b941..1ff444ca25f 100644 --- a/homeassistant/generated/bluetooth.py +++ b/homeassistant/generated/bluetooth.py @@ -356,6 +356,21 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [ "domain": "inkbird", "local_name": "tps", }, + { + "connectable": False, + "domain": "inkbird", + "local_name": "ITH-11-B", + }, + { + "connectable": False, + "domain": "inkbird", + "local_name": "ITH-13-B", + }, + { + "connectable": False, + "domain": "inkbird", + "local_name": "ITH-21-B", + }, { "connectable": True, "domain": "iron_os", diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 8284f77ef94..d192b8fcd13 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -91,6 +91,7 @@ FLOWS = { "bluetooth", "bmw_connected_drive", "bond", + "bosch_alarm", "bosch_shc", "braviatv", "bring", @@ -488,6 +489,7 @@ FLOWS = { "proximity", "prusalink", "ps4", + "pterodactyl", "pure_energie", "purpleair", "pushbullet", @@ -513,6 +515,7 @@ FLOWS = { "rdw", "recollect_waste", "refoss", + "remote_calendar", "renault", "renson", "reolink", diff --git a/homeassistant/generated/dhcp.py b/homeassistant/generated/dhcp.py index 3dba5a98f3c..8ee1ea270f3 100644 --- a/homeassistant/generated/dhcp.py +++ b/homeassistant/generated/dhcp.py @@ -498,6 +498,18 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "hostname": "ring*", "macaddress": "341513*", }, + { + "domain": "roborock", + "macaddress": "249E7D*", + }, + { + "domain": "roborock", + "macaddress": "B04A39*", + }, + { + "domain": "roborock", + "hostname": "roborock-*", + }, { "domain": "roomba", "hostname": "irobot-*", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 1f5a4d9d279..da4f08f157d 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -611,6 +611,13 @@ "config_flow": true, "iot_class": "local_push" }, + "backup": { + "name": "Backup", + "integration_type": "service", + "config_flow": false, + "iot_class": "calculated", + "single_config_entry": true + }, "baf": { "name": "Big Ass Fans", "integration_type": "hub", @@ -752,11 +759,28 @@ "config_flow": true, "iot_class": "local_push" }, - "bosch_shc": { - "name": "Bosch SHC", - "integration_type": "hub", - "config_flow": true, - "iot_class": "local_push" + "bosch": { + "name": "Bosch", + "integrations": { + "bosch_alarm": { + "integration_type": "device", + "config_flow": true, + "iot_class": "local_push", + "name": "Bosch Alarm" + }, + "bosch_shc": { + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_push", + "name": "Bosch SHC" + }, + "home_connect": { + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_push", + "name": "Home Connect" + } + } }, "brandt": { "name": "Brandt Smart Control", @@ -1791,6 +1815,12 @@ } } }, + "eve": { + "name": "Eve", + "iot_standards": [ + "matter" + ] + }, "evergy": { "name": "Evergy", "integration_type": "virtual", @@ -2051,6 +2081,11 @@ "config_flow": false, "iot_class": "cloud_push" }, + "frankever": { + "name": "FrankEver", + "integration_type": "virtual", + "supported_by": "shelly" + }, "free_mobile": { "name": "Free Mobile", "integration_type": "hub", @@ -2484,6 +2519,12 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "hardkernel": { + "name": "Hardkernel", + "integration_type": "hardware", + "config_flow": false, + "single_config_entry": true + }, "harman_kardon_avr": { "name": "Harman Kardon AVR", "integration_type": "hub", @@ -2615,18 +2656,28 @@ "config_flow": true, "iot_class": "local_polling" }, - "home_connect": { - "name": "Home Connect", - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_push", - "single_config_entry": true - }, "home_plus_control": { "name": "Legrand Home+ Control", "integration_type": "virtual", "supported_by": "netatmo" }, + "homeassistant_green": { + "name": "Home Assistant Green", + "integration_type": "hardware", + "config_flow": false, + "single_config_entry": true + }, + "homeassistant_sky_connect": { + "name": "Home Assistant Connect ZBT-1", + "integration_type": "hardware", + "config_flow": true + }, + "homeassistant_yellow": { + "name": "Home Assistant Yellow", + "integration_type": "hardware", + "config_flow": false, + "single_config_entry": true + }, "homee": { "name": "Homee", "integration_type": "hub", @@ -3413,6 +3464,11 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "linkedgo": { + "name": "LinkedGo", + "integration_type": "virtual", + "supported_by": "shelly" + }, "linkplay": { "name": "LinkPlay", "integration_type": "hub", @@ -3990,7 +4046,10 @@ "iot_class": "assumed_state", "name": "Motionblinds Bluetooth" } - } + }, + "iot_standards": [ + "matter" + ] }, "motioneye": { "name": "motionEye", @@ -4416,6 +4475,11 @@ "config_flow": false, "iot_class": "local_polling" }, + "ogemray": { + "name": "Ogemray", + "integration_type": "virtual", + "supported_by": "shelly" + }, "ohmconnect": { "name": "OhmConnect", "integration_type": "hub", @@ -4973,6 +5037,12 @@ "integration_type": "virtual", "supported_by": "opower" }, + "pterodactyl": { + "name": "Pterodactyl", + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_polling" + }, "pulseaudio_loopback": { "name": "PulseAudio Loopback", "integration_type": "hub", @@ -5177,6 +5247,11 @@ "raspberry_pi": { "name": "Raspberry Pi", "integrations": { + "raspberry_pi": { + "integration_type": "hardware", + "config_flow": false, + "name": "Raspberry Pi" + }, "rpi_camera": { "integration_type": "hub", "config_flow": false, @@ -5250,6 +5325,11 @@ "config_flow": false, "iot_class": "cloud_push" }, + "remote_calendar": { + "integration_type": "service", + "config_flow": true, + "iot_class": "cloud_polling" + }, "renault": { "name": "Renault", "integration_type": "hub", @@ -7675,6 +7755,7 @@ "plant", "proximity", "random", + "remote_calendar", "rpi_power", "schedule", "season", diff --git a/homeassistant/helpers/backup.py b/homeassistant/helpers/backup.py index 4ab302749a1..b3607f6653c 100644 --- a/homeassistant/helpers/backup.py +++ b/homeassistant/helpers/backup.py @@ -12,7 +12,11 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.util.hass_dict import HassKey if TYPE_CHECKING: - from homeassistant.components.backup import BackupManager, ManagerStateEvent + from homeassistant.components.backup import ( + BackupManager, + BackupPlatformEvent, + ManagerStateEvent, + ) DATA_BACKUP: HassKey[BackupData] = HassKey("backup_data") DATA_MANAGER: HassKey[BackupManager] = HassKey("backup") @@ -25,6 +29,9 @@ class BackupData: backup_event_subscriptions: list[Callable[[ManagerStateEvent], None]] = field( default_factory=list ) + backup_platform_event_subscriptions: list[Callable[[BackupPlatformEvent], None]] = ( + field(default_factory=list) + ) manager_ready: asyncio.Future[None] = field(default_factory=asyncio.Future) @@ -68,3 +75,20 @@ def async_subscribe_events( backup_event_subscriptions.append(on_event) return remove_subscription + + +@callback +def async_subscribe_platform_events( + hass: HomeAssistant, + on_event: Callable[[BackupPlatformEvent], None], +) -> Callable[[], None]: + """Subscribe to backup platform events.""" + backup_platform_event_subscriptions = hass.data[ + DATA_BACKUP + ].backup_platform_event_subscriptions + + def remove_subscription() -> None: + backup_platform_event_subscriptions.remove(on_event) + + backup_platform_event_subscriptions.append(on_event) + return remove_subscription diff --git a/homeassistant/helpers/check_config.py b/homeassistant/helpers/check_config.py index 0841585e1a1..836536da9ee 100644 --- a/homeassistant/helpers/check_config.py +++ b/homeassistant/helpers/check_config.py @@ -8,6 +8,7 @@ import os from pathlib import Path from typing import NamedTuple, Self +from annotatedyaml import loader as yaml_loader import voluptuous as vol from homeassistant import loader @@ -29,7 +30,6 @@ from homeassistant.requirements import ( async_clear_install_history, async_get_integration_with_requirements, ) -from homeassistant.util.yaml import loader as yaml_loader from . import config_validation as cv from .typing import ConfigType diff --git a/homeassistant/helpers/config_entry_oauth2_flow.py b/homeassistant/helpers/config_entry_oauth2_flow.py index 24a9de5b562..84728978ede 100644 --- a/homeassistant/helpers/config_entry_oauth2_flow.py +++ b/homeassistant/helpers/config_entry_oauth2_flow.py @@ -11,7 +11,9 @@ from __future__ import annotations from abc import ABC, ABCMeta, abstractmethod import asyncio from asyncio import Lock +import base64 from collections.abc import Awaitable, Callable +import hashlib from http import HTTPStatus from json import JSONDecodeError import logging @@ -166,6 +168,11 @@ class LocalOAuth2Implementation(AbstractOAuth2Implementation): """Extra data that needs to be appended to the authorize url.""" return {} + @property + def extra_token_resolve_data(self) -> dict: + """Extra data for the token resolve request.""" + return {} + async def async_generate_authorize_url(self, flow_id: str) -> str: """Generate a url for the user to authorize.""" redirect_uri = self.redirect_uri @@ -186,13 +193,13 @@ class LocalOAuth2Implementation(AbstractOAuth2Implementation): async def async_resolve_external_data(self, external_data: Any) -> dict: """Resolve the authorization code to tokens.""" - return await self._token_request( - { - "grant_type": "authorization_code", - "code": external_data["code"], - "redirect_uri": external_data["state"]["redirect_uri"], - } - ) + request_data: dict = { + "grant_type": "authorization_code", + "code": external_data["code"], + "redirect_uri": external_data["state"]["redirect_uri"], + } + request_data.update(self.extra_token_resolve_data) + return await self._token_request(request_data) async def _async_refresh_token(self, token: dict) -> dict: """Refresh tokens.""" @@ -211,7 +218,7 @@ class LocalOAuth2Implementation(AbstractOAuth2Implementation): data["client_id"] = self.client_id - if self.client_secret is not None: + if self.client_secret: data["client_secret"] = self.client_secret _LOGGER.debug("Sending token request to %s", self.token_url) @@ -233,6 +240,100 @@ class LocalOAuth2Implementation(AbstractOAuth2Implementation): return cast(dict, await resp.json()) +class LocalOAuth2ImplementationWithPkce(LocalOAuth2Implementation): + """Local OAuth2 implementation with PKCE.""" + + def __init__( + self, + hass: HomeAssistant, + domain: str, + client_id: str, + authorize_url: str, + token_url: str, + client_secret: str = "", + code_verifier_length: int = 128, + ) -> None: + """Initialize local auth implementation.""" + super().__init__( + hass, + domain, + client_id, + client_secret, + authorize_url, + token_url, + ) + + # Generate code verifier + self.code_verifier = LocalOAuth2ImplementationWithPkce.generate_code_verifier( + code_verifier_length + ) + + @property + def extra_authorize_data(self) -> dict: + """Extra data that needs to be appended to the authorize url. + + If you want to override this method, + calling super is mandatory (for adding scopes): + ``` + @def extra_authorize_data(self) -> dict: + data: dict = { + "scope": "openid profile email", + } + data.update(super().extra_authorize_data) + return data + ``` + """ + return { + "code_challenge": LocalOAuth2ImplementationWithPkce.compute_code_challenge( + self.code_verifier + ), + "code_challenge_method": "S256", + } + + @property + def extra_token_resolve_data(self) -> dict: + """Extra data that needs to be included in the token resolve request. + + If you want to override this method, + calling super is mandatory (for adding `someKey`): + ``` + @def extra_token_resolve_data(self) -> dict: + data: dict = { + "someKey": "someValue", + } + data.update(super().extra_token_resolve_data) + return data + ``` + """ + + return {"code_verifier": self.code_verifier} + + @staticmethod + def generate_code_verifier(code_verifier_length: int = 128) -> str: + """Generate a code verifier.""" + if not 43 <= code_verifier_length <= 128: + msg = ( + "Parameter `code_verifier_length` must validate" + "`43 <= code_verifier_length <= 128`." + ) + raise ValueError(msg) + return secrets.token_urlsafe(96)[:code_verifier_length] + + @staticmethod + def compute_code_challenge(code_verifier: str) -> str: + """Compute the code challenge.""" + if not 43 <= len(code_verifier) <= 128: + msg = ( + "Parameter `code_verifier` must validate " + "`43 <= len(code_verifier) <= 128`." + ) + raise ValueError(msg) + + hashed = hashlib.sha256(code_verifier.encode("ascii")).digest() + encoded = base64.urlsafe_b64encode(hashed) + return encoded.decode("ascii").replace("=", "") + + class AbstractOAuth2FlowHandler(config_entries.ConfigFlow, metaclass=ABCMeta): """Handle a config flow.""" diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index 4978158c0f6..5c1a7c99565 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -1153,41 +1153,6 @@ def _custom_serializer(schema: Any, *, allow_section: bool) -> Any: return voluptuous_serialize.UNSUPPORTED -def expand_condition_shorthand(value: Any | None) -> Any: - """Expand boolean condition shorthand notations.""" - - if not isinstance(value, dict) or CONF_CONDITIONS in value: - return value - - for key, schema in ( - ("and", AND_CONDITION_SHORTHAND_SCHEMA), - ("or", OR_CONDITION_SHORTHAND_SCHEMA), - ("not", NOT_CONDITION_SHORTHAND_SCHEMA), - ): - try: - schema(value) - return { - CONF_CONDITION: key, - CONF_CONDITIONS: value[key], - **{k: value[k] for k in value if k != key}, - } - except vol.MultipleInvalid: - pass - - if isinstance(value.get(CONF_CONDITION), list): - try: - CONDITION_SHORTHAND_SCHEMA(value) - return { - CONF_CONDITION: "and", - CONF_CONDITIONS: value[CONF_CONDITION], - **{k: value[k] for k in value if k != CONF_CONDITION}, - } - except vol.MultipleInvalid: - pass - - return value - - # Schemas def empty_config_schema(domain: str) -> Callable[[dict], dict]: """Return a config schema which logs if there are configuration parameters.""" @@ -1683,7 +1648,43 @@ DEVICE_CONDITION_BASE_SCHEMA = vol.Schema( DEVICE_CONDITION_SCHEMA = DEVICE_CONDITION_BASE_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA) -dynamic_template_condition_action = vol.All( + +def expand_condition_shorthand(value: Any | None) -> Any: + """Expand boolean condition shorthand notations.""" + + if not isinstance(value, dict) or CONF_CONDITIONS in value: + return value + + for key, schema in ( + ("and", AND_CONDITION_SHORTHAND_SCHEMA), + ("or", OR_CONDITION_SHORTHAND_SCHEMA), + ("not", NOT_CONDITION_SHORTHAND_SCHEMA), + ): + try: + schema(value) + return { + CONF_CONDITION: key, + CONF_CONDITIONS: value[key], + **{k: value[k] for k in value if k != key}, + } + except vol.MultipleInvalid: + pass + + if isinstance(value.get(CONF_CONDITION), list): + try: + CONDITION_SHORTHAND_SCHEMA(value) + return { + CONF_CONDITION: "and", + CONF_CONDITIONS: value[CONF_CONDITION], + **{k: value[k] for k in value if k != CONF_CONDITION}, + } + except vol.MultipleInvalid: + pass + + return value + + +dynamic_template_condition = vol.All( # Wrap a shorthand template condition in a template condition dynamic_template, lambda config: { @@ -1724,7 +1725,7 @@ CONDITION_SCHEMA: vol.Schema = vol.Schema( }, ), ), - dynamic_template_condition_action, + dynamic_template_condition, ) ) @@ -1873,12 +1874,8 @@ _SCRIPT_REPEAT_SCHEMA = vol.Schema( vol.Exclusive(CONF_FOR_EACH, "repeat"): vol.Any( dynamic_template, vol.All(list, template_complex) ), - vol.Exclusive(CONF_WHILE, "repeat"): vol.All( - ensure_list, [CONDITION_SCHEMA] - ), - vol.Exclusive(CONF_UNTIL, "repeat"): vol.All( - ensure_list, [CONDITION_SCHEMA] - ), + vol.Exclusive(CONF_WHILE, "repeat"): CONDITIONS_SCHEMA, + vol.Exclusive(CONF_UNTIL, "repeat"): CONDITIONS_SCHEMA, vol.Required(CONF_SEQUENCE): SCRIPT_SCHEMA, }, has_at_least_one_key(CONF_COUNT, CONF_FOR_EACH, CONF_WHILE, CONF_UNTIL), @@ -1894,9 +1891,7 @@ _SCRIPT_CHOOSE_SCHEMA = vol.Schema( [ { vol.Optional(CONF_ALIAS): string, - vol.Required(CONF_CONDITIONS): vol.All( - ensure_list, [CONDITION_SCHEMA] - ), + vol.Required(CONF_CONDITIONS): CONDITIONS_SCHEMA, vol.Required(CONF_SEQUENCE): SCRIPT_SCHEMA, } ], @@ -1917,7 +1912,7 @@ _SCRIPT_WAIT_FOR_TRIGGER_SCHEMA = vol.Schema( _SCRIPT_IF_SCHEMA = vol.Schema( { **SCRIPT_ACTION_BASE_SCHEMA, - vol.Required(CONF_IF): vol.All(ensure_list, [CONDITION_SCHEMA]), + vol.Required(CONF_IF): CONDITIONS_SCHEMA, vol.Required(CONF_THEN): SCRIPT_SCHEMA, vol.Optional(CONF_ELSE): SCRIPT_SCHEMA, } diff --git a/homeassistant/helpers/deprecation.py b/homeassistant/helpers/deprecation.py index 375ec58c26f..101b9731caf 100644 --- a/homeassistant/helpers/deprecation.py +++ b/homeassistant/helpers/deprecation.py @@ -369,7 +369,7 @@ class EnumWithDeprecatedMembers(EnumType): """Enum with deprecated members.""" def __new__( - mcs, # noqa: N804 ruff bug, ruff does not understand this is a metaclass + mcs, cls: str, bases: tuple[type, ...], classdict: _EnumDict, diff --git a/homeassistant/helpers/entity.py b/homeassistant/helpers/entity.py index bed5ce586c5..bdcda58c054 100644 --- a/homeassistant/helpers/entity.py +++ b/homeassistant/helpers/entity.py @@ -281,7 +281,7 @@ class CachedProperties(type): """ def __new__( - mcs, # noqa: N804 ruff bug, ruff does not understand this is a metaclass + mcs, name: str, bases: tuple[type, ...], namespace: dict[Any, Any], diff --git a/homeassistant/helpers/frame.py b/homeassistant/helpers/frame.py index f33f8407e47..ca7b097d90d 100644 --- a/homeassistant/helpers/frame.py +++ b/homeassistant/helpers/frame.py @@ -10,18 +10,20 @@ import functools import linecache import logging import sys +import threading from types import FrameType from typing import Any, cast from propcache.api import cached_property -from homeassistant.core import HomeAssistant, async_get_hass_or_none +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.loader import ( Integration, async_get_issue_integration, async_suggest_report_issue, ) +from homeassistant.util.async_ import run_callback_threadsafe _LOGGER = logging.getLogger(__name__) @@ -29,6 +31,21 @@ _LOGGER = logging.getLogger(__name__) _REPORTED_INTEGRATIONS: set[str] = set() +class _Hass: + """Container which makes a HomeAssistant instance available to frame helper.""" + + hass: HomeAssistant | None = None + + +_hass = _Hass() + + +@callback +def async_setup(hass: HomeAssistant) -> None: + """Set up the frame helper.""" + _hass.hass = hass + + @dataclass(kw_only=True) class IntegrationFrame: """Integration frame container.""" @@ -133,44 +150,6 @@ class MissingIntegrationFrame(HomeAssistantError): """Raised when no integration is found in the frame.""" -def report( - what: str, - *, - exclude_integrations: set[str] | None = None, - error_if_core: bool = True, - error_if_integration: bool = False, - level: int = logging.WARNING, - log_custom_component_only: bool = False, -) -> None: - """Report incorrect usage. - - If error_if_core is True, raise instead of log if an integration is not found - when unwinding the stack frame. - If error_if_integration is True, raise instead of log if an integration is found - when unwinding the stack frame. - """ - core_behavior = ReportBehavior.ERROR if error_if_core else ReportBehavior.LOG - core_integration_behavior = ( - ReportBehavior.ERROR if error_if_integration else ReportBehavior.LOG - ) - custom_integration_behavior = core_integration_behavior - - if log_custom_component_only: - if core_behavior is ReportBehavior.LOG: - core_behavior = ReportBehavior.IGNORE - if core_integration_behavior is ReportBehavior.LOG: - core_integration_behavior = ReportBehavior.IGNORE - - report_usage( - what, - core_behavior=core_behavior, - core_integration_behavior=core_integration_behavior, - custom_integration_behavior=custom_integration_behavior, - exclude_integrations=exclude_integrations, - level=level, - ) - - class ReportBehavior(enum.Enum): """Enum for behavior on code usage.""" @@ -201,18 +180,49 @@ def report_usage( breaking version :param exclude_integrations: skip specified integration when reviewing the stack. If no integration is found, the core behavior will be applied - :param integration_domain: fallback for identifying the integration if the - frame is not found + :param integration_domain: domain of the integration causing the issue. If None, the + stack frame will be searched to identify the integration causing the issue. """ - try: - integration_frame = get_integration_frame( - exclude_integrations=exclude_integrations - ) - except MissingIntegrationFrame as err: - if integration := async_get_issue_integration( - hass := async_get_hass_or_none(), integration_domain - ): - _report_integration_domain( + if (hass := _hass.hass) is None: + raise RuntimeError("Frame helper not set up") + _report_usage_partial = functools.partial( + _report_usage, + hass, + what, + breaks_in_ha_version=breaks_in_ha_version, + core_behavior=core_behavior, + core_integration_behavior=core_integration_behavior, + custom_integration_behavior=custom_integration_behavior, + exclude_integrations=exclude_integrations, + integration_domain=integration_domain, + level=level, + ) + if hass.loop_thread_id != threading.get_ident(): + future = run_callback_threadsafe(hass.loop, _report_usage_partial) + future.result() + return + _report_usage_partial() + + +def _report_usage( + hass: HomeAssistant, + what: str, + *, + breaks_in_ha_version: str | None, + core_behavior: ReportBehavior, + core_integration_behavior: ReportBehavior, + custom_integration_behavior: ReportBehavior, + exclude_integrations: set[str] | None, + integration_domain: str | None, + level: int, +) -> None: + """Report incorrect code usage. + + Must be called from the event loop. + """ + if integration_domain: + if integration := async_get_issue_integration(hass, integration_domain): + _report_usage_integration_domain( hass, what, breaks_in_ha_version, @@ -222,16 +232,15 @@ def report_usage( level, ) return - msg = f"Detected code that {what}. Please report this issue" - if core_behavior is ReportBehavior.ERROR: - raise RuntimeError(msg) from err - if core_behavior is ReportBehavior.LOG: - if breaks_in_ha_version: - msg = ( - f"Detected code that {what}. This will stop working in Home " - f"Assistant {breaks_in_ha_version}, please report this issue" - ) - _LOGGER.warning(msg, stack_info=True) + _report_usage_no_integration(what, core_behavior, breaks_in_ha_version, None) + return + + try: + integration_frame = get_integration_frame( + exclude_integrations=exclude_integrations + ) + except MissingIntegrationFrame as err: + _report_usage_no_integration(what, core_behavior, breaks_in_ha_version, err) return integration_behavior = core_integration_behavior @@ -239,7 +248,8 @@ def report_usage( integration_behavior = custom_integration_behavior if integration_behavior is not ReportBehavior.IGNORE: - _report_integration_frame( + _report_usage_integration_frame( + hass, what, breaks_in_ha_version, integration_frame, @@ -248,7 +258,7 @@ def report_usage( ) -def _report_integration_domain( +def _report_usage_integration_domain( hass: HomeAssistant | None, what: str, breaks_in_ha_version: str | None, @@ -298,7 +308,8 @@ def _report_integration_domain( ) -def _report_integration_frame( +def _report_usage_integration_frame( + hass: HomeAssistant, what: str, breaks_in_ha_version: str | None, integration_frame: IntegrationFrame, @@ -316,7 +327,7 @@ def _report_integration_frame( _REPORTED_INTEGRATIONS.add(key) report_issue = async_suggest_report_issue( - async_get_hass_or_none(), + hass, integration_domain=integration_frame.integration, module=integration_frame.module, ) @@ -346,31 +357,55 @@ def _report_integration_frame( ) +def _report_usage_no_integration( + what: str, + core_behavior: ReportBehavior, + breaks_in_ha_version: str | None, + err: MissingIntegrationFrame | None, +) -> None: + """Report incorrect usage without an integration. + + This could happen because the offending call happened outside of an integration, + or because the integration could not be identified. + """ + msg = f"Detected code that {what}. Please report this issue" + if core_behavior is ReportBehavior.ERROR: + raise RuntimeError(msg) from err + if core_behavior is ReportBehavior.LOG: + if breaks_in_ha_version: + msg = ( + f"Detected code that {what}. This will stop working in Home " + f"Assistant {breaks_in_ha_version}, please report this issue" + ) + _LOGGER.warning(msg, stack_info=True) + + def warn_use[_CallableT: Callable](func: _CallableT, what: str) -> _CallableT: """Mock a function to warn when it was about to be used.""" if asyncio.iscoroutinefunction(func): @functools.wraps(func) async def report_use(*args: Any, **kwargs: Any) -> None: - report(what) + report_usage(what) else: @functools.wraps(func) def report_use(*args: Any, **kwargs: Any) -> None: - report(what) + report_usage(what) return cast(_CallableT, report_use) def report_non_thread_safe_operation(what: str) -> None: """Report a non-thread safe operation.""" - report( + report_usage( f"calls {what} from a thread other than the event loop, " "which may cause Home Assistant to crash or data to corrupt. " "For more information, see " "https://developers.home-assistant.io/docs/asyncio_thread_safety/" f"#{what.replace('.', '')}", - error_if_core=True, - error_if_integration=True, + core_behavior=ReportBehavior.ERROR, + core_integration_behavior=ReportBehavior.ERROR, + custom_integration_behavior=ReportBehavior.ERROR, ) diff --git a/homeassistant/helpers/httpx_client.py b/homeassistant/helpers/httpx_client.py index ade2ce747d5..49b12e0aa60 100644 --- a/homeassistant/helpers/httpx_client.py +++ b/homeassistant/helpers/httpx_client.py @@ -7,6 +7,9 @@ import sys from types import TracebackType from typing import Any, Self +# httpx dynamically imports httpcore, so we need to import it +# to avoid it being imported later when the event loop is running +import httpcore # noqa: F401 import httpx from homeassistant.const import APPLICATION_NAME, EVENT_HOMEASSISTANT_CLOSE, __version__ diff --git a/homeassistant/helpers/intent.py b/homeassistant/helpers/intent.py index cecb84d0373..75572194bb8 100644 --- a/homeassistant/helpers/intent.py +++ b/homeassistant/helpers/intent.py @@ -38,7 +38,7 @@ from .typing import VolSchemaType _LOGGER = logging.getLogger(__name__) type _SlotsType = dict[str, Any] type _IntentSlotsType = dict[ - str | tuple[str, str], VolSchemaType | Callable[[Any], Any] + str | tuple[str, str], IntentSlotInfo | VolSchemaType | Callable[[Any], Any] ] INTENT_TURN_OFF = "HassTurnOff" @@ -507,12 +507,22 @@ def _add_areas( candidate.area = areas.async_get_area(candidate.device.area_id) +def _default_area_candidate_filter( + candidate: MatchTargetsCandidate, possible_area_ids: Collection[str] +) -> bool: + """Keep candidates in the possible areas.""" + return (candidate.area is not None) and (candidate.area.id in possible_area_ids) + + @callback def async_match_targets( # noqa: C901 hass: HomeAssistant, constraints: MatchTargetsConstraints, preferences: MatchTargetsPreferences | None = None, states: list[State] | None = None, + area_candidate_filter: Callable[ + [MatchTargetsCandidate, Collection[str]], bool + ] = _default_area_candidate_filter, ) -> MatchTargetsResult: """Match entities based on constraints in order to handle an intent.""" preferences = preferences or MatchTargetsPreferences() @@ -623,9 +633,7 @@ def async_match_targets( # noqa: C901 } candidates = [ - c - for c in candidates - if (c.area is not None) and (c.area.id in possible_area_ids) + c for c in candidates if area_candidate_filter(c, possible_area_ids) ] if not candidates: return MatchTargetsResult( @@ -649,9 +657,7 @@ def async_match_targets( # noqa: C901 # May be constrained by floors above possible_area_ids.intersection_update(matching_area_ids) candidates = [ - c - for c in candidates - if (c.area is not None) and (c.area.id in possible_area_ids) + c for c in candidates if area_candidate_filter(c, possible_area_ids) ] if not candidates: return MatchTargetsResult( @@ -701,7 +707,7 @@ def async_match_targets( # noqa: C901 group_candidates = [ c for c in group_candidates - if (c.area is not None) and (c.area.id == preferences.area_id) + if area_candidate_filter(c, {preferences.area_id}) ] if len(group_candidates) < 2: # Disambiguated by area @@ -747,7 +753,7 @@ def async_match_targets( # noqa: C901 if preferences.area_id: # Filter by area filtered_candidates = [ - c for c in candidates if c.area and (c.area.id == preferences.area_id) + c for c in candidates if area_candidate_filter(c, {preferences.area_id}) ] if (len(filtered_candidates) > 1) and preferences.floor_id: @@ -868,6 +874,34 @@ def non_empty_string(value: Any) -> str: return value_str +@dataclass(kw_only=True) +class IntentSlotInfo: + """Details about how intent slots are processed and validated.""" + + service_data_name: str | None = None + """Optional name of the service data input to map to this slot.""" + + description: str | None = None + """Human readable description of the slot.""" + + value_schema: VolSchemaType | Callable[[Any], Any] = vol.Any + """Validator for the slot.""" + + +def _convert_slot_info( + key: str | tuple[str, str], + value: IntentSlotInfo | VolSchemaType | Callable[[Any], Any], +) -> tuple[str, IntentSlotInfo]: + """Create an IntentSlotInfo from the various supported input arguments.""" + if isinstance(value, IntentSlotInfo): + if not isinstance(key, str): + raise TypeError("Tuple key and IntentSlotDescription value not supported") + return key, value + if isinstance(key, tuple): + return key[0], IntentSlotInfo(service_data_name=key[1], value_schema=value) + return key, IntentSlotInfo(value_schema=value) + + class DynamicServiceIntentHandler(IntentHandler): """Service Intent handler registration (dynamic). @@ -901,23 +935,14 @@ class DynamicServiceIntentHandler(IntentHandler): self.platforms = platforms self.device_classes = device_classes - self.required_slots: _IntentSlotsType = {} - if required_slots: - for key, value_schema in required_slots.items(): - if isinstance(key, str): - # Slot name/service data key - key = (key, key) - - self.required_slots[key] = value_schema - - self.optional_slots: _IntentSlotsType = {} - if optional_slots: - for key, value_schema in optional_slots.items(): - if isinstance(key, str): - # Slot name/service data key - key = (key, key) - - self.optional_slots[key] = value_schema + self.required_slots: dict[str, IntentSlotInfo] = dict( + _convert_slot_info(key, value) + for key, value in (required_slots or {}).items() + ) + self.optional_slots: dict[str, IntentSlotInfo] = dict( + _convert_slot_info(key, value) + for key, value in (optional_slots or {}).items() + ) @cached_property def slot_schema(self) -> dict: @@ -958,16 +983,20 @@ class DynamicServiceIntentHandler(IntentHandler): if self.required_slots: slot_schema.update( { - vol.Required(key[0]): validator - for key, validator in self.required_slots.items() + vol.Required( + key, description=slot_info.description + ): slot_info.value_schema + for key, slot_info in self.required_slots.items() } ) if self.optional_slots: slot_schema.update( { - vol.Optional(key[0]): validator - for key, validator in self.optional_slots.items() + vol.Optional( + key, description=slot_info.description + ): slot_info.value_schema + for key, slot_info in self.optional_slots.items() } ) @@ -1150,18 +1179,15 @@ class DynamicServiceIntentHandler(IntentHandler): service_data: dict[str, Any] = {ATTR_ENTITY_ID: state.entity_id} if self.required_slots: - service_data.update( - { - key[1]: intent_obj.slots[key[0]]["value"] - for key in self.required_slots - } - ) + for key, slot_info in self.required_slots.items(): + service_data[slot_info.service_data_name or key] = intent_obj.slots[ + key + ]["value"] if self.optional_slots: - for key in self.optional_slots: - value = intent_obj.slots.get(key[0]) - if value: - service_data[key[1]] = value["value"] + for key, slot_info in self.optional_slots.items(): + if value := intent_obj.slots.get(key): + service_data[slot_info.service_data_name or key] = value["value"] await self._run_then_background( hass.async_create_task_internal( diff --git a/homeassistant/helpers/llm.py b/homeassistant/helpers/llm.py index 4ad2bdd6563..7f6fe22ec70 100644 --- a/homeassistant/helpers/llm.py +++ b/homeassistant/helpers/llm.py @@ -66,6 +66,11 @@ Answer questions about the world truthfully. Answer in plain text. Keep it simple and to the point. """ +NO_ENTITIES_PROMPT = ( + "Only if the user wants to control a device, tell them to expose entities " + "to their voice assistant in Home Assistant." +) + @callback def async_render_no_api_prompt(hass: HomeAssistant) -> str: @@ -311,7 +316,7 @@ class AssistAPI(API): """Return the instance of the API.""" if llm_context.assistant: exposed_entities: dict | None = _get_exposed_entities( - self.hass, llm_context.assistant + self.hass, llm_context.assistant, include_state=False ) else: exposed_entities = None @@ -329,10 +334,7 @@ class AssistAPI(API): self, llm_context: LLMContext, exposed_entities: dict | None ) -> str: if not exposed_entities or not exposed_entities["entities"]: - return ( - "Only if the user wants to control a device, tell them to expose entities " - "to their voice assistant in Home Assistant." - ) + return NO_ENTITIES_PROMPT return "\n".join( [ *self._async_get_preable(llm_context), @@ -454,11 +456,16 @@ class AssistAPI(API): for script_entity_id in exposed_entities[SCRIPT_DOMAIN] ) + if exposed_domains: + tools.append(GetHomeStateTool()) + return tools def _get_exposed_entities( - hass: HomeAssistant, assistant: str + hass: HomeAssistant, + assistant: str, + include_state: bool = True, ) -> dict[str, dict[str, dict[str, Any]]]: """Get exposed entities. @@ -519,24 +526,28 @@ def _get_exposed_entities( info: dict[str, Any] = { "names": ", ".join(names), "domain": state.domain, - "state": state.state, } + if include_state: + info["state"] = state.state + if description: info["description"] = description if area_names: info["areas"] = ", ".join(area_names) - if attributes := { - attr_name: ( - str(attr_value) - if isinstance(attr_value, (Enum, Decimal, int)) - else attr_value - ) - for attr_name, attr_value in state.attributes.items() - if attr_name in interesting_attributes - }: + if include_state and ( + attributes := { + attr_name: ( + str(attr_value) + if isinstance(attr_value, (Enum, Decimal, int)) + else attr_value + ) + for attr_name, attr_value in state.attributes.items() + if attr_name in interesting_attributes + } + ): info["attributes"] = attributes if state.domain in data: @@ -885,3 +896,39 @@ class CalendarGetEventsTool(Tool): ] return {"success": True, "result": events} + + +class GetHomeStateTool(Tool): + """Tool for getting the current state of exposed entities. + + This returns state for all entities that have been exposed to + the assistant. This is different than the GetState intent, which + returns state for entities based on intent parameters. + """ + + name = "get_home_state" + description = "Get the current state of all devices in the home. " + + async def async_call( + self, + hass: HomeAssistant, + tool_input: ToolInput, + llm_context: LLMContext, + ) -> JsonObjectType: + """Get the current state of exposed entities.""" + if llm_context.assistant is None: + # Note this doesn't happen in practice since this tool won't be + # exposed if no assistant is configured. + return {"success": False, "error": "No assistant configured"} + + exposed_entities = _get_exposed_entities(hass, llm_context.assistant) + if not exposed_entities["entities"]: + return {"success": False, "error": NO_ENTITIES_PROMPT} + prompt = [ + "An overview of the areas and the devices in this smart home:", + yaml_util.dump(list(exposed_entities["entities"].values())), + ] + return { + "success": True, + "result": "\n".join(prompt), + } diff --git a/homeassistant/helpers/script.py b/homeassistant/helpers/script.py index bf7a4a0971c..43429bdb1d2 100644 --- a/homeassistant/helpers/script.py +++ b/homeassistant/helpers/script.py @@ -966,12 +966,11 @@ class _ScriptRun: ## Variable actions ## async def _async_step_variables(self) -> None: - """Define a local variable.""" - self._step_log("defining local variables") - for key, value in ( - self._action[CONF_VARIABLES].async_simple_render(self._variables).items() - ): - self._variables.define_local(key, value) + """Assign values to variables.""" + self._step_log("assigning variables") + self._variables.update( + self._action[CONF_VARIABLES].async_simple_render(self._variables) + ) ## External actions ## @@ -1312,7 +1311,7 @@ class _QueuedScriptRun(_ScriptRun): lock_acquired = False - async def async_run(self) -> None: + async def async_run(self) -> ScriptRunResult | None: """Run script.""" # Wait for previous run, if any, to finish by attempting to acquire the script's # shared lock. At the same time monitor if we've been told to stop. @@ -1326,7 +1325,7 @@ class _QueuedScriptRun(_ScriptRun): self.lock_acquired = True # We've acquired the lock so we can go ahead and start the run. - await super().async_run() + return await super().async_run() def _finish(self) -> None: if self.lock_acquired: diff --git a/homeassistant/helpers/selector.py b/homeassistant/helpers/selector.py index 025b8de8896..f2c76d1d019 100644 --- a/homeassistant/helpers/selector.py +++ b/homeassistant/helpers/selector.py @@ -164,6 +164,8 @@ DEVICE_FILTER_SELECTOR_CONFIG_SCHEMA = vol.Schema( vol.Optional("manufacturer"): str, # Model of device vol.Optional("model"): str, + # Model ID of device + vol.Optional("model_id"): str, # Device has to contain entities matching this selector vol.Optional("entity"): vol.All( cv.ensure_list, [ENTITY_FILTER_SELECTOR_CONFIG_SCHEMA] @@ -178,6 +180,7 @@ class DeviceFilterSelectorConfig(TypedDict, total=False): integration: str manufacturer: str model: str + model_id: str class ActionSelectorConfig(TypedDict): @@ -1133,7 +1136,7 @@ class SelectOptionDict(TypedDict): class SelectSelectorMode(StrEnum): - """Possible modes for a number selector.""" + """Possible modes for a select selector.""" LIST = "list" DROPDOWN = "dropdown" diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index 7866250d658..70a94cfaaa9 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -6,12 +6,13 @@ from ast import literal_eval import asyncio import base64 import collections.abc -from collections.abc import Callable, Generator, Iterable +from collections.abc import Callable, Generator, Iterable, MutableSequence from contextlib import AbstractContextManager from contextvars import ContextVar from copy import deepcopy from datetime import date, datetime, time, timedelta from functools import cache, lru_cache, partial, wraps +import hashlib import json import logging import math @@ -1525,6 +1526,15 @@ def floor_areas(hass: HomeAssistant, floor_id_or_name: str) -> Iterable[str]: return [entry.id for entry in entries if entry.id] +def floor_entities(hass: HomeAssistant, floor_id_or_name: str) -> Iterable[str]: + """Return entity_ids for a given floor ID or name.""" + return [ + entity_id + for area_id in floor_areas(hass, floor_id_or_name) + for entity_id in area_entities(hass, area_id) + ] + + def areas(hass: HomeAssistant) -> Iterable[str | None]: """Return all areas.""" return list(area_registry.async_get(hass).areas) @@ -2727,6 +2737,144 @@ def iif( return if_false +def shuffle(*args: Any, seed: Any = None) -> MutableSequence[Any]: + """Shuffle a list, either with a seed or without.""" + if not args: + raise TypeError("shuffle expected at least 1 argument, got 0") + + # If first argument is iterable and more than 1 argument provided + # but not a named seed, then use 2nd argument as seed. + if isinstance(args[0], Iterable): + items = list(args[0]) + if len(args) > 1 and seed is None: + seed = args[1] + elif len(args) == 1: + raise TypeError(f"'{type(args[0]).__name__}' object is not iterable") + else: + items = list(args) + + if seed: + r = random.Random(seed) + r.shuffle(items) + else: + random.shuffle(items) + return items + + +def typeof(value: Any) -> Any: + """Return the type of value passed to debug types.""" + return value.__class__.__name__ + + +def flatten(value: Iterable[Any], levels: int | None = None) -> list[Any]: + """Flattens list of lists.""" + if not isinstance(value, Iterable) or isinstance(value, str): + raise TypeError(f"flatten expected a list, got {type(value).__name__}") + + flattened: list[Any] = [] + for item in value: + if isinstance(item, Iterable) and not isinstance(item, str): + if levels is None: + flattened.extend(flatten(item)) + elif levels >= 1: + flattened.extend(flatten(item, levels=(levels - 1))) + else: + flattened.append(item) + else: + flattened.append(item) + return flattened + + +def intersect(value: Iterable[Any], other: Iterable[Any]) -> list[Any]: + """Return the common elements between two lists.""" + if not isinstance(value, Iterable) or isinstance(value, str): + raise TypeError(f"intersect expected a list, got {type(value).__name__}") + if not isinstance(other, Iterable) or isinstance(other, str): + raise TypeError(f"intersect expected a list, got {type(other).__name__}") + + return list(set(value) & set(other)) + + +def difference(value: Iterable[Any], other: Iterable[Any]) -> list[Any]: + """Return elements in first list that are not in second list.""" + if not isinstance(value, Iterable) or isinstance(value, str): + raise TypeError(f"difference expected a list, got {type(value).__name__}") + if not isinstance(other, Iterable) or isinstance(other, str): + raise TypeError(f"difference expected a list, got {type(other).__name__}") + + return list(set(value) - set(other)) + + +def union(value: Iterable[Any], other: Iterable[Any]) -> list[Any]: + """Return all unique elements from both lists combined.""" + if not isinstance(value, Iterable) or isinstance(value, str): + raise TypeError(f"union expected a list, got {type(value).__name__}") + if not isinstance(other, Iterable) or isinstance(other, str): + raise TypeError(f"union expected a list, got {type(other).__name__}") + + return list(set(value) | set(other)) + + +def symmetric_difference(value: Iterable[Any], other: Iterable[Any]) -> list[Any]: + """Return elements that are in either list but not in both.""" + if not isinstance(value, Iterable) or isinstance(value, str): + raise TypeError( + f"symmetric_difference expected a list, got {type(value).__name__}" + ) + if not isinstance(other, Iterable) or isinstance(other, str): + raise TypeError( + f"symmetric_difference expected a list, got {type(other).__name__}" + ) + + return list(set(value) ^ set(other)) + + +def combine(*args: Any, recursive: bool = False) -> dict[Any, Any]: + """Combine multiple dictionaries into one.""" + if not args: + raise TypeError("combine expected at least 1 argument, got 0") + + result: dict[Any, Any] = {} + for arg in args: + if not isinstance(arg, dict): + raise TypeError(f"combine expected a dict, got {type(arg).__name__}") + + if recursive: + for key, value in arg.items(): + if ( + key in result + and isinstance(result[key], dict) + and isinstance(value, dict) + ): + result[key] = combine(result[key], value, recursive=True) + else: + result[key] = value + else: + result |= arg + + return result + + +def md5(value: str) -> str: + """Generate md5 hash from a string.""" + return hashlib.md5(value.encode()).hexdigest() + + +def sha1(value: str) -> str: + """Generate sha1 hash from a string.""" + return hashlib.sha1(value.encode()).hexdigest() + + +def sha256(value: str) -> str: + """Generate sha256 hash from a string.""" + return hashlib.sha256(value.encode()).hexdigest() + + +def sha512(value: str) -> str: + """Generate sha512 hash from a string.""" + return hashlib.sha512(value.encode()).hexdigest() + + class TemplateContextManager(AbstractContextManager): """Context manager to store template being parsed or rendered in a ContextVar.""" @@ -2879,100 +3027,127 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): str | jinja2.nodes.Template, CodeType | None ] = weakref.WeakValueDictionary() self.add_extension("jinja2.ext.loopcontrols") - self.filters["round"] = forgiving_round - self.filters["multiply"] = multiply - self.filters["add"] = add - self.filters["log"] = logarithm - self.filters["sin"] = sine - self.filters["cos"] = cosine - self.filters["tan"] = tangent - self.filters["asin"] = arc_sine - self.filters["acos"] = arc_cosine - self.filters["atan"] = arc_tangent - self.filters["atan2"] = arc_tangent2 - self.filters["sqrt"] = square_root - self.filters["as_datetime"] = as_datetime - self.filters["as_timedelta"] = as_timedelta - self.filters["as_timestamp"] = forgiving_as_timestamp - self.filters["as_local"] = dt_util.as_local - self.filters["timestamp_custom"] = timestamp_custom - self.filters["timestamp_local"] = timestamp_local - self.filters["timestamp_utc"] = timestamp_utc - self.filters["to_json"] = to_json - self.filters["from_json"] = from_json - self.filters["is_defined"] = fail_when_undefined - self.filters["average"] = average - self.filters["median"] = median - self.filters["statistical_mode"] = statistical_mode - self.filters["random"] = random_every_time - self.filters["base64_encode"] = base64_encode - self.filters["base64_decode"] = base64_decode - self.filters["ordinal"] = ordinal - self.filters["regex_match"] = regex_match - self.filters["regex_replace"] = regex_replace - self.filters["regex_search"] = regex_search - self.filters["regex_findall"] = regex_findall - self.filters["regex_findall_index"] = regex_findall_index - self.filters["bitwise_and"] = bitwise_and - self.filters["bitwise_or"] = bitwise_or - self.filters["bitwise_xor"] = bitwise_xor - self.filters["pack"] = struct_pack - self.filters["unpack"] = struct_unpack - self.filters["ord"] = ord - self.filters["is_number"] = is_number - self.filters["float"] = forgiving_float_filter - self.filters["int"] = forgiving_int_filter - self.filters["slugify"] = slugify - self.filters["iif"] = iif - self.filters["bool"] = forgiving_boolean - self.filters["version"] = version - self.filters["contains"] = contains - self.globals["log"] = logarithm - self.globals["sin"] = sine - self.globals["cos"] = cosine - self.globals["tan"] = tangent - self.globals["sqrt"] = square_root - self.globals["pi"] = math.pi - self.globals["tau"] = math.pi * 2 - self.globals["e"] = math.e - self.globals["asin"] = arc_sine + self.globals["acos"] = arc_cosine - self.globals["atan"] = arc_tangent - self.globals["atan2"] = arc_tangent2 - self.globals["float"] = forgiving_float self.globals["as_datetime"] = as_datetime self.globals["as_local"] = dt_util.as_local self.globals["as_timedelta"] = as_timedelta self.globals["as_timestamp"] = forgiving_as_timestamp - self.globals["timedelta"] = timedelta - self.globals["merge_response"] = merge_response - self.globals["strptime"] = strptime - self.globals["urlencode"] = urlencode + self.globals["asin"] = arc_sine + self.globals["atan"] = arc_tangent + self.globals["atan2"] = arc_tangent2 self.globals["average"] = average - self.globals["median"] = median - self.globals["statistical_mode"] = statistical_mode - self.globals["max"] = min_max_from_filter(self.filters["max"], "max") - self.globals["min"] = min_max_from_filter(self.filters["min"], "min") - self.globals["is_number"] = is_number - self.globals["set"] = _to_set - self.globals["tuple"] = _to_tuple - self.globals["int"] = forgiving_int - self.globals["pack"] = struct_pack - self.globals["unpack"] = struct_unpack - self.globals["slugify"] = slugify - self.globals["iif"] = iif self.globals["bool"] = forgiving_boolean + self.globals["combine"] = combine + self.globals["cos"] = cosine + self.globals["difference"] = difference + self.globals["e"] = math.e + self.globals["flatten"] = flatten + self.globals["float"] = forgiving_float + self.globals["iif"] = iif + self.globals["int"] = forgiving_int + self.globals["intersect"] = intersect + self.globals["is_number"] = is_number + self.globals["log"] = logarithm + self.globals["max"] = min_max_from_filter(self.filters["max"], "max") + self.globals["md5"] = md5 + self.globals["median"] = median + self.globals["merge_response"] = merge_response + self.globals["min"] = min_max_from_filter(self.filters["min"], "min") + self.globals["pack"] = struct_pack + self.globals["pi"] = math.pi + self.globals["set"] = _to_set + self.globals["sha1"] = sha1 + self.globals["sha256"] = sha256 + self.globals["sha512"] = sha512 + self.globals["shuffle"] = shuffle + self.globals["sin"] = sine + self.globals["slugify"] = slugify + self.globals["sqrt"] = square_root + self.globals["statistical_mode"] = statistical_mode + self.globals["strptime"] = strptime + self.globals["symmetric_difference"] = symmetric_difference + self.globals["tan"] = tangent + self.globals["tau"] = math.pi * 2 + self.globals["timedelta"] = timedelta + self.globals["tuple"] = _to_tuple + self.globals["typeof"] = typeof + self.globals["union"] = union + self.globals["unpack"] = struct_unpack + self.globals["urlencode"] = urlencode self.globals["version"] = version self.globals["zip"] = zip + + self.filters["acos"] = arc_cosine + self.filters["add"] = add + self.filters["as_datetime"] = as_datetime + self.filters["as_local"] = dt_util.as_local + self.filters["as_timedelta"] = as_timedelta + self.filters["as_timestamp"] = forgiving_as_timestamp + self.filters["asin"] = arc_sine + self.filters["atan"] = arc_tangent + self.filters["atan2"] = arc_tangent2 + self.filters["average"] = average + self.filters["base64_decode"] = base64_decode + self.filters["base64_encode"] = base64_encode + self.filters["bitwise_and"] = bitwise_and + self.filters["bitwise_or"] = bitwise_or + self.filters["bitwise_xor"] = bitwise_xor + self.filters["bool"] = forgiving_boolean + self.filters["combine"] = combine + self.filters["contains"] = contains + self.filters["cos"] = cosine + self.filters["difference"] = difference + self.filters["flatten"] = flatten + self.filters["float"] = forgiving_float_filter + self.filters["from_json"] = from_json + self.filters["iif"] = iif + self.filters["int"] = forgiving_int_filter + self.filters["intersect"] = intersect + self.filters["is_defined"] = fail_when_undefined + self.filters["is_number"] = is_number + self.filters["log"] = logarithm + self.filters["md5"] = md5 + self.filters["median"] = median + self.filters["multiply"] = multiply + self.filters["ord"] = ord + self.filters["ordinal"] = ordinal + self.filters["pack"] = struct_pack + self.filters["random"] = random_every_time + self.filters["regex_findall_index"] = regex_findall_index + self.filters["regex_findall"] = regex_findall + self.filters["regex_match"] = regex_match + self.filters["regex_replace"] = regex_replace + self.filters["regex_search"] = regex_search + self.filters["round"] = forgiving_round + self.filters["sha1"] = sha1 + self.filters["sha256"] = sha256 + self.filters["sha512"] = sha512 + self.filters["shuffle"] = shuffle + self.filters["sin"] = sine + self.filters["slugify"] = slugify + self.filters["sqrt"] = square_root + self.filters["statistical_mode"] = statistical_mode + self.filters["symmetric_difference"] = symmetric_difference + self.filters["tan"] = tangent + self.filters["timestamp_custom"] = timestamp_custom + self.filters["timestamp_local"] = timestamp_local + self.filters["timestamp_utc"] = timestamp_utc + self.filters["to_json"] = to_json + self.filters["typeof"] = typeof + self.filters["union"] = union + self.filters["unpack"] = struct_unpack + self.filters["version"] = version + + self.tests["contains"] = contains + self.tests["datetime"] = _is_datetime self.tests["is_number"] = is_number self.tests["list"] = _is_list - self.tests["set"] = _is_set - self.tests["tuple"] = _is_tuple - self.tests["datetime"] = _is_datetime - self.tests["string_like"] = _is_string_like self.tests["match"] = regex_match self.tests["search"] = regex_search - self.tests["contains"] = contains + self.tests["set"] = _is_set + self.tests["string_like"] = _is_string_like + self.tests["tuple"] = _is_tuple if hass is None: return @@ -2999,28 +3174,7 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): return jinja_context(wrapper) - self.globals["device_entities"] = hassfunction(device_entities) - self.filters["device_entities"] = self.globals["device_entities"] - - self.globals["device_attr"] = hassfunction(device_attr) - self.filters["device_attr"] = self.globals["device_attr"] - - self.globals["config_entry_attr"] = hassfunction(config_entry_attr) - self.filters["config_entry_attr"] = self.globals["config_entry_attr"] - - self.globals["is_device_attr"] = hassfunction(is_device_attr) - self.tests["is_device_attr"] = hassfunction(is_device_attr, pass_eval_context) - - self.globals["config_entry_id"] = hassfunction(config_entry_id) - self.filters["config_entry_id"] = self.globals["config_entry_id"] - - self.globals["device_id"] = hassfunction(device_id) - self.filters["device_id"] = self.globals["device_id"] - - self.globals["issues"] = hassfunction(issues) - - self.globals["issue"] = hassfunction(issue) - self.filters["issue"] = self.globals["issue"] + # Area extensions self.globals["areas"] = hassfunction(areas) @@ -3036,6 +3190,8 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): self.globals["area_devices"] = hassfunction(area_devices) self.filters["area_devices"] = self.globals["area_devices"] + # Floor extensions + self.globals["floors"] = hassfunction(floors) self.filters["floors"] = self.globals["floors"] @@ -3048,9 +3204,38 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): self.globals["floor_areas"] = hassfunction(floor_areas) self.filters["floor_areas"] = self.globals["floor_areas"] + self.globals["floor_entities"] = hassfunction(floor_entities) + self.filters["floor_entities"] = self.globals["floor_entities"] + + # Integration extensions + self.globals["integration_entities"] = hassfunction(integration_entities) self.filters["integration_entities"] = self.globals["integration_entities"] + # Config entry extensions + + self.globals["config_entry_attr"] = hassfunction(config_entry_attr) + self.filters["config_entry_attr"] = self.globals["config_entry_attr"] + + self.globals["config_entry_id"] = hassfunction(config_entry_id) + self.filters["config_entry_id"] = self.globals["config_entry_id"] + + # Device extensions + + self.globals["device_attr"] = hassfunction(device_attr) + self.filters["device_attr"] = self.globals["device_attr"] + + self.globals["device_entities"] = hassfunction(device_entities) + self.filters["device_entities"] = self.globals["device_entities"] + + self.globals["is_device_attr"] = hassfunction(is_device_attr) + self.tests["is_device_attr"] = hassfunction(is_device_attr, pass_eval_context) + + self.globals["device_id"] = hassfunction(device_id) + self.filters["device_id"] = self.globals["device_id"] + + # Label extensions + self.globals["labels"] = hassfunction(labels) self.filters["labels"] = self.globals["labels"] @@ -3069,6 +3254,12 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): self.globals["label_entities"] = hassfunction(label_entities) self.filters["label_entities"] = self.globals["label_entities"] + # Issue extensions + + self.globals["issues"] = hassfunction(issues) + self.globals["issue"] = hassfunction(issue) + self.filters["issue"] = self.globals["issue"] + if limited: # Only device_entities is available to limited templates, mark other # functions and filters as unsupported. @@ -3081,38 +3272,38 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): return warn_unsupported hass_globals = [ - "closest", - "distance", - "expand", - "is_hidden_entity", - "is_state", - "is_state_attr", - "state_attr", - "states", - "state_translated", - "has_value", - "utcnow", - "now", - "device_attr", - "is_device_attr", - "device_id", "area_id", "area_name", + "closest", + "device_attr", + "device_id", + "distance", + "expand", "floor_id", "floor_name", + "has_value", + "is_device_attr", + "is_hidden_entity", + "is_state_attr", + "is_state", + "label_id", + "label_name", + "now", "relative_time", + "state_attr", + "state_translated", + "states", "time_since", "time_until", "today_at", - "label_id", - "label_name", + "utcnow", ] hass_filters = [ - "closest", - "expand", - "device_id", "area_id", "area_name", + "closest", + "device_id", + "expand", "floor_id", "floor_name", "has_value", @@ -3122,8 +3313,8 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): hass_tests = [ "has_value", "is_hidden_entity", - "is_state", "is_state_attr", + "is_state", ] for glob in hass_globals: self.globals[glob] = unsupported(glob) @@ -3133,38 +3324,46 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): self.filters[test] = unsupported(test) return - self.globals["expand"] = hassfunction(expand) - self.filters["expand"] = self.globals["expand"] self.globals["closest"] = hassfunction(closest) - self.filters["closest"] = hassfunction(closest_filter) self.globals["distance"] = hassfunction(distance) + self.globals["expand"] = hassfunction(expand) + self.globals["has_value"] = hassfunction(has_value) + self.globals["now"] = hassfunction(now) + self.globals["relative_time"] = hassfunction(relative_time) + self.globals["time_since"] = hassfunction(time_since) + self.globals["time_until"] = hassfunction(time_until) + self.globals["today_at"] = hassfunction(today_at) + self.globals["utcnow"] = hassfunction(utcnow) + + self.filters["closest"] = hassfunction(closest_filter) + self.filters["expand"] = self.globals["expand"] + self.filters["has_value"] = self.globals["has_value"] + self.filters["relative_time"] = self.globals["relative_time"] + self.filters["time_since"] = self.globals["time_since"] + self.filters["time_until"] = self.globals["time_until"] + self.filters["today_at"] = self.globals["today_at"] + + self.tests["has_value"] = hassfunction(has_value, pass_eval_context) + + # Entity extensions + self.globals["is_hidden_entity"] = hassfunction(is_hidden_entity) self.tests["is_hidden_entity"] = hassfunction( is_hidden_entity, pass_eval_context ) - self.globals["is_state"] = hassfunction(is_state) - self.tests["is_state"] = hassfunction(is_state, pass_eval_context) + + # State extensions + self.globals["is_state_attr"] = hassfunction(is_state_attr) - self.tests["is_state_attr"] = hassfunction(is_state_attr, pass_eval_context) + self.globals["is_state"] = hassfunction(is_state) self.globals["state_attr"] = hassfunction(state_attr) - self.filters["state_attr"] = self.globals["state_attr"] - self.globals["states"] = AllStates(hass) - self.filters["states"] = self.globals["states"] self.globals["state_translated"] = StateTranslated(hass) + self.globals["states"] = AllStates(hass) + self.filters["state_attr"] = self.globals["state_attr"] self.filters["state_translated"] = self.globals["state_translated"] - self.globals["has_value"] = hassfunction(has_value) - self.filters["has_value"] = self.globals["has_value"] - self.tests["has_value"] = hassfunction(has_value, pass_eval_context) - self.globals["utcnow"] = hassfunction(utcnow) - self.globals["now"] = hassfunction(now) - self.globals["relative_time"] = hassfunction(relative_time) - self.filters["relative_time"] = self.globals["relative_time"] - self.globals["time_since"] = hassfunction(time_since) - self.filters["time_since"] = self.globals["time_since"] - self.globals["time_until"] = hassfunction(time_until) - self.filters["time_until"] = self.globals["time_until"] - self.globals["today_at"] = hassfunction(today_at) - self.filters["today_at"] = self.globals["today_at"] + self.filters["states"] = self.globals["states"] + self.tests["is_state_attr"] = hassfunction(is_state_attr, pass_eval_context) + self.tests["is_state"] = hassfunction(is_state, pass_eval_context) def is_safe_callable(self, obj): """Test if callback is safe.""" diff --git a/homeassistant/helpers/trigger.py b/homeassistant/helpers/trigger.py index 67e9010df79..a27c85a5c58 100644 --- a/homeassistant/helpers/trigger.py +++ b/homeassistant/helpers/trigger.py @@ -265,18 +265,18 @@ def _trigger_action_wrapper( while isinstance(check_func, functools.partial): check_func = check_func.func - wrapper_func: Callable[..., None] | Callable[..., Coroutine[Any, Any, None]] + wrapper_func: Callable[..., Any] | Callable[..., Coroutine[Any, Any, Any]] if asyncio.iscoroutinefunction(check_func): - async_action = cast(Callable[..., Coroutine[Any, Any, None]], action) + async_action = cast(Callable[..., Coroutine[Any, Any, Any]], action) @functools.wraps(async_action) async def async_with_vars( run_variables: dict[str, Any], context: Context | None = None - ) -> None: + ) -> Any: """Wrap action with extra vars.""" trigger_variables = conf[CONF_VARIABLES] run_variables.update(trigger_variables.async_render(hass, run_variables)) - await action(run_variables, context) + return await action(run_variables, context) wrapper_func = async_with_vars @@ -285,11 +285,11 @@ def _trigger_action_wrapper( @functools.wraps(action) async def with_vars( run_variables: dict[str, Any], context: Context | None = None - ) -> None: + ) -> Any: """Wrap action with extra vars.""" trigger_variables = conf[CONF_VARIABLES] run_variables.update(trigger_variables.async_render(hass, run_variables)) - action(run_variables, context) + return action(run_variables, context) if is_callback(check_func): with_vars = callback(with_vars) diff --git a/homeassistant/loader.py b/homeassistant/loader.py index 3bc33f8374c..20763dc7b30 100644 --- a/homeassistant/loader.py +++ b/homeassistant/loader.py @@ -40,6 +40,8 @@ from .generated.ssdp import SSDP from .generated.usb import USB from .generated.zeroconf import HOMEKIT, ZEROCONF from .helpers.json import json_bytes, json_fragment +from .helpers.typing import UNDEFINED, UndefinedType +from .util.async_ import create_eager_task from .util.hass_dict import HassKey from .util.json import JSON_DECODE_EXCEPTIONS, json_loads @@ -758,10 +760,8 @@ class Integration: manifest["overwrites_built_in"] = self.overwrites_built_in if self.dependencies: - self._all_dependencies_resolved: bool | None = None - self._all_dependencies: set[str] | None = None + self._all_dependencies: set[str] | Exception | None = None else: - self._all_dependencies_resolved = True self._all_dependencies = set() self._platforms_to_preload = hass.data[DATA_PRELOAD_PLATFORMS] @@ -933,47 +933,25 @@ class Integration: """Return all dependencies including sub-dependencies.""" if self._all_dependencies is None: raise RuntimeError("Dependencies not resolved!") + if isinstance(self._all_dependencies, Exception): + raise self._all_dependencies return self._all_dependencies @property def all_dependencies_resolved(self) -> bool: """Return if all dependencies have been resolved.""" - return self._all_dependencies_resolved is not None + return self._all_dependencies is not None - async def resolve_dependencies(self) -> bool: + async def resolve_dependencies(self) -> set[str] | None: """Resolve all dependencies.""" - if self._all_dependencies_resolved is not None: - return self._all_dependencies_resolved + if self._all_dependencies is not None: + if isinstance(self._all_dependencies, Exception): + return None + return self._all_dependencies - self._all_dependencies_resolved = False - try: - dependencies = await _async_component_dependencies(self.hass, self) - except IntegrationNotFound as err: - _LOGGER.error( - ( - "Unable to resolve dependencies for %s: unable to resolve" - " (sub)dependency %s" - ), - self.domain, - err.domain, - ) - except CircularDependency as err: - _LOGGER.error( - ( - "Unable to resolve dependencies for %s: it contains a circular" - " dependency: %s -> %s" - ), - self.domain, - err.from_domain, - err.to_domain, - ) - else: - dependencies.discard(self.domain) - self._all_dependencies = dependencies - self._all_dependencies_resolved = True - - return self._all_dependencies_resolved + result = await resolve_integrations_dependencies(self.hass, (self,)) + return result.get(self.domain) async def async_get_component(self) -> ComponentProtocol: """Return the component. @@ -1441,6 +1419,189 @@ async def async_get_integrations( return results +class _ResolveDependenciesCacheProtocol(Protocol): + def get(self, itg: Integration) -> set[str] | Exception | None: ... + + def __setitem__( + self, itg: Integration, all_dependencies: set[str] | Exception + ) -> None: ... + + +class _ResolveDependenciesCache(_ResolveDependenciesCacheProtocol): + """Cache for resolve_integrations_dependencies.""" + + def get(self, itg: Integration) -> set[str] | Exception | None: + return itg._all_dependencies # noqa: SLF001 + + def __setitem__( + self, itg: Integration, all_dependencies: set[str] | Exception + ) -> None: + itg._all_dependencies = all_dependencies # noqa: SLF001 + + +async def resolve_integrations_dependencies( + hass: HomeAssistant, integrations: Iterable[Integration] +) -> dict[str, set[str]]: + """Resolve all dependencies for integrations. + + Detects circular dependencies and missing integrations. + """ + resolved = _ResolveDependenciesCache() + + async def _resolve_deps_catch_exceptions(itg: Integration) -> set[str] | None: + try: + return await _do_resolve_dependencies(itg, cache=resolved) + except Exception as exc: # noqa: BLE001 + _LOGGER.error("Unable to resolve dependencies for %s: %s", itg.domain, exc) + return None + + resolve_dependencies_tasks = { + itg.domain: create_eager_task( + _resolve_deps_catch_exceptions(itg), + name=f"resolve dependencies {itg.domain}", + loop=hass.loop, + ) + for itg in integrations + } + + result = await asyncio.gather(*resolve_dependencies_tasks.values()) + + return { + domain: deps + for domain, deps in zip(resolve_dependencies_tasks, result, strict=True) + if deps is not None + } + + +async def resolve_integrations_after_dependencies( + hass: HomeAssistant, + integrations: Iterable[Integration], + possible_after_dependencies: set[str] | None = None, + *, + ignore_exceptions: bool = False, +) -> dict[str, set[str]]: + """Resolve all dependencies, including after_dependencies, for integrations. + + Detects circular dependencies and missing integrations. + """ + resolved: dict[Integration, set[str] | Exception] = {} + + async def _resolve_deps_catch_exceptions(itg: Integration) -> set[str] | None: + try: + return await _do_resolve_dependencies( + itg, + cache=resolved, + possible_after_dependencies=possible_after_dependencies, + ignore_exceptions=ignore_exceptions, + ) + except Exception as exc: # noqa: BLE001 + _LOGGER.error( + "Unable to resolve (after) dependencies for %s: %s", itg.domain, exc + ) + return None + + resolve_dependencies_tasks = { + itg.domain: create_eager_task( + _resolve_deps_catch_exceptions(itg), + name=f"resolve after dependencies {itg.domain}", + loop=hass.loop, + ) + for itg in integrations + } + + result = await asyncio.gather(*resolve_dependencies_tasks.values()) + + return { + domain: deps + for domain, deps in zip(resolve_dependencies_tasks, result, strict=True) + if deps is not None + } + + +async def _do_resolve_dependencies( + itg: Integration, + *, + cache: _ResolveDependenciesCacheProtocol, + possible_after_dependencies: set[str] | None | UndefinedType = UNDEFINED, + ignore_exceptions: bool = False, +) -> set[str]: + """Recursively resolve all dependencies. + + Uses `cache` to cache the results. + + If `possible_after_dependencies` is not UNDEFINED, + listed after dependencies are also considered. + If `possible_after_dependencies` is None, + all the possible after dependencies are considered. + + If `ignore_exceptions` is True, exceptions are caught and ignored + and the normal resolution algorithm continues. + Otherwise, exceptions are raised. + """ + resolved = cache + resolving: set[str] = set() + + async def do_resolve_dependencies_impl(itg: Integration) -> set[str]: + domain = itg.domain + + # If it's already resolved, no point doing it again. + if (result := resolved.get(itg)) is not None: + if isinstance(result, Exception): + raise result + return result + + # If we are already resolving it, we have a circular dependency. + if domain in resolving: + if ignore_exceptions: + resolved[itg] = set() + return set() + exc = CircularDependency([domain]) + resolved[itg] = exc + raise exc + + resolving.add(domain) + + dependencies_domains = set(itg.dependencies) + if possible_after_dependencies is not UNDEFINED: + if possible_after_dependencies is None: + after_dependencies: Iterable[str] = itg.after_dependencies + else: + after_dependencies = ( + set(itg.after_dependencies) & possible_after_dependencies + ) + dependencies_domains.update(after_dependencies) + dependencies = await async_get_integrations(itg.hass, dependencies_domains) + + all_dependencies: set[str] = set() + for dep_domain, dep_integration in dependencies.items(): + if isinstance(dep_integration, Exception): + if ignore_exceptions: + continue + resolved[itg] = dep_integration + raise dep_integration + + all_dependencies.add(dep_domain) + + try: + dep_dependencies = await do_resolve_dependencies_impl(dep_integration) + except CircularDependency as exc: + exc.extend_cycle(domain) + resolved[itg] = exc + raise + except Exception as exc: + resolved[itg] = exc + raise + + all_dependencies.update(dep_dependencies) + + resolving.remove(domain) + + resolved[itg] = all_dependencies + return all_dependencies + + return await do_resolve_dependencies_impl(itg) + + class LoaderError(Exception): """Loader base error.""" @@ -1466,11 +1627,13 @@ class IntegrationNotLoaded(LoaderError): class CircularDependency(LoaderError): """Raised when a circular dependency is found when resolving components.""" - def __init__(self, from_domain: str | set[str], to_domain: str) -> None: + def __init__(self, domain_cycle: list[str]) -> None: """Initialize circular dependency error.""" - super().__init__(f"Circular dependency detected: {from_domain} -> {to_domain}.") - self.from_domain = from_domain - self.to_domain = to_domain + super().__init__("Circular dependency detected", domain_cycle) + + def extend_cycle(self, domain: str) -> None: + """Extend the cycle with the domain.""" + self.args[1].insert(0, domain) def _load_file( @@ -1624,50 +1787,6 @@ def bind_hass[_CallableT: Callable[..., Any]](func: _CallableT) -> _CallableT: return func -async def _async_component_dependencies( - hass: HomeAssistant, - integration: Integration, -) -> set[str]: - """Get component dependencies.""" - loading: set[str] = set() - loaded: set[str] = set() - - async def component_dependencies_impl(integration: Integration) -> None: - """Recursively get component dependencies.""" - domain = integration.domain - if not (dependencies := integration.dependencies): - loaded.add(domain) - return - - loading.add(domain) - dep_integrations = await async_get_integrations(hass, dependencies) - for dependency_domain, dep_integration in dep_integrations.items(): - if isinstance(dep_integration, Exception): - raise dep_integration - - # If we are already loading it, we have a circular dependency. - # We have to check it here to make sure that every integration that - # depends on us, does not appear in our own after_dependencies. - if conflict := loading.intersection(dep_integration.after_dependencies): - raise CircularDependency(conflict, dependency_domain) - - # If we have already loaded it, no point doing it again. - if dependency_domain in loaded: - continue - - # If we are already loading it, we have a circular dependency. - if dependency_domain in loading: - raise CircularDependency(dependency_domain, domain) - - await component_dependencies_impl(dep_integration) - loading.remove(domain) - loaded.add(domain) - - await component_dependencies_impl(integration) - - return loaded - - def _async_mount_config_dir(hass: HomeAssistant) -> None: """Mount config dir in order to load custom_component. diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 02d635007a5..658fdc55e21 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -6,13 +6,14 @@ aiodns==3.2.0 aiohasupervisor==0.3.0 aiohttp-asyncmdnsresolver==0.1.1 aiohttp-fast-zlib==0.2.3 -aiohttp==3.11.13 +aiohttp==3.11.16 aiohttp_cors==0.7.0 aiousbwatcher==1.1.1 aiozoneinfo==0.2.3 +annotatedyaml==0.4.5 astral==2.2 async-interrupt==1.2.2 -async-upnp-client==0.43.0 +async-upnp-client==0.44.0 atomicwrites-homeassistant==1.4.1 attrs==25.1.0 audioop-lts==0.2.1 @@ -22,29 +23,30 @@ bcrypt==4.2.0 bleak-retry-connector==3.9.0 bleak==0.22.3 bluetooth-adapters==0.21.4 -bluetooth-auto-recovery==1.4.4 -bluetooth-data-tools==1.23.4 -cached-ipaddress==0.9.2 +bluetooth-auto-recovery==1.4.5 +bluetooth-data-tools==1.26.5 +cached-ipaddress==0.10.0 certifi>=2021.5.30 ciso8601==2.3.2 cronsim==2.6 cryptography==44.0.1 -dbus-fast==2.33.0 -fnv-hash-fast==1.2.6 +dbus-fast==2.43.0 +fnv-hash-fast==1.4.0 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 -habluetooth==3.24.1 -hass-nabucasa==0.92.0 +habluetooth==3.37.0 +hass-nabucasa==0.94.0 hassil==2.2.3 home-assistant-bluetooth==1.13.1 -home-assistant-frontend==20250306.0 -home-assistant-intents==2025.3.5 +home-assistant-frontend==20250401.0 +home-assistant-intents==2025.3.28 httpx==0.28.1 ifaddr==0.2.0 Jinja2==3.1.6 lru-dict==1.3.0 mutagen==1.47.0 -orjson==3.10.12 +numpy==2.2.2 +orjson==3.10.16 packaging>=23.1 paho-mqtt==2.1.0 Pillow==11.1.0 @@ -61,19 +63,19 @@ PyTurboJPEG==1.7.5 PyYAML==6.0.2 requests==2.32.3 securetar==2025.2.1 -SQLAlchemy==2.0.38 +SQLAlchemy==2.0.39 standard-aifc==3.13.0 standard-telnetlib==3.13.0 -typing-extensions>=4.12.2,<5.0 -ulid-transform==1.2.1 +typing-extensions>=4.13.0,<5.0 +ulid-transform==1.4.0 urllib3>=1.26.5,<2 -uv==0.6.1 +uv==0.6.10 voluptuous-openapi==0.0.6 voluptuous-serialize==2.6.0 voluptuous==0.15.2 webrtc-models==0.3.0 yarl==1.18.3 -zeroconf==0.145.1 +zeroconf==0.146.0 # Constrain pycryptodome to avoid vulnerability # see https://github.com/home-assistant/core/pull/16238 @@ -108,7 +110,7 @@ uuid==1000000000.0.0 # these requirements are quite loose. As the entire stack has some outstanding issues, and # even newer versions seem to introduce new issues, it's useful for us to pin all these # requirements so we can directly link HA versions to these library versions. -anyio==4.8.0 +anyio==4.9.0 h11==0.14.0 httpcore==1.0.7 diff --git a/homeassistant/scripts/check_config.py b/homeassistant/scripts/check_config.py index a24568e9a6f..ca3df5080b5 100644 --- a/homeassistant/scripts/check_config.py +++ b/homeassistant/scripts/check_config.py @@ -12,6 +12,9 @@ import os from typing import Any from unittest.mock import patch +from annotatedyaml import loader as yaml_loader +from annotatedyaml.loader import Secrets + from homeassistant import core, loader from homeassistant.config import get_default_config_dir from homeassistant.config_entries import ConfigEntries @@ -23,7 +26,6 @@ from homeassistant.helpers import ( issue_registry as ir, ) from homeassistant.helpers.check_config import async_check_ha_config_file -from homeassistant.util.yaml import Secrets, loader as yaml_loader # mypy: allow-untyped-calls, allow-untyped-defs @@ -31,9 +33,9 @@ REQUIREMENTS = ("colorlog==6.8.2",) _LOGGER = logging.getLogger(__name__) MOCKS: dict[str, tuple[str, Callable]] = { - "load": ("homeassistant.util.yaml.loader.load_yaml", yaml_loader.load_yaml), + "load": ("annotatedyaml.loader.load_yaml", yaml_loader.load_yaml), "load*": ("homeassistant.config.load_yaml_dict", yaml_loader.load_yaml_dict), - "secrets": ("homeassistant.util.yaml.loader.secret_yaml", yaml_loader.secret_yaml), + "secrets": ("annotatedyaml.loader.secret_yaml", yaml_loader.secret_yaml), } PATCHES: dict[str, Any] = {} diff --git a/homeassistant/setup.py b/homeassistant/setup.py index dc4d0988b91..334e3a9e074 100644 --- a/homeassistant/setup.py +++ b/homeassistant/setup.py @@ -323,7 +323,7 @@ async def _async_setup_component( translation.async_load_integrations(hass, integration_set), loop=hass.loop ) # Validate all dependencies exist and there are no circular dependencies - if not await integration.resolve_dependencies(): + if await integration.resolve_dependencies() is None: return False # Process requirements as soon as possible, so we can import the component @@ -387,7 +387,7 @@ async def _async_setup_component( }, ) - _LOGGER.info("Setting up %s", domain) + _LOGGER.debug("Setting up %s", domain) with async_start_setup(hass, integration=domain, phase=SetupPhases.SETUP): if hasattr(component, "PLATFORM_SCHEMA"): @@ -783,7 +783,7 @@ def async_start_setup( # platforms, but we only care about the longest time. group_setup_times[phase] = max(group_setup_times[phase], time_taken) if group is None: - _LOGGER.info( + _LOGGER.debug( "Setup of domain %s took %.2f seconds", integration, time_taken ) elif _LOGGER.isEnabledFor(logging.DEBUG): diff --git a/homeassistant/strings.json b/homeassistant/strings.json index 29b7db7a011..dd3caa1ff51 100644 --- a/homeassistant/strings.json +++ b/homeassistant/strings.json @@ -47,6 +47,7 @@ "access_token": "Access token", "api_key": "API key", "api_token": "API token", + "country": "Country", "device": "Device", "elevation": "Elevation", "email": "Email", diff --git a/homeassistant/util/frozen_dataclass_compat.py b/homeassistant/util/frozen_dataclass_compat.py index 81ce9961a0b..518515d4f85 100644 --- a/homeassistant/util/frozen_dataclass_compat.py +++ b/homeassistant/util/frozen_dataclass_compat.py @@ -63,7 +63,7 @@ class FrozenOrThawed(type): ) def __new__( - mcs, # noqa: N804 ruff bug, ruff does not understand this is a metaclass + mcs, name: str, bases: tuple[type, ...], namespace: dict[Any, Any], diff --git a/homeassistant/util/logging.py b/homeassistant/util/logging.py index 2c4eb744614..d5dfab7da6c 100644 --- a/homeassistant/util/logging.py +++ b/homeassistant/util/logging.py @@ -2,14 +2,16 @@ from __future__ import annotations +from collections import defaultdict from collections.abc import Callable, Coroutine from functools import partial, wraps import inspect import logging import logging.handlers -import queue +from queue import SimpleQueue +import time import traceback -from typing import Any, cast, overload +from typing import Any, cast, overload, override from homeassistant.core import ( HassJobType, @@ -18,6 +20,76 @@ from homeassistant.core import ( get_hassjob_callable_job_type, ) +_LOGGER = logging.getLogger(__name__) + + +class HomeAssistantQueueListener(logging.handlers.QueueListener): + """Custom QueueListener to watch for noisy loggers.""" + + LOG_COUNTS_RESET_INTERVAL = 300 + MAX_LOGS_COUNT = 200 + + EXCLUDED_LOG_COUNT_MODULES = [ + "homeassistant.components.automation", + "homeassistant.components.script", + "homeassistant.setup", + "homeassistant.util.logging", + ] + + _last_reset: float + _log_counts: dict[str, int] + + def __init__( + self, queue: SimpleQueue[logging.Handler], *handlers: logging.Handler + ) -> None: + """Initialize the handler.""" + super().__init__(queue, *handlers) + self._module_log_count_skip_flags: dict[str, bool] = {} + self._reset_counters(time.time()) + + @override + def handle(self, record: logging.LogRecord) -> None: + """Handle the record.""" + super().handle(record) + + if record.levelno < logging.INFO: + return + + if (record.created - self._last_reset) > self.LOG_COUNTS_RESET_INTERVAL: + self._reset_counters(record.created) + + module_name = record.name + + if skip_flag := self._module_log_count_skip_flags.get(module_name): + return + + if skip_flag is None and self._update_skip_flags(module_name): + return + + self._log_counts[module_name] += 1 + module_count = self._log_counts[module_name] + if module_count < self.MAX_LOGS_COUNT: + return + + _LOGGER.warning( + "Module %s is logging too frequently. %d messages since last count", + module_name, + module_count, + ) + self._module_log_count_skip_flags[module_name] = True + + def _reset_counters(self, time_sec: float) -> None: + _LOGGER.debug("Resetting log counters") + self._last_reset = time_sec + self._log_counts = defaultdict(int) + + def _update_skip_flags(self, module_name: str) -> bool: + excluded = any( + module_name.startswith(prefix) for prefix in self.EXCLUDED_LOG_COUNT_MODULES + ) + self._module_log_count_skip_flags[module_name] = excluded + return excluded + class HomeAssistantQueueHandler(logging.handlers.QueueHandler): """Process the log in another thread.""" @@ -60,7 +132,7 @@ def async_activate_log_queue_handler(hass: HomeAssistant) -> None: This allows us to avoid blocking I/O and formatting messages in the event loop as log messages are written in another thread. """ - simple_queue: queue.SimpleQueue[logging.Handler] = queue.SimpleQueue() + simple_queue: SimpleQueue[logging.Handler] = SimpleQueue() queue_handler = HomeAssistantQueueHandler(simple_queue) logging.root.addHandler(queue_handler) @@ -71,7 +143,7 @@ def async_activate_log_queue_handler(hass: HomeAssistant) -> None: logging.root.removeHandler(handler) migrated_handlers.append(handler) - listener = logging.handlers.QueueListener(simple_queue, *migrated_handlers) + listener = HomeAssistantQueueListener(simple_queue, *migrated_handlers) queue_handler.listener = listener listener.start() diff --git a/homeassistant/util/unit_conversion.py b/homeassistant/util/unit_conversion.py index 67258c9cd09..f2619c5dd61 100644 --- a/homeassistant/util/unit_conversion.py +++ b/homeassistant/util/unit_conversion.py @@ -4,6 +4,7 @@ from __future__ import annotations from collections.abc import Callable from functools import lru_cache +from math import floor, log10 from homeassistant.const import ( CONCENTRATION_PARTS_PER_BILLION, @@ -144,6 +145,15 @@ class BaseUnitConverter: from_ratio, to_ratio = cls._get_from_to_ratio(from_unit, to_unit) return from_ratio / to_ratio + @classmethod + @lru_cache + def get_unit_floored_log_ratio( + cls, from_unit: str | None, to_unit: str | None + ) -> float: + """Get floored base10 log ratio between units of measurement.""" + from_ratio, to_ratio = cls._get_from_to_ratio(from_unit, to_unit) + return floor(max(0, log10(from_ratio / to_ratio))) + @classmethod @lru_cache def _are_unit_inverses(cls, from_unit: str | None, to_unit: str | None) -> bool: diff --git a/homeassistant/util/unit_system.py b/homeassistant/util/unit_system.py index 15993cbae47..055f435503f 100644 --- a/homeassistant/util/unit_system.py +++ b/homeassistant/util/unit_system.py @@ -2,6 +2,7 @@ from __future__ import annotations +from dataclasses import dataclass from numbers import Number from typing import TYPE_CHECKING, Final @@ -82,9 +83,21 @@ def _is_valid_unit(unit: str, unit_type: str) -> bool: return False +@dataclass(frozen=True, kw_only=True) class UnitSystem: """A container for units of measure.""" + _name: str + accumulated_precipitation_unit: UnitOfPrecipitationDepth + area_unit: UnitOfArea + length_unit: UnitOfLength + mass_unit: UnitOfMass + pressure_unit: UnitOfPressure + temperature_unit: UnitOfTemperature + volume_unit: UnitOfVolume + wind_speed_unit: UnitOfSpeed + _conversions: dict[tuple[SensorDeviceClass | str | None, str | None], str] + def __init__( self, name: str, @@ -118,16 +131,16 @@ class UnitSystem: if errors: raise ValueError(errors) - self._name = name - self.accumulated_precipitation_unit = accumulated_precipitation - self.area_unit = area - self.length_unit = length - self.mass_unit = mass - self.pressure_unit = pressure - self.temperature_unit = temperature - self.volume_unit = volume - self.wind_speed_unit = wind_speed - self._conversions = conversions + super().__setattr__("_name", name) + super().__setattr__("accumulated_precipitation_unit", accumulated_precipitation) + super().__setattr__("area_unit", area) + super().__setattr__("length_unit", length) + super().__setattr__("mass_unit", mass) + super().__setattr__("pressure_unit", pressure) + super().__setattr__("temperature_unit", temperature) + super().__setattr__("volume_unit", volume) + super().__setattr__("wind_speed_unit", wind_speed) + super().__setattr__("_conversions", conversions) def temperature(self, temperature: float, from_unit: str) -> float: """Convert the given temperature to this unit system.""" diff --git a/homeassistant/util/yaml/__init__.py b/homeassistant/util/yaml/__init__.py index 3b1f5c4cc0a..323383ef53f 100644 --- a/homeassistant/util/yaml/__init__.py +++ b/homeassistant/util/yaml/__init__.py @@ -1,17 +1,10 @@ """YAML utility functions.""" -from .const import SECRET_YAML +from annotatedyaml import SECRET_YAML, Input, YamlTypeError +from annotatedyaml.input import UndefinedSubstitution, extract_inputs, substitute + from .dumper import dump, save_yaml -from .input import UndefinedSubstitution, extract_inputs, substitute -from .loader import ( - Secrets, - YamlTypeError, - load_yaml, - load_yaml_dict, - parse_yaml, - secret_yaml, -) -from .objects import Input +from .loader import Secrets, load_yaml, load_yaml_dict, parse_yaml, secret_yaml __all__ = [ "SECRET_YAML", diff --git a/homeassistant/util/yaml/const.py b/homeassistant/util/yaml/const.py deleted file mode 100644 index 811c7d149f7..00000000000 --- a/homeassistant/util/yaml/const.py +++ /dev/null @@ -1,3 +0,0 @@ -"""Constants.""" - -SECRET_YAML = "secrets.yaml" diff --git a/homeassistant/util/yaml/dumper.py b/homeassistant/util/yaml/dumper.py index 61772b6989d..059be2c1c5b 100644 --- a/homeassistant/util/yaml/dumper.py +++ b/homeassistant/util/yaml/dumper.py @@ -1,96 +1,5 @@ """Custom dumper and representers.""" -from collections import OrderedDict -from typing import Any +from annotatedyaml.dumper import add_representer, dump, represent_odict, save_yaml -import yaml - -from .objects import Input, NodeDictClass, NodeListClass, NodeStrClass - -# mypy: allow-untyped-calls, no-warn-return-any - - -try: - from yaml import CSafeDumper as FastestAvailableSafeDumper -except ImportError: - from yaml import ( # type: ignore[assignment] - SafeDumper as FastestAvailableSafeDumper, - ) - - -def dump(_dict: dict | list) -> str: - """Dump YAML to a string and remove null.""" - return yaml.dump( - _dict, - default_flow_style=False, - allow_unicode=True, - sort_keys=False, - Dumper=FastestAvailableSafeDumper, - ).replace(": null\n", ":\n") - - -def save_yaml(path: str, data: dict) -> None: - """Save YAML to a file.""" - # Dump before writing to not truncate the file if dumping fails - str_data = dump(data) - with open(path, "w", encoding="utf-8") as outfile: - outfile.write(str_data) - - -# From: https://gist.github.com/miracle2k/3184458 -def represent_odict( # type: ignore[no-untyped-def] - dumper, tag, mapping, flow_style=None -) -> yaml.MappingNode: - """Like BaseRepresenter.represent_mapping but does not issue the sort().""" - value: list = [] - node = yaml.MappingNode(tag, value, flow_style=flow_style) - if dumper.alias_key is not None: - dumper.represented_objects[dumper.alias_key] = node - best_style = True - if hasattr(mapping, "items"): - mapping = mapping.items() - for item_key, item_value in mapping: - node_key = dumper.represent_data(item_key) - node_value = dumper.represent_data(item_value) - if not (isinstance(node_key, yaml.ScalarNode) and not node_key.style): - best_style = False - if not (isinstance(node_value, yaml.ScalarNode) and not node_value.style): - best_style = False - value.append((node_key, node_value)) - if flow_style is None: - if dumper.default_flow_style is not None: - node.flow_style = dumper.default_flow_style - else: - node.flow_style = best_style - return node - - -def add_representer(klass: Any, representer: Any) -> None: - """Add to representer to the dumper.""" - FastestAvailableSafeDumper.add_representer(klass, representer) - - -add_representer( - OrderedDict, - lambda dumper, value: represent_odict(dumper, "tag:yaml.org,2002:map", value), -) - -add_representer( - NodeDictClass, - lambda dumper, value: represent_odict(dumper, "tag:yaml.org,2002:map", value), -) - -add_representer( - NodeListClass, - lambda dumper, value: dumper.represent_sequence("tag:yaml.org,2002:seq", value), -) - -add_representer( - NodeStrClass, - lambda dumper, value: dumper.represent_scalar("tag:yaml.org,2002:str", str(value)), -) - -add_representer( - Input, - lambda dumper, value: dumper.represent_scalar("!input", value.name), -) +__all__ = ["add_representer", "dump", "represent_odict", "save_yaml"] diff --git a/homeassistant/util/yaml/input.py b/homeassistant/util/yaml/input.py index ff9b37f18f1..5dad8a63ae5 100644 --- a/homeassistant/util/yaml/input.py +++ b/homeassistant/util/yaml/input.py @@ -2,55 +2,8 @@ from __future__ import annotations -from typing import Any +from annotatedyaml.input import UndefinedSubstitution, extract_inputs, substitute from .objects import Input - -class UndefinedSubstitution(Exception): - """Error raised when we find a substitution that is not defined.""" - - def __init__(self, input_name: str) -> None: - """Initialize the undefined substitution exception.""" - super().__init__(f"No substitution found for input {input_name}") - self.input = input - - -def extract_inputs(obj: Any) -> set[str]: - """Extract input from a structure.""" - found: set[str] = set() - _extract_inputs(obj, found) - return found - - -def _extract_inputs(obj: Any, found: set[str]) -> None: - """Extract input from a structure.""" - if isinstance(obj, Input): - found.add(obj.name) - return - - if isinstance(obj, list): - for val in obj: - _extract_inputs(val, found) - return - - if isinstance(obj, dict): - for val in obj.values(): - _extract_inputs(val, found) - return - - -def substitute(obj: Any, substitutions: dict[str, Any]) -> Any: - """Substitute values.""" - if isinstance(obj, Input): - if obj.name not in substitutions: - raise UndefinedSubstitution(obj.name) - return substitutions[obj.name] - - if isinstance(obj, list): - return [substitute(val, substitutions) for val in obj] - - if isinstance(obj, dict): - return {key: substitute(val, substitutions) for key, val in obj.items()} - - return obj +__all__ = ["Input", "UndefinedSubstitution", "extract_inputs", "substitute"] diff --git a/homeassistant/util/yaml/loader.py b/homeassistant/util/yaml/loader.py index 3911d62040b..1f8338a1ff7 100644 --- a/homeassistant/util/yaml/loader.py +++ b/homeassistant/util/yaml/loader.py @@ -2,157 +2,37 @@ from __future__ import annotations -from collections.abc import Callable, Iterator -import fnmatch -from io import StringIO, TextIOWrapper -import logging +from io import StringIO import os -from pathlib import Path -from typing import Any, TextIO, overload +from typing import TextIO +from annotatedyaml import YAMLException, YamlTypeError +from annotatedyaml.loader import ( + HAS_C_LOADER, + JSON_TYPE, + LoaderType, + Secrets, + add_constructor, + load_yaml as load_annotated_yaml, + load_yaml_dict as load_annotated_yaml_dict, + parse_yaml as parse_annotated_yaml, + secret_yaml as annotated_secret_yaml, +) import yaml -try: - from yaml import CSafeLoader as FastestAvailableSafeLoader - - HAS_C_LOADER = True -except ImportError: - HAS_C_LOADER = False - from yaml import ( # type: ignore[assignment] - SafeLoader as FastestAvailableSafeLoader, - ) - -from propcache.api import cached_property - from homeassistant.exceptions import HomeAssistantError -from .const import SECRET_YAML -from .objects import Input, NodeDictClass, NodeListClass, NodeStrClass - -# mypy: allow-untyped-calls, no-warn-return-any - -JSON_TYPE = list | dict | str - -_LOGGER = logging.getLogger(__name__) - - -class YamlTypeError(HomeAssistantError): - """Raised by load_yaml_dict if top level data is not a dict.""" - - -class Secrets: - """Store secrets while loading YAML.""" - - def __init__(self, config_dir: Path) -> None: - """Initialize secrets.""" - self.config_dir = config_dir - self._cache: dict[Path, dict[str, str]] = {} - - def get(self, requester_path: str, secret: str) -> str: - """Return the value of a secret.""" - current_path = Path(requester_path) - - secret_dir = current_path - while True: - secret_dir = secret_dir.parent - - try: - secret_dir.relative_to(self.config_dir) - except ValueError: - # We went above the config dir - break - - secrets = self._load_secret_yaml(secret_dir) - - if secret in secrets: - _LOGGER.debug( - "Secret %s retrieved from secrets.yaml in folder %s", - secret, - secret_dir, - ) - return secrets[secret] - - raise HomeAssistantError(f"Secret {secret} not defined") - - def _load_secret_yaml(self, secret_dir: Path) -> dict[str, str]: - """Load the secrets yaml from path.""" - if (secret_path := secret_dir / SECRET_YAML) in self._cache: - return self._cache[secret_path] - - _LOGGER.debug("Loading %s", secret_path) - try: - secrets = load_yaml(str(secret_path)) - - if not isinstance(secrets, dict): - raise HomeAssistantError("Secrets is not a dictionary") - - if "logger" in secrets: - logger = str(secrets["logger"]).lower() - if logger == "debug": - _LOGGER.setLevel(logging.DEBUG) - else: - _LOGGER.error( - ( - "Error in secrets.yaml: 'logger: debug' expected, but" - " 'logger: %s' found" - ), - logger, - ) - del secrets["logger"] - except FileNotFoundError: - secrets = {} - - self._cache[secret_path] = secrets - - return secrets - - -class _LoaderMixin: - """Mixin class with extensions for YAML loader.""" - - name: str - stream: Any - - @cached_property - def get_name(self) -> str: - """Get the name of the loader.""" - return self.name - - @cached_property - def get_stream_name(self) -> str: - """Get the name of the stream.""" - return getattr(self.stream, "name", "") - - -class FastSafeLoader(FastestAvailableSafeLoader, _LoaderMixin): - """The fastest available safe loader, either C or Python.""" - - def __init__(self, stream: Any, secrets: Secrets | None = None) -> None: - """Initialize a safe line loader.""" - self.stream = stream - - # Set name in same way as the Python loader does in yaml.reader.__init__ - if isinstance(stream, str): - self.name = "" - elif isinstance(stream, bytes): - self.name = "" - else: - self.name = getattr(stream, "name", "") - - super().__init__(stream) - self.secrets = secrets - - -class PythonSafeLoader(yaml.SafeLoader, _LoaderMixin): - """Python safe loader.""" - - def __init__(self, stream: Any, secrets: Secrets | None = None) -> None: - """Initialize a safe line loader.""" - super().__init__(stream) - self.secrets = secrets - - -type LoaderType = FastSafeLoader | PythonSafeLoader +__all__ = [ + "HAS_C_LOADER", + "JSON_TYPE", + "Secrets", + "YamlTypeError", + "add_constructor", + "load_yaml", + "load_yaml_dict", + "parse_yaml", + "secret_yaml", +] def load_yaml( @@ -164,15 +44,9 @@ def load_yaml( except for FileNotFoundError which will be re-raised. """ try: - with open(fname, encoding="utf-8") as conf_file: - return parse_yaml(conf_file, secrets) - except UnicodeDecodeError as exc: - _LOGGER.error("Unable to read file %s: %s", fname, exc) - raise HomeAssistantError(exc) from exc - except FileNotFoundError: - raise - except OSError as exc: - raise HomeAssistantError(exc) from exc + return load_annotated_yaml(fname, secrets) + except YAMLException as exc: + raise HomeAssistantError(str(exc)) from exc def load_yaml_dict( @@ -183,320 +57,27 @@ def load_yaml_dict( Raise if the top level is not a dict. Return an empty dict if the file is empty. """ - loaded_yaml = load_yaml(fname, secrets) - if loaded_yaml is None: - loaded_yaml = {} - if not isinstance(loaded_yaml, dict): - raise YamlTypeError(f"YAML file {fname} does not contain a dict") - return loaded_yaml + try: + return load_annotated_yaml_dict(fname, secrets) + except YamlTypeError: + raise + except YAMLException as exc: + raise HomeAssistantError(str(exc)) from exc def parse_yaml( content: str | TextIO | StringIO, secrets: Secrets | None = None ) -> JSON_TYPE: """Parse YAML with the fastest available loader.""" - if not HAS_C_LOADER: - return _parse_yaml_python(content, secrets) try: - return _parse_yaml(FastSafeLoader, content, secrets) - except yaml.YAMLError: - # Loading failed, so we now load with the Python loader which has more - # readable exceptions - if isinstance(content, (StringIO, TextIO, TextIOWrapper)): - # Rewind the stream so we can try again - content.seek(0, 0) - return _parse_yaml_python(content, secrets) - - -def _parse_yaml_python( - content: str | TextIO | StringIO, secrets: Secrets | None = None -) -> JSON_TYPE: - """Parse YAML with the python loader (this is very slow).""" - try: - return _parse_yaml(PythonSafeLoader, content, secrets) - except yaml.YAMLError as exc: - _LOGGER.error(str(exc)) - raise HomeAssistantError(exc) from exc - - -def _parse_yaml( - loader: type[FastSafeLoader | PythonSafeLoader], - content: str | TextIO, - secrets: Secrets | None = None, -) -> JSON_TYPE: - """Load a YAML file.""" - return yaml.load(content, Loader=lambda stream: loader(stream, secrets)) # type: ignore[arg-type] - - -@overload -def _add_reference( - obj: list | NodeListClass, loader: LoaderType, node: yaml.nodes.Node -) -> NodeListClass: ... - - -@overload -def _add_reference( - obj: str | NodeStrClass, loader: LoaderType, node: yaml.nodes.Node -) -> NodeStrClass: ... - - -@overload -def _add_reference( - obj: dict | NodeDictClass, loader: LoaderType, node: yaml.nodes.Node -) -> NodeDictClass: ... - - -def _add_reference( - obj: dict | list | str | NodeDictClass | NodeListClass | NodeStrClass, - loader: LoaderType, - node: yaml.nodes.Node, -) -> NodeDictClass | NodeListClass | NodeStrClass: - """Add file reference information to an object.""" - if isinstance(obj, list): - obj = NodeListClass(obj) - elif isinstance(obj, str): - obj = NodeStrClass(obj) - elif isinstance(obj, dict): - obj = NodeDictClass(obj) - return _add_reference_to_node_class(obj, loader, node) - - -@overload -def _add_reference_to_node_class( - obj: NodeListClass, loader: LoaderType, node: yaml.nodes.Node -) -> NodeListClass: ... - - -@overload -def _add_reference_to_node_class( - obj: NodeStrClass, loader: LoaderType, node: yaml.nodes.Node -) -> NodeStrClass: ... - - -@overload -def _add_reference_to_node_class( - obj: NodeDictClass, loader: LoaderType, node: yaml.nodes.Node -) -> NodeDictClass: ... - - -def _add_reference_to_node_class( - obj: NodeDictClass | NodeListClass | NodeStrClass, - loader: LoaderType, - node: yaml.nodes.Node, -) -> NodeDictClass | NodeListClass | NodeStrClass: - """Add file reference information to a node class object.""" - try: # suppress is much slower - obj.__config_file__ = loader.get_name - obj.__line__ = node.start_mark.line + 1 - except AttributeError: - pass - return obj - - -def _raise_if_no_value[NodeT: yaml.nodes.Node, _R]( - func: Callable[[LoaderType, NodeT], _R], -) -> Callable[[LoaderType, NodeT], _R]: - def wrapper(loader: LoaderType, node: NodeT) -> _R: - if not node.value: - raise HomeAssistantError( - f"{node.start_mark}: {node.tag} needs an argument." - ) - return func(loader, node) - - return wrapper - - -@_raise_if_no_value -def _include_yaml(loader: LoaderType, node: yaml.nodes.Node) -> JSON_TYPE: - """Load another YAML file and embed it using the !include tag. - - Example: - device_tracker: !include device_tracker.yaml - - """ - fname = os.path.join(os.path.dirname(loader.get_name), node.value) - try: - loaded_yaml = load_yaml(fname, loader.secrets) - if loaded_yaml is None: - loaded_yaml = NodeDictClass() - return _add_reference(loaded_yaml, loader, node) - except FileNotFoundError as exc: - raise HomeAssistantError( - f"{node.start_mark}: Unable to read file {fname}" - ) from exc - - -def _is_file_valid(name: str) -> bool: - """Decide if a file is valid.""" - return not name.startswith(".") - - -def _find_files(directory: str, pattern: str) -> Iterator[str]: - """Recursively load files in a directory.""" - for root, dirs, files in os.walk(directory, topdown=True): - dirs[:] = [d for d in dirs if _is_file_valid(d)] - for basename in sorted(files): - if _is_file_valid(basename) and fnmatch.fnmatch(basename, pattern): - filename = os.path.join(root, basename) - yield filename - - -@_raise_if_no_value -def _include_dir_named_yaml(loader: LoaderType, node: yaml.nodes.Node) -> NodeDictClass: - """Load multiple files from directory as a dictionary.""" - mapping = NodeDictClass() - loc = os.path.join(os.path.dirname(loader.get_name), node.value) - for fname in _find_files(loc, "*.yaml"): - filename = os.path.splitext(os.path.basename(fname))[0] - if os.path.basename(fname) == SECRET_YAML: - continue - loaded_yaml = load_yaml(fname, loader.secrets) - if loaded_yaml is None: - # Special case, an empty file included by !include_dir_named is treated - # as an empty dictionary - loaded_yaml = NodeDictClass() - mapping[filename] = loaded_yaml - return _add_reference_to_node_class(mapping, loader, node) - - -@_raise_if_no_value -def _include_dir_merge_named_yaml( - loader: LoaderType, node: yaml.nodes.Node -) -> NodeDictClass: - """Load multiple files from directory as a merged dictionary.""" - mapping = NodeDictClass() - loc = os.path.join(os.path.dirname(loader.get_name), node.value) - for fname in _find_files(loc, "*.yaml"): - if os.path.basename(fname) == SECRET_YAML: - continue - loaded_yaml = load_yaml(fname, loader.secrets) - if isinstance(loaded_yaml, dict): - mapping.update(loaded_yaml) - return _add_reference_to_node_class(mapping, loader, node) - - -@_raise_if_no_value -def _include_dir_list_yaml( - loader: LoaderType, node: yaml.nodes.Node -) -> list[JSON_TYPE]: - """Load multiple files from directory as a list.""" - loc = os.path.join(os.path.dirname(loader.get_name), node.value) - return [ - loaded_yaml - for f in _find_files(loc, "*.yaml") - if os.path.basename(f) != SECRET_YAML - and (loaded_yaml := load_yaml(f, loader.secrets)) is not None - ] - - -@_raise_if_no_value -def _include_dir_merge_list_yaml( - loader: LoaderType, node: yaml.nodes.Node -) -> JSON_TYPE: - """Load multiple files from directory as a merged list.""" - loc: str = os.path.join(os.path.dirname(loader.get_name), node.value) - merged_list: list[JSON_TYPE] = [] - for fname in _find_files(loc, "*.yaml"): - if os.path.basename(fname) == SECRET_YAML: - continue - loaded_yaml = load_yaml(fname, loader.secrets) - if isinstance(loaded_yaml, list): - merged_list.extend(loaded_yaml) - return _add_reference(merged_list, loader, node) - - -def _handle_mapping_tag( - loader: LoaderType, node: yaml.nodes.MappingNode -) -> NodeDictClass: - """Load YAML mappings into an ordered dictionary to preserve key order.""" - loader.flatten_mapping(node) - nodes = loader.construct_pairs(node) - - seen: dict = {} - for (key, _), (child_node, _) in zip(nodes, node.value, strict=False): - line = child_node.start_mark.line - - try: - hash(key) - except TypeError as exc: - fname = loader.get_stream_name - raise yaml.MarkedYAMLError( - context=f'invalid key: "{key}"', - context_mark=yaml.Mark( - fname, - 0, - line, - -1, - None, - None, # type: ignore[arg-type] - ), - ) from exc - - if key in seen: - fname = loader.get_stream_name - _LOGGER.warning( - 'YAML file %s contains duplicate key "%s". Check lines %d and %d', - fname, - key, - seen[key], - line, - ) - seen[key] = line - - return _add_reference_to_node_class(NodeDictClass(nodes), loader, node) - - -def _construct_seq(loader: LoaderType, node: yaml.nodes.Node) -> JSON_TYPE: - """Add line number and file name to Load YAML sequence.""" - (obj,) = loader.construct_yaml_seq(node) - return _add_reference(obj, loader, node) - - -def _handle_scalar_tag( - loader: LoaderType, node: yaml.nodes.ScalarNode -) -> str | int | float | None: - """Add line number and file name to Load YAML sequence.""" - obj = node.value - if not isinstance(obj, str): - return obj - return _add_reference_to_node_class(NodeStrClass(obj), loader, node) - - -def _env_var_yaml(loader: LoaderType, node: yaml.nodes.Node) -> str: - """Load environment variables and embed it into the configuration YAML.""" - args = node.value.split() - - # Check for a default value - if len(args) > 1: - return os.getenv(args[0], " ".join(args[1:])) - if args[0] in os.environ: - return os.environ[args[0]] - _LOGGER.error("Environment variable %s not defined", node.value) - raise HomeAssistantError(node.value) + return parse_annotated_yaml(content, secrets) + except YAMLException as exc: + raise HomeAssistantError(str(exc)) from exc def secret_yaml(loader: LoaderType, node: yaml.nodes.Node) -> JSON_TYPE: """Load secrets and embed it into the configuration YAML.""" - if loader.secrets is None: - raise HomeAssistantError("Secrets not supported in this YAML file") - - return loader.secrets.get(loader.get_name, node.value) - - -def add_constructor(tag: Any, constructor: Any) -> None: - """Add to constructor to all loaders.""" - for yaml_loader in (FastSafeLoader, PythonSafeLoader): - yaml_loader.add_constructor(tag, constructor) - - -add_constructor("!include", _include_yaml) -add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, _handle_mapping_tag) -add_constructor(yaml.resolver.BaseResolver.DEFAULT_SCALAR_TAG, _handle_scalar_tag) -add_constructor(yaml.resolver.BaseResolver.DEFAULT_SEQUENCE_TAG, _construct_seq) -add_constructor("!env_var", _env_var_yaml) -add_constructor("!secret", secret_yaml) -add_constructor("!include_dir_list", _include_dir_list_yaml) -add_constructor("!include_dir_merge_list", _include_dir_merge_list_yaml) -add_constructor("!include_dir_named", _include_dir_named_yaml) -add_constructor("!include_dir_merge_named", _include_dir_merge_named_yaml) -add_constructor("!input", Input.from_node) + try: + return annotated_secret_yaml(loader, node) + except YAMLException as exc: + raise HomeAssistantError(str(exc)) from exc diff --git a/homeassistant/util/yaml/objects.py b/homeassistant/util/yaml/objects.py index 7e4019331c6..26714b0fdd4 100644 --- a/homeassistant/util/yaml/objects.py +++ b/homeassistant/util/yaml/objects.py @@ -2,52 +2,6 @@ from __future__ import annotations -from dataclasses import dataclass -from typing import Any +from annotatedyaml import Input, NodeDictClass, NodeListClass, NodeStrClass -import voluptuous as vol -from voluptuous.schema_builder import _compile_scalar -import yaml - - -class NodeListClass(list): - """Wrapper class to be able to add attributes on a list.""" - - __slots__ = ("__config_file__", "__line__") - - __config_file__: str - __line__: int | str - - -class NodeStrClass(str): - """Wrapper class to be able to add attributes on a string.""" - - __slots__ = ("__config_file__", "__line__") - - __config_file__: str - __line__: int | str - - def __voluptuous_compile__(self, schema: vol.Schema) -> Any: - """Needed because vol.Schema.compile does not handle str subclasses.""" - return _compile_scalar(self) # type: ignore[no-untyped-call] - - -class NodeDictClass(dict): - """Wrapper class to be able to add attributes on a dict.""" - - __slots__ = ("__config_file__", "__line__") - - __config_file__: str - __line__: int | str - - -@dataclass(slots=True, frozen=True) -class Input: - """Input that should be substituted.""" - - name: str - - @classmethod - def from_node(cls, loader: yaml.Loader, node: yaml.nodes.Node) -> Input: - """Create a new placeholder from a node.""" - return cls(node.value) +__all__ = ["Input", "NodeDictClass", "NodeListClass", "NodeStrClass"] diff --git a/mypy.ini b/mypy.ini index a6203993c87..9831a183ec4 100644 --- a/mypy.ini +++ b/mypy.ini @@ -945,6 +945,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.bosch_alarm.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.braviatv.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -1115,6 +1125,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.comelit.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.command_line.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3716,6 +3736,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.pyload.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.python_script.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3856,6 +3886,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.remote_calendar.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.renault.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -5039,6 +5079,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.vodafone_station.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.wake_on_lan.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/pylint/plugins/hass_enforce_type_hints.py b/pylint/plugins/hass_enforce_type_hints.py index a4590207294..ca7777da959 100644 --- a/pylint/plugins/hass_enforce_type_hints.py +++ b/pylint/plugins/hass_enforce_type_hints.py @@ -2568,7 +2568,7 @@ _INHERITANCE_MATCH: dict[str, list[ClassTypeHintMatch]] = { ), TypeHintMatch( function_name="in_progress", - return_type=["bool", "int", None], + return_type=["bool", None], ), TypeHintMatch( function_name="latest_version", @@ -2590,6 +2590,10 @@ _INHERITANCE_MATCH: dict[str, list[ClassTypeHintMatch]] = { function_name="title", return_type=["str", None], ), + TypeHintMatch( + function_name="update_percentage", + return_type=["int", "float", None], + ), TypeHintMatch( function_name="install", arg_types={1: "str | None", 2: "bool"}, diff --git a/pyproject.toml b/pyproject.toml index 9c7508e2ebb..8966c2fd9b2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,11 +1,12 @@ [build-system] -requires = ["setuptools==75.1.0"] +requires = ["setuptools==77.0.3"] build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2025.3.4" -license = {text = "Apache-2.0"} +version = "2025.4.0" +license = "Apache-2.0" +license-files = ["LICENSE*", "homeassistant/backports/LICENSE*"] description = "Open-source home automation platform running on Python 3." readme = "README.rst" authors = [ @@ -16,7 +17,6 @@ classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: End Users/Desktop", "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Programming Language :: Python :: 3.13", "Topic :: Home Automation", @@ -28,11 +28,12 @@ dependencies = [ # change behavior based on presence of supervisor. Deprecated with #127228 # Lib can be removed with 2025.11 "aiohasupervisor==0.3.0", - "aiohttp==3.11.13", + "aiohttp==3.11.16", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.2.3", "aiohttp-asyncmdnsresolver==0.1.1", "aiozoneinfo==0.2.3", + "annotatedyaml==0.4.5", "astral==2.2", "async-interrupt==1.2.2", "attrs==25.1.0", @@ -43,46 +44,86 @@ dependencies = [ "certifi>=2021.5.30", "ciso8601==2.3.2", "cronsim==2.6", - "fnv-hash-fast==1.2.6", + "fnv-hash-fast==1.4.0", + # ha-ffmpeg is indirectly imported from onboarding via the import chain + # onboarding->cloud->assist_pipeline->tts->ffmpeg. Onboarding needs + # to be setup in stage 0, but we don't want to also promote cloud with all its + # dependencies to stage 0. + "ha-ffmpeg==3.2.2", # hass-nabucasa is imported by helpers which don't depend on the cloud # integration - "hass-nabucasa==0.92.0", + "hass-nabucasa==0.94.0", + # hassil is indirectly imported from onboarding via the import chain + # onboarding->cloud->assist_pipeline->conversation->hassil. Onboarding needs + # to be setup in stage 0, but we don't want to also promote cloud with all its + # dependencies to stage 0. + "hassil==2.2.3", # When bumping httpx, please check the version pins of # httpcore, anyio, and h11 in gen_requirements_all "httpx==0.28.1", "home-assistant-bluetooth==1.13.1", + # home_assistant_intents is indirectly imported from onboarding via the import chain + # onboarding->cloud->assist_pipeline->conversation->home_assistant_intents. Onboarding needs + # to be setup in stage 0, but we don't want to also promote cloud with all its + # dependencies to stage 0. + "home-assistant-intents==2025.3.28", "ifaddr==0.2.0", "Jinja2==3.1.6", "lru-dict==1.3.0", + # mutagen is indirectly imported from onboarding via the import chain + # onboarding->cloud->assist_pipeline->tts->mutagen. Onboarding needs + # to be setup in stage 0, but we don't want to also promote cloud with all its + # dependencies to stage 0. + "mutagen==1.47.0", + # numpy is indirectly imported from onboarding via the import chain + # onboarding->cloud->alexa->camera->stream->numpy. Onboarding needs + # to be setup in stage 0, but we don't want to also promote cloud with all its + # dependencies to stage 0. + "numpy==2.2.2", "PyJWT==2.10.1", # PyJWT has loose dependency. We want the latest one. "cryptography==44.0.1", "Pillow==11.1.0", "propcache==0.3.0", "pyOpenSSL==25.0.0", - "orjson==3.10.12", + "orjson==3.10.16", "packaging>=23.1", "psutil-home-assistant==0.0.1", + # pymicro_vad is indirectly imported from onboarding via the import chain + # onboarding->cloud->assist_pipeline->pymicro_vad. Onboarding needs + # to be setup in stage 0, but we don't want to also promote cloud with all its + # dependencies to stage 0. + "pymicro-vad==1.0.1", + # pyspeex-noise is indirectly imported from onboarding via the import chain + # onboarding->cloud->assist_pipeline->pyspeex_noise. Onboarding needs + # to be setup in stage 0, but we don't want to also promote cloud with all its + # dependencies to stage 0. + "pyspeex-noise==1.0.2", "python-slugify==8.0.4", + # PyTurboJPEG is indirectly imported from onboarding via the import chain + # onboarding->cloud->camera->pyturbojpeg. Onboarding needs + # to be setup in stage 0, but we don't want to also promote cloud with all its + # dependencies to stage 0. + "PyTurboJPEG==1.7.5", "PyYAML==6.0.2", "requests==2.32.3", "securetar==2025.2.1", - "SQLAlchemy==2.0.38", + "SQLAlchemy==2.0.39", "standard-aifc==3.13.0", "standard-telnetlib==3.13.0", - "typing-extensions>=4.12.2,<5.0", - "ulid-transform==1.2.1", + "typing-extensions>=4.13.0,<5.0", + "ulid-transform==1.4.0", # Constrain urllib3 to ensure we deal with CVE-2020-26137 and CVE-2021-33503 # Temporary setting an upper bound, to prevent compat issues with urllib3>=2 # https://github.com/home-assistant/core/issues/97248 "urllib3>=1.26.5,<2", - "uv==0.6.1", + "uv==0.6.10", "voluptuous==0.15.2", "voluptuous-serialize==2.6.0", "voluptuous-openapi==0.0.6", "yarl==1.18.3", "webrtc-models==0.3.0", - "zeroconf==0.145.1" + "zeroconf==0.146.0" ] [project.urls] @@ -471,8 +512,6 @@ filterwarnings = [ # Ignore custom pytest marks "ignore:Unknown pytest.mark.disable_autouse_fixture:pytest.PytestUnknownMarkWarning:tests.components.met", "ignore:Unknown pytest.mark.dataset:pytest.PytestUnknownMarkWarning:tests.components.screenlogic", - # https://github.com/rokam/sunweg/blob/3.1.0/sunweg/plant.py#L96 - v3.1.0 - 2024-10-02 - "ignore:The '(kwh_per_kwp|performance_rate)' property is deprecated and will return 0:DeprecationWarning:tests.components.sunweg.test_init", # -- design choice 3rd party # https://github.com/gwww/elkm1/blob/2.2.10/elkm1_lib/util.py#L8-L19 @@ -700,17 +739,12 @@ exclude_lines = [ ] [tool.ruff] -required-version = ">=0.9.1" +required-version = ">=0.11.0" [tool.ruff.lint] select = [ "A001", # Variable {name} is shadowing a Python builtin - "ASYNC210", # Async functions should not call blocking HTTP methods - "ASYNC220", # Async functions should not create subprocesses with blocking methods - "ASYNC221", # Async functions should not run processes with blocking methods - "ASYNC222", # Async functions should not wait on processes with blocking methods - "ASYNC230", # Async functions should not open files with blocking methods like open - "ASYNC251", # Async functions should not call time.sleep + "ASYNC", # flake8-async "B002", # Python does not support the unary prefix increment "B005", # Using .strip() with multi-character strings is misleading "B007", # Loop control variable {name} not used within loop body @@ -789,7 +823,6 @@ select = [ "S317", # suspicious-xml-sax-usage "S318", # suspicious-xml-mini-dom-usage "S319", # suspicious-xml-pull-dom-usage - "S320", # suspicious-xmle-tree-usage "S601", # paramiko-call "S602", # subprocess-popen-with-shell-equals-true "S604", # call-with-shell-equals-true @@ -810,6 +843,8 @@ select = [ ] ignore = [ + "ASYNC109", # Async function definition with a `timeout` parameter Use `asyncio.timeout` instead + "ASYNC110", # Use `asyncio.Event` instead of awaiting `asyncio.sleep` in a `while` loop "D202", # No blank lines allowed after function docstring "D203", # 1 blank line required before class docstring "D213", # Multi-line docstring summary should start at the second line @@ -839,6 +874,8 @@ ignore = [ "TC001", # Move application import {} into a type-checking block "TC002", # Move third-party import {} into a type-checking block "TC003", # Move standard library import {} into a type-checking block + # Quotes for typing.cast generally not necessary, only for performance critical paths + "TC006", # Add quotes to type expression in typing.cast() "TRY003", # Avoid specifying long messages outside the exception class "TRY400", # Use `logging.exception` instead of `logging.error` @@ -857,7 +894,7 @@ ignore = [ "COM819", # Disabled because ruff does not understand type of __all__ generated by a function - "PLE0605" + "PLE0605", ] [tool.ruff.lint.flake8-import-conventions.extend-aliases] diff --git a/requirements.txt b/requirements.txt index 20fd6f3dfb8..7095fccc964 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,11 +5,12 @@ # Home Assistant Core aiodns==3.2.0 aiohasupervisor==0.3.0 -aiohttp==3.11.13 +aiohttp==3.11.16 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.2.3 aiohttp-asyncmdnsresolver==0.1.1 aiozoneinfo==0.2.3 +annotatedyaml==0.4.5 astral==2.2 async-interrupt==1.2.2 attrs==25.1.0 @@ -20,35 +21,43 @@ bcrypt==4.2.0 certifi>=2021.5.30 ciso8601==2.3.2 cronsim==2.6 -fnv-hash-fast==1.2.6 -hass-nabucasa==0.92.0 +fnv-hash-fast==1.4.0 +ha-ffmpeg==3.2.2 +hass-nabucasa==0.94.0 +hassil==2.2.3 httpx==0.28.1 home-assistant-bluetooth==1.13.1 +home-assistant-intents==2025.3.28 ifaddr==0.2.0 Jinja2==3.1.6 lru-dict==1.3.0 +mutagen==1.47.0 +numpy==2.2.2 PyJWT==2.10.1 cryptography==44.0.1 Pillow==11.1.0 propcache==0.3.0 pyOpenSSL==25.0.0 -orjson==3.10.12 +orjson==3.10.16 packaging>=23.1 psutil-home-assistant==0.0.1 +pymicro-vad==1.0.1 +pyspeex-noise==1.0.2 python-slugify==8.0.4 +PyTurboJPEG==1.7.5 PyYAML==6.0.2 requests==2.32.3 securetar==2025.2.1 -SQLAlchemy==2.0.38 +SQLAlchemy==2.0.39 standard-aifc==3.13.0 standard-telnetlib==3.13.0 -typing-extensions>=4.12.2,<5.0 -ulid-transform==1.2.1 +typing-extensions>=4.13.0,<5.0 +ulid-transform==1.4.0 urllib3>=1.26.5,<2 -uv==0.6.1 +uv==0.6.10 voluptuous==0.15.2 voluptuous-serialize==2.6.0 voluptuous-openapi==0.0.6 yarl==1.18.3 webrtc-models==0.3.0 -zeroconf==0.145.1 +zeroconf==0.146.0 diff --git a/requirements_all.txt b/requirements_all.txt index 0ed5f7ccb03..fb3777b5cfa 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -19,10 +19,10 @@ DoorBirdPy==3.0.8 HAP-python==4.9.2 # homeassistant.components.tasmota -HATasmota==0.9.2 +HATasmota==0.10.0 # homeassistant.components.mastodon -Mastodon.py==1.8.1 +Mastodon.py==2.0.1 # homeassistant.components.doods # homeassistant.components.generic @@ -45,7 +45,7 @@ ProgettiHWSW==0.1.3 # PyBluez==0.22 # homeassistant.components.cast -PyChromecast==14.0.5 +PyChromecast==14.0.7 # homeassistant.components.flick_electric PyFlick==1.1.3 @@ -54,7 +54,7 @@ PyFlick==1.1.3 PyFlume==0.6.5 # homeassistant.components.fronius -PyFronius==0.7.3 +PyFronius==0.7.7 # homeassistant.components.pyload PyLoadAPI==1.4.2 @@ -84,7 +84,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.57.1 +PySwitchbot==0.58.0 # homeassistant.components.switchmate PySwitchmate==0.5.1 @@ -116,7 +116,7 @@ RtmAPI==0.7.2 # homeassistant.components.recorder # homeassistant.components.sql -SQLAlchemy==2.0.38 +SQLAlchemy==2.0.39 # homeassistant.components.tami4 Tami4EdgeAPI==3.0 @@ -131,7 +131,7 @@ TwitterAPI==2.7.12 WSDiscovery==2.1.2 # homeassistant.components.accuweather -accuweather==4.1.0 +accuweather==4.2.0 # homeassistant.components.adax adax==0.4.0 @@ -179,7 +179,7 @@ aioacaia==0.1.14 aioairq==0.4.4 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.6.10 +aioairzone-cloud==0.6.11 # homeassistant.components.airzone aioairzone==0.9.9 @@ -201,7 +201,7 @@ aioaseko==1.0.0 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2025.1.1 +aioautomower==2025.3.2 # homeassistant.components.azure_devops aioazuredevops==2.2.1 @@ -213,7 +213,7 @@ aiobafi6==0.9.0 aiobotocore==2.13.1 # homeassistant.components.comelit -aiocomelit==0.10.1 +aiocomelit==0.11.3 # homeassistant.components.dhcp aiodhcpwatcher==1.1.1 @@ -225,7 +225,7 @@ aiodiscover==2.6.1 aiodns==3.2.0 # homeassistant.components.duke_energy -aiodukeenergy==0.2.2 +aiodukeenergy==0.3.0 # homeassistant.components.eafm aioeafm==0.1.2 @@ -243,7 +243,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==29.2.0 +aioesphomeapi==29.8.0 # homeassistant.components.flo aioflo==2021.11.0 @@ -258,7 +258,7 @@ aiogithubapi==24.6.0 aioguardian==2022.07.0 # homeassistant.components.harmony -aioharmony==0.4.1 +aioharmony==0.5.2 # homeassistant.components.hassio aiohasupervisor==0.3.0 @@ -267,7 +267,7 @@ aiohasupervisor==0.3.0 aiohomeconnect==0.16.3 # homeassistant.components.homekit_controller -aiohomekit==3.2.8 +aiohomekit==3.2.13 # homeassistant.components.mcp_server aiohttp_sse==2.2.0 @@ -291,7 +291,7 @@ aiolifx-effects==0.3.2 aiolifx-themes==0.6.4 # homeassistant.components.lifx -aiolifx==1.1.2 +aiolifx==1.1.4 # homeassistant.components.lookin aiolookin==1.0.0 @@ -371,7 +371,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==13.1.0 +aioshelly==13.4.0 # homeassistant.components.skybell aioskybell==22.7.0 @@ -422,7 +422,7 @@ aiowaqi==3.1.0 aiowatttime==0.1.1 # homeassistant.components.webdav -aiowebdav2==0.4.2 +aiowebdav2==0.4.5 # homeassistant.components.webostv aiowebostv==0.7.3 @@ -464,7 +464,7 @@ amcrest==1.9.8 androidtv[async]==0.0.75 # homeassistant.components.androidtv_remote -androidtvremote2==0.2.0 +androidtvremote2==0.2.1 # homeassistant.components.anel_pwrctrl anel-pwrctrl-homeassistant==0.0.1.dev2 @@ -479,7 +479,7 @@ anthemav==1.4.1 anthropic==0.47.2 # homeassistant.components.mcp_server -anyio==4.8.0 +anyio==4.9.0 # homeassistant.components.weatherkit apple_weatherkit==1.1.3 @@ -514,7 +514,7 @@ asmog==0.0.6 # homeassistant.components.ssdp # homeassistant.components.upnp # homeassistant.components.yeelight -async-upnp-client==0.43.0 +async-upnp-client==0.44.0 # homeassistant.components.arve asyncarve==0.1.1 @@ -603,7 +603,7 @@ bizkaibus==0.1.1 # homeassistant.components.eq3btsmart # homeassistant.components.esphome -bleak-esphome==2.8.0 +bleak-esphome==2.12.0 # homeassistant.components.bluetooth bleak-retry-connector==3.9.0 @@ -627,24 +627,26 @@ bluecurrent-api==1.2.3 bluemaestro-ble==0.2.3 # homeassistant.components.decora -# homeassistant.components.zengge # bluepy==1.3.0 # homeassistant.components.bluetooth bluetooth-adapters==0.21.4 # homeassistant.components.bluetooth -bluetooth-auto-recovery==1.4.4 +bluetooth-auto-recovery==1.4.5 # homeassistant.components.bluetooth # homeassistant.components.ld2410_ble # homeassistant.components.led_ble # homeassistant.components.private_ble_device -bluetooth-data-tools==1.23.4 +bluetooth-data-tools==1.26.5 # homeassistant.components.bond bond-async==0.2.1 +# homeassistant.components.bosch_alarm +bosch-alarm-mode2==0.4.3 + # homeassistant.components.bosch_shc boschshcpy==0.2.91 @@ -656,7 +658,7 @@ boto3==1.34.131 botocore==1.34.131 # homeassistant.components.bring -bring-api==1.0.2 +bring-api==1.1.0 # homeassistant.components.broadlink broadlink==0.19.0 @@ -686,7 +688,7 @@ btsmarthub-devicelist==0.2.3 buienradar==1.0.6 # homeassistant.components.dhcp -cached-ipaddress==0.9.2 +cached-ipaddress==0.10.0 # homeassistant.components.caldav caldav==1.3.9 @@ -744,10 +746,10 @@ datadog==0.15.0 datapoint==0.9.9 # homeassistant.components.bluetooth -dbus-fast==2.33.0 +dbus-fast==2.43.0 # homeassistant.components.debugpy -debugpy==1.8.11 +debugpy==1.8.13 # homeassistant.components.decora_wifi # decora-wifi==1.4 @@ -756,7 +758,7 @@ debugpy==1.8.11 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==12.3.1 +deebot-client==12.5.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -946,7 +948,7 @@ flux-led==1.1.3 # homeassistant.components.homekit # homeassistant.components.recorder -fnv-hash-fast==1.2.6 +fnv-hash-fast==1.4.0 # homeassistant.components.foobot foobot_async==1.0.0 @@ -968,16 +970,16 @@ freesms==0.2.0 fritzconnection[qr]==1.14.0 # homeassistant.components.fyta -fyta_cli==0.7.0 +fyta_cli==0.7.2 # homeassistant.components.google_translate gTTS==2.5.3 # homeassistant.components.gardena_bluetooth -gardena-bluetooth==1.5.0 +gardena-bluetooth==1.6.0 # homeassistant.components.google_assistant_sdk -gassist-text==0.0.11 +gassist-text==0.0.12 # homeassistant.components.google gcal-sync==7.0.0 @@ -986,7 +988,7 @@ gcal-sync==7.0.0 geniushub-client==0.7.1 # homeassistant.components.geocaching -geocachingapi==0.2.1 +geocachingapi==0.3.0 # homeassistant.components.aprs geopy==2.3.0 @@ -1008,7 +1010,7 @@ georss-qld-bushfire-alert-client==0.8 getmac==0.9.5 # homeassistant.components.gios -gios==5.0.0 +gios==6.0.0 # homeassistant.components.gitter gitterpy==0.1.7 @@ -1030,7 +1032,7 @@ goodwe==0.3.6 google-api-python-client==2.71.0 # homeassistant.components.google_pubsub -google-cloud-pubsub==2.28.0 +google-cloud-pubsub==2.29.0 # homeassistant.components.google_cloud google-cloud-speech==2.27.0 @@ -1039,7 +1041,7 @@ google-cloud-speech==2.27.0 google-cloud-texttospeech==2.17.2 # homeassistant.components.google_generative_ai_conversation -google-genai==1.1.0 +google-genai==1.7.0 # homeassistant.components.nest google-nest-sdm==7.1.4 @@ -1061,7 +1063,7 @@ gotailwind==0.3.0 govee-ble==0.43.1 # homeassistant.components.govee_light_local -govee-local-api==2.0.1 +govee-local-api==2.1.0 # homeassistant.components.remote_rpi_gpio gpiozero==1.6.2 @@ -1105,14 +1107,17 @@ ha-iotawattpy==0.1.2 # homeassistant.components.philips_js ha-philipsjs==3.2.2 +# homeassistant.components.homeassistant_hardware +ha-silabs-firmware-client==0.2.0 + # homeassistant.components.habitica habiticalib==0.3.7 # homeassistant.components.bluetooth -habluetooth==3.24.1 +habluetooth==3.37.0 # homeassistant.components.cloud -hass-nabucasa==0.92.0 +hass-nabucasa==0.94.0 # homeassistant.components.splunk hass-splunk==0.1.1 @@ -1121,7 +1126,7 @@ hass-splunk==0.1.1 hassil==2.2.3 # homeassistant.components.jewish_calendar -hdate==0.11.1 +hdate[astral]==1.0.3 # homeassistant.components.heatmiser heatmiserV3==2.0.3 @@ -1149,13 +1154,13 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.68 +holidays==0.69 # homeassistant.components.frontend -home-assistant-frontend==20250306.0 +home-assistant-frontend==20250401.0 # homeassistant.components.conversation -home-assistant-intents==2025.3.5 +home-assistant-intents==2025.3.28 # homeassistant.components.homematicip_cloud homematicip==1.1.7 @@ -1179,7 +1184,7 @@ hyperion-py==0.7.5 iammeter==0.2.1 # homeassistant.components.iaqualink -iaqualink==0.5.0 +iaqualink==0.5.3 # homeassistant.components.ibeacon ibeacon-ble==1.2.0 @@ -1190,7 +1195,8 @@ ibmiotf==0.3.4 # homeassistant.components.google # homeassistant.components.local_calendar # homeassistant.components.local_todo -ical==8.3.0 +# homeassistant.components.remote_calendar +ical==9.0.3 # homeassistant.components.caldav icalendar==6.1.0 @@ -1214,7 +1220,7 @@ igloohome-api==0.1.0 ihcsdk==2.8.5 # homeassistant.components.imgw_pib -imgw_pib==1.0.9 +imgw_pib==1.0.10 # homeassistant.components.incomfort incomfort-client==0.6.7 @@ -1226,7 +1232,7 @@ influxdb-client==1.24.0 influxdb==5.3.1 # homeassistant.components.inkbird -inkbird-ble==0.7.1 +inkbird-ble==0.9.0 # homeassistant.components.insteon insteon-frontend-home-assistant==0.5.0 @@ -1281,7 +1287,7 @@ kiwiki-client==0.1.1 knocki==0.4.2 # homeassistant.components.knx -knx-frontend==2025.1.30.194235 +knx-frontend==2025.3.8.214559 # homeassistant.components.konnected konnected==1.2.0 @@ -1378,7 +1384,7 @@ mbddns==0.1.2 # homeassistant.components.mcp # homeassistant.components.mcp_server -mcp==1.1.2 +mcp==1.5.0 # homeassistant.components.minecraft_server mcstatus==11.1.1 @@ -1399,7 +1405,7 @@ messagebird==1.2.0 meteoalertapi==0.3.1 # homeassistant.components.meteo_france -meteofrance-api==1.3.0 +meteofrance-api==1.4.0 # homeassistant.components.mfi mficlient==0.5.0 @@ -1423,7 +1429,7 @@ minio==7.1.12 moat-ble==0.1.1 # homeassistant.components.moehlenhoff_alpha2 -moehlenhoff-alpha2==1.3.1 +moehlenhoff-alpha2==1.4.0 # homeassistant.components.monzo monzopy==1.4.2 @@ -1432,7 +1438,7 @@ monzopy==1.4.2 mopeka-iot-ble==0.8.0 # homeassistant.components.motion_blinds -motionblinds==0.6.25 +motionblinds==0.6.26 # homeassistant.components.motionblinds_ble motionblindsble==0.1.3 @@ -1447,7 +1453,7 @@ mozart-api==4.1.1.116.4 mullvad-api==1.0.0 # homeassistant.components.music_assistant -music-assistant-client==1.1.1 +music-assistant-client==1.2.0 # homeassistant.components.tts mutagen==1.47.0 @@ -1477,13 +1483,13 @@ netdata==1.3.0 netmap==0.7.0.2 # homeassistant.components.nam -nettigo-air-monitor==4.0.0 +nettigo-air-monitor==4.1.0 # homeassistant.components.neurio_energy neurio==0.3.1 # homeassistant.components.nexia -nexia==2.2.2 +nexia==2.4.0 # homeassistant.components.nextcloud nextcloudmonitor==1.5.1 @@ -1498,7 +1504,7 @@ nextdns==4.0.0 nhc==0.4.10 # homeassistant.components.nibe_heatpump -nibe==2.14.0 +nibe==2.17.0 # homeassistant.components.nice_go nice-go==1.0.1 @@ -1553,7 +1559,7 @@ odp-amsterdam==6.0.2 oemthermostat==1.1.1 # homeassistant.components.ohme -ohme==1.3.2 +ohme==1.5.1 # homeassistant.components.ollama ollama==0.4.7 @@ -1577,7 +1583,7 @@ open-garage==0.2.0 open-meteo==0.3.2 # homeassistant.components.openai_conversation -openai==1.61.0 +openai==1.68.2 # homeassistant.components.openerz openerz-api==0.3.0 @@ -1704,7 +1710,7 @@ proxmoxer==2.0.1 psutil-home-assistant==0.0.1 # homeassistant.components.systemmonitor -psutil==6.1.1 +psutil==7.0.0 # homeassistant.components.pulseaudio_loopback pulsectl==23.5.2 @@ -1716,7 +1722,7 @@ pushbullet.py==0.11.0 pushover_complete==1.1.1 # homeassistant.components.pvoutput -pvo==2.2.0 +pvo==2.2.1 # homeassistant.components.aosmith py-aosmith==1.0.12 @@ -1730,6 +1736,9 @@ py-ccm15==0.0.9 # homeassistant.components.cpuspeed py-cpuinfo==9.0.0 +# homeassistant.components.pterodactyl +py-dactyl==2.0.4 + # homeassistant.components.dormakaba_dkey py-dormakaba-dkey==1.0.5 @@ -1779,7 +1788,7 @@ pyEmby==1.10 pyHik==0.3.2 # homeassistant.components.homee -pyHomee==1.2.7 +pyHomee==1.2.8 # homeassistant.components.rfxtrx pyRFXtrx==0.31.1 @@ -1813,7 +1822,7 @@ pyairnow==1.2.1 pyairvisual==2023.08.1 # homeassistant.components.aprilaire -pyaprilaire==0.7.7 +pyaprilaire==0.8.1 # homeassistant.components.asuswrt pyasuswrt==0.1.21 @@ -1882,7 +1891,7 @@ pycsspeechtts==1.0.8 # pycups==2.0.4 # homeassistant.components.daikin -pydaikin==2.13.8 +pydaikin==2.14.1 # homeassistant.components.danfoss_air pydanfossair==0.1.0 @@ -1891,7 +1900,7 @@ pydanfossair==0.1.0 pydeako==0.6.0 # homeassistant.components.deconz -pydeconz==118 +pydeconz==120 # homeassistant.components.delijn pydelijn==1.1.0 @@ -1957,7 +1966,7 @@ pyevilgenius==2.0.0 pyezviz==0.2.1.2 # homeassistant.components.fibaro -pyfibaro==0.8.0 +pyfibaro==0.8.2 # homeassistant.components.fido pyfido==2.1.2 @@ -1996,7 +2005,7 @@ pygti==0.9.4 pyhaversion==22.8.0 # homeassistant.components.heos -pyheos==1.0.3 +pyheos==1.0.4 # homeassistant.components.hive pyhive-integration==1.0.2 @@ -2038,7 +2047,7 @@ pyiskra==0.1.15 pyiss==1.0.1 # homeassistant.components.isy994 -pyisy==3.1.14 +pyisy==3.1.15 # homeassistant.components.itach pyitachip2ir==0.0.7 @@ -2098,7 +2107,7 @@ pylitejet==0.6.3 pylitterbot==2024.0.0 # homeassistant.components.lutron_caseta -pylutron-caseta==0.23.0 +pylutron-caseta==0.24.0 # homeassistant.components.lutron pylutron==0.2.16 @@ -2140,7 +2149,7 @@ pymsteams==0.1.12 pymysensors==0.24.0 # homeassistant.components.iron_os -pynecil==4.0.1 +pynecil==4.1.0 # homeassistant.components.netgear pynetgear==0.10.10 @@ -2179,7 +2188,7 @@ pyombi==0.1.10 pyopenuv==2023.02.0 # homeassistant.components.openweathermap -pyopenweathermap==0.2.1 +pyopenweathermap==0.2.2 # homeassistant.components.opnsense pyopnsense==0.4.0 @@ -2199,7 +2208,7 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.16.0 +pyoverkiz==1.16.5 # homeassistant.components.onewire pyownet==0.10.0.post1 @@ -2244,7 +2253,7 @@ pyqvrpro==0.52 pyqwikswitch==0.93 # homeassistant.components.nmbs -pyrail==0.0.3 +pyrail==0.4.1 # homeassistant.components.rainbird pyrainbird==6.0.1 @@ -2310,7 +2319,7 @@ pysma==0.7.5 pysmappee==0.2.29 # homeassistant.components.smartthings -pysmartthings==2.7.4 +pysmartthings==3.0.1 # homeassistant.components.smarty pysmarty2==0.10.2 @@ -2322,7 +2331,7 @@ pysmhi==1.0.0 pysml==0.0.12 # homeassistant.components.smlight -pysmlight==0.2.3 +pysmlight==0.2.4 # homeassistant.components.snmp pysnmp==6.2.6 @@ -2421,7 +2430,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.10.2 # homeassistant.components.linkplay -python-linkplay==0.1.3 +python-linkplay==0.2.2 # homeassistant.components.lirc # python-lirc==1.2.3 @@ -2452,7 +2461,7 @@ python-otbr-api==2.7.0 python-overseerr==0.7.1 # homeassistant.components.picnic -python-picnic-api2==1.2.2 +python-picnic-api2==1.2.4 # homeassistant.components.rabbitair python-rabbitair==0.0.8 @@ -2461,19 +2470,19 @@ python-rabbitair==0.0.8 python-ripple-api==0.0.3 # homeassistant.components.roborock -python-roborock==2.12.2 +python-roborock==2.16.1 # homeassistant.components.smarttub python-smarttub==0.0.39 # homeassistant.components.snoo -python-snoo==0.6.4 +python-snoo==0.6.5 # homeassistant.components.songpal python-songpal==0.16.2 # homeassistant.components.tado -python-tado==0.18.6 +python-tado==0.18.9 # homeassistant.components.technove python-technove==2.0.0 @@ -2618,7 +2627,7 @@ renault-api==0.2.9 renson-endura-delta==1.7.2 # homeassistant.components.reolink -reolink-aio==0.12.1 +reolink-aio==0.13.0 # homeassistant.components.idteck_prox rfk101py==0.0.1 @@ -2821,9 +2830,6 @@ stringcase==1.2.0 # homeassistant.components.subaru subarulink==0.7.13 -# homeassistant.components.sunweg -sunweg==3.0.2 - # homeassistant.components.surepetcare surepy==0.9.0 @@ -2872,7 +2878,7 @@ temperusb==1.6.1 # homeassistant.components.tesla_fleet # homeassistant.components.teslemetry # homeassistant.components.tessie -tesla-fleet-api==0.9.13 +tesla-fleet-api==1.0.16 # homeassistant.components.powerwall tesla-powerwall==0.5.2 @@ -2899,7 +2905,7 @@ thermopro-ble==0.11.0 thingspeak==1.0.0 # homeassistant.components.lg_thinq -thinqconnect==1.0.4 +thinqconnect==1.0.5 # homeassistant.components.tikteck tikteck==0.4 @@ -2926,7 +2932,7 @@ total-connect-client==2025.1.4 tp-connected==0.0.4 # homeassistant.components.tplink_omada -tplink-omada-client==1.4.3 +tplink-omada-client==1.4.4 # homeassistant.components.transmission transmission-rpc==7.0.3 @@ -2974,7 +2980,7 @@ unifi_ap==0.0.2 unifiled==0.11 # homeassistant.components.homeassistant_hardware -universal-silabs-flasher==0.0.29 +universal-silabs-flasher==0.0.30 # homeassistant.components.upb upb-lib==0.6.1 @@ -3034,7 +3040,7 @@ vultr==0.1.2 wakeonlan==2.1.0 # homeassistant.components.wallbox -wallbox==0.7.0 +wallbox==0.8.0 # homeassistant.components.folder_watcher watchdog==6.0.0 @@ -3061,7 +3067,7 @@ webmin-xmlrpc==0.0.2 weheat==2025.2.26 # homeassistant.components.whirlpool -whirlpool-sixth-sense==0.18.12 +whirlpool-sixth-sense==0.19.1 # homeassistant.components.whois whois==0.9.27 @@ -3076,7 +3082,7 @@ wirelesstagpy==0.8.1 wled==0.21.0 # homeassistant.components.wolflink -wolf-comm==0.0.19 +wolf-comm==0.0.23 # homeassistant.components.wyoming wyoming==1.5.4 @@ -3122,7 +3128,7 @@ yeelight==0.7.16 yeelightsunflower==0.0.10 # homeassistant.components.yolink -yolink-api==0.4.8 +yolink-api==0.4.9 # homeassistant.components.youless youless-api==2.2.0 @@ -3131,7 +3137,7 @@ youless-api==2.2.0 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2025.02.19 +yt-dlp[default]==2025.03.26 # homeassistant.components.zabbix zabbix-utils==2.0.2 @@ -3139,17 +3145,14 @@ zabbix-utils==2.0.2 # homeassistant.components.zamg zamg==0.3.6 -# homeassistant.components.zengge -zengge==0.2 - # homeassistant.components.zeroconf -zeroconf==0.145.1 +zeroconf==0.146.0 # homeassistant.components.zeversolar zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.53 +zha==0.0.55 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.13 @@ -3161,7 +3164,7 @@ ziggo-mediabox-xl==1.1.0 zm-py==0.5.4 # homeassistant.components.zwave_js -zwave-js-server-python==0.60.1 +zwave-js-server-python==0.62.0 # homeassistant.components.zwave_me zwave-me-ws==0.4.3 diff --git a/requirements_test.txt b/requirements_test.txt index 0a7a3bb18e5..c7bb9b11b87 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -7,18 +7,18 @@ -c homeassistant/package_constraints.txt -r requirements_test_pre_commit.txt -astroid==3.3.8 -coverage==7.6.10 +astroid==3.3.9 +coverage==7.6.12 freezegun==1.5.1 license-expression==30.4.1 mock-open==1.4.0 -mypy-dev==1.16.0a3 +mypy-dev==1.16.0a7 pre-commit==4.0.0 pydantic==2.10.6 -pylint==3.3.4 +pylint==3.3.6 pylint-per-file-ignores==1.4.0 -pipdeptree==2.25.0 -pytest-asyncio==0.25.3 +pipdeptree==2.25.1 +pytest-asyncio==0.26.0 pytest-aiohttp==1.1.0 pytest-cov==6.0.0 pytest-freezer==0.4.9 @@ -29,7 +29,7 @@ pytest-timeout==2.3.1 pytest-unordered==0.6.1 pytest-picked==0.5.1 pytest-xdist==3.6.1 -pytest==8.3.4 +pytest==8.3.5 requests-mock==1.12.1 respx==0.22.0 syrupy==4.8.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 830fc17c6e0..b7563d0d82a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -19,10 +19,10 @@ DoorBirdPy==3.0.8 HAP-python==4.9.2 # homeassistant.components.tasmota -HATasmota==0.9.2 +HATasmota==0.10.0 # homeassistant.components.mastodon -Mastodon.py==1.8.1 +Mastodon.py==2.0.1 # homeassistant.components.doods # homeassistant.components.generic @@ -42,7 +42,7 @@ PlexAPI==4.15.16 ProgettiHWSW==0.1.3 # homeassistant.components.cast -PyChromecast==14.0.5 +PyChromecast==14.0.7 # homeassistant.components.flick_electric PyFlick==1.1.3 @@ -51,7 +51,7 @@ PyFlick==1.1.3 PyFlume==0.6.5 # homeassistant.components.fronius -PyFronius==0.7.3 +PyFronius==0.7.7 # homeassistant.components.pyload PyLoadAPI==1.4.2 @@ -81,7 +81,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.57.1 +PySwitchbot==0.58.0 # homeassistant.components.syncthru PySyncThru==0.8.0 @@ -110,7 +110,7 @@ RtmAPI==0.7.2 # homeassistant.components.recorder # homeassistant.components.sql -SQLAlchemy==2.0.38 +SQLAlchemy==2.0.39 # homeassistant.components.tami4 Tami4EdgeAPI==3.0 @@ -119,7 +119,7 @@ Tami4EdgeAPI==3.0 WSDiscovery==2.1.2 # homeassistant.components.accuweather -accuweather==4.1.0 +accuweather==4.2.0 # homeassistant.components.adax adax==0.4.0 @@ -167,7 +167,7 @@ aioacaia==0.1.14 aioairq==0.4.4 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.6.10 +aioairzone-cloud==0.6.11 # homeassistant.components.airzone aioairzone==0.9.9 @@ -189,7 +189,7 @@ aioaseko==1.0.0 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2025.1.1 +aioautomower==2025.3.2 # homeassistant.components.azure_devops aioazuredevops==2.2.1 @@ -201,7 +201,7 @@ aiobafi6==0.9.0 aiobotocore==2.13.1 # homeassistant.components.comelit -aiocomelit==0.10.1 +aiocomelit==0.11.3 # homeassistant.components.dhcp aiodhcpwatcher==1.1.1 @@ -213,7 +213,7 @@ aiodiscover==2.6.1 aiodns==3.2.0 # homeassistant.components.duke_energy -aiodukeenergy==0.2.2 +aiodukeenergy==0.3.0 # homeassistant.components.eafm aioeafm==0.1.2 @@ -231,7 +231,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==29.2.0 +aioesphomeapi==29.8.0 # homeassistant.components.flo aioflo==2021.11.0 @@ -243,7 +243,7 @@ aiogithubapi==24.6.0 aioguardian==2022.07.0 # homeassistant.components.harmony -aioharmony==0.4.1 +aioharmony==0.5.2 # homeassistant.components.hassio aiohasupervisor==0.3.0 @@ -252,7 +252,7 @@ aiohasupervisor==0.3.0 aiohomeconnect==0.16.3 # homeassistant.components.homekit_controller -aiohomekit==3.2.8 +aiohomekit==3.2.13 # homeassistant.components.mcp_server aiohttp_sse==2.2.0 @@ -273,7 +273,7 @@ aiolifx-effects==0.3.2 aiolifx-themes==0.6.4 # homeassistant.components.lifx -aiolifx==1.1.2 +aiolifx==1.1.4 # homeassistant.components.lookin aiolookin==1.0.0 @@ -353,7 +353,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==13.1.0 +aioshelly==13.4.0 # homeassistant.components.skybell aioskybell==22.7.0 @@ -404,7 +404,7 @@ aiowaqi==3.1.0 aiowatttime==0.1.1 # homeassistant.components.webdav -aiowebdav2==0.4.2 +aiowebdav2==0.4.5 # homeassistant.components.webostv aiowebostv==0.7.3 @@ -440,7 +440,7 @@ amberelectric==2.0.12 androidtv[async]==0.0.75 # homeassistant.components.androidtv_remote -androidtvremote2==0.2.0 +androidtvremote2==0.2.1 # homeassistant.components.anova anova-wifi==0.17.0 @@ -452,7 +452,7 @@ anthemav==1.4.1 anthropic==0.47.2 # homeassistant.components.mcp_server -anyio==4.8.0 +anyio==4.9.0 # homeassistant.components.weatherkit apple_weatherkit==1.1.3 @@ -478,7 +478,7 @@ arcam-fmj==1.8.1 # homeassistant.components.ssdp # homeassistant.components.upnp # homeassistant.components.yeelight -async-upnp-client==0.43.0 +async-upnp-client==0.44.0 # homeassistant.components.arve asyncarve==0.1.1 @@ -534,7 +534,7 @@ bimmer-connected[china]==0.17.2 # homeassistant.components.eq3btsmart # homeassistant.components.esphome -bleak-esphome==2.8.0 +bleak-esphome==2.12.0 # homeassistant.components.bluetooth bleak-retry-connector==3.9.0 @@ -558,17 +558,20 @@ bluemaestro-ble==0.2.3 bluetooth-adapters==0.21.4 # homeassistant.components.bluetooth -bluetooth-auto-recovery==1.4.4 +bluetooth-auto-recovery==1.4.5 # homeassistant.components.bluetooth # homeassistant.components.ld2410_ble # homeassistant.components.led_ble # homeassistant.components.private_ble_device -bluetooth-data-tools==1.23.4 +bluetooth-data-tools==1.26.5 # homeassistant.components.bond bond-async==0.2.1 +# homeassistant.components.bosch_alarm +bosch-alarm-mode2==0.4.3 + # homeassistant.components.bosch_shc boschshcpy==0.2.91 @@ -576,7 +579,7 @@ boschshcpy==0.2.91 botocore==1.34.131 # homeassistant.components.bring -bring-api==1.0.2 +bring-api==1.1.0 # homeassistant.components.broadlink broadlink==0.19.0 @@ -597,7 +600,7 @@ bthome-ble==3.12.4 buienradar==1.0.6 # homeassistant.components.dhcp -cached-ipaddress==0.9.2 +cached-ipaddress==0.10.0 # homeassistant.components.caldav caldav==1.3.9 @@ -640,13 +643,13 @@ datadog==0.15.0 datapoint==0.9.9 # homeassistant.components.bluetooth -dbus-fast==2.33.0 +dbus-fast==2.43.0 # homeassistant.components.debugpy -debugpy==1.8.11 +debugpy==1.8.13 # homeassistant.components.ecovacs -deebot-client==12.3.1 +deebot-client==12.5.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -805,7 +808,7 @@ flux-led==1.1.3 # homeassistant.components.homekit # homeassistant.components.recorder -fnv-hash-fast==1.2.6 +fnv-hash-fast==1.4.0 # homeassistant.components.foobot foobot_async==1.0.0 @@ -821,16 +824,16 @@ freebox-api==1.2.2 fritzconnection[qr]==1.14.0 # homeassistant.components.fyta -fyta_cli==0.7.0 +fyta_cli==0.7.2 # homeassistant.components.google_translate gTTS==2.5.3 # homeassistant.components.gardena_bluetooth -gardena-bluetooth==1.5.0 +gardena-bluetooth==1.6.0 # homeassistant.components.google_assistant_sdk -gassist-text==0.0.11 +gassist-text==0.0.12 # homeassistant.components.google gcal-sync==7.0.0 @@ -839,7 +842,7 @@ gcal-sync==7.0.0 geniushub-client==0.7.1 # homeassistant.components.geocaching -geocachingapi==0.2.1 +geocachingapi==0.3.0 # homeassistant.components.aprs geopy==2.3.0 @@ -861,7 +864,7 @@ georss-qld-bushfire-alert-client==0.8 getmac==0.9.5 # homeassistant.components.gios -gios==5.0.0 +gios==6.0.0 # homeassistant.components.glances glances-api==0.8.0 @@ -880,7 +883,7 @@ goodwe==0.3.6 google-api-python-client==2.71.0 # homeassistant.components.google_pubsub -google-cloud-pubsub==2.28.0 +google-cloud-pubsub==2.29.0 # homeassistant.components.google_cloud google-cloud-speech==2.27.0 @@ -889,7 +892,7 @@ google-cloud-speech==2.27.0 google-cloud-texttospeech==2.17.2 # homeassistant.components.google_generative_ai_conversation -google-genai==1.1.0 +google-genai==1.7.0 # homeassistant.components.nest google-nest-sdm==7.1.4 @@ -911,7 +914,7 @@ gotailwind==0.3.0 govee-ble==0.43.1 # homeassistant.components.govee_light_local -govee-local-api==2.0.1 +govee-local-api==2.1.0 # homeassistant.components.gpsd gps3==0.33.3 @@ -950,16 +953,16 @@ ha-philipsjs==3.2.2 habiticalib==0.3.7 # homeassistant.components.bluetooth -habluetooth==3.24.1 +habluetooth==3.37.0 # homeassistant.components.cloud -hass-nabucasa==0.92.0 +hass-nabucasa==0.94.0 # homeassistant.components.conversation hassil==2.2.3 # homeassistant.components.jewish_calendar -hdate==0.11.1 +hdate[astral]==1.0.3 # homeassistant.components.here_travel_time here-routing==1.0.1 @@ -978,13 +981,13 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.68 +holidays==0.69 # homeassistant.components.frontend -home-assistant-frontend==20250306.0 +home-assistant-frontend==20250401.0 # homeassistant.components.conversation -home-assistant-intents==2025.3.5 +home-assistant-intents==2025.3.28 # homeassistant.components.homematicip_cloud homematicip==1.1.7 @@ -1002,7 +1005,7 @@ huum==0.7.12 hyperion-py==0.7.5 # homeassistant.components.iaqualink -iaqualink==0.5.0 +iaqualink==0.5.3 # homeassistant.components.ibeacon ibeacon-ble==1.2.0 @@ -1010,7 +1013,8 @@ ibeacon-ble==1.2.0 # homeassistant.components.google # homeassistant.components.local_calendar # homeassistant.components.local_todo -ical==8.3.0 +# homeassistant.components.remote_calendar +ical==9.0.3 # homeassistant.components.caldav icalendar==6.1.0 @@ -1028,7 +1032,7 @@ ifaddr==0.2.0 igloohome-api==0.1.0 # homeassistant.components.imgw_pib -imgw_pib==1.0.9 +imgw_pib==1.0.10 # homeassistant.components.incomfort incomfort-client==0.6.7 @@ -1040,7 +1044,7 @@ influxdb-client==1.24.0 influxdb==5.3.1 # homeassistant.components.inkbird -inkbird-ble==0.7.1 +inkbird-ble==0.9.0 # homeassistant.components.insteon insteon-frontend-home-assistant==0.5.0 @@ -1083,7 +1087,7 @@ kegtron-ble==0.4.0 knocki==0.4.2 # homeassistant.components.knx -knx-frontend==2025.1.30.194235 +knx-frontend==2025.3.8.214559 # homeassistant.components.konnected konnected==1.2.0 @@ -1156,7 +1160,7 @@ mbddns==0.1.2 # homeassistant.components.mcp # homeassistant.components.mcp_server -mcp==1.1.2 +mcp==1.5.0 # homeassistant.components.minecraft_server mcstatus==11.1.1 @@ -1171,7 +1175,7 @@ medcom-ble==0.1.1 melnor-bluetooth==0.0.25 # homeassistant.components.meteo_france -meteofrance-api==1.3.0 +meteofrance-api==1.4.0 # homeassistant.components.mfi mficlient==0.5.0 @@ -1195,7 +1199,7 @@ minio==7.1.12 moat-ble==0.1.1 # homeassistant.components.moehlenhoff_alpha2 -moehlenhoff-alpha2==1.3.1 +moehlenhoff-alpha2==1.4.0 # homeassistant.components.monzo monzopy==1.4.2 @@ -1204,7 +1208,7 @@ monzopy==1.4.2 mopeka-iot-ble==0.8.0 # homeassistant.components.motion_blinds -motionblinds==0.6.25 +motionblinds==0.6.26 # homeassistant.components.motionblinds_ble motionblindsble==0.1.3 @@ -1219,7 +1223,7 @@ mozart-api==4.1.1.116.4 mullvad-api==1.0.0 # homeassistant.components.music_assistant -music-assistant-client==1.1.1 +music-assistant-client==1.2.0 # homeassistant.components.tts mutagen==1.47.0 @@ -1243,10 +1247,10 @@ nessclient==1.1.2 netmap==0.7.0.2 # homeassistant.components.nam -nettigo-air-monitor==4.0.0 +nettigo-air-monitor==4.1.0 # homeassistant.components.nexia -nexia==2.2.2 +nexia==2.4.0 # homeassistant.components.nextcloud nextcloudmonitor==1.5.1 @@ -1261,7 +1265,7 @@ nextdns==4.0.0 nhc==0.4.10 # homeassistant.components.nibe_heatpump -nibe==2.14.0 +nibe==2.17.0 # homeassistant.components.nice_go nice-go==1.0.1 @@ -1301,7 +1305,7 @@ objgraph==3.5.0 odp-amsterdam==6.0.2 # homeassistant.components.ohme -ohme==1.3.2 +ohme==1.5.1 # homeassistant.components.ollama ollama==0.4.7 @@ -1325,7 +1329,7 @@ open-garage==0.2.0 open-meteo==0.3.2 # homeassistant.components.openai_conversation -openai==1.61.0 +openai==1.68.2 # homeassistant.components.openerz openerz-api==0.3.0 @@ -1408,7 +1412,7 @@ prometheus-client==0.21.0 psutil-home-assistant==0.0.1 # homeassistant.components.systemmonitor -psutil==6.1.1 +psutil==7.0.0 # homeassistant.components.pushbullet pushbullet.py==0.11.0 @@ -1417,7 +1421,7 @@ pushbullet.py==0.11.0 pushover_complete==1.1.1 # homeassistant.components.pvoutput -pvo==2.2.0 +pvo==2.2.1 # homeassistant.components.aosmith py-aosmith==1.0.12 @@ -1431,6 +1435,9 @@ py-ccm15==0.0.9 # homeassistant.components.cpuspeed py-cpuinfo==9.0.0 +# homeassistant.components.pterodactyl +py-dactyl==2.0.4 + # homeassistant.components.dormakaba_dkey py-dormakaba-dkey==1.0.5 @@ -1468,7 +1475,7 @@ pyDuotecno==2024.10.1 pyElectra==1.2.4 # homeassistant.components.homee -pyHomee==1.2.7 +pyHomee==1.2.8 # homeassistant.components.rfxtrx pyRFXtrx==0.31.1 @@ -1493,7 +1500,7 @@ pyairnow==1.2.1 pyairvisual==2023.08.1 # homeassistant.components.aprilaire -pyaprilaire==0.7.7 +pyaprilaire==0.8.1 # homeassistant.components.asuswrt pyasuswrt==0.1.21 @@ -1541,13 +1548,13 @@ pycountry==24.6.1 pycsspeechtts==1.0.8 # homeassistant.components.daikin -pydaikin==2.13.8 +pydaikin==2.14.1 # homeassistant.components.deako pydeako==0.6.0 # homeassistant.components.deconz -pydeconz==118 +pydeconz==120 # homeassistant.components.dexcom pydexcom==0.2.3 @@ -1595,7 +1602,7 @@ pyevilgenius==2.0.0 pyezviz==0.2.1.2 # homeassistant.components.fibaro -pyfibaro==0.8.0 +pyfibaro==0.8.2 # homeassistant.components.fido pyfido==2.1.2 @@ -1625,7 +1632,7 @@ pygti==0.9.4 pyhaversion==22.8.0 # homeassistant.components.heos -pyheos==1.0.3 +pyheos==1.0.4 # homeassistant.components.hive pyhive-integration==1.0.2 @@ -1661,7 +1668,7 @@ pyiskra==0.1.15 pyiss==1.0.1 # homeassistant.components.isy994 -pyisy==3.1.14 +pyisy==3.1.15 # homeassistant.components.ituran pyituran==0.1.4 @@ -1712,7 +1719,7 @@ pylitejet==0.6.3 pylitterbot==2024.0.0 # homeassistant.components.lutron_caseta -pylutron-caseta==0.23.0 +pylutron-caseta==0.24.0 # homeassistant.components.lutron pylutron==0.2.16 @@ -1745,7 +1752,7 @@ pymonoprice==0.4 pymysensors==0.24.0 # homeassistant.components.iron_os -pynecil==4.0.1 +pynecil==4.1.0 # homeassistant.components.netgear pynetgear==0.10.10 @@ -1778,7 +1785,7 @@ pyoctoprintapi==0.1.12 pyopenuv==2023.02.0 # homeassistant.components.openweathermap -pyopenweathermap==0.2.1 +pyopenweathermap==0.2.2 # homeassistant.components.opnsense pyopnsense==0.4.0 @@ -1795,7 +1802,7 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.16.0 +pyoverkiz==1.16.5 # homeassistant.components.onewire pyownet==0.10.0.post1 @@ -1834,7 +1841,7 @@ pyps4-2ndscreen==1.3.1 pyqwikswitch==0.93 # homeassistant.components.nmbs -pyrail==0.0.3 +pyrail==0.4.1 # homeassistant.components.rainbird pyrainbird==6.0.1 @@ -1882,7 +1889,7 @@ pysma==0.7.5 pysmappee==0.2.29 # homeassistant.components.smartthings -pysmartthings==2.7.4 +pysmartthings==3.0.1 # homeassistant.components.smarty pysmarty2==0.10.2 @@ -1894,7 +1901,7 @@ pysmhi==1.0.0 pysml==0.0.12 # homeassistant.components.smlight -pysmlight==0.2.3 +pysmlight==0.2.4 # homeassistant.components.snmp pysnmp==6.2.6 @@ -1960,7 +1967,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.10.2 # homeassistant.components.linkplay -python-linkplay==0.1.3 +python-linkplay==0.2.2 # homeassistant.components.matter python-matter-server==7.0.0 @@ -1988,25 +1995,25 @@ python-otbr-api==2.7.0 python-overseerr==0.7.1 # homeassistant.components.picnic -python-picnic-api2==1.2.2 +python-picnic-api2==1.2.4 # homeassistant.components.rabbitair python-rabbitair==0.0.8 # homeassistant.components.roborock -python-roborock==2.12.2 +python-roborock==2.16.1 # homeassistant.components.smarttub python-smarttub==0.0.39 # homeassistant.components.snoo -python-snoo==0.6.4 +python-snoo==0.6.5 # homeassistant.components.songpal python-songpal==0.16.2 # homeassistant.components.tado -python-tado==0.18.6 +python-tado==0.18.9 # homeassistant.components.technove python-technove==2.0.0 @@ -2121,7 +2128,7 @@ renault-api==0.2.9 renson-endura-delta==1.7.2 # homeassistant.components.reolink -reolink-aio==0.12.1 +reolink-aio==0.13.0 # homeassistant.components.rflink rflink==0.0.66 @@ -2282,9 +2289,6 @@ stringcase==1.2.0 # homeassistant.components.subaru subarulink==0.7.13 -# homeassistant.components.sunweg -sunweg==3.0.2 - # homeassistant.components.surepetcare surepy==0.9.0 @@ -2312,7 +2316,7 @@ temperusb==1.6.1 # homeassistant.components.tesla_fleet # homeassistant.components.teslemetry # homeassistant.components.tessie -tesla-fleet-api==0.9.13 +tesla-fleet-api==1.0.16 # homeassistant.components.powerwall tesla-powerwall==0.5.2 @@ -2333,7 +2337,7 @@ thermobeacon-ble==0.8.1 thermopro-ble==0.11.0 # homeassistant.components.lg_thinq -thinqconnect==1.0.4 +thinqconnect==1.0.5 # homeassistant.components.tilt_ble tilt-ble==0.2.3 @@ -2351,7 +2355,7 @@ toonapi==0.3.0 total-connect-client==2025.1.4 # homeassistant.components.tplink_omada -tplink-omada-client==1.4.3 +tplink-omada-client==1.4.4 # homeassistant.components.transmission transmission-rpc==7.0.3 @@ -2444,7 +2448,7 @@ vultr==0.1.2 wakeonlan==2.1.0 # homeassistant.components.wallbox -wallbox==0.7.0 +wallbox==0.8.0 # homeassistant.components.folder_watcher watchdog==6.0.0 @@ -2465,7 +2469,7 @@ webmin-xmlrpc==0.0.2 weheat==2025.2.26 # homeassistant.components.whirlpool -whirlpool-sixth-sense==0.18.12 +whirlpool-sixth-sense==0.19.1 # homeassistant.components.whois whois==0.9.27 @@ -2477,7 +2481,7 @@ wiffi==1.1.2 wled==0.21.0 # homeassistant.components.wolflink -wolf-comm==0.0.19 +wolf-comm==0.0.23 # homeassistant.components.wyoming wyoming==1.5.4 @@ -2517,7 +2521,7 @@ yalexs==8.10.0 yeelight==0.7.16 # homeassistant.components.yolink -yolink-api==0.4.8 +yolink-api==0.4.9 # homeassistant.components.youless youless-api==2.2.0 @@ -2526,22 +2530,22 @@ youless-api==2.2.0 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2025.02.19 +yt-dlp[default]==2025.03.26 # homeassistant.components.zamg zamg==0.3.6 # homeassistant.components.zeroconf -zeroconf==0.145.1 +zeroconf==0.146.0 # homeassistant.components.zeversolar zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.53 +zha==0.0.55 # homeassistant.components.zwave_js -zwave-js-server-python==0.60.1 +zwave-js-server-python==0.62.0 # homeassistant.components.zwave_me zwave-me-ws==0.4.3 diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index 8c9308e739b..ff86915bbf3 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.4.1 -ruff==0.9.7 +ruff==0.11.0 yamllint==1.35.1 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index fa823fa4834..1be6286d30c 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -139,7 +139,7 @@ uuid==1000000000.0.0 # these requirements are quite loose. As the entire stack has some outstanding issues, and # even newer versions seem to introduce new issues, it's useful for us to pin all these # requirements so we can directly link HA versions to these library versions. -anyio==4.8.0 +anyio==4.9.0 h11==0.14.0 httpcore==1.0.7 diff --git a/script/hassfest/config_flow.py b/script/hassfest/config_flow.py index f842ec61b97..1f8b7d1139b 100644 --- a/script/hassfest/config_flow.py +++ b/script/hassfest/config_flow.py @@ -95,7 +95,6 @@ def _populate_brand_integrations( integration = integrations.get(domain) if not integration or integration.integration_type in ( "entity", - "hardware", "system", ): continue @@ -171,7 +170,7 @@ def _generate_integrations( result["integration"][domain] = metadata else: # integration integration = integrations[domain] - if integration.integration_type in ("entity", "system", "hardware"): + if integration.integration_type in ("entity", "system"): continue if integration.translated_name: diff --git a/script/hassfest/dependencies.py b/script/hassfest/dependencies.py index b22027500dd..52ea79d32fe 100644 --- a/script/hassfest/dependencies.py +++ b/script/hassfest/dependencies.py @@ -173,10 +173,11 @@ IGNORE_VIOLATIONS = { "logbook", # Temporary needed for migration until 2024.10 ("conversation", "assist_pipeline"), - # The onboarding integration provides a limited backup API used during - # onboarding. The onboarding integration waits for the backup manager - # to be ready before calling any backup functionality. + # The onboarding integration provides limited backup and cloud APIs for use + # during onboarding. The onboarding integration waits for the backup manager + # and cloud to be ready before calling any backup or cloud functionality. ("onboarding", "backup"), + ("onboarding", "cloud"), } diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index 37de7857915..bfdb61096b6 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -14,7 +14,7 @@ WORKDIR "/github/workspace" COPY . /usr/src/homeassistant # Uv is only needed during build -RUN --mount=from=ghcr.io/astral-sh/uv:0.6.1,source=/uv,target=/bin/uv \ +RUN --mount=from=ghcr.io/astral-sh/uv:0.6.10,source=/uv,target=/bin/uv \ # Uv creates a lock file in /tmp --mount=type=tmpfs,target=/tmp \ # Required for PyTurboJPEG @@ -24,8 +24,8 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.6.1,source=/uv,target=/bin/uv \ --no-cache \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ - stdlib-list==0.10.0 pipdeptree==2.25.0 tqdm==4.67.1 ruff==0.9.7 \ - PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.2.3 home-assistant-intents==2025.3.5 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 + stdlib-list==0.10.0 pipdeptree==2.25.1 tqdm==4.67.1 ruff==0.11.0 \ + PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.2.3 home-assistant-intents==2025.3.28 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" LABEL "maintainer"="Home Assistant " diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py index 5f90fff81d5..ea6e657ec50 100644 --- a/script/hassfest/quality_scale.py +++ b/script/hassfest/quality_scale.py @@ -515,7 +515,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "ihc", "imgw_pib", "improv_ble", - "incomfort", "influxdb", "inkbird", "insteon", @@ -812,7 +811,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "pushsafer", "pvoutput", "pvpc_hourly_pricing", - "pyload", "qbittorrent", "qingping", "qld_bushfire", @@ -856,7 +854,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "risco", "rituals_perfume_genie", "rmvtransport", - "roborock", "rocketchat", "roku", "romy", @@ -923,7 +920,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "sma", "smappee", "smart_meter_texas", - "smartthings", "smarttub", "smarty", "smhi", @@ -1090,7 +1086,6 @@ INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ "vizio", "vlc", "vlc_telnet", - "vodafone_station", "voicerss", "voip", "volkszaehler", @@ -1581,7 +1576,6 @@ INTEGRATIONS_WITHOUT_SCALE = [ "imap", "imgw_pib", "improv_ble", - "incomfort", "influxdb", "inkbird", "insteon", @@ -1589,7 +1583,6 @@ INTEGRATIONS_WITHOUT_SCALE = [ "intellifire", "intesishome", "ios", - "iron_os", "iotawatt", "iotty", "iperf3", @@ -1890,7 +1883,6 @@ INTEGRATIONS_WITHOUT_SCALE = [ "pushsafer", "pvoutput", "pvpc_hourly_pricing", - "pyload", "qbittorrent", "qingping", "qld_bushfire", @@ -1935,7 +1927,6 @@ INTEGRATIONS_WITHOUT_SCALE = [ "risco", "rituals_perfume_genie", "rmvtransport", - "roborock", "rocketchat", "roku", "romy", @@ -1965,7 +1956,6 @@ INTEGRATIONS_WITHOUT_SCALE = [ "season", "sendgrid", "sense", - "sensibo", "sensirion_ble", "sensorpro", "sensorpush", @@ -2004,7 +1994,6 @@ INTEGRATIONS_WITHOUT_SCALE = [ "sma", "smappee", "smart_meter_texas", - "smartthings", "smarttub", "smarty", "smhi", @@ -2177,7 +2166,6 @@ INTEGRATIONS_WITHOUT_SCALE = [ "vizio", "vlc", "vlc_telnet", - "vodafone_station", "voicerss", "voip", "volkszaehler", diff --git a/script/hassfest/translations.py b/script/hassfest/translations.py index c257f185f51..f4c05f504ca 100644 --- a/script/hassfest/translations.py +++ b/script/hassfest/translations.py @@ -29,6 +29,7 @@ ALLOW_NAME_TRANSLATION = { "cert_expiry", "cpuspeed", "emulated_roku", + "energenie_power_sockets", "faa_delays", "garages_amsterdam", "generic", @@ -40,6 +41,7 @@ ALLOW_NAME_TRANSLATION = { "local_ip", "local_todo", "nmap_tracker", + "remote_calendar", "rpi_power", "swiss_public_transport", "waze_travel_time", diff --git a/tests/common.py b/tests/common.py index df674d1824c..f426d2aebd2 100644 --- a/tests/common.py +++ b/tests/common.py @@ -29,6 +29,7 @@ from typing import Any, Literal, NoReturn from unittest.mock import AsyncMock, Mock, patch from aiohttp.test_utils import unused_port as get_test_instance_port # noqa: F401 +from annotatedyaml import load_yaml_dict, loader as yaml_loader import pytest from syrupy import SnapshotAssertion import voluptuous as vol @@ -109,7 +110,6 @@ from homeassistant.util.json import ( ) from homeassistant.util.signal_type import SignalType from homeassistant.util.unit_system import METRIC_SYSTEM -from homeassistant.util.yaml import load_yaml_dict, loader as yaml_loader from .testing_config.custom_components.test_constant_deprecation import ( import_deprecated_constant, diff --git a/tests/components/accuweather/snapshots/test_sensor.ambr b/tests/components/accuweather/snapshots/test_sensor.ambr index 257d29ae844..cbd2e14207e 100644 --- a/tests/components/accuweather/snapshots/test_sensor.ambr +++ b/tests/components/accuweather/snapshots/test_sensor.ambr @@ -7,11 +7,10 @@ 'capabilities': dict({ 'options': list([ 'good', - 'hazardous', - 'high', - 'low', 'moderate', 'unhealthy', + 'very_unhealthy', + 'hazardous', ]), }), 'config_entry_id': , @@ -50,11 +49,10 @@ 'friendly_name': 'Home Air quality day 0', 'options': list([ 'good', - 'hazardous', - 'high', - 'low', 'moderate', 'unhealthy', + 'very_unhealthy', + 'hazardous', ]), }), 'context': , @@ -73,11 +71,10 @@ 'capabilities': dict({ 'options': list([ 'good', - 'hazardous', - 'high', - 'low', 'moderate', 'unhealthy', + 'very_unhealthy', + 'hazardous', ]), }), 'config_entry_id': , @@ -116,11 +113,10 @@ 'friendly_name': 'Home Air quality day 1', 'options': list([ 'good', - 'hazardous', - 'high', - 'low', 'moderate', 'unhealthy', + 'very_unhealthy', + 'hazardous', ]), }), 'context': , @@ -139,11 +135,10 @@ 'capabilities': dict({ 'options': list([ 'good', - 'hazardous', - 'high', - 'low', 'moderate', 'unhealthy', + 'very_unhealthy', + 'hazardous', ]), }), 'config_entry_id': , @@ -182,11 +177,10 @@ 'friendly_name': 'Home Air quality day 2', 'options': list([ 'good', - 'hazardous', - 'high', - 'low', 'moderate', 'unhealthy', + 'very_unhealthy', + 'hazardous', ]), }), 'context': , @@ -205,11 +199,10 @@ 'capabilities': dict({ 'options': list([ 'good', - 'hazardous', - 'high', - 'low', 'moderate', 'unhealthy', + 'very_unhealthy', + 'hazardous', ]), }), 'config_entry_id': , @@ -248,11 +241,10 @@ 'friendly_name': 'Home Air quality day 3', 'options': list([ 'good', - 'hazardous', - 'high', - 'low', 'moderate', 'unhealthy', + 'very_unhealthy', + 'hazardous', ]), }), 'context': , @@ -271,11 +263,10 @@ 'capabilities': dict({ 'options': list([ 'good', - 'hazardous', - 'high', - 'low', 'moderate', 'unhealthy', + 'very_unhealthy', + 'hazardous', ]), }), 'config_entry_id': , @@ -314,11 +305,10 @@ 'friendly_name': 'Home Air quality day 4', 'options': list([ 'good', - 'hazardous', - 'high', - 'low', 'moderate', 'unhealthy', + 'very_unhealthy', + 'hazardous', ]), }), 'context': , diff --git a/tests/components/advantage_air/snapshots/test_climate.ambr b/tests/components/advantage_air/snapshots/test_climate.ambr index bd1fb431ae1..b2559b5bdfd 100644 --- a/tests/components/advantage_air/snapshots/test_climate.ambr +++ b/tests/components/advantage_air/snapshots/test_climate.ambr @@ -30,6 +30,7 @@ 'auto', ]), 'friendly_name': 'myauto', + 'hvac_action': , 'hvac_modes': list([ , , diff --git a/tests/components/airthings_ble/test_config_flow.py b/tests/components/airthings_ble/test_config_flow.py index 314594c612f..2adc5498e7b 100644 --- a/tests/components/airthings_ble/test_config_flow.py +++ b/tests/components/airthings_ble/test_config_flow.py @@ -159,7 +159,6 @@ async def test_user_setup_replaces_ignored_device(hass: HomeAssistant) -> None: domain=DOMAIN, unique_id="cc:cc:cc:cc:cc:cc", source=SOURCE_IGNORE, - data={CONF_ADDRESS: "cc:cc:cc:cc:cc:cc"}, ) entry.add_to_hass(hass) with ( diff --git a/tests/components/alarm_control_panel/test_init.py b/tests/components/alarm_control_panel/test_init.py index 168d7ecc269..01d103d01aa 100644 --- a/tests/components/alarm_control_panel/test_init.py +++ b/tests/components/alarm_control_panel/test_init.py @@ -1,7 +1,6 @@ """Test for the alarm control panel const module.""" from typing import Any -from unittest.mock import patch import pytest @@ -23,7 +22,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import entity_registry as er, frame +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.typing import UNDEFINED, UndefinedType from . import help_async_setup_entry_init, help_async_unload_entry @@ -222,7 +221,6 @@ async def test_alarm_control_panel_with_default_code( mock_alarm_control_panel_entity.calls_disarm.assert_called_with("1234") -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_alarm_control_panel_not_log_deprecated_state_warning( hass: HomeAssistant, mock_alarm_control_panel_entity: MockAlarmControlPanel, @@ -238,7 +236,6 @@ async def test_alarm_control_panel_not_log_deprecated_state_warning( @pytest.mark.usefixtures("mock_as_custom_component") -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_alarm_control_panel_log_deprecated_state_warning_using_state_prop( hass: HomeAssistant, code_format: CodeFormat | None, @@ -292,18 +289,17 @@ async def test_alarm_control_panel_log_deprecated_state_warning_using_state_prop assert state is not None assert ( - "Detected that custom integration 'alarm_control_panel' is setting state" - " directly. Entity None (.MockLegacyAlarmControlPanel'>) should implement" " the 'alarm_state' property and return its state using the AlarmControlPanelState" - " enum at test_init.py, line 123: yield. This will stop working in Home Assistant" - " 2025.11, please create a bug report at" in caplog.text + " enum. This will stop working in Home Assistant 2025.11, please report it to" + " the author of the 'test' custom integration" in caplog.text ) @pytest.mark.usefixtures("mock_as_custom_component") -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_alarm_control_panel_log_deprecated_state_warning_using_attr_state_attr( hass: HomeAssistant, code_format: CodeFormat | None, @@ -345,6 +341,7 @@ async def test_alarm_control_panel_log_deprecated_state_warning_using_attr_state async_setup_entry=help_async_setup_entry_init, async_unload_entry=help_async_unload_entry, ), + built_in=False, ) setup_test_component_platform( hass, ALARM_CONTROL_PANEL_DOMAIN, [entity], from_config_entry=True @@ -355,7 +352,7 @@ async def test_alarm_control_panel_log_deprecated_state_warning_using_attr_state assert state is not None assert ( - "Detected that custom integration 'alarm_control_panel' is setting state directly." + "Detected that custom integration 'test' is setting state directly." not in caplog.text ) @@ -364,14 +361,14 @@ async def test_alarm_control_panel_log_deprecated_state_warning_using_attr_state ) assert ( - "Detected that custom integration 'alarm_control_panel' is setting state directly." + "Detected that custom integration 'test' is setting state directly." " Entity alarm_control_panel.test_alarm_control_panel" " (.MockLegacyAlarmControlPanel'>) should implement the 'alarm_state' property" - " and return its state using the AlarmControlPanelState enum at test_init.py, line 123:" - " yield. This will stop working in Home Assistant 2025.11," - " please create a bug report at" in caplog.text + " and return its state using the AlarmControlPanelState enum. " + "This will stop working in Home Assistant 2025.11, please report " + "it to the author of the 'test' custom integration" in caplog.text ) caplog.clear() await help_test_async_alarm_control_panel_service( @@ -379,13 +376,12 @@ async def test_alarm_control_panel_log_deprecated_state_warning_using_attr_state ) # Test we only log once assert ( - "Detected that custom integration 'alarm_control_panel' is setting state directly." + "Detected that custom integration 'test' is setting state directly." not in caplog.text ) @pytest.mark.usefixtures("mock_as_custom_component") -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_alarm_control_panel_deprecated_state_does_not_break_state( hass: HomeAssistant, code_format: CodeFormat | None, @@ -428,6 +424,7 @@ async def test_alarm_control_panel_deprecated_state_does_not_break_state( async_setup_entry=help_async_setup_entry_init, async_unload_entry=help_async_unload_entry, ), + built_in=False, ) setup_test_component_platform( hass, ALARM_CONTROL_PANEL_DOMAIN, [entity], from_config_entry=True diff --git a/tests/components/ambient_network/snapshots/test_sensor.ambr b/tests/components/ambient_network/snapshots/test_sensor.ambr index 8637471cc60..ddf05c99b88 100644 --- a/tests/components/ambient_network/snapshots/test_sensor.ambr +++ b/tests/components/ambient_network/snapshots/test_sensor.ambr @@ -815,7 +815,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'config_subentry_id': , 'device_class': None, @@ -854,6 +856,7 @@ 'device_class': 'wind_direction', 'friendly_name': 'Station A Wind direction', 'last_measured': HAFakeDatetime(2023, 11, 8, 12, 12, 0, 914000, tzinfo=zoneinfo.ZoneInfo(key='US/Pacific')), + 'state_class': , 'unit_of_measurement': '°', }), 'context': , @@ -1800,7 +1803,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'config_subentry_id': , 'device_class': None, @@ -1839,6 +1844,7 @@ 'device_class': 'wind_direction', 'friendly_name': 'Station C Wind direction', 'last_measured': HAFakeDatetime(2024, 6, 6, 8, 28, 3, tzinfo=zoneinfo.ZoneInfo(key='US/Pacific')), + 'state_class': , 'unit_of_measurement': '°', }), 'context': , @@ -2722,7 +2728,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'config_subentry_id': , 'device_class': None, @@ -2760,6 +2768,7 @@ 'attribution': 'Data provided by ambientnetwork.net', 'device_class': 'wind_direction', 'friendly_name': 'Station D Wind direction', + 'state_class': , 'unit_of_measurement': '°', }), 'context': , diff --git a/tests/components/analytics_insights/fixtures/current_data.json b/tests/components/analytics_insights/fixtures/current_data.json index c652a8c0154..ff1baca49ed 100644 --- a/tests/components/analytics_insights/fixtures/current_data.json +++ b/tests/components/analytics_insights/fixtures/current_data.json @@ -1050,7 +1050,6 @@ "melnor": 42, "plaato": 45, "freedompro": 26, - "sunweg": 3, "logi_circle": 18, "proxy": 16, "statsd": 4, diff --git a/tests/components/anthropic/conftest.py b/tests/components/anthropic/conftest.py index ce6b98c480c..7419ea6c28f 100644 --- a/tests/components/anthropic/conftest.py +++ b/tests/components/anthropic/conftest.py @@ -1,10 +1,11 @@ """Tests helpers.""" from collections.abc import AsyncGenerator -from unittest.mock import AsyncMock, patch +from unittest.mock import patch import pytest +from homeassistant.components.anthropic import CONF_CHAT_MODEL from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import HomeAssistant from homeassistant.helpers import llm @@ -38,14 +39,27 @@ def mock_config_entry_with_assist( return mock_config_entry +@pytest.fixture +def mock_config_entry_with_extended_thinking( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> MockConfigEntry: + """Mock a config entry with assist.""" + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + CONF_LLM_HASS_API: llm.LLM_API_ASSIST, + CONF_CHAT_MODEL: "claude-3-7-sonnet-latest", + }, + ) + return mock_config_entry + + @pytest.fixture async def mock_init_component( hass: HomeAssistant, mock_config_entry: MockConfigEntry ) -> AsyncGenerator[None]: """Initialize integration.""" - with patch( - "anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock - ): + with patch("anthropic.resources.models.AsyncModels.retrieve"): assert await async_setup_component(hass, "anthropic", {}) await hass.async_block_till_done() yield diff --git a/tests/components/anthropic/snapshots/test_conversation.ambr b/tests/components/anthropic/snapshots/test_conversation.ambr index 93f3b03d9af..c0ed986f002 100644 --- a/tests/components/anthropic/snapshots/test_conversation.ambr +++ b/tests/components/anthropic/snapshots/test_conversation.ambr @@ -1,6 +1,324 @@ # serializer version: 1 +# name: test_extended_thinking_tool_call + list([ + dict({ + 'content': ''' + Current time is 16:00:00. Today's date is 2024-06-03. + You are a voice assistant for Home Assistant. + Answer questions about the world truthfully. + Answer in plain text. Keep it simple and to the point. + Only if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant. + ''', + 'role': 'system', + }), + dict({ + 'content': 'Please call the test function', + 'role': 'user', + }), + dict({ + 'agent_id': 'conversation.claude', + 'content': 'Certainly, calling it now!', + 'role': 'assistant', + 'tool_calls': list([ + dict({ + 'id': 'toolu_0123456789AbCdEfGhIjKlM', + 'tool_args': dict({ + 'param1': 'test_value', + }), + 'tool_name': 'test_tool', + }), + ]), + }), + dict({ + 'agent_id': 'conversation.claude', + 'role': 'tool_result', + 'tool_call_id': 'toolu_0123456789AbCdEfGhIjKlM', + 'tool_name': 'test_tool', + 'tool_result': 'Test response', + }), + dict({ + 'agent_id': 'conversation.claude', + 'content': 'I have successfully called the function', + 'role': 'assistant', + 'tool_calls': None, + }), + ]) +# --- +# name: test_extended_thinking_tool_call.1 + list([ + dict({ + 'content': 'Please call the test function', + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'signature': 'ErUBCkYIARgCIkCYXaVNJShe3A86Hp7XUzh9YsCYBbJTbQsrklTAPtJ2sP/NoB6tSzpK/nTL6CjSo2R6n0KNBIg5MH6asM2R/kmaEgyB/X1FtZq5OQAC7jUaDEPWCdcwGQ4RaBy5wiIwmRxExIlDhoY6tILoVPnOExkC/0igZxHEwxK8RU/fmw0b+o+TwAarzUitwzbo21E5Kh3pa3I6yqVROf1t2F8rFocNUeCegsWV/ytwYV+ayA==', + 'thinking': 'The user asked me to call a test function.Is it a test? What would the function do? Would it violate any privacy or security policies?', + 'type': 'thinking', + }), + dict({ + 'data': 'EroBCkYIARgCKkBJDytPJhw//4vy3t7aE+LfIkxvkAh51cBPrAvBCo6AjgI57Zt9KWPnUVV50OQJ0KZzUFoGZG5sxg95zx4qMwkoEgz43Su3myJKckvj03waDBZLIBSeoAeRUeVsJCIwQ5edQN0sa+HNeB/KUBkoMUwV+IT0eIhcpFxnILdvxUAKM4R1o4KG3x+yO0eo/kyOKiKfrCPFQhvBVmTZPFhgA2Ow8L9gGDVipcz6x3Uu9YETGEny', + 'type': 'redacted_thinking', + }), + dict({ + 'signature': 'ErUBCkYIARgCIkCYXaVNJShe3A86Hp7XUzh9YsCYBbJTbQsrklTAPtJ2sP/NoB6tSzpK/nTL6CjSo2R6n0KNBIg5MH6asM2R/kmaEgyB/X1FtZq5OQAC7jUaDEPWCdcwGQ4RaBy5wiIwmRxExIlDhoY6tILoVPnOExkC/0igZxHEwxK8RU/fmw0b+o+TwAarzUitwzbo21E5Kh3pa3I6yqVROf1t2F8rFocNUeCegsWV/ytwYV+ayA==', + 'thinking': "Okay, let's give it a shot. Will I pass the test?", + 'type': 'thinking', + }), + dict({ + 'text': 'Certainly, calling it now!', + 'type': 'text', + }), + dict({ + 'id': 'toolu_0123456789AbCdEfGhIjKlM', + 'input': dict({ + 'param1': 'test_value', + }), + 'name': 'test_tool', + 'type': 'tool_use', + }), + ]), + 'role': 'assistant', + }), + dict({ + 'content': list([ + dict({ + 'content': '"Test response"', + 'tool_use_id': 'toolu_0123456789AbCdEfGhIjKlM', + 'type': 'tool_result', + }), + ]), + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'text': 'I have successfully called the function', + 'type': 'text', + }), + ]), + 'role': 'assistant', + }), + ]) +# --- +# name: test_history_conversion[content0] + list([ + dict({ + 'content': 'Are you sure?', + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'text': 'Yes, I am sure!', + 'type': 'text', + }), + ]), + 'role': 'assistant', + }), + ]) +# --- +# name: test_history_conversion[content1] + list([ + dict({ + 'content': 'What shape is a donut?', + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'text': 'A donut is a torus.', + 'type': 'text', + }), + ]), + 'role': 'assistant', + }), + dict({ + 'content': 'Are you sure?', + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'text': 'Yes, I am sure!', + 'type': 'text', + }), + ]), + 'role': 'assistant', + }), + ]) +# --- +# name: test_history_conversion[content2] + list([ + dict({ + 'content': list([ + dict({ + 'text': 'What shape is a donut?', + 'type': 'text', + }), + dict({ + 'text': 'Can you tell me?', + 'type': 'text', + }), + ]), + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'text': 'A donut is a torus.', + 'type': 'text', + }), + dict({ + 'text': 'Hope this helps.', + 'type': 'text', + }), + ]), + 'role': 'assistant', + }), + dict({ + 'content': 'Are you sure?', + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'text': 'Yes, I am sure!', + 'type': 'text', + }), + ]), + 'role': 'assistant', + }), + ]) +# --- +# name: test_history_conversion[content3] + list([ + dict({ + 'content': list([ + dict({ + 'text': 'What shape is a donut?', + 'type': 'text', + }), + dict({ + 'text': 'Can you tell me?', + 'type': 'text', + }), + dict({ + 'text': 'Please?', + 'type': 'text', + }), + ]), + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'text': 'A donut is a torus.', + 'type': 'text', + }), + dict({ + 'text': 'Hope this helps.', + 'type': 'text', + }), + dict({ + 'text': 'You are welcome.', + 'type': 'text', + }), + ]), + 'role': 'assistant', + }), + dict({ + 'content': 'Are you sure?', + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'text': 'Yes, I am sure!', + 'type': 'text', + }), + ]), + 'role': 'assistant', + }), + ]) +# --- +# name: test_history_conversion[content4] + list([ + dict({ + 'content': 'Turn off the lights and make me coffee', + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'text': 'Sure.', + 'type': 'text', + }), + dict({ + 'id': 'mock-tool-call-id', + 'input': dict({ + 'domain': 'light', + }), + 'name': 'HassTurnOff', + 'type': 'tool_use', + }), + dict({ + 'id': 'mock-tool-call-id-2', + 'input': dict({ + }), + 'name': 'MakeCoffee', + 'type': 'tool_use', + }), + ]), + 'role': 'assistant', + }), + dict({ + 'content': list([ + dict({ + 'text': 'Thank you', + 'type': 'text', + }), + dict({ + 'content': '{"success": true, "response": "Lights are off."}', + 'tool_use_id': 'mock-tool-call-id', + 'type': 'tool_result', + }), + dict({ + 'content': '{"success": false, "response": "Not enough milk."}', + 'tool_use_id': 'mock-tool-call-id-2', + 'type': 'tool_result', + }), + ]), + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'text': 'Should I add milk to the shopping list?', + 'type': 'text', + }), + ]), + 'role': 'assistant', + }), + dict({ + 'content': 'Are you sure?', + 'role': 'user', + }), + dict({ + 'content': list([ + dict({ + 'text': 'Yes, I am sure!', + 'type': 'text', + }), + ]), + 'role': 'assistant', + }), + ]) +# --- # name: test_unknown_hass_api dict({ + 'continue_conversation': False, 'conversation_id': '1234', 'response': IntentResponse( card=dict({ diff --git a/tests/components/anthropic/test_config_flow.py b/tests/components/anthropic/test_config_flow.py index a5a025b00d0..30aba6e1b1f 100644 --- a/tests/components/anthropic/test_config_flow.py +++ b/tests/components/anthropic/test_config_flow.py @@ -21,9 +21,11 @@ from homeassistant.components.anthropic.const import ( CONF_PROMPT, CONF_RECOMMENDED, CONF_TEMPERATURE, + CONF_THINKING_BUDGET, DOMAIN, RECOMMENDED_CHAT_MODEL, RECOMMENDED_MAX_TOKENS, + RECOMMENDED_THINKING_BUDGET, ) from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import HomeAssistant @@ -49,7 +51,7 @@ async def test_form(hass: HomeAssistant) -> None: with ( patch( - "homeassistant.components.anthropic.config_flow.anthropic.resources.messages.AsyncMessages.create", + "homeassistant.components.anthropic.config_flow.anthropic.resources.models.AsyncModels.list", new_callable=AsyncMock, ), patch( @@ -94,6 +96,28 @@ async def test_options( assert options["data"][CONF_CHAT_MODEL] == RECOMMENDED_CHAT_MODEL +async def test_options_thinking_budget_more_than_max( + hass: HomeAssistant, mock_config_entry, mock_init_component +) -> None: + """Test error about thinking budget being more than max tokens.""" + options_flow = await hass.config_entries.options.async_init( + mock_config_entry.entry_id + ) + options = await hass.config_entries.options.async_configure( + options_flow["flow_id"], + { + "prompt": "Speak like a pirate", + "max_tokens": 8192, + "chat_model": "claude-3-7-sonnet-latest", + "temperature": 1, + "thinking_budget": 16384, + }, + ) + await hass.async_block_till_done() + assert options["type"] is FlowResultType.FORM + assert options["errors"] == {"thinking_budget": "thinking_budget_too_large"} + + @pytest.mark.parametrize( ("side_effect", "error"), [ @@ -151,7 +175,7 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non ) with patch( - "homeassistant.components.anthropic.config_flow.anthropic.resources.messages.AsyncMessages.create", + "homeassistant.components.anthropic.config_flow.anthropic.resources.models.AsyncModels.list", new_callable=AsyncMock, side_effect=side_effect, ): @@ -186,6 +210,7 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non CONF_TEMPERATURE: 0.3, CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, + CONF_THINKING_BUDGET: RECOMMENDED_THINKING_BUDGET, }, ), ( @@ -195,6 +220,7 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non CONF_TEMPERATURE: 0.3, CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, + CONF_THINKING_BUDGET: RECOMMENDED_THINKING_BUDGET, }, { CONF_RECOMMENDED: True, diff --git a/tests/components/anthropic/test_conversation.py b/tests/components/anthropic/test_conversation.py index a35df281fb6..67a4434a664 100644 --- a/tests/components/anthropic/test_conversation.py +++ b/tests/components/anthropic/test_conversation.py @@ -14,13 +14,18 @@ from anthropic.types import ( RawMessageStartEvent, RawMessageStopEvent, RawMessageStreamEvent, + RedactedThinkingBlock, + SignatureDelta, TextBlock, TextDelta, + ThinkingBlock, + ThinkingDelta, ToolUseBlock, Usage, ) from freezegun import freeze_time from httpx import URL, Request, Response +import pytest from syrupy.assertion import SnapshotAssertion import voluptuous as vol @@ -28,7 +33,7 @@ from homeassistant.components import conversation from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import Context, HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import intent, llm +from homeassistant.helpers import chat_session, intent, llm from homeassistant.setup import async_setup_component from homeassistant.util import ulid as ulid_util @@ -86,6 +91,57 @@ def create_content_block( ] +def create_thinking_block( + index: int, thinking_parts: list[str] +) -> list[RawMessageStreamEvent]: + """Create a thinking block with the specified deltas.""" + return [ + RawContentBlockStartEvent( + type="content_block_start", + content_block=ThinkingBlock(signature="", thinking="", type="thinking"), + index=index, + ), + *[ + RawContentBlockDeltaEvent( + delta=ThinkingDelta(thinking=thinking_part, type="thinking_delta"), + index=index, + type="content_block_delta", + ) + for thinking_part in thinking_parts + ], + RawContentBlockDeltaEvent( + delta=SignatureDelta( + signature="ErUBCkYIARgCIkCYXaVNJShe3A86Hp7XUzh9YsCYBbJTbQsrklTAPtJ2sP/N" + "oB6tSzpK/nTL6CjSo2R6n0KNBIg5MH6asM2R/kmaEgyB/X1FtZq5OQAC7jUaDEPWCdcwGQ" + "4RaBy5wiIwmRxExIlDhoY6tILoVPnOExkC/0igZxHEwxK8RU/fmw0b+o+TwAarzUitwzbo" + "21E5Kh3pa3I6yqVROf1t2F8rFocNUeCegsWV/ytwYV+ayA==", + type="signature_delta", + ), + index=index, + type="content_block_delta", + ), + RawContentBlockStopEvent(index=index, type="content_block_stop"), + ] + + +def create_redacted_thinking_block(index: int) -> list[RawMessageStreamEvent]: + """Create a redacted thinking block.""" + return [ + RawContentBlockStartEvent( + type="content_block_start", + content_block=RedactedThinkingBlock( + data="EroBCkYIARgCKkBJDytPJhw//4vy3t7aE+LfIkxvkAh51cBPrAvBCo6AjgI57Zt9K" + "WPnUVV50OQJ0KZzUFoGZG5sxg95zx4qMwkoEgz43Su3myJKckvj03waDBZLIBSeoAeRUeV" + "sJCIwQ5edQN0sa+HNeB/KUBkoMUwV+IT0eIhcpFxnILdvxUAKM4R1o4KG3x+yO0eo/kyOK" + "iKfrCPFQhvBVmTZPFhgA2Ow8L9gGDVipcz6x3Uu9YETGEny", + type="redacted_thinking", + ), + index=index, + ), + RawContentBlockStopEvent(index=index, type="content_block_stop"), + ] + + def create_tool_use_block( index: int, tool_id: str, tool_name: str, json_parts: list[str] ) -> list[RawMessageStreamEvent]: @@ -127,9 +183,7 @@ async def test_entity( CONF_LLM_HASS_API: "assist", }, ) - with patch( - "anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock - ): + with patch("anthropic.resources.models.AsyncModels.retrieve"): await hass.config_entries.async_reload(mock_config_entry.entry_id) state = hass.states.get("conversation.claude") @@ -173,8 +227,11 @@ async def test_template_error( "prompt": "talk like a {% if True %}smarthome{% else %}pirate please.", }, ) - with patch( - "anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock + with ( + patch("anthropic.resources.models.AsyncModels.retrieve"), + patch( + "anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock + ), ): await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() @@ -205,6 +262,7 @@ async def test_template_variables( }, ) with ( + patch("anthropic.resources.models.AsyncModels.retrieve"), patch( "anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock ) as mock_create, @@ -230,8 +288,8 @@ async def test_template_variables( result.response.speech["plain"]["speech"] == "Okay, let me take care of that for you." ) - assert "The user name is Test User." in mock_create.mock_calls[1][2]["system"] - assert "The user id is 12345." in mock_create.mock_calls[1][2]["system"] + assert "The user name is Test User." in mock_create.call_args.kwargs["system"] + assert "The user id is 12345." in mock_create.call_args.kwargs["system"] async def test_conversation_agent( @@ -379,7 +437,7 @@ async def test_function_exception( return stream_generator( create_messages( [ - *create_content_block(0, "Certainly, calling it now!"), + *create_content_block(0, ["Certainly, calling it now!"]), *create_tool_use_block( 1, "toolu_0123456789AbCdEfGhIjKlM", @@ -462,7 +520,7 @@ async def test_assist_api_tools_conversion( new_callable=AsyncMock, return_value=stream_generator( create_messages( - create_content_block(0, "Hello, how can I help you?"), + create_content_block(0, ["Hello, how can I help you?"]), ), ), ) as mock_create: @@ -497,9 +555,7 @@ async def test_unknown_hass_api( assert result == snapshot -@patch("anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock) async def test_conversation_id( - mock_create, hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component, @@ -509,7 +565,7 @@ async def test_conversation_id( def create_stream_generator(*args, **kwargs) -> Any: return stream_generator( create_messages( - create_content_block(0, "Hello, how can I help you?"), + create_content_block(0, ["Hello, how can I help you?"]), ), ) @@ -547,3 +603,283 @@ async def test_conversation_id( ) assert result.conversation_id == "koala" + + +async def test_extended_thinking( + hass: HomeAssistant, + mock_config_entry_with_extended_thinking: MockConfigEntry, + mock_init_component, +) -> None: + """Test extended thinking support.""" + with patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + return_value=stream_generator( + create_messages( + [ + *create_thinking_block( + 0, + [ + "The user has just", + ' greeted me with "Hi".', + " This is a simple greeting an", + "d doesn't require any Home Assistant function", + " calls. I should respond with", + " a friendly greeting and let them know I'm available", + " to help with their smart home.", + ], + ), + *create_content_block(1, ["Hello, how can I help you today?"]), + ] + ), + ), + ): + result = await conversation.async_converse( + hass, "hello", None, Context(), agent_id="conversation.claude" + ) + + chat_log = hass.data.get(conversation.chat_log.DATA_CHAT_LOGS).get( + result.conversation_id + ) + assert len(chat_log.content) == 3 + assert chat_log.content[1].content == "hello" + assert chat_log.content[2].content == "Hello, how can I help you today?" + + +async def test_redacted_thinking( + hass: HomeAssistant, + mock_config_entry_with_extended_thinking: MockConfigEntry, + mock_init_component, +) -> None: + """Test extended thinking with redacted thinking blocks.""" + with patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + return_value=stream_generator( + create_messages( + [ + *create_redacted_thinking_block(0), + *create_redacted_thinking_block(1), + *create_redacted_thinking_block(2), + *create_content_block(3, ["How can I help you today?"]), + ] + ), + ), + ): + result = await conversation.async_converse( + hass, + "ANTHROPIC_MAGIC_STRING_TRIGGER_REDACTED_THINKING_46C9A13E193C177646C7398A9" + "8432ECCCE4C1253D5E2D82641AC0E52CC2876CB", + None, + Context(), + agent_id="conversation.claude", + ) + + chat_log = hass.data.get(conversation.chat_log.DATA_CHAT_LOGS).get( + result.conversation_id + ) + assert len(chat_log.content) == 3 + assert chat_log.content[2].content == "How can I help you today?" + + +@patch("homeassistant.components.anthropic.conversation.llm.AssistAPI._async_get_tools") +async def test_extended_thinking_tool_call( + mock_get_tools, + hass: HomeAssistant, + mock_config_entry_with_extended_thinking: MockConfigEntry, + mock_init_component, + snapshot: SnapshotAssertion, +) -> None: + """Test that thinking blocks and their order are preserved in with tool calls.""" + agent_id = "conversation.claude" + context = Context() + + mock_tool = AsyncMock() + mock_tool.name = "test_tool" + mock_tool.description = "Test function" + mock_tool.parameters = vol.Schema( + {vol.Optional("param1", description="Test parameters"): str} + ) + mock_tool.async_call.return_value = "Test response" + + mock_get_tools.return_value = [mock_tool] + + def completion_result(*args, messages, **kwargs): + for message in messages: + for content in message["content"]: + if not isinstance(content, str) and content["type"] == "tool_use": + return stream_generator( + create_messages( + create_content_block( + 0, ["I have ", "successfully called ", "the function"] + ), + ) + ) + + return stream_generator( + create_messages( + [ + *create_thinking_block( + 0, + [ + "The user asked me to", + " call a test function.", + "Is it a test? What", + " would the function", + " do? Would it violate", + " any privacy or security", + " policies?", + ], + ), + *create_redacted_thinking_block(1), + *create_thinking_block( + 2, ["Okay, let's give it a shot.", " Will I pass the test?"] + ), + *create_content_block(3, ["Certainly, calling it now!"]), + *create_tool_use_block( + 1, + "toolu_0123456789AbCdEfGhIjKlM", + "test_tool", + ['{"para', 'm1": "test_valu', 'e"}'], + ), + ] + ) + ) + + with ( + patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + side_effect=completion_result, + ) as mock_create, + freeze_time("2024-06-03 23:00:00"), + ): + result = await conversation.async_converse( + hass, + "Please call the test function", + None, + context, + agent_id=agent_id, + ) + + chat_log = hass.data.get(conversation.chat_log.DATA_CHAT_LOGS).get( + result.conversation_id + ) + + assert chat_log.content == snapshot + assert mock_create.mock_calls[1][2]["messages"] == snapshot + + +@pytest.mark.parametrize( + "content", + [ + [ + conversation.chat_log.SystemContent("You are a helpful assistant."), + ], + [ + conversation.chat_log.SystemContent("You are a helpful assistant."), + conversation.chat_log.UserContent("What shape is a donut?"), + conversation.chat_log.AssistantContent( + agent_id="conversation.claude", content="A donut is a torus." + ), + ], + [ + conversation.chat_log.SystemContent("You are a helpful assistant."), + conversation.chat_log.UserContent("What shape is a donut?"), + conversation.chat_log.UserContent("Can you tell me?"), + conversation.chat_log.AssistantContent( + agent_id="conversation.claude", content="A donut is a torus." + ), + conversation.chat_log.AssistantContent( + agent_id="conversation.claude", content="Hope this helps." + ), + ], + [ + conversation.chat_log.SystemContent("You are a helpful assistant."), + conversation.chat_log.UserContent("What shape is a donut?"), + conversation.chat_log.UserContent("Can you tell me?"), + conversation.chat_log.UserContent("Please?"), + conversation.chat_log.AssistantContent( + agent_id="conversation.claude", content="A donut is a torus." + ), + conversation.chat_log.AssistantContent( + agent_id="conversation.claude", content="Hope this helps." + ), + conversation.chat_log.AssistantContent( + agent_id="conversation.claude", content="You are welcome." + ), + ], + [ + conversation.chat_log.SystemContent("You are a helpful assistant."), + conversation.chat_log.UserContent("Turn off the lights and make me coffee"), + conversation.chat_log.AssistantContent( + agent_id="conversation.claude", + content="Sure.", + tool_calls=[ + llm.ToolInput( + id="mock-tool-call-id", + tool_name="HassTurnOff", + tool_args={"domain": "light"}, + ), + llm.ToolInput( + id="mock-tool-call-id-2", + tool_name="MakeCoffee", + tool_args={}, + ), + ], + ), + conversation.chat_log.UserContent("Thank you"), + conversation.chat_log.ToolResultContent( + agent_id="conversation.claude", + tool_call_id="mock-tool-call-id", + tool_name="HassTurnOff", + tool_result={"success": True, "response": "Lights are off."}, + ), + conversation.chat_log.ToolResultContent( + agent_id="conversation.claude", + tool_call_id="mock-tool-call-id-2", + tool_name="MakeCoffee", + tool_result={"success": False, "response": "Not enough milk."}, + ), + conversation.chat_log.AssistantContent( + agent_id="conversation.claude", + content="Should I add milk to the shopping list?", + ), + ], + ], +) +async def test_history_conversion( + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, + snapshot: SnapshotAssertion, + content: list[conversation.chat_log.Content], +) -> None: + """Test conversion of chat_log entries into API parameters.""" + conversation_id = "conversation_id" + with ( + chat_session.async_get_chat_session(hass, conversation_id) as session, + conversation.async_get_chat_log(hass, session) as chat_log, + patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + return_value=stream_generator( + create_messages( + [ + *create_content_block(0, ["Yes, I am sure!"]), + ] + ), + ), + ) as mock_create, + ): + chat_log.content = content + + await conversation.async_converse( + hass, + "Are you sure?", + conversation_id, + Context(), + agent_id="conversation.claude", + ) + + assert mock_create.mock_calls[0][2]["messages"] == snapshot diff --git a/tests/components/anthropic/test_init.py b/tests/components/anthropic/test_init.py index ee87bb708d0..305e442f52d 100644 --- a/tests/components/anthropic/test_init.py +++ b/tests/components/anthropic/test_init.py @@ -1,6 +1,6 @@ """Tests for the Anthropic integration.""" -from unittest.mock import AsyncMock, patch +from unittest.mock import patch from anthropic import ( APIConnectionError, @@ -55,8 +55,7 @@ async def test_init_error( ) -> None: """Test initialization errors.""" with patch( - "anthropic.resources.messages.AsyncMessages.create", - new_callable=AsyncMock, + "anthropic.resources.models.AsyncModels.retrieve", side_effect=side_effect, ): assert await async_setup_component(hass, "anthropic", {}) diff --git a/tests/components/assist_pipeline/conftest.py b/tests/components/assist_pipeline/conftest.py index 02ec7c04607..a0549f27f05 100644 --- a/tests/components/assist_pipeline/conftest.py +++ b/tests/components/assist_pipeline/conftest.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import AsyncIterable, Generator from pathlib import Path from typing import Any -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch import pytest @@ -24,7 +24,7 @@ from homeassistant.components.assist_pipeline.pipeline import ( from homeassistant.config_entries import ConfigEntry, ConfigFlow from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import chat_session, device_registry as dr from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.setup import async_setup_component @@ -379,3 +379,14 @@ def pipeline_storage(pipeline_data) -> PipelineStorageCollection: def make_10ms_chunk(header: bytes) -> bytes: """Return 10ms of zeros with the given header.""" return header + bytes(BYTES_PER_CHUNK - len(header)) + + +@pytest.fixture +def mock_chat_session(hass: HomeAssistant) -> Generator[chat_session.ChatSession]: + """Mock the ulid of chat sessions.""" + # pylint: disable-next=contextmanager-generator-missing-cleanup + with ( + patch("homeassistant.helpers.chat_session.ulid_now", return_value="mock-ulid"), + chat_session.async_get_chat_session(hass) as session, + ): + yield session diff --git a/tests/components/assist_pipeline/snapshots/test_init.ambr b/tests/components/assist_pipeline/snapshots/test_init.ambr index 11e6bc2339a..f772f877d3a 100644 --- a/tests/components/assist_pipeline/snapshots/test_init.ambr +++ b/tests/components/assist_pipeline/snapshots/test_init.ambr @@ -6,6 +6,11 @@ 'conversation_id': 'mock-ulid', 'language': 'en', 'pipeline': , + 'tts_output': dict({ + 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', + 'url': '/api/tts_proxy/test_token.mp3', + }), }), 'type': , }), @@ -45,6 +50,7 @@ dict({ 'data': dict({ 'intent_output': dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -80,6 +86,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }), @@ -98,6 +105,11 @@ 'conversation_id': 'mock-ulid', 'language': 'en', 'pipeline': , + 'tts_output': dict({ + 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', + 'url': '/api/tts_proxy/test_token.mp3', + }), }), 'type': , }), @@ -137,6 +149,7 @@ dict({ 'data': dict({ 'intent_output': dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -172,6 +185,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22Arnold+Schwarzenegger%22%7D", 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }), @@ -190,6 +204,11 @@ 'conversation_id': 'mock-ulid', 'language': 'en', 'pipeline': , + 'tts_output': dict({ + 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', + 'url': '/api/tts_proxy/test_token.mp3', + }), }), 'type': , }), @@ -229,6 +248,7 @@ dict({ 'data': dict({ 'intent_output': dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -264,6 +284,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22Arnold+Schwarzenegger%22%7D", 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }), @@ -282,6 +303,11 @@ 'conversation_id': 'mock-ulid', 'language': 'en', 'pipeline': , + 'tts_output': dict({ + 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', + 'url': '/api/tts_proxy/test_token.mp3', + }), }), 'type': , }), @@ -345,6 +371,7 @@ dict({ 'data': dict({ 'intent_output': dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -380,6 +407,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }), @@ -398,6 +426,11 @@ 'conversation_id': 'mock-ulid', 'language': 'en', 'pipeline': , + 'tts_output': dict({ + 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', + 'url': '/api/tts_proxy/mocked-token.mp3', + }), }), 'type': , }), @@ -432,7 +465,7 @@ list([ dict({ 'data': dict({ - 'conversation_id': 'mock-conversation-id', + 'conversation_id': 'mock-ulid', 'language': 'en', 'pipeline': , }), @@ -440,7 +473,7 @@ }), dict({ 'data': dict({ - 'conversation_id': 'mock-conversation-id', + 'conversation_id': 'mock-ulid', 'device_id': None, 'engine': 'conversation.home_assistant', 'intent_input': 'test input', @@ -452,6 +485,7 @@ dict({ 'data': dict({ 'intent_output': dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -484,7 +518,7 @@ list([ dict({ 'data': dict({ - 'conversation_id': 'mock-conversation-id', + 'conversation_id': 'mock-ulid', 'language': 'en', 'pipeline': , }), @@ -492,7 +526,7 @@ }), dict({ 'data': dict({ - 'conversation_id': 'mock-conversation-id', + 'conversation_id': 'mock-ulid', 'device_id': None, 'engine': 'conversation.home_assistant', 'intent_input': 'test input', @@ -504,6 +538,7 @@ dict({ 'data': dict({ 'intent_output': dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -536,7 +571,7 @@ list([ dict({ 'data': dict({ - 'conversation_id': 'mock-conversation-id', + 'conversation_id': 'mock-ulid', 'language': 'en', 'pipeline': , }), @@ -544,7 +579,7 @@ }), dict({ 'data': dict({ - 'conversation_id': 'mock-conversation-id', + 'conversation_id': 'mock-ulid', 'device_id': None, 'engine': 'conversation.home_assistant', 'intent_input': 'test input', @@ -556,6 +591,7 @@ dict({ 'data': dict({ 'intent_output': dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -588,9 +624,14 @@ list([ dict({ 'data': dict({ - 'conversation_id': 'mock-conversation-id', + 'conversation_id': 'mock-ulid', 'language': 'en', 'pipeline': , + 'tts_output': dict({ + 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', + 'url': '/api/tts_proxy/mocked-token.mp3', + }), }), 'type': , }), diff --git a/tests/components/assist_pipeline/snapshots/test_websocket.ambr b/tests/components/assist_pipeline/snapshots/test_websocket.ambr index f677fa6d8cf..57ae0095236 100644 --- a/tests/components/assist_pipeline/snapshots/test_websocket.ambr +++ b/tests/components/assist_pipeline/snapshots/test_websocket.ambr @@ -8,6 +8,11 @@ 'stt_binary_handler_id': 1, 'timeout': 300, }), + 'tts_output': dict({ + 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', + 'url': '/api/tts_proxy/test_token.mp3', + }), }) # --- # name: test_audio_pipeline.1 @@ -43,6 +48,7 @@ # name: test_audio_pipeline.4 dict({ 'intent_output': dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -76,6 +82,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }) @@ -92,6 +99,11 @@ 'stt_binary_handler_id': 1, 'timeout': 300, }), + 'tts_output': dict({ + 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', + 'url': '/api/tts_proxy/test_token.mp3', + }), }) # --- # name: test_audio_pipeline_debug.1 @@ -127,6 +139,7 @@ # name: test_audio_pipeline_debug.4 dict({ 'intent_output': dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -160,6 +173,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }) @@ -188,6 +202,11 @@ 'stt_binary_handler_id': 1, 'timeout': 300, }), + 'tts_output': dict({ + 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', + 'url': '/api/tts_proxy/test_token.mp3', + }), }) # --- # name: test_audio_pipeline_with_enhancements.1 @@ -223,6 +242,7 @@ # name: test_audio_pipeline_with_enhancements.4 dict({ 'intent_output': dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -256,6 +276,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }) @@ -272,6 +293,11 @@ 'stt_binary_handler_id': 1, 'timeout': 300, }), + 'tts_output': dict({ + 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', + 'url': '/api/tts_proxy/test_token.mp3', + }), }) # --- # name: test_audio_pipeline_with_wake_word_no_timeout.1 @@ -329,6 +355,7 @@ # name: test_audio_pipeline_with_wake_word_no_timeout.6 dict({ 'intent_output': dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -362,6 +389,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', 'url': '/api/tts_proxy/test_token.mp3', }), }) @@ -378,6 +406,11 @@ 'stt_binary_handler_id': 1, 'timeout': 300, }), + 'tts_output': dict({ + 'mime_type': 'audio/mpeg', + 'token': 'test_token.mp3', + 'url': '/api/tts_proxy/test_token.mp3', + }), }) # --- # name: test_audio_pipeline_with_wake_word_timeout.1 @@ -581,6 +614,11 @@ 'stt_binary_handler_id': None, 'timeout': 300, }), + 'tts_output': dict({ + 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', + 'url': '/api/tts_proxy/mocked-token.mp3', + }), }) # --- # name: test_pipeline_empty_tts_output.1 @@ -596,6 +634,7 @@ # name: test_pipeline_empty_tts_output.2 dict({ 'intent_output': dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -629,6 +668,11 @@ 'stt_binary_handler_id': 1, 'timeout': 300, }), + 'tts_output': dict({ + 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', + 'url': '/api/tts_proxy/mocked-token.mp3', + }), }) # --- # name: test_stt_cooldown_different_ids.1 @@ -640,6 +684,11 @@ 'stt_binary_handler_id': 1, 'timeout': 300, }), + 'tts_output': dict({ + 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', + 'url': '/api/tts_proxy/mocked-token.mp3', + }), }) # --- # name: test_stt_cooldown_same_id @@ -651,6 +700,11 @@ 'stt_binary_handler_id': 1, 'timeout': 300, }), + 'tts_output': dict({ + 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', + 'url': '/api/tts_proxy/mocked-token.mp3', + }), }) # --- # name: test_stt_cooldown_same_id.1 @@ -662,6 +716,11 @@ 'stt_binary_handler_id': 1, 'timeout': 300, }), + 'tts_output': dict({ + 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', + 'url': '/api/tts_proxy/mocked-token.mp3', + }), }) # --- # name: test_stt_stream_failed @@ -673,6 +732,11 @@ 'stt_binary_handler_id': 1, 'timeout': 300, }), + 'tts_output': dict({ + 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', + 'url': '/api/tts_proxy/mocked-token.mp3', + }), }) # --- # name: test_stt_stream_failed.1 @@ -715,6 +779,7 @@ # name: test_text_only_pipeline[extra_msg0].2 dict({ 'intent_output': dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -762,6 +827,7 @@ # name: test_text_only_pipeline[extra_msg1].2 dict({ 'intent_output': dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -791,28 +857,6 @@ 'message': 'Timeout running pipeline', }) # --- -# name: test_tts_failed - dict({ - 'conversation_id': 'mock-ulid', - 'language': 'en', - 'pipeline': , - 'runner_data': dict({ - 'stt_binary_handler_id': None, - 'timeout': 300, - }), - }) -# --- -# name: test_tts_failed.1 - dict({ - 'engine': 'test', - 'language': 'en-US', - 'tts_input': 'Lights are on.', - 'voice': 'james_earl_jones', - }) -# --- -# name: test_tts_failed.2 - None -# --- # name: test_wake_word_cooldown_different_entities dict({ 'conversation_id': 'mock-ulid', @@ -822,6 +866,11 @@ 'stt_binary_handler_id': 1, 'timeout': 300, }), + 'tts_output': dict({ + 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', + 'url': '/api/tts_proxy/mocked-token.mp3', + }), }) # --- # name: test_wake_word_cooldown_different_entities.1 @@ -833,6 +882,11 @@ 'stt_binary_handler_id': 1, 'timeout': 300, }), + 'tts_output': dict({ + 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', + 'url': '/api/tts_proxy/mocked-token.mp3', + }), }) # --- # name: test_wake_word_cooldown_different_entities.2 @@ -885,6 +939,11 @@ 'stt_binary_handler_id': 1, 'timeout': 300, }), + 'tts_output': dict({ + 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', + 'url': '/api/tts_proxy/mocked-token.mp3', + }), }) # --- # name: test_wake_word_cooldown_different_ids.1 @@ -896,6 +955,11 @@ 'stt_binary_handler_id': 1, 'timeout': 300, }), + 'tts_output': dict({ + 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', + 'url': '/api/tts_proxy/mocked-token.mp3', + }), }) # --- # name: test_wake_word_cooldown_different_ids.2 @@ -951,6 +1015,11 @@ 'stt_binary_handler_id': 1, 'timeout': 300, }), + 'tts_output': dict({ + 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', + 'url': '/api/tts_proxy/mocked-token.mp3', + }), }) # --- # name: test_wake_word_cooldown_same_id.1 @@ -962,6 +1031,11 @@ 'stt_binary_handler_id': 1, 'timeout': 300, }), + 'tts_output': dict({ + 'mime_type': 'audio/mpeg', + 'token': 'mocked-token.mp3', + 'url': '/api/tts_proxy/mocked-token.mp3', + }), }) # --- # name: test_wake_word_cooldown_same_id.2 diff --git a/tests/components/assist_pipeline/test_init.py b/tests/components/assist_pipeline/test_init.py index 1651950c173..0e04d1f0cd2 100644 --- a/tests/components/assist_pipeline/test_init.py +++ b/tests/components/assist_pipeline/test_init.py @@ -27,7 +27,7 @@ from homeassistant.components.assist_pipeline.const import ( ) from homeassistant.const import MATCH_ALL from homeassistant.core import Context, HomeAssistant -from homeassistant.helpers import intent +from homeassistant.helpers import chat_session, intent from homeassistant.setup import async_setup_component from .conftest import ( @@ -43,13 +43,21 @@ from tests.typing import ClientSessionGenerator, WebSocketGenerator @pytest.fixture(autouse=True) -def mock_ulid() -> Generator[Mock]: - """Mock the ulid of chat sessions.""" - with patch("homeassistant.helpers.chat_session.ulid_now") as mock_ulid_now: - mock_ulid_now.return_value = "mock-ulid" +def mock_chat_session_id() -> Generator[Mock]: + """Mock the conversation ID of chat sessions.""" + with patch( + "homeassistant.helpers.chat_session.ulid_now", return_value="mock-ulid" + ) as mock_ulid_now: yield mock_ulid_now +@pytest.fixture(autouse=True) +def mock_tts_token() -> Generator[None]: + """Mock the TTS token for URLs.""" + with patch("secrets.token_urlsafe", return_value="mocked-token"): + yield + + def process_events(events: list[assist_pipeline.PipelineEvent]) -> list[dict]: """Process events to remove dynamic values.""" processed = [] @@ -675,6 +683,7 @@ async def test_wake_word_detection_aborted( mock_wake_word_provider_entity: MockWakeWordEntity, init_components, pipeline_data: assist_pipeline.pipeline.PipelineData, + mock_chat_session: chat_session.ChatSession, snapshot: SnapshotAssertion, ) -> None: """Test creating a pipeline from an audio stream with wake word.""" @@ -693,7 +702,7 @@ async def test_wake_word_detection_aborted( pipeline = assist_pipeline.pipeline.async_get_pipeline(hass, pipeline_id) pipeline_input = assist_pipeline.pipeline.PipelineInput( - conversation_id="mock-conversation-id", + session=mock_chat_session, device_id=None, stt_metadata=stt.SpeechMetadata( language="", @@ -766,6 +775,7 @@ async def test_tts_audio_output( mock_tts_provider: MockTTSProvider, init_components, pipeline_data: assist_pipeline.pipeline.PipelineData, + mock_chat_session: chat_session.ChatSession, snapshot: SnapshotAssertion, ) -> None: """Test using tts_audio_output with wav sets options correctly.""" @@ -780,7 +790,7 @@ async def test_tts_audio_output( pipeline_input = assist_pipeline.pipeline.PipelineInput( tts_input="This is a test.", - conversation_id="mock-conversation-id", + session=mock_chat_session, device_id=None, run=assist_pipeline.pipeline.PipelineRun( hass, @@ -795,10 +805,16 @@ async def test_tts_audio_output( await pipeline_input.validate() # Verify TTS audio settings - assert pipeline_input.run.tts_options is not None - assert pipeline_input.run.tts_options.get(tts.ATTR_PREFERRED_FORMAT) == "wav" - assert pipeline_input.run.tts_options.get(tts.ATTR_PREFERRED_SAMPLE_RATE) == 16000 - assert pipeline_input.run.tts_options.get(tts.ATTR_PREFERRED_SAMPLE_CHANNELS) == 1 + assert pipeline_input.run.tts_stream.options is not None + assert pipeline_input.run.tts_stream.options.get(tts.ATTR_PREFERRED_FORMAT) == "wav" + assert ( + pipeline_input.run.tts_stream.options.get(tts.ATTR_PREFERRED_SAMPLE_RATE) + == 16000 + ) + assert ( + pipeline_input.run.tts_stream.options.get(tts.ATTR_PREFERRED_SAMPLE_CHANNELS) + == 1 + ) with patch.object(mock_tts_provider, "get_tts_audio") as mock_get_tts_audio: await pipeline_input.execute() @@ -807,9 +823,7 @@ async def test_tts_audio_output( if event.type == assist_pipeline.PipelineEventType.TTS_END: # We must fetch the media URL to trigger the TTS assert event.data - media_id = event.data["tts_output"]["media_id"] - resolved = await media_source.async_resolve_media(hass, media_id, None) - await client.get(resolved.url) + await client.get(event.data["tts_output"]["url"]) # Ensure that no unsupported options were passed in assert mock_get_tts_audio.called @@ -823,6 +837,7 @@ async def test_tts_wav_preferred_format( hass_client: ClientSessionGenerator, mock_tts_provider: MockTTSProvider, init_components, + mock_chat_session: chat_session.ChatSession, pipeline_data: assist_pipeline.pipeline.PipelineData, ) -> None: """Test that preferred format options are given to the TTS system if supported.""" @@ -837,7 +852,7 @@ async def test_tts_wav_preferred_format( pipeline_input = assist_pipeline.pipeline.PipelineInput( tts_input="This is a test.", - conversation_id="mock-conversation-id", + session=mock_chat_session, device_id=None, run=assist_pipeline.pipeline.PipelineRun( hass, @@ -872,9 +887,7 @@ async def test_tts_wav_preferred_format( if event.type == assist_pipeline.PipelineEventType.TTS_END: # We must fetch the media URL to trigger the TTS assert event.data - media_id = event.data["tts_output"]["media_id"] - resolved = await media_source.async_resolve_media(hass, media_id, None) - await client.get(resolved.url) + await client.get(event.data["tts_output"]["url"]) assert mock_get_tts_audio.called options = mock_get_tts_audio.call_args_list[0].kwargs["options"] @@ -891,6 +904,7 @@ async def test_tts_dict_preferred_format( hass_client: ClientSessionGenerator, mock_tts_provider: MockTTSProvider, init_components, + mock_chat_session: chat_session.ChatSession, pipeline_data: assist_pipeline.pipeline.PipelineData, ) -> None: """Test that preferred format options are given to the TTS system if supported.""" @@ -905,7 +919,7 @@ async def test_tts_dict_preferred_format( pipeline_input = assist_pipeline.pipeline.PipelineInput( tts_input="This is a test.", - conversation_id="mock-conversation-id", + session=mock_chat_session, device_id=None, run=assist_pipeline.pipeline.PipelineRun( hass, @@ -945,9 +959,7 @@ async def test_tts_dict_preferred_format( if event.type == assist_pipeline.PipelineEventType.TTS_END: # We must fetch the media URL to trigger the TTS assert event.data - media_id = event.data["tts_output"]["media_id"] - resolved = await media_source.async_resolve_media(hass, media_id, None) - await client.get(resolved.url) + await client.get(event.data["tts_output"]["url"]) assert mock_get_tts_audio.called options = mock_get_tts_audio.call_args_list[0].kwargs["options"] @@ -962,6 +974,7 @@ async def test_tts_dict_preferred_format( async def test_sentence_trigger_overrides_conversation_agent( hass: HomeAssistant, init_components, + mock_chat_session: chat_session.ChatSession, pipeline_data: assist_pipeline.pipeline.PipelineData, ) -> None: """Test that sentence triggers are checked before a non-default conversation agent.""" @@ -991,7 +1004,7 @@ async def test_sentence_trigger_overrides_conversation_agent( pipeline_input = assist_pipeline.pipeline.PipelineInput( intent_input="test trigger sentence", - conversation_id="mock-conversation-id", + session=mock_chat_session, run=assist_pipeline.pipeline.PipelineRun( hass, context=Context(), @@ -1039,6 +1052,7 @@ async def test_sentence_trigger_overrides_conversation_agent( async def test_prefer_local_intents( hass: HomeAssistant, init_components, + mock_chat_session: chat_session.ChatSession, pipeline_data: assist_pipeline.pipeline.PipelineData, ) -> None: """Test that the default agent is checked first when local intents are preferred.""" @@ -1069,7 +1083,7 @@ async def test_prefer_local_intents( pipeline_input = assist_pipeline.pipeline.PipelineInput( intent_input="I'd like to order a stout please", - conversation_id="mock-conversation-id", + session=mock_chat_session, run=assist_pipeline.pipeline.PipelineRun( hass, context=Context(), @@ -1113,10 +1127,150 @@ async def test_prefer_local_intents( ) +async def test_intent_continue_conversation( + hass: HomeAssistant, + init_components, + mock_chat_session: chat_session.ChatSession, + pipeline_data: assist_pipeline.pipeline.PipelineData, +) -> None: + """Test that a conversation agent flagging continue conversation gets response.""" + events: list[assist_pipeline.PipelineEvent] = [] + + # Fake a test agent and prefer local intents + pipeline_store = pipeline_data.pipeline_store + pipeline_id = pipeline_store.async_get_preferred_item() + pipeline = assist_pipeline.pipeline.async_get_pipeline(hass, pipeline_id) + await assist_pipeline.pipeline.async_update_pipeline( + hass, pipeline, conversation_engine="test-agent" + ) + pipeline = assist_pipeline.pipeline.async_get_pipeline(hass, pipeline_id) + + pipeline_input = assist_pipeline.pipeline.PipelineInput( + intent_input="Set a timer", + session=mock_chat_session, + run=assist_pipeline.pipeline.PipelineRun( + hass, + context=Context(), + pipeline=pipeline, + start_stage=assist_pipeline.PipelineStage.INTENT, + end_stage=assist_pipeline.PipelineStage.INTENT, + event_callback=events.append, + ), + ) + + # Ensure prepare succeeds + with patch( + "homeassistant.components.assist_pipeline.pipeline.conversation.async_get_agent_info", + return_value=conversation.AgentInfo(id="test-agent", name="Test Agent"), + ): + await pipeline_input.validate() + + response = intent.IntentResponse("en") + response.async_set_speech("For how long?") + + with patch( + "homeassistant.components.assist_pipeline.pipeline.conversation.async_converse", + return_value=conversation.ConversationResult( + response=response, + conversation_id=mock_chat_session.conversation_id, + continue_conversation=True, + ), + ) as mock_async_converse: + await pipeline_input.execute() + + mock_async_converse.assert_called() + + results = [ + event.data + for event in events + if event.type + in ( + assist_pipeline.PipelineEventType.INTENT_START, + assist_pipeline.PipelineEventType.INTENT_END, + ) + ] + assert results[1]["intent_output"]["continue_conversation"] is True + + # Change conversation agent to default one and register sentence trigger that should not be called + await assist_pipeline.pipeline.async_update_pipeline( + hass, pipeline, conversation_engine=None + ) + pipeline = assist_pipeline.pipeline.async_get_pipeline(hass, pipeline_id) + assert await async_setup_component( + hass, + "automation", + { + "automation": { + "trigger": { + "platform": "conversation", + "command": ["Hello"], + }, + "action": { + "set_conversation_response": "test trigger response", + }, + } + }, + ) + + # Because we did continue conversation, it should respond to the test agent again. + events.clear() + + pipeline_input = assist_pipeline.pipeline.PipelineInput( + intent_input="Hello", + session=mock_chat_session, + run=assist_pipeline.pipeline.PipelineRun( + hass, + context=Context(), + pipeline=pipeline, + start_stage=assist_pipeline.PipelineStage.INTENT, + end_stage=assist_pipeline.PipelineStage.INTENT, + event_callback=events.append, + ), + ) + + # Ensure prepare succeeds + with patch( + "homeassistant.components.assist_pipeline.pipeline.conversation.async_get_agent_info", + return_value=conversation.AgentInfo(id="test-agent", name="Test Agent"), + ) as mock_prepare: + await pipeline_input.validate() + + # It requested test agent even if that was not default agent. + assert mock_prepare.mock_calls[0][1][1] == "test-agent" + + response = intent.IntentResponse("en") + response.async_set_speech("Timer set for 20 minutes") + + with patch( + "homeassistant.components.assist_pipeline.pipeline.conversation.async_converse", + return_value=conversation.ConversationResult( + response=response, + conversation_id=mock_chat_session.conversation_id, + ), + ) as mock_async_converse: + await pipeline_input.execute() + + mock_async_converse.assert_called() + + # Snapshot will show it was still handled by the test agent and not default agent + results = [ + event.data + for event in events + if event.type + in ( + assist_pipeline.PipelineEventType.INTENT_START, + assist_pipeline.PipelineEventType.INTENT_END, + ) + ] + assert results[0]["engine"] == "test-agent" + assert results[1]["intent_output"]["continue_conversation"] is False + + async def test_stt_language_used_instead_of_conversation_language( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, init_components, + mock_chat_session: chat_session.ChatSession, snapshot: SnapshotAssertion, ) -> None: """Test that the STT language is used first when the conversation language is '*' (all languages).""" @@ -1147,7 +1301,7 @@ async def test_stt_language_used_instead_of_conversation_language( pipeline_input = assist_pipeline.pipeline.PipelineInput( intent_input="test input", - conversation_id="mock-conversation-id", + session=mock_chat_session, run=assist_pipeline.pipeline.PipelineRun( hass, context=Context(), @@ -1192,6 +1346,7 @@ async def test_tts_language_used_instead_of_conversation_language( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, init_components, + mock_chat_session: chat_session.ChatSession, snapshot: SnapshotAssertion, ) -> None: """Test that the TTS language is used after STT when the conversation language is '*' (all languages).""" @@ -1222,7 +1377,7 @@ async def test_tts_language_used_instead_of_conversation_language( pipeline_input = assist_pipeline.pipeline.PipelineInput( intent_input="test input", - conversation_id="mock-conversation-id", + session=mock_chat_session, run=assist_pipeline.pipeline.PipelineRun( hass, context=Context(), @@ -1267,6 +1422,7 @@ async def test_pipeline_language_used_instead_of_conversation_language( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, init_components, + mock_chat_session: chat_session.ChatSession, snapshot: SnapshotAssertion, ) -> None: """Test that the pipeline language is used last when the conversation language is '*' (all languages).""" @@ -1297,7 +1453,7 @@ async def test_pipeline_language_used_instead_of_conversation_language( pipeline_input = assist_pipeline.pipeline.PipelineInput( intent_input="test input", - conversation_id="mock-conversation-id", + session=mock_chat_session, run=assist_pipeline.pipeline.PipelineRun( hass, context=Context(), diff --git a/tests/components/assist_pipeline/test_pipeline.py b/tests/components/assist_pipeline/test_pipeline.py index a7f6fbf7553..d67a0fd1726 100644 --- a/tests/components/assist_pipeline/test_pipeline.py +++ b/tests/components/assist_pipeline/test_pipeline.py @@ -684,7 +684,7 @@ def test_fallback_intent_filter() -> None: entities_list=[], ) ) - is True + is False ) assert ( _async_local_fallback_intent_filter( diff --git a/tests/components/assist_pipeline/test_websocket.py b/tests/components/assist_pipeline/test_websocket.py index f856bbe7f61..060c0dce660 100644 --- a/tests/components/assist_pipeline/test_websocket.py +++ b/tests/components/assist_pipeline/test_websocket.py @@ -20,6 +20,8 @@ from homeassistant.components.assist_pipeline.pipeline import ( DeviceAudioQueue, Pipeline, PipelineData, + async_get_pipelines, + async_update_pipeline, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -38,13 +40,21 @@ from tests.typing import WebSocketGenerator @pytest.fixture(autouse=True) -def mock_ulid() -> Generator[Mock]: - """Mock the ulid of chat sessions.""" - with patch("homeassistant.helpers.chat_session.ulid_now") as mock_ulid_now: - mock_ulid_now.return_value = "mock-ulid" +def mock_chat_session_id() -> Generator[Mock]: + """Mock the conversation ID of chat sessions.""" + with patch( + "homeassistant.helpers.chat_session.ulid_now", return_value="mock-ulid" + ) as mock_ulid_now: yield mock_ulid_now +@pytest.fixture(autouse=True) +def mock_tts_token() -> Generator[None]: + """Mock the TTS token for URLs.""" + with patch("secrets.token_urlsafe", return_value="mocked-token"): + yield + + @pytest.mark.parametrize( "extra_msg", [ @@ -825,74 +835,6 @@ async def test_stt_stream_failed( assert msg["result"] == {"events": events} -async def test_tts_failed( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - init_components, - snapshot: SnapshotAssertion, -) -> None: - """Test pipeline run with text-to-speech error.""" - events = [] - client = await hass_ws_client(hass) - - with patch( - "homeassistant.components.media_source.async_resolve_media", - side_effect=RuntimeError, - ): - await client.send_json_auto_id( - { - "type": "assist_pipeline/run", - "start_stage": "tts", - "end_stage": "tts", - "input": {"text": "Lights are on."}, - } - ) - - # result - msg = await client.receive_json() - assert msg["success"] - - # run start - msg = await client.receive_json() - assert msg["event"]["type"] == "run-start" - msg["event"]["data"]["pipeline"] = ANY - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) - - # tts start - msg = await client.receive_json() - assert msg["event"]["type"] == "tts-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) - - # tts error - msg = await client.receive_json() - assert msg["event"]["type"] == "error" - assert msg["event"]["data"]["code"] == "tts-failed" - events.append(msg["event"]) - - # run end - msg = await client.receive_json() - assert msg["event"]["type"] == "run-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) - - pipeline_data: PipelineData = hass.data[DOMAIN] - pipeline_id = list(pipeline_data.pipeline_debug)[0] - pipeline_run_id = list(pipeline_data.pipeline_debug[pipeline_id])[0] - - await client.send_json_auto_id( - { - "type": "assist_pipeline/pipeline_debug/get", - "pipeline_id": pipeline_id, - "pipeline_run_id": pipeline_run_id, - } - ) - msg = await client.receive_json() - assert msg["success"] - assert msg["result"] == {"events": events} - - async def test_tts_provider_missing( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -903,23 +845,22 @@ async def test_tts_provider_missing( """Test pipeline run with text-to-speech error.""" client = await hass_ws_client(hass) - with patch( - "homeassistant.components.tts.async_support_options", - side_effect=HomeAssistantError, - ): - await client.send_json_auto_id( - { - "type": "assist_pipeline/run", - "start_stage": "tts", - "end_stage": "tts", - "input": {"text": "Lights are on."}, - } - ) + pipelines = async_get_pipelines(hass) + await async_update_pipeline(hass, pipelines[0], tts_engine="unavailable") - # result - msg = await client.receive_json() - assert not msg["success"] - assert msg["error"]["code"] == "tts-not-supported" + await client.send_json_auto_id( + { + "type": "assist_pipeline/run", + "start_stage": "tts", + "end_stage": "tts", + "input": {"text": "Lights are on."}, + } + ) + + # result + msg = await client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == "tts-not-supported" async def test_tts_provider_bad_options( @@ -933,8 +874,8 @@ async def test_tts_provider_bad_options( client = await hass_ws_client(hass) with patch( - "homeassistant.components.tts.async_support_options", - return_value=False, + "homeassistant.components.tts.SpeechManager.process_options", + side_effect=HomeAssistantError("Language not supported"), ): await client.send_json_auto_id( { diff --git a/tests/components/assist_satellite/test_entity.py b/tests/components/assist_satellite/test_entity.py index 42b4adf742c..8050b23f5ff 100644 --- a/tests/components/assist_satellite/test_entity.py +++ b/tests/components/assist_satellite/test_entity.py @@ -22,6 +22,7 @@ from homeassistant.components.assist_satellite import ( AssistSatelliteAnnouncement, SatelliteBusyError, ) +from homeassistant.components.assist_satellite.const import PREANNOUNCE_URL from homeassistant.components.assist_satellite.entity import AssistSatelliteState from homeassistant.components.media_source import PlayMedia from homeassistant.config_entries import ConfigEntry @@ -31,6 +32,8 @@ from homeassistant.exceptions import HomeAssistantError from . import ENTITY_ID from .conftest import MockAssistSatellite +from tests.components.tts.common import MockResultStream + @pytest.fixture def mock_chat_session_conversation_id() -> Generator[Mock]: @@ -183,11 +186,12 @@ async def test_new_pipeline_cancels_pipeline( ("service_data", "expected_params"), [ ( - {"message": "Hello"}, + {"message": "Hello", "preannounce": False}, AssistSatelliteAnnouncement( message="Hello", - media_id="https://www.home-assistant.io/resolved.mp3", + media_id="http://10.10.10.10:8123/api/tts_proxy/test-token", original_media_id="media-source://bla", + tts_token="test-token", media_id_source="tts", ), ), @@ -195,23 +199,40 @@ async def test_new_pipeline_cancels_pipeline( { "message": "Hello", "media_id": "media-source://given", + "preannounce": False, }, AssistSatelliteAnnouncement( message="Hello", media_id="https://www.home-assistant.io/resolved.mp3", original_media_id="media-source://given", + tts_token=None, media_id_source="media_id", ), ), ( - {"media_id": "http://example.com/bla.mp3"}, + {"media_id": "http://example.com/bla.mp3", "preannounce": False}, AssistSatelliteAnnouncement( message="", media_id="http://example.com/bla.mp3", original_media_id="http://example.com/bla.mp3", + tts_token=None, media_id_source="url", ), ), + ( + { + "media_id": "http://example.com/bla.mp3", + "preannounce_media_id": "http://example.com/preannounce.mp3", + }, + AssistSatelliteAnnouncement( + message="", + media_id="http://example.com/bla.mp3", + original_media_id="http://example.com/bla.mp3", + tts_token=None, + media_id_source="url", + preannounce_media_id="http://example.com/preannounce.mp3", + ), + ), ], ) async def test_announce( @@ -243,9 +264,17 @@ async def test_announce( with ( patch( - "homeassistant.components.assist_satellite.entity.tts_generate_media_source_id", + "homeassistant.components.tts.generate_media_source_id", new=tts_generate_media_source_id, ), + patch( + "homeassistant.components.tts.async_resolve_engine", + return_value="tts.cloud", + ), + patch( + "homeassistant.components.tts.async_create_stream", + return_value=MockResultStream(hass, "wav", b""), + ), patch( "homeassistant.components.media_source.async_resolve_media", return_value=PlayMedia( @@ -341,6 +370,24 @@ async def test_announce_cancels_pipeline( mock_async_announce.assert_called_once() +async def test_announce_default_preannounce( + hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite +) -> None: + """Test announcing on a device with the default preannouncement sound.""" + + async def async_announce(announcement): + assert announcement.preannounce_media_id.endswith(PREANNOUNCE_URL) + + with patch.object(entity, "async_announce", new=async_announce): + await hass.services.async_call( + "assist_satellite", + "announce", + {"media_id": "test-media-id"}, + target={"entity_id": "assist_satellite.test_entity"}, + blocking=True, + ) + + async def test_context_refresh( hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite ) -> None: @@ -494,13 +541,15 @@ async def test_vad_sensitivity_entity_not_found( { "start_message": "Hello", "extra_system_prompt": "Better system prompt", + "preannounce": False, }, ( "mock-conversation-id", "Better system prompt", AssistSatelliteAnnouncement( message="Hello", - media_id="https://www.home-assistant.io/resolved.mp3", + media_id="http://10.10.10.10:8123/api/tts_proxy/test-token", + tts_token="test-token", original_media_id="media-source://generated", media_id_source="tts", ), @@ -510,6 +559,7 @@ async def test_vad_sensitivity_entity_not_found( { "start_message": "Hello", "start_media_id": "media-source://given", + "preannounce": False, }, ( "mock-conversation-id", @@ -517,24 +567,47 @@ async def test_vad_sensitivity_entity_not_found( AssistSatelliteAnnouncement( message="Hello", media_id="https://www.home-assistant.io/resolved.mp3", + tts_token=None, original_media_id="media-source://given", media_id_source="media_id", ), ), ), ( - {"start_media_id": "http://example.com/given.mp3"}, + { + "start_media_id": "http://example.com/given.mp3", + "preannounce": False, + }, ( "mock-conversation-id", None, AssistSatelliteAnnouncement( message="", media_id="http://example.com/given.mp3", + tts_token=None, original_media_id="http://example.com/given.mp3", media_id_source="url", ), ), ), + ( + { + "start_media_id": "http://example.com/given.mp3", + "preannounce_media_id": "http://example.com/preannounce.mp3", + }, + ( + "mock-conversation-id", + None, + AssistSatelliteAnnouncement( + message="", + media_id="http://example.com/given.mp3", + tts_token=None, + original_media_id="http://example.com/given.mp3", + media_id_source="url", + preannounce_media_id="http://example.com/preannounce.mp3", + ), + ), + ), ], ) @pytest.mark.usefixtures("mock_chat_session_conversation_id") @@ -546,6 +619,13 @@ async def test_start_conversation( expected_params: tuple[str, str], ) -> None: """Test starting a conversation on a device.""" + original_start_conversation = entity.async_start_conversation + + async def async_start_conversation(start_announcement): + # Verify state change + assert entity.state == AssistSatelliteState.RESPONDING + await original_start_conversation(start_announcement) + await async_update_pipeline( hass, async_get_pipeline(hass), @@ -554,9 +634,17 @@ async def test_start_conversation( with ( patch( - "homeassistant.components.assist_satellite.entity.tts_generate_media_source_id", + "homeassistant.components.tts.generate_media_source_id", return_value="media-source://generated", ), + patch( + "homeassistant.components.tts.async_resolve_engine", + return_value="tts.cloud", + ), + patch( + "homeassistant.components.tts.async_create_stream", + return_value=MockResultStream(hass, "wav", b""), + ), patch( "homeassistant.components.media_source.async_resolve_media", return_value=PlayMedia( @@ -564,6 +652,7 @@ async def test_start_conversation( mime_type="audio/mp3", ), ), + patch.object(entity, "async_start_conversation", new=async_start_conversation), ): await hass.services.async_call( "assist_satellite", @@ -572,6 +661,7 @@ async def test_start_conversation( target={"entity_id": "assist_satellite.test_entity"}, blocking=True, ) + assert entity.state == AssistSatelliteState.IDLE assert entity.start_conversations[0] == expected_params @@ -592,6 +682,32 @@ async def test_start_conversation_reject_builtin_agent( ) +async def test_start_conversation_default_preannounce( + hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite +) -> None: + """Test starting a conversation on a device with the default preannouncement sound.""" + + async def async_start_conversation(start_announcement): + assert PREANNOUNCE_URL in start_announcement.preannounce_media_id + + await async_update_pipeline( + hass, + async_get_pipeline(hass), + conversation_engine="conversation.some_llm", + ) + + with ( + patch.object(entity, "async_start_conversation", new=async_start_conversation), + ): + await hass.services.async_call( + "assist_satellite", + "start_conversation", + {"start_media_id": "test-media-id"}, + target={"entity_id": "assist_satellite.test_entity"}, + blocking=True, + ) + + async def test_wake_word_start_keeps_responding( hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite ) -> None: diff --git a/tests/components/assist_satellite/test_intent.py b/tests/components/assist_satellite/test_intent.py index 9304229dbe3..0e531811adc 100644 --- a/tests/components/assist_satellite/test_intent.py +++ b/tests/components/assist_satellite/test_intent.py @@ -4,28 +4,28 @@ from unittest.mock import patch import pytest -from homeassistant.components.media_source import PlayMedia from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import intent +from homeassistant.setup import async_setup_component from .conftest import TEST_DOMAIN, MockAssistSatellite +from tests.components.tts.common import MockResultStream + @pytest.fixture -def mock_tts(): +async def mock_tts(hass: HomeAssistant): """Mock TTS service.""" + assert await async_setup_component(hass, "tts", {}) with ( patch( - "homeassistant.components.assist_satellite.entity.tts_generate_media_source_id", + "homeassistant.components.tts.generate_media_source_id", return_value="media-source://bla", ), patch( - "homeassistant.components.media_source.async_resolve_media", - return_value=PlayMedia( - url="https://www.home-assistant.io/resolved.mp3", - mime_type="audio/mp3", - ), + "homeassistant.components.tts.async_create_stream", + return_value=MockResultStream(hass, "wav", b""), ), ): yield @@ -41,9 +41,13 @@ async def test_broadcast_intent( ) -> None: """Test we can invoke a broadcast intent.""" - result = await intent.async_handle( - hass, "test", intent.INTENT_BROADCAST, {"message": {"value": "Hello"}} - ) + with patch( + "homeassistant.components.tts.async_resolve_engine", + return_value="tts.cloud", + ): + result = await intent.async_handle( + hass, "test", intent.INTENT_BROADCAST, {"message": {"value": "Hello"}} + ) assert result.as_dict() == { "card": {}, @@ -71,13 +75,17 @@ async def test_broadcast_intent( assert len(entity2.announcements) == 1 assert len(entity_no_features.announcements) == 0 - result = await intent.async_handle( - hass, - "test", - intent.INTENT_BROADCAST, - {"message": {"value": "Hello"}}, - device_id=entity.device_entry.id, - ) + with patch( + "homeassistant.components.tts.async_resolve_engine", + return_value="tts.cloud", + ): + result = await intent.async_handle( + hass, + "test", + intent.INTENT_BROADCAST, + {"message": {"value": "Hello"}}, + device_id=entity.device_entry.id, + ) # Broadcast doesn't targets device that triggered it. assert result.as_dict() == { "card": {}, diff --git a/tests/components/assist_satellite/test_websocket_api.py b/tests/components/assist_satellite/test_websocket_api.py index f0a8f02fc50..23eec7e8461 100644 --- a/tests/components/assist_satellite/test_websocket_api.py +++ b/tests/components/assist_satellite/test_websocket_api.py @@ -445,6 +445,7 @@ async def test_connection_test( assert len(entity.announcements) == 1 assert entity.announcements[0].message == "" + assert entity.announcements[0].preannounce_media_id is None announcement_media_id = entity.announcements[0].media_id hass_url = "http://10.10.10.10:8123" assert announcement_media_id.startswith( diff --git a/tests/components/awair/test_sensor.py b/tests/components/awair/test_sensor.py index 8c9cd6e3a24..040deaf8f80 100644 --- a/tests/components/awair/test_sensor.py +++ b/tests/components/awair/test_sensor.py @@ -127,7 +127,7 @@ async def test_awair_gen1_sensors( assert_expected_properties( hass, entity_registry, - "sensor.living_room_vocs", + "sensor.living_room_volatile_organic_compounds_parts", f"{AWAIR_UUID}_{SENSOR_TYPES_MAP[API_VOC].unique_id_tag}", "366", { diff --git a/tests/components/azure_devops/snapshots/test_sensor.ambr b/tests/components/azure_devops/snapshots/test_sensor.ambr index 0b8f35497c6..3fe4d470a63 100644 --- a/tests/components/azure_devops/snapshots/test_sensor.ambr +++ b/tests/components/azure_devops/snapshots/test_sensor.ambr @@ -131,7 +131,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'CI latest build id', + 'original_name': 'CI latest build ID', 'platform': 'azure_devops', 'previous_unique_id': None, 'supported_features': 0, @@ -143,7 +143,7 @@ # name: test_sensors[sensor.testproject_ci_latest_build_id-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI latest build id', + 'friendly_name': 'testproject CI latest build ID', }), 'context': , 'entity_id': 'sensor.testproject_ci_latest_build_id', @@ -462,7 +462,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'CI latest build url', + 'original_name': 'CI latest build URL', 'platform': 'azure_devops', 'previous_unique_id': None, 'supported_features': 0, @@ -474,7 +474,7 @@ # name: test_sensors[sensor.testproject_ci_latest_build_url-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI latest build url', + 'friendly_name': 'testproject CI latest build URL', }), 'context': , 'entity_id': 'sensor.testproject_ci_latest_build_url', @@ -526,7 +526,7 @@ # name: test_sensors_missing_data[sensor.testproject_ci_latest_build_id-state-missing-data] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI latest build id', + 'friendly_name': 'testproject CI latest build ID', }), 'context': , 'entity_id': 'sensor.testproject_ci_latest_build_id', @@ -619,7 +619,7 @@ # name: test_sensors_missing_data[sensor.testproject_ci_latest_build_url-state-missing-data] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI latest build url', + 'friendly_name': 'testproject CI latest build URL', }), 'context': , 'entity_id': 'sensor.testproject_ci_latest_build_url', diff --git a/tests/components/azure_storage/test_config_flow.py b/tests/components/azure_storage/test_config_flow.py index ed8bbed0718..67dc44f9f2c 100644 --- a/tests/components/azure_storage/test_config_flow.py +++ b/tests/components/azure_storage/test_config_flow.py @@ -15,6 +15,7 @@ from homeassistant.config_entries import SOURCE_USER, ConfigFlowResult from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from . import setup_integration from .const import USER_INPUT from tests.common import MockConfigEntry @@ -111,3 +112,87 @@ async def test_abort_if_already_configured( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_reauth_flow( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that the reauth flow works.""" + + await setup_integration(hass, mock_config_entry) + + result = await mock_config_entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_STORAGE_ACCOUNT_KEY: "new_key"} + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert mock_config_entry.data == { + **USER_INPUT, + CONF_STORAGE_ACCOUNT_KEY: "new_key", + } + + +async def test_reauth_flow_errors( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_client: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that the reauth flow works with an errors.""" + + await setup_integration(hass, mock_config_entry) + + mock_client.exists.side_effect = Exception() + + result = await mock_config_entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_STORAGE_ACCOUNT_KEY: "new_key"} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "unknown"} + + # fix the error and finish the flow successfully + mock_client.exists.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_STORAGE_ACCOUNT_KEY: "new_key"} + ) + assert result["reason"] == "reauth_successful" + assert mock_config_entry.data == { + **USER_INPUT, + CONF_STORAGE_ACCOUNT_KEY: "new_key", + } + + +async def test_reconfigure_flow( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that the reconfigure flow works.""" + + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_CONTAINER_NAME: "new_container"} + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert mock_config_entry.data == { + **USER_INPUT, + CONF_CONTAINER_NAME: "new_container", + } diff --git a/tests/components/backup/common.py b/tests/components/backup/common.py index e41da5c1bad..3197cbfadeb 100644 --- a/tests/components/backup/common.py +++ b/tests/components/backup/common.py @@ -2,9 +2,9 @@ from __future__ import annotations -from collections.abc import AsyncIterator, Callable, Coroutine, Iterable +from collections.abc import AsyncIterator, Buffer, Callable, Coroutine, Iterable from pathlib import Path -from typing import Any +from typing import Any, cast from unittest.mock import AsyncMock, Mock, patch from homeassistant.components.backup import ( @@ -16,6 +16,7 @@ from homeassistant.components.backup import ( BackupNotFound, Folder, ) +from homeassistant.components.backup.backup import CoreLocalBackupAgent from homeassistant.components.backup.const import DATA_MANAGER from homeassistant.core import HomeAssistant from homeassistant.helpers.backup import async_initialize_backup @@ -67,15 +68,20 @@ async def aiter_from_iter(iterable: Iterable) -> AsyncIterator: def mock_backup_agent(name: str, backups: list[AgentBackup] | None = None) -> Mock: """Create a mock backup agent.""" + async def delete_backup(backup_id: str, **kwargs: Any) -> None: + """Mock delete.""" + await get_backup(backup_id) + async def download_backup(backup_id: str, **kwargs: Any) -> AsyncIterator[bytes]: """Mock download.""" - if not await get_backup(backup_id): - raise BackupNotFound return aiter_from_iter((backups_data.get(backup_id, b"backup data"),)) - async def get_backup(backup_id: str, **kwargs: Any) -> AgentBackup | None: + async def get_backup(backup_id: str, **kwargs: Any) -> AgentBackup: """Get a backup.""" - return next((b for b in backups if b.backup_id == backup_id), None) + backup = next((b for b in _backups if b.backup_id == backup_id), None) + if backup is None: + raise BackupNotFound + return backup async def upload_backup( *, @@ -84,22 +90,22 @@ def mock_backup_agent(name: str, backups: list[AgentBackup] | None = None) -> Mo **kwargs: Any, ) -> None: """Upload a backup.""" - backups.append(backup) + _backups.append(backup) backup_stream = await open_stream() backup_data = bytearray() async for chunk in backup_stream: backup_data += chunk backups_data[backup.backup_id] = backup_data - backups = backups or [] - backups_data: dict[str, bytes] = {} + _backups = backups or [] + backups_data: dict[str, Buffer] = {} mock_agent = Mock(spec=BackupAgent) mock_agent.domain = TEST_DOMAIN mock_agent.name = name mock_agent.unique_id = name type(mock_agent).agent_id = BackupAgent.agent_id mock_agent.async_delete_backup = AsyncMock( - spec_set=[BackupAgent.async_delete_backup] + side_effect=delete_backup, spec_set=[BackupAgent.async_delete_backup] ) mock_agent.async_download_backup = AsyncMock( side_effect=download_backup, spec_set=[BackupAgent.async_download_backup] @@ -108,7 +114,7 @@ def mock_backup_agent(name: str, backups: list[AgentBackup] | None = None) -> Mo side_effect=get_backup, spec_set=[BackupAgent.async_get_backup] ) mock_agent.async_list_backups = AsyncMock( - return_value=backups, spec_set=[BackupAgent.async_list_backups] + return_value=_backups, spec_set=[BackupAgent.async_list_backups] ) mock_agent.async_upload_backup = AsyncMock( side_effect=upload_backup, @@ -155,11 +161,18 @@ async def setup_backup_integration( if LOCAL_AGENT_ID not in backups or with_hassio: return remote_agents_dict - agent = hass.data[DATA_MANAGER].backup_agents[LOCAL_AGENT_ID] + local_agent = cast( + CoreLocalBackupAgent, hass.data[DATA_MANAGER].backup_agents[LOCAL_AGENT_ID] + ) for backup in backups[LOCAL_AGENT_ID]: - await agent.async_upload_backup(open_stream=None, backup=backup) - agent._loaded_backups = True + await local_agent.async_upload_backup( + open_stream=AsyncMock( + side_effect=RuntimeError("Local agent does not open stream") + ), + backup=backup, + ) + local_agent._loaded_backups = True return remote_agents_dict diff --git a/tests/components/backup/conftest.py b/tests/components/backup/conftest.py index eb38399eb79..d391df44475 100644 --- a/tests/components/backup/conftest.py +++ b/tests/components/backup/conftest.py @@ -61,24 +61,49 @@ def path_glob_fixture(hass: HomeAssistant) -> Generator[MagicMock]: CONFIG_DIR = { - "testing_config": [ + "tests/testing_config": [ Path("test.txt"), Path(".DS_Store"), Path(".storage"), + Path("another_subdir"), Path("backups"), Path("tmp_backups"), + Path("tts"), Path("home-assistant_v2.db"), ], - "backups": [ + "/backups": [ Path("backups/backup.tar"), Path("backups/not_backup"), ], - "tmp_backups": [ + "/another_subdir": [ + Path("another_subdir/.DS_Store"), + Path("another_subdir/backups"), + Path("another_subdir/tts"), + ], + "another_subdir/backups": [ + Path("another_subdir/backups/backup.tar"), + Path("another_subdir/backups/not_backup"), + ], + "another_subdir/tts": [ + Path("another_subdir/tts/voice.mp3"), + ], + "/tmp_backups": [ # noqa: S108 Path("tmp_backups/forgotten_backup.tar"), Path("tmp_backups/not_backup"), ], + "/tts": [ + Path("tts/voice.mp3"), + ], +} +CONFIG_DIR_DIRS = { + Path(".storage"), + Path("another_subdir"), + Path("another_subdir/backups"), + Path("another_subdir/tts"), + Path("backups"), + Path("tmp_backups"), + Path("tts"), } -CONFIG_DIR_DIRS = {Path(".storage"), Path("backups"), Path("tmp_backups")} @pytest.fixture(name="create_backup") @@ -105,7 +130,10 @@ def mock_backup_generation_fixture( """Mock backup generator.""" with ( - patch("pathlib.Path.iterdir", lambda x: CONFIG_DIR.get(x.name, [])), + patch( + "pathlib.Path.iterdir", + lambda x: CONFIG_DIR.get(f"{x.parent.name}/{x.name}", []), + ), patch("pathlib.Path.stat", return_value=MagicMock(st_size=123)), patch("pathlib.Path.is_file", lambda x: x not in CONFIG_DIR_DIRS), patch("pathlib.Path.is_dir", lambda x: x in CONFIG_DIR_DIRS), diff --git a/tests/components/backup/snapshots/test_backup.ambr b/tests/components/backup/snapshots/test_backup.ambr index 28ee9b834c1..7cbbb9ddbce 100644 --- a/tests/components/backup/snapshots/test_backup.ambr +++ b/tests/components/backup/snapshots/test_backup.ambr @@ -114,9 +114,9 @@ 'with_automatic_settings': None, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -148,9 +148,9 @@ }), 'backups': list([ ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -182,9 +182,9 @@ }), 'backups': list([ ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -216,9 +216,9 @@ }), 'backups': list([ ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -250,9 +250,9 @@ }), 'backups': list([ ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', diff --git a/tests/components/backup/snapshots/test_diagnostics.ambr b/tests/components/backup/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..cf412970204 --- /dev/null +++ b/tests/components/backup/snapshots/test_diagnostics.ambr @@ -0,0 +1,39 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'backup_agents': list([ + dict({ + 'agent_id': 'backup.local', + 'name': 'local', + }), + ]), + 'backup_config': dict({ + 'agents': dict({ + }), + 'automatic_backups_configured': False, + 'create_backup': dict({ + 'agent_ids': list([ + ]), + 'include_addons': None, + 'include_all_addons': False, + 'include_database': True, + 'include_folders': None, + 'name': None, + 'password': None, + }), + 'last_attempted_automatic_backup': None, + 'last_completed_automatic_backup': None, + 'retention': dict({ + 'copies': None, + 'days': None, + }), + 'schedule': dict({ + 'days': list([ + ]), + 'recurrence': 'never', + 'state': 'never', + 'time': None, + }), + }), + }) +# --- diff --git a/tests/components/backup/snapshots/test_sensors.ambr b/tests/components/backup/snapshots/test_sensors.ambr new file mode 100644 index 00000000000..924038ef81f --- /dev/null +++ b/tests/components/backup/snapshots/test_sensors.ambr @@ -0,0 +1,160 @@ +# serializer version: 1 +# name: test_sensors[sensor.backup_backup_manager_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'idle', + 'create_backup', + 'blocked', + 'receive_backup', + 'restore_backup', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.backup_backup_manager_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Backup Manager State', + 'platform': 'backup', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'backup_manager_state', + 'unique_id': 'backup_manager_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.backup_backup_manager_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Backup Backup Manager State', + 'options': list([ + 'idle', + 'create_backup', + 'blocked', + 'receive_backup', + 'restore_backup', + ]), + }), + 'context': , + 'entity_id': 'sensor.backup_backup_manager_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_sensors[sensor.backup_last_successful_automatic_backup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.backup_last_successful_automatic_backup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last successful automatic backup', + 'platform': 'backup', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_successful_automatic_backup', + 'unique_id': 'last_successful_automatic_backup', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.backup_last_successful_automatic_backup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Backup Last successful automatic backup', + }), + 'context': , + 'entity_id': 'sensor.backup_last_successful_automatic_backup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors[sensor.backup_next_scheduled_automatic_backup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.backup_next_scheduled_automatic_backup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Next scheduled automatic backup', + 'platform': 'backup', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'next_scheduled_automatic_backup', + 'unique_id': 'next_scheduled_automatic_backup', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.backup_next_scheduled_automatic_backup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Backup Next scheduled automatic backup', + }), + 'context': , + 'entity_id': 'sensor.backup_next_scheduled_automatic_backup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr index 17e3ca8b176..0bef632f0b4 100644 --- a/tests/components/backup/snapshots/test_websocket.ambr +++ b/tests/components/backup/snapshots/test_websocket.ambr @@ -229,6 +229,17 @@ 'type': 'result', }) # --- +# name: test_can_decrypt_on_download_get_backup_returns_none + dict({ + 'error': dict({ + 'code': 'home_assistant_error', + 'message': 'Backup abc123 not found in agent test.remote', + }), + 'id': 1, + 'success': False, + 'type': 'result', + }) +# --- # name: test_can_decrypt_on_download_with_agent_error[BackupAgentError] dict({ 'error': dict({ @@ -3940,9 +3951,9 @@ }), 'backups': list([ ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -3970,9 +3981,9 @@ }), 'backups': list([ ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4021,9 +4032,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4051,9 +4062,9 @@ }), 'backups': list([ ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4102,9 +4113,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4164,9 +4175,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4210,9 +4221,9 @@ 'with_automatic_settings': None, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4267,9 +4278,9 @@ 'with_automatic_settings': None, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4322,9 +4333,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4384,9 +4395,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4447,9 +4458,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4511,9 +4522,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4573,9 +4584,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4635,9 +4646,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4698,9 +4709,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4762,9 +4773,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -4930,6 +4941,18 @@ 'type': 'result', }) # --- +# name: test_details_get_backup_returns_none + dict({ + 'id': 1, + 'result': dict({ + 'agent_errors': dict({ + }), + 'backup': None, + }), + 'success': True, + 'type': 'result', + }) +# --- # name: test_details_with_errors[BackupAgentUnreachableError] dict({ 'id': 1, @@ -5327,9 +5350,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -5378,9 +5401,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -5433,9 +5456,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -5511,9 +5534,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -5563,9 +5586,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -5615,9 +5638,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -5667,9 +5690,9 @@ 'with_automatic_settings': True, }), ]), + 'last_action_event': None, 'last_attempted_automatic_backup': None, 'last_completed_automatic_backup': None, - 'last_non_idle_event': None, 'next_automatic_backup': None, 'next_automatic_backup_additional': False, 'state': 'idle', @@ -5728,6 +5751,17 @@ # name: test_restore_remote_agent[remote_agents1-backups1].1 1 # --- +# name: test_restore_remote_agent_get_backup_returns_none + dict({ + 'error': dict({ + 'code': 'home_assistant_error', + 'message': 'Backup abc123 not found in agent test.remote', + }), + 'id': 1, + 'success': False, + 'type': 'result', + }) +# --- # name: test_restore_wrong_password dict({ 'error': dict({ diff --git a/tests/components/backup/test_diagnostics.py b/tests/components/backup/test_diagnostics.py new file mode 100644 index 00000000000..a66b4a9a2ea --- /dev/null +++ b/tests/components/backup/test_diagnostics.py @@ -0,0 +1,26 @@ +"""Tests the diagnostics for Home Assistant Backup integration.""" + +from syrupy import SnapshotAssertion + +from homeassistant.components.backup.const import DOMAIN +from homeassistant.core import HomeAssistant + +from .common import setup_backup_integration + +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + await setup_backup_integration(hass, with_hassio=False) + await hass.async_block_till_done(wait_background_tasks=True) + + entry = hass.config_entries.async_entries(DOMAIN)[0] + diag_data = await get_diagnostics_for_config_entry(hass, hass_client, entry) + + assert diag_data == snapshot diff --git a/tests/components/backup/test_http.py b/tests/components/backup/test_http.py index a03217beac2..92bf454095e 100644 --- a/tests/components/backup/test_http.py +++ b/tests/components/backup/test_http.py @@ -234,6 +234,26 @@ async def test_downloading_backup_not_found( assert resp.status == 404 +async def test_downloading_backup_not_found_get_backup_returns_none( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test downloading a backup file that does not exist.""" + mock_agents = await setup_backup_integration(hass, remote_agents=["test.test"]) + mock_agents["test.test"].async_get_backup.return_value = None + mock_agents["test.test"].async_get_backup.side_effect = None + + client = await hass_client() + + resp = await client.get("/api/backup/download/abc123?agent_id=test.test") + assert resp.status == 404 + assert ( + "Detected that integration 'test' returns None from BackupAgent.async_get_backup." + in caplog.text + ) + + async def test_downloading_as_non_admin( hass: HomeAssistant, hass_client: ClientSessionGenerator, diff --git a/tests/components/backup/test_init.py b/tests/components/backup/test_init.py index 8a0cc2b97c0..10bd2d8b97a 100644 --- a/tests/components/backup/test_init.py +++ b/tests/components/backup/test_init.py @@ -6,11 +6,13 @@ from unittest.mock import patch import pytest from homeassistant.components.backup.const import DATA_MANAGER, DOMAIN +from homeassistant.config_entries import SOURCE_SYSTEM, ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceNotFound from .common import setup_backup_integration +from tests.common import MockConfigEntry from tests.typing import WebSocketGenerator @@ -141,3 +143,17 @@ async def test_create_automatic_service( ) generate_backup.assert_called_once_with(**expected_kwargs) + + +async def test_setup_entry( + hass: HomeAssistant, +) -> None: + """Test setup backup config entry.""" + await setup_backup_integration(hass, with_hassio=False) + entry = MockConfigEntry(domain=DOMAIN, source=SOURCE_SYSTEM) + entry.add_to_hass(hass) + + with patch("homeassistant.components.backup.PLATFORMS", return_value=[]): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state is ConfigEntryState.LOADED diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index 41f98d6fa53..04072dae864 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -68,10 +68,17 @@ from tests.typing import ClientSessionGenerator, WebSocketGenerator _EXPECTED_FILES = [ "test.txt", ".storage", + "another_subdir", + "another_subdir/backups", + "another_subdir/backups/backup.tar", + "another_subdir/backups/not_backup", + "another_subdir/tts", + "another_subdir/tts/voice.mp3", "backups", "backups/not_backup", "tmp_backups", "tmp_backups/not_backup", + "tts", ] _EXPECTED_FILES_WITH_DATABASE = { True: [*_EXPECTED_FILES, "home-assistant_v2.db"], @@ -538,7 +545,7 @@ async def test_initiate_backup( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -771,7 +778,7 @@ async def test_initiate_backup_with_agent_error( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -863,7 +870,7 @@ async def test_initiate_backup_with_agent_error( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": { + "last_action_event": { "manager_state": "create_backup", "reason": "upload_failed", "stage": None, @@ -1153,7 +1160,7 @@ async def test_initiate_backup_non_agent_upload_error( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -1250,7 +1257,7 @@ async def test_initiate_backup_with_task_error( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -1346,7 +1353,7 @@ async def test_initiate_backup_file_error_upload_to_agents( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -1470,7 +1477,7 @@ async def test_initiate_backup_file_error_create_backup( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -1967,7 +1974,7 @@ async def test_receive_backup_agent_error( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -2050,7 +2057,7 @@ async def test_receive_backup_agent_error( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": { + "last_action_event": { "manager_state": "receive_backup", "reason": None, "stage": None, @@ -2103,7 +2110,7 @@ async def test_receive_backup_non_agent_upload_error( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -2215,7 +2222,7 @@ async def test_receive_backup_file_write_error( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -2311,7 +2318,7 @@ async def test_receive_backup_read_tar_error( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -2476,7 +2483,7 @@ async def test_receive_backup_file_read_error( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -3287,7 +3294,7 @@ async def test_initiate_backup_per_agent_encryption( "agent_errors": {}, "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -3390,7 +3397,7 @@ async def test_initiate_backup_per_agent_encryption( @pytest.mark.parametrize( - ("restore_result", "last_non_idle_event"), + ("restore_result", "last_action_event"), [ ( {"error": None, "error_type": None, "success": True}, @@ -3416,7 +3423,7 @@ async def test_restore_progress_after_restart( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, restore_result: dict[str, Any], - last_non_idle_event: dict[str, Any], + last_action_event: dict[str, Any], ) -> None: """Test restore backup progress after restart.""" @@ -3434,7 +3441,7 @@ async def test_restore_progress_after_restart( "backups": [], "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": last_non_idle_event, + "last_action_event": last_action_event, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -3460,7 +3467,7 @@ async def test_restore_progress_after_restart_fail_to_remove( "backups": [], "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -3485,20 +3492,20 @@ async def test_manager_blocked_until_home_assistant_started( manager = hass.data[DATA_MANAGER] assert manager.state == BackupManagerState.BLOCKED - assert manager.last_non_idle_event is None + assert manager.last_action_event is None # Fired when Home Assistant changes to starting state hass.bus.async_fire(EVENT_HOMEASSISTANT_START) await hass.async_block_till_done() await hass.async_block_till_done() assert manager.state == BackupManagerState.BLOCKED - assert manager.last_non_idle_event is None + assert manager.last_action_event is None # Fired when Home Assistant changes to running state hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() assert manager.state == BackupManagerState.IDLE - assert manager.last_non_idle_event is None + assert manager.last_action_event is None async def test_manager_not_blocked_after_restore( @@ -3523,7 +3530,7 @@ async def test_manager_not_blocked_after_restore( "backups": [], "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": { + "last_action_event": { "manager_state": "restore_backup", "reason": None, "stage": None, diff --git a/tests/components/backup/test_sensors.py b/tests/components/backup/test_sensors.py new file mode 100644 index 00000000000..bee61887ea5 --- /dev/null +++ b/tests/components/backup/test_sensors.py @@ -0,0 +1,119 @@ +"""Tests for the sensors of the Backup integration.""" + +from typing import Any +from unittest.mock import AsyncMock, MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.backup import store +from homeassistant.components.backup.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .common import setup_backup_integration + +from tests.common import async_fire_time_changed, snapshot_platform +from tests.typing import WebSocketGenerator + + +@pytest.mark.usefixtures("mock_backup_generation") +async def test_sensors( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test setup of backup sensors.""" + with patch("homeassistant.components.backup.PLATFORMS", [Platform.SENSOR]): + await setup_backup_integration(hass, with_hassio=False) + await hass.async_block_till_done(wait_background_tasks=True) + + entry = hass.config_entries.async_entries(DOMAIN)[0] + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + # start backup and check sensor states again + client = await hass_ws_client(hass) + await hass.async_block_till_done() + await client.send_json_auto_id( + {"type": "backup/generate", "agent_ids": ["backup.local"]} + ) + + assert await client.receive_json() + state = hass.states.get("sensor.backup_backup_manager_state") + assert state.state == "create_backup" + + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("sensor.backup_backup_manager_state") + assert state.state == "idle" + + +async def test_sensor_updates( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, + hass_storage: dict[str, Any], + create_backup: AsyncMock, +) -> None: + """Test update of backup sensors.""" + # Ensure created backup is already protected, + # to avoid manager creating a new EncryptedBackupStreamer + # instead of using the already mocked stream writer. + created_backup: MagicMock = create_backup.return_value[1].result().backup + created_backup.protected = True + + await hass.config.async_set_time_zone("Europe/Amsterdam") + freezer.move_to("2024-11-12T12:00:00+01:00") + storage_data = { + "backups": [], + "config": { + "agents": {}, + "automatic_backups_configured": True, + "create_backup": { + "agent_ids": ["test.remote"], + "include_addons": [], + "include_all_addons": False, + "include_database": True, + "include_folders": [], + "name": "test-name", + "password": "test-password", + }, + "retention": {"copies": None, "days": None}, + "last_attempted_automatic_backup": "2024-11-11T04:45:00+01:00", + "last_completed_automatic_backup": "2024-11-11T04:45:00+01:00", + "schedule": { + "days": [], + "recurrence": "daily", + "state": "never", + "time": "06:00", + }, + }, + } + hass_storage[DOMAIN] = { + "data": storage_data, + "key": DOMAIN, + "version": store.STORAGE_VERSION, + "minor_version": store.STORAGE_VERSION_MINOR, + } + + with patch("homeassistant.components.backup.PLATFORMS", [Platform.SENSOR]): + await setup_backup_integration( + hass, with_hassio=False, remote_agents=["test.remote"] + ) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("sensor.backup_last_successful_automatic_backup") + assert state.state == "2024-11-11T03:45:00+00:00" + state = hass.states.get("sensor.backup_next_scheduled_automatic_backup") + assert state.state == "2024-11-13T05:00:00+00:00" + + freezer.move_to("2024-11-13T12:00:00+01:00") + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("sensor.backup_last_successful_automatic_backup") + assert state.state == "2024-11-13T11:00:00+00:00" + state = hass.states.get("sensor.backup_next_scheduled_automatic_backup") + assert state.state == "2024-11-14T05:00:00+00:00" diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index 404ba52de4b..d89e68f4ed8 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -234,6 +234,31 @@ async def test_details_with_errors( assert await client.receive_json() == snapshot +async def test_details_get_backup_returns_none( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, + snapshot: SnapshotAssertion, +) -> None: + """Test getting backup info when the agent returns None from get_backup.""" + mock_agents = await setup_backup_integration(hass, remote_agents=["test.remote"]) + mock_agents["test.remote"].async_get_backup.return_value = None + mock_agents["test.remote"].async_get_backup.side_effect = None + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + with patch("pathlib.Path.exists", return_value=True): + await client.send_json_auto_id( + {"type": "backup/details", "backup_id": "abc123"} + ) + assert await client.receive_json() == snapshot + assert ( + "Detected that integration 'test' returns None from BackupAgent.async_get_backup." + in caplog.text + ) + + @pytest.mark.parametrize( ("remote_agents", "backups"), [ @@ -724,6 +749,36 @@ async def test_restore_remote_agent( assert len(restart_calls) == snapshot +async def test_restore_remote_agent_get_backup_returns_none( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, + snapshot: SnapshotAssertion, +) -> None: + """Test calling the restore command when the agent returns None from get_backup.""" + mock_agents = await setup_backup_integration(hass, remote_agents=["test.remote"]) + mock_agents["test.remote"].async_get_backup.return_value = None + mock_agents["test.remote"].async_get_backup.side_effect = None + restart_calls = async_mock_service(hass, "homeassistant", "restart") + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + await client.send_json_auto_id( + { + "type": "backup/restore", + "backup_id": "abc123", + "agent_id": "test.remote", + } + ) + assert await client.receive_json() == snapshot + assert len(restart_calls) == 0 + assert ( + "Detected that integration 'test' returns None from BackupAgent.async_get_backup." + in caplog.text + ) + + async def test_restore_wrong_password( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -3543,3 +3598,32 @@ async def test_can_decrypt_on_download_with_agent_error( } ) assert await client.receive_json() == snapshot + + +@pytest.mark.usefixtures("mock_backups") +async def test_can_decrypt_on_download_get_backup_returns_none( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, + snapshot: SnapshotAssertion, +) -> None: + """Test can decrypt on download when the agent returns None from get_backup.""" + + mock_agents = await setup_backup_integration(hass, remote_agents=["test.remote"]) + mock_agents["test.remote"].async_get_backup.return_value = None + mock_agents["test.remote"].async_get_backup.side_effect = None + + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + "type": "backup/can_decrypt_on_download", + "backup_id": TEST_BACKUP_ABC123.backup_id, + "agent_id": "test.remote", + "password": "hunter2", + } + ) + assert await client.receive_json() == snapshot + assert ( + "Detected that integration 'test' returns None from BackupAgent.async_get_backup." + in caplog.text + ) diff --git a/tests/components/balboa/conftest.py b/tests/components/balboa/conftest.py index 90f8fdc3d6e..18639b0c9be 100644 --- a/tests/components/balboa/conftest.py +++ b/tests/components/balboa/conftest.py @@ -68,4 +68,6 @@ def client_fixture() -> Generator[MagicMock]: client.pumps = [] client.temperature_range.state = LowHighRange.LOW + client.fault = None + yield client diff --git a/tests/components/balboa/snapshots/test_event.ambr b/tests/components/balboa/snapshots/test_event.ambr new file mode 100644 index 00000000000..fc8f591a9fc --- /dev/null +++ b/tests/components/balboa/snapshots/test_event.ambr @@ -0,0 +1,90 @@ +# serializer version: 1 +# name: test_events[event.fakespa_fault-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'clock_failed', + 'flow_failed', + 'gfci_test_failed', + 'heater_dry', + 'heater_may_be_dry', + 'heater_too_hot', + 'hot_fault', + 'low_flow', + 'memory_failure', + 'priming_mode', + 'pump_stuck', + 'sensor_a_fault', + 'sensor_b_fault', + 'sensor_out_of_sync', + 'service_sensor_sync', + 'settings_reset', + 'standby_mode', + 'water_too_hot', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.fakespa_fault', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Fault', + 'platform': 'balboa', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fault', + 'unique_id': 'FakeSpa-fault-c0ffee', + 'unit_of_measurement': None, + }) +# --- +# name: test_events[event.fakespa_fault-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'event_type': None, + 'event_types': list([ + 'clock_failed', + 'flow_failed', + 'gfci_test_failed', + 'heater_dry', + 'heater_may_be_dry', + 'heater_too_hot', + 'hot_fault', + 'low_flow', + 'memory_failure', + 'priming_mode', + 'pump_stuck', + 'sensor_a_fault', + 'sensor_b_fault', + 'sensor_out_of_sync', + 'service_sensor_sync', + 'settings_reset', + 'standby_mode', + 'water_too_hot', + ]), + 'friendly_name': 'FakeSpa Fault', + }), + 'context': , + 'entity_id': 'event.fakespa_fault', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/balboa/test_climate.py b/tests/components/balboa/test_climate.py index 850184a7d71..9c23833518e 100644 --- a/tests/components/balboa/test_climate.py +++ b/tests/components/balboa/test_climate.py @@ -26,10 +26,11 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.const import ATTR_TEMPERATURE, Platform, UnitOfTemperature +from homeassistant.const import ATTR_TEMPERATURE, Platform from homeassistant.core import HomeAssistant, State from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er +from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM from . import client_update, init_integration @@ -97,11 +98,10 @@ async def test_spa_temperature_unit( hass: HomeAssistant, client: MagicMock, integration: MockConfigEntry ) -> None: """Test temperature unit conversions.""" - with patch.object( - hass.config.units, "temperature_unit", UnitOfTemperature.FAHRENHEIT - ): - state = await _patch_spa_settemp(hass, client, 0, 15.4) - assert state.attributes.get(ATTR_TEMPERATURE) == 15.0 + hass.config.units = US_CUSTOMARY_SYSTEM + + state = await _patch_spa_settemp(hass, client, 0, 15.4) + assert state.attributes.get(ATTR_TEMPERATURE) == 15.0 async def test_spa_hvac_modes( diff --git a/tests/components/balboa/test_event.py b/tests/components/balboa/test_event.py new file mode 100644 index 00000000000..04f25f6cfa0 --- /dev/null +++ b/tests/components/balboa/test_event.py @@ -0,0 +1,82 @@ +"""Tests of the events of the balboa integration.""" + +from __future__ import annotations + +from datetime import datetime +from unittest.mock import MagicMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.event import ATTR_EVENT_TYPE +from homeassistant.const import STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import init_integration + +from tests.common import snapshot_platform + +ENTITY_EVENT = "event.fakespa_fault" +FAULT_DATE = "fault_date" + + +async def test_events( + hass: HomeAssistant, + client: MagicMock, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test spa events.""" + with patch("homeassistant.components.balboa.PLATFORMS", [Platform.EVENT]): + entry = await init_integration(hass) + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + +async def test_event(hass: HomeAssistant, client: MagicMock) -> None: + """Test spa fault event.""" + await init_integration(hass) + + # check the state is unknown + state = hass.states.get(ENTITY_EVENT) + assert state.state == STATE_UNKNOWN + + # set a fault + client.fault = MagicMock( + fault_datetime=datetime(2025, 2, 15, 13, 0), message_code=16 + ) + client.emit("") + await hass.async_block_till_done() + + # check new state is what we expect + state = hass.states.get(ENTITY_EVENT) + assert state.attributes[ATTR_EVENT_TYPE] == "low_flow" + assert state.attributes[FAULT_DATE] == "2025-02-15T13:00:00" + assert state.attributes["code"] == 16 + + # set fault to None + client.fault = None + client.emit("") + await hass.async_block_till_done() + + # validate state remains unchanged + state = hass.states.get(ENTITY_EVENT) + assert state.attributes[ATTR_EVENT_TYPE] == "low_flow" + assert state.attributes[FAULT_DATE] == "2025-02-15T13:00:00" + assert state.attributes["code"] == 16 + + # set fault to an unknown one + client.fault = MagicMock( + fault_datetime=datetime(2025, 2, 15, 14, 0), message_code=-1 + ) + # validate a ValueError is raises + with pytest.raises(ValueError): + client.emit("") + await hass.async_block_till_done() + + # validate state remains unchanged + state = hass.states.get(ENTITY_EVENT) + assert state.attributes[ATTR_EVENT_TYPE] == "low_flow" + assert state.attributes[FAULT_DATE] == "2025-02-15T13:00:00" + assert state.attributes["code"] == 16 diff --git a/tests/components/bluesound/conftest.py b/tests/components/bluesound/conftest.py index 717c9f61850..63597ed0532 100644 --- a/tests/components/bluesound/conftest.py +++ b/tests/components/bluesound/conftest.py @@ -102,8 +102,8 @@ class PlayerMockData: ) player.presets = AsyncMock( return_value=[ - Preset("preset1", "1", "url1", "image1", None), - Preset("preset2", "2", "url2", "image2", None), + Preset("preset1", 1, "url1", "image1", None), + Preset("preset2", 2, "url2", "image2", None), ] ) diff --git a/tests/components/bluesound/test_media_player.py b/tests/components/bluesound/test_media_player.py index ed537d0bc57..dcff33399f5 100644 --- a/tests/components/bluesound/test_media_player.py +++ b/tests/components/bluesound/test_media_player.py @@ -17,12 +17,14 @@ from homeassistant.components.bluesound.media_player import ( SERVICE_SET_TIMER, ) from homeassistant.components.media_player import ( + ATTR_INPUT_SOURCE, ATTR_MEDIA_VOLUME_LEVEL, DOMAIN as MEDIA_PLAYER_DOMAIN, SERVICE_MEDIA_NEXT_TRACK, SERVICE_MEDIA_PAUSE, SERVICE_MEDIA_PLAY, SERVICE_MEDIA_PREVIOUS_TRACK, + SERVICE_SELECT_SOURCE, SERVICE_VOLUME_DOWN, SERVICE_VOLUME_MUTE, SERVICE_VOLUME_SET, @@ -119,6 +121,32 @@ async def test_volume_down( player_mocks.player_data.player.volume.assert_called_once_with(level=9) +async def test_select_input_source( + hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks +) -> None: + """Test the media player select input source.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_SELECT_SOURCE, + {ATTR_ENTITY_ID: "media_player.player_name1111", ATTR_INPUT_SOURCE: "input1"}, + ) + + player_mocks.player_data.player.play_url.assert_called_once_with("url1") + + +async def test_select_preset_source( + hass: HomeAssistant, setup_config_entry: None, player_mocks: PlayerMocks +) -> None: + """Test the media player select preset source.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_SELECT_SOURCE, + {ATTR_ENTITY_ID: "media_player.player_name1111", ATTR_INPUT_SOURCE: "preset1"}, + ) + + player_mocks.player_data.player.load_preset.assert_called_once_with(1) + + async def test_attributes_set( hass: HomeAssistant, setup_config_entry: None, diff --git a/tests/components/bluetooth/test_config_flow.py b/tests/components/bluetooth/test_config_flow.py index f0136396c22..45d177de132 100644 --- a/tests/components/bluetooth/test_config_flow.py +++ b/tests/components/bluetooth/test_config_flow.py @@ -608,3 +608,40 @@ async def test_async_step_integration_discovery_remote_adapter( await hass.async_block_till_done() cancel_scanner() await hass.async_block_till_done() + + +@pytest.mark.usefixtures("enable_bluetooth") +async def test_async_step_integration_discovery_remote_adapter_mac_fix( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + area_registry: ar.AreaRegistry, +) -> None: + """Test remote adapter corrects mac address via integration discovery.""" + entry = MockConfigEntry(domain="test") + entry.add_to_hass(hass) + bluetooth_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_SOURCE: "AA:BB:CC:DD:EE:FF", + CONF_SOURCE_DOMAIN: "test", + CONF_SOURCE_MODEL: "test", + CONF_SOURCE_CONFIG_ENTRY_ID: entry.entry_id, + CONF_SOURCE_DEVICE_ID: None, + }, + ) + bluetooth_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, + data={ + CONF_SOURCE: "AA:AA:AA:AA:AA:AA", + CONF_SOURCE_DOMAIN: "test", + CONF_SOURCE_MODEL: "test", + CONF_SOURCE_CONFIG_ENTRY_ID: entry.entry_id, + CONF_SOURCE_DEVICE_ID: None, + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + assert bluetooth_entry.unique_id == "AA:AA:AA:AA:AA:AA" + assert bluetooth_entry.data[CONF_SOURCE] == "AA:AA:AA:AA:AA:AA" diff --git a/tests/components/bluetooth/test_init.py b/tests/components/bluetooth/test_init.py index 2c8c9e70e7f..de299c58b93 100644 --- a/tests/components/bluetooth/test_init.py +++ b/tests/components/bluetooth/test_init.py @@ -3300,3 +3300,52 @@ async def test_cleanup_orphened_remote_scanner_config_entry( assert not hass.config_entries.async_entry_for_domain_unique_id( "bluetooth", scanner.source ) + + +@pytest.mark.usefixtures("enable_bluetooth") +async def test_fix_incorrect_mac_remote_scanner_config_entry( + hass: HomeAssistant, +) -> None: + """Test the remote scanner config entries can replace a incorrect mac.""" + source_entry = MockConfigEntry(domain="test") + source_entry.add_to_hass(hass) + connector = ( + HaBluetoothConnector(MockBleakClient, "mock_bleak_client", lambda: False), + ) + scanner = FakeRemoteScanner("AA:BB:CC:DD:EE:FF", "esp32", connector, True) + assert scanner.source == "AA:BB:CC:DD:EE:FF" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_SOURCE: scanner.source, + CONF_SOURCE_DOMAIN: "test", + CONF_SOURCE_MODEL: "test", + CONF_SOURCE_CONFIG_ENTRY_ID: source_entry.entry_id, + }, + unique_id=scanner.source, + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert hass.config_entries.async_entry_for_domain_unique_id( + "bluetooth", scanner.source + ) + await hass.config_entries.async_unload(entry.entry_id) + + new_scanner = FakeRemoteScanner("AA:BB:CC:DD:EE:AA", "esp32", connector, True) + assert new_scanner.source == "AA:BB:CC:DD:EE:AA" + hass.config_entries.async_update_entry( + entry, + data={**entry.data, CONF_SOURCE: new_scanner.source}, + unique_id=new_scanner.source, + ) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert hass.config_entries.async_entry_for_domain_unique_id( + "bluetooth", new_scanner.source + ) + # Incorrect connection should be removed + assert not hass.config_entries.async_entry_for_domain_unique_id( + "bluetooth", scanner.source + ) diff --git a/tests/components/bluetooth/test_manager.py b/tests/components/bluetooth/test_manager.py index be23a536f49..48d1a38375d 100644 --- a/tests/components/bluetooth/test_manager.py +++ b/tests/components/bluetooth/test_manager.py @@ -1019,8 +1019,6 @@ async def test_goes_unavailable_dismisses_discovery_and_makes_discoverable( def clear_all_devices(self) -> None: """Clear all devices.""" - self._discovered_device_advertisement_datas.clear() - self._discovered_device_timestamps.clear() self._previous_service_info.clear() connector = ( @@ -1446,8 +1444,6 @@ async def test_bluetooth_rediscover( def clear_all_devices(self) -> None: """Clear all devices.""" - self._discovered_device_advertisement_datas.clear() - self._discovered_device_timestamps.clear() self._previous_service_info.clear() connector = ( @@ -1625,8 +1621,6 @@ async def test_bluetooth_rediscover_no_match( def clear_all_devices(self) -> None: """Clear all devices.""" - self._discovered_device_advertisement_datas.clear() - self._discovered_device_timestamps.clear() self._previous_service_info.clear() connector = ( diff --git a/tests/components/bosch_alarm/__init__.py b/tests/components/bosch_alarm/__init__.py new file mode 100644 index 00000000000..2b2d94cf1e5 --- /dev/null +++ b/tests/components/bosch_alarm/__init__.py @@ -0,0 +1,22 @@ +"""Tests for the Bosch Alarm component.""" + +from unittest.mock import AsyncMock + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + +async def call_observable(hass: HomeAssistant, observable: AsyncMock) -> None: + """Call the observable with the given event.""" + for callback in observable.attach.call_args_list: + callback[0][0]() + await hass.async_block_till_done() diff --git a/tests/components/bosch_alarm/conftest.py b/tests/components/bosch_alarm/conftest.py new file mode 100644 index 00000000000..45ec0072a37 --- /dev/null +++ b/tests/components/bosch_alarm/conftest.py @@ -0,0 +1,131 @@ +"""Define fixtures for Bosch Alarm tests.""" + +from collections.abc import Generator +from typing import Any +from unittest.mock import AsyncMock, patch + +from bosch_alarm_mode2.panel import Area +from bosch_alarm_mode2.utils import Observable +import pytest + +from homeassistant.components.bosch_alarm.const import ( + CONF_INSTALLER_CODE, + CONF_USER_CODE, + DOMAIN, +) +from homeassistant.const import CONF_HOST, CONF_MODEL, CONF_PASSWORD, CONF_PORT + +from tests.common import MockConfigEntry + + +@pytest.fixture( + params=[ + "solution_3000", + "amax_3000", + "b5512", + ] +) +def model(request: pytest.FixtureRequest) -> Generator[str]: + """Return every device.""" + return request.param + + +@pytest.fixture +def extra_config_entry_data( + model: str, model_name: str, config_flow_data: dict[str, Any] +) -> dict[str, Any]: + """Return extra config entry data.""" + return {CONF_MODEL: model_name} | config_flow_data + + +@pytest.fixture +def config_flow_data(model: str) -> dict[str, Any]: + """Return extra config entry data.""" + if model == "solution_3000": + return {CONF_USER_CODE: "1234"} + if model == "amax_3000": + return {CONF_INSTALLER_CODE: "1234", CONF_PASSWORD: "1234567890"} + if model == "b5512": + return {CONF_PASSWORD: "1234567890"} + pytest.fail("Invalid model") + + +@pytest.fixture +def model_name(model: str) -> str | None: + """Return extra config entry data.""" + return { + "solution_3000": "Solution 3000", + "amax_3000": "AMAX 3000", + "b5512": "B5512 (US1B)", + }.get(model) + + +@pytest.fixture +def serial_number(model: str) -> str | None: + """Return extra config entry data.""" + if model == "solution_3000": + return "1234567890" + return None + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.bosch_alarm.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def area() -> Generator[Area]: + """Define a mocked area.""" + mock = AsyncMock(spec=Area) + mock.name = "Area1" + mock.status_observer = AsyncMock(spec=Observable) + mock.is_triggered.return_value = False + mock.is_disarmed.return_value = True + mock.is_arming.return_value = False + mock.is_pending.return_value = False + mock.is_part_armed.return_value = False + mock.is_all_armed.return_value = False + return mock + + +@pytest.fixture +def mock_panel( + area: AsyncMock, model_name: str, serial_number: str | None +) -> Generator[AsyncMock]: + """Define a fixture to set up Bosch Alarm.""" + with ( + patch( + "homeassistant.components.bosch_alarm.Panel", autospec=True + ) as mock_panel, + patch("homeassistant.components.bosch_alarm.config_flow.Panel", new=mock_panel), + ): + client = mock_panel.return_value + client.areas = {1: area} + client.model = model_name + client.firmware_version = "1.0.0" + client.serial_number = serial_number + client.connection_status_observer = AsyncMock(spec=Observable) + yield client + + +@pytest.fixture +def mock_config_entry( + extra_config_entry_data: dict[str, Any], serial_number: str | None +) -> MockConfigEntry: + """Mock config entry for bosch alarm.""" + return MockConfigEntry( + domain=DOMAIN, + unique_id=serial_number, + entry_id="01JQ917ACKQ33HHM7YCFXYZX51", + data={ + CONF_HOST: "0.0.0.0", + CONF_PORT: 7700, + CONF_MODEL: "bosch_alarm_test_data.model", + } + | extra_config_entry_data, + ) diff --git a/tests/components/bosch_alarm/snapshots/test_alarm_control_panel.ambr b/tests/components/bosch_alarm/snapshots/test_alarm_control_panel.ambr new file mode 100644 index 00000000000..76568cef56c --- /dev/null +++ b/tests/components/bosch_alarm/snapshots/test_alarm_control_panel.ambr @@ -0,0 +1,154 @@ +# serializer version: 1 +# name: test_alarm_control_panel[amax_3000][alarm_control_panel.area1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'alarm_control_panel', + 'entity_category': None, + 'entity_id': 'alarm_control_panel.area1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'bosch_alarm', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01JQ917ACKQ33HHM7YCFXYZX51_area_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_alarm_control_panel[amax_3000][alarm_control_panel.area1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'changed_by': None, + 'code_arm_required': False, + 'code_format': None, + 'friendly_name': 'Area1', + 'supported_features': , + }), + 'context': , + 'entity_id': 'alarm_control_panel.area1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'disarmed', + }) +# --- +# name: test_alarm_control_panel[b5512][alarm_control_panel.area1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'alarm_control_panel', + 'entity_category': None, + 'entity_id': 'alarm_control_panel.area1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'bosch_alarm', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01JQ917ACKQ33HHM7YCFXYZX51_area_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_alarm_control_panel[b5512][alarm_control_panel.area1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'changed_by': None, + 'code_arm_required': False, + 'code_format': None, + 'friendly_name': 'Area1', + 'supported_features': , + }), + 'context': , + 'entity_id': 'alarm_control_panel.area1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'disarmed', + }) +# --- +# name: test_alarm_control_panel[solution_3000][alarm_control_panel.area1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'alarm_control_panel', + 'entity_category': None, + 'entity_id': 'alarm_control_panel.area1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'bosch_alarm', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '1234567890_area_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_alarm_control_panel[solution_3000][alarm_control_panel.area1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'changed_by': None, + 'code_arm_required': False, + 'code_format': None, + 'friendly_name': 'Area1', + 'supported_features': , + }), + 'context': , + 'entity_id': 'alarm_control_panel.area1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'disarmed', + }) +# --- diff --git a/tests/components/bosch_alarm/test_alarm_control_panel.py b/tests/components/bosch_alarm/test_alarm_control_panel.py new file mode 100644 index 00000000000..31d2f928ec5 --- /dev/null +++ b/tests/components/bosch_alarm/test_alarm_control_panel.py @@ -0,0 +1,145 @@ +"""Tests for Bosch Alarm component.""" + +from collections.abc import AsyncGenerator +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.alarm_control_panel import ( + DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, + AlarmControlPanelState, +) +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_ALARM_ARM_AWAY, + SERVICE_ALARM_ARM_HOME, + SERVICE_ALARM_DISARM, + STATE_UNAVAILABLE, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import call_observable, setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.fixture(autouse=True) +async def platforms() -> AsyncGenerator[None]: + """Return the platforms to be loaded for this test.""" + with patch( + "homeassistant.components.bosch_alarm.PLATFORMS", [Platform.ALARM_CONTROL_PANEL] + ): + yield + + +async def test_update_alarm_device( + hass: HomeAssistant, + mock_panel: AsyncMock, + area: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that alarm panel state changes after arming the panel.""" + await setup_integration(hass, mock_config_entry) + entity_id = "alarm_control_panel.area1" + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + + area.is_arming.return_value = True + area.is_disarmed.return_value = False + + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_ARM_AWAY, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + await call_observable(hass, area.status_observer) + + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMING + + area.is_arming.return_value = False + area.is_all_armed.return_value = True + + await call_observable(hass, area.status_observer) + + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_AWAY + + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_DISARM, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + area.is_all_armed.return_value = False + area.is_disarmed.return_value = True + + await call_observable(hass, area.status_observer) + + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_ARM_HOME, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + area.is_disarmed.return_value = False + area.is_arming.return_value = True + + await call_observable(hass, area.status_observer) + + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMING + + area.is_arming.return_value = False + area.is_part_armed.return_value = True + + await call_observable(hass, area.status_observer) + + assert hass.states.get(entity_id).state == AlarmControlPanelState.ARMED_HOME + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + SERVICE_ALARM_DISARM, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + area.is_part_armed.return_value = False + area.is_disarmed.return_value = True + + await call_observable(hass, area.status_observer) + assert hass.states.get(entity_id).state == AlarmControlPanelState.DISARMED + + +async def test_alarm_control_panel( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_panel: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the alarm_control_panel state.""" + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_alarm_control_panel_availability( + hass: HomeAssistant, + mock_panel: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the alarm_control_panel availability.""" + await setup_integration(hass, mock_config_entry) + + assert ( + hass.states.get("alarm_control_panel.area1").state + == AlarmControlPanelState.DISARMED + ) + + mock_panel.connection_status.return_value = False + + await call_observable(hass, mock_panel.connection_status_observer) + + assert hass.states.get("alarm_control_panel.area1").state == STATE_UNAVAILABLE diff --git a/tests/components/bosch_alarm/test_config_flow.py b/tests/components/bosch_alarm/test_config_flow.py new file mode 100644 index 00000000000..066b3008821 --- /dev/null +++ b/tests/components/bosch_alarm/test_config_flow.py @@ -0,0 +1,212 @@ +"""Tests for the bosch_alarm config flow.""" + +import asyncio +from typing import Any +from unittest.mock import AsyncMock + +import pytest + +from homeassistant import config_entries +from homeassistant.components.bosch_alarm.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_HOST, CONF_MODEL, CONF_PORT +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_form_user( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_panel: AsyncMock, + model_name: str, + serial_number: str, + config_flow_data: dict[str, Any], +) -> None: + """Test the config flow for bosch_alarm.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "1.1.1.1", CONF_PORT: 7700}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + assert result["errors"] == {} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + config_flow_data, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == f"Bosch {model_name}" + assert ( + result["data"] + == { + CONF_HOST: "1.1.1.1", + CONF_PORT: 7700, + CONF_MODEL: model_name, + } + | config_flow_data + ) + assert result["result"].unique_id == serial_number + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("exception", "message"), + [ + (asyncio.TimeoutError, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_form_exceptions( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_panel: AsyncMock, + config_flow_data: dict[str, Any], + exception: Exception, + message: str, +) -> None: + """Test we handle exceptions correctly.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + mock_panel.connect.side_effect = exception + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "1.1.1.1", CONF_PORT: 7700}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": message} + + mock_panel.connect.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "1.1.1.1", CONF_PORT: 7700}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + config_flow_data, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + +@pytest.mark.parametrize( + ("exception", "message"), + [ + (PermissionError, "invalid_auth"), + (asyncio.TimeoutError, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_form_exceptions_user( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_panel: AsyncMock, + config_flow_data: dict[str, Any], + exception: Exception, + message: str, +) -> None: + """Test we handle exceptions correctly.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "1.1.1.1", CONF_PORT: 7700}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + assert result["errors"] == {} + mock_panel.connect.side_effect = exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], config_flow_data + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + assert result["errors"] == {"base": message} + + mock_panel.connect.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], config_flow_data + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + +@pytest.mark.parametrize("model", ["solution_3000", "amax_3000"]) +async def test_entry_already_configured_host( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_panel: AsyncMock, + config_flow_data: dict[str, Any], +) -> None: + """Test if configuring an entity twice results in an error.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: "0.0.0.0"} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + assert result["errors"] == {} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], config_flow_data + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.parametrize("model", ["b5512"]) +async def test_entry_already_configured_serial( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_panel: AsyncMock, + config_flow_data: dict[str, Any], +) -> None: + """Test if configuring an entity twice results in an error.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: "0.0.0.0"} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + assert result["errors"] == {} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], config_flow_data + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/bosch_alarm/test_init.py b/tests/components/bosch_alarm/test_init.py new file mode 100644 index 00000000000..0497a91eadf --- /dev/null +++ b/tests/components/bosch_alarm/test_init.py @@ -0,0 +1,33 @@ +"""Tests for bosch alarm integration init.""" + +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.fixture(autouse=True) +def disable_platform_only(): + """Disable platforms to speed up tests.""" + with patch("homeassistant.components.bosch_alarm.PLATFORMS", []): + yield + + +@pytest.mark.parametrize("model", ["solution_3000"]) +@pytest.mark.parametrize("exception", [PermissionError(), TimeoutError()]) +async def test_incorrect_auth( + hass: HomeAssistant, + mock_panel: AsyncMock, + mock_config_entry: MockConfigEntry, + exception: Exception, +) -> None: + """Test errors with incorrect auth.""" + mock_panel.connect.side_effect = exception + await setup_integration(hass, mock_config_entry) + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/bring/snapshots/test_diagnostics.ambr b/tests/components/bring/snapshots/test_diagnostics.ambr index 951c3d3f808..3f4c8f5f339 100644 --- a/tests/components/bring/snapshots/test_diagnostics.ambr +++ b/tests/components/bring/snapshots/test_diagnostics.ambr @@ -128,27 +128,29 @@ }), 'lst': dict({ 'listUuid': 'b4776778-7f6c-496e-951b-92a35d3db0dd', - 'name': 'Baumarkt', + 'name': '**REDACTED**', 'theme': 'ch.publisheria.bring.theme.home', }), 'users': dict({ 'users': list([ dict({ 'country': 'DE', - 'email': 'test-email', + 'email': '**REDACTED**', 'language': 'de', - 'name': 'Bring', + 'name': '**REDACTED**', 'photoPath': '', + 'plusExpiry': None, 'plusTryOut': False, 'publicUuid': '9a21fdfc-63a4-441a-afc1-ef3030605a9d', 'pushEnabled': True, }), dict({ 'country': 'US', - 'email': 'EMAIL', + 'email': '**REDACTED**', 'language': 'en', - 'name': 'NAME', + 'name': '**REDACTED**', 'photoPath': '', + 'plusExpiry': None, 'plusTryOut': False, 'publicUuid': '73af455f-c158-4004-a5e0-79f4f8a6d4bd', 'pushEnabled': True, @@ -159,6 +161,7 @@ 'language': 'en', 'name': None, 'photoPath': None, + 'plusExpiry': None, 'plusTryOut': False, 'publicUuid': '7d5e9d08-877a-4c36-8740-a9bf74ec690a', 'pushEnabled': True, @@ -292,27 +295,29 @@ }), 'lst': dict({ 'listUuid': 'e542eef6-dba7-4c31-a52c-29e6ab9d83a5', - 'name': 'Einkauf', + 'name': '**REDACTED**', 'theme': 'ch.publisheria.bring.theme.home', }), 'users': dict({ 'users': list([ dict({ 'country': 'DE', - 'email': 'test-email', + 'email': '**REDACTED**', 'language': 'de', - 'name': 'Bring', + 'name': '**REDACTED**', 'photoPath': '', + 'plusExpiry': None, 'plusTryOut': False, 'publicUuid': '9a21fdfc-63a4-441a-afc1-ef3030605a9d', 'pushEnabled': True, }), dict({ 'country': 'US', - 'email': 'EMAIL', + 'email': '**REDACTED**', 'language': 'en', - 'name': 'NAME', + 'name': '**REDACTED**', 'photoPath': '', + 'plusExpiry': None, 'plusTryOut': False, 'publicUuid': '73af455f-c158-4004-a5e0-79f4f8a6d4bd', 'pushEnabled': True, @@ -323,6 +328,7 @@ 'language': 'en', 'name': None, 'photoPath': None, + 'plusExpiry': None, 'plusTryOut': False, 'publicUuid': '7d5e9d08-877a-4c36-8740-a9bf74ec690a', 'pushEnabled': True, diff --git a/tests/components/cast/test_config_flow.py b/tests/components/cast/test_config_flow.py index 2dcf007c6d4..e02230892bf 100644 --- a/tests/components/cast/test_config_flow.py +++ b/tests/components/cast/test_config_flow.py @@ -87,7 +87,7 @@ async def test_user_setup_options(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM result = await hass.config_entries.flow.async_configure( - result["flow_id"], {"known_hosts": "192.168.0.1, , 192.168.0.2 "} + result["flow_id"], {"known_hosts": ["192.168.0.1", "", " ", "192.168.0.2 "]} ) users = await hass.auth.async_get_users() @@ -152,13 +152,13 @@ def get_suggested(schema, key): @pytest.mark.parametrize( - "parameter_data", + ("parameter", "initial", "suggested", "user_input", "updated"), [ ( "known_hosts", ["192.168.0.10", "192.168.0.11"], - "192.168.0.10,192.168.0.11", - "192.168.0.1, , 192.168.0.2 ", + ["192.168.0.10", "192.168.0.11"], + ["192.168.0.1", " ", " 192.168.0.2 "], ["192.168.0.1", "192.168.0.2"], ), ( @@ -177,11 +177,17 @@ def get_suggested(schema, key): ), ], ) -async def test_option_flow(hass: HomeAssistant, parameter_data) -> None: +async def test_option_flow( + hass: HomeAssistant, + parameter: str, + initial: list[str], + suggested: str | list[str], + user_input: str | list[str], + updated: list[str], +) -> None: """Test config flow options.""" basic_parameters = ["known_hosts"] advanced_parameters = ["ignore_cec", "uuid"] - parameter, initial, suggested, user_input, updated = parameter_data data = { "ignore_cec": [], @@ -213,7 +219,7 @@ async def test_option_flow(hass: HomeAssistant, parameter_data) -> None: for other_param in basic_parameters: if other_param == parameter: continue - assert get_suggested(data_schema, other_param) == "" + assert get_suggested(data_schema, other_param) == [] if parameter in basic_parameters: assert get_suggested(data_schema, parameter) == suggested @@ -261,7 +267,7 @@ async def test_option_flow(hass: HomeAssistant, parameter_data) -> None: result = await hass.config_entries.options.async_init(config_entry.entry_id) result = await hass.config_entries.options.async_configure( result["flow_id"], - user_input={"known_hosts": ""}, + user_input={}, ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == {} @@ -277,7 +283,7 @@ async def test_known_hosts(hass: HomeAssistant, castbrowser_mock) -> None: "cast", context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( - result["flow_id"], {"known_hosts": "192.168.0.1, 192.168.0.2"} + result["flow_id"], {"known_hosts": ["192.168.0.1", "192.168.0.2"]} ) assert result["type"] is FlowResultType.CREATE_ENTRY await hass.async_block_till_done(wait_background_tasks=True) @@ -290,7 +296,7 @@ async def test_known_hosts(hass: HomeAssistant, castbrowser_mock) -> None: result = await hass.config_entries.options.async_init(config_entry.entry_id) result = await hass.config_entries.options.async_configure( result["flow_id"], - user_input={"known_hosts": "192.168.0.11, 192.168.0.12"}, + user_input={"known_hosts": ["192.168.0.11", "192.168.0.12"]}, ) await hass.async_block_till_done(wait_background_tasks=True) diff --git a/tests/components/cast/test_media_player.py b/tests/components/cast/test_media_player.py index b2ce60e9393..668ed985154 100644 --- a/tests/components/cast/test_media_player.py +++ b/tests/components/cast/test_media_player.py @@ -1909,6 +1909,7 @@ async def test_group_media_control( ) +@pytest.mark.usefixtures("mock_tts_cache_dir") async def test_failed_cast_on_idle( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: @@ -1939,6 +1940,7 @@ async def test_failed_cast_on_idle( assert "Failed to cast media http://example.com:8123/tts.mp3." in caplog.text +@pytest.mark.usefixtures("mock_tts_cache_dir") async def test_failed_cast_other_url( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: @@ -1963,6 +1965,7 @@ async def test_failed_cast_other_url( assert "Failed to cast media http://example.com:8123/tts.mp3." in caplog.text +@pytest.mark.usefixtures("mock_tts_cache_dir") async def test_failed_cast_internal_url( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: @@ -1992,6 +1995,7 @@ async def test_failed_cast_internal_url( ) +@pytest.mark.usefixtures("mock_tts_cache_dir") async def test_failed_cast_external_url( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: diff --git a/tests/components/cloud/test_backup.py b/tests/components/cloud/test_backup.py index 5220d3eccd5..8399e69ab09 100644 --- a/tests/components/cloud/test_backup.py +++ b/tests/components/cloud/test_backup.py @@ -5,9 +5,9 @@ from io import StringIO from typing import Any from unittest.mock import ANY, Mock, PropertyMock, patch -from aiohttp import ClientError +from aiohttp import ClientError, ClientResponseError from hass_nabucasa import CloudError -from hass_nabucasa.api import CloudApiNonRetryableError +from hass_nabucasa.api import CloudApiError, CloudApiNonRetryableError from hass_nabucasa.files import FilesError, StorageType import pytest @@ -208,7 +208,7 @@ async def test_agents_list_backups_fail_cloud( "backups": [], "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", @@ -547,6 +547,120 @@ async def test_agents_upload_not_protected( assert stored_backup["failed_agent_ids"] == ["cloud.cloud"] +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_upload_not_subscribed( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_storage: dict[str, Any], + cloud: Mock, +) -> None: + """Test upload backup when cloud user is not subscribed.""" + cloud.subscription_expired = True + client = await hass_client() + backup_data = "test" + backup_id = "test-backup" + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + extra_metadata={}, + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=True, + size=len(backup_data), + ) + + with ( + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + patch("pathlib.Path.open") as mocked_open, + ): + mocked_open.return_value.read = Mock(side_effect=[backup_data.encode(), b""]) + fetch_backup.return_value = test_backup + resp = await client.post( + "/api/backup/upload?agent_id=cloud.cloud", + data={"file": StringIO(backup_data)}, + ) + await hass.async_block_till_done() + + assert resp.status == 201 + assert cloud.files.upload.call_count == 0 + store_backups = hass_storage[BACKUP_DOMAIN]["data"]["backups"] + assert len(store_backups) == 1 + stored_backup = store_backups[0] + assert stored_backup["backup_id"] == backup_id + assert stored_backup["failed_agent_ids"] == ["cloud.cloud"] + + +@pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") +async def test_agents_upload_not_subscribed_midway( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_storage: dict[str, Any], + cloud: Mock, +) -> None: + """Test upload backup when cloud subscription expires during the call.""" + client = await hass_client() + backup_data = "test" + backup_id = "test-backup" + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + extra_metadata={}, + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=True, + size=len(backup_data), + ) + + async def mock_upload(*args: Any, **kwargs: Any) -> None: + """Mock file upload.""" + cloud.subscription_expired = True + raise CloudApiError( + "Boom!", orig_exc=ClientResponseError(Mock(), Mock(), status=403) + ) + + cloud.files.upload.side_effect = mock_upload + + with ( + patch( + "homeassistant.components.backup.manager.BackupManager.async_get_backup", + ) as fetch_backup, + patch( + "homeassistant.components.backup.manager.read_backup", + return_value=test_backup, + ), + patch("pathlib.Path.open") as mocked_open, + ): + mocked_open.return_value.read = Mock(side_effect=[backup_data.encode(), b""]) + fetch_backup.return_value = test_backup + resp = await client.post( + "/api/backup/upload?agent_id=cloud.cloud", + data={"file": StringIO(backup_data)}, + ) + await hass.async_block_till_done() + + assert resp.status == 201 + assert cloud.files.upload.call_count == 1 + store_backups = hass_storage[BACKUP_DOMAIN]["data"]["backups"] + assert len(store_backups) == 1 + stored_backup = store_backups[0] + assert stored_backup["backup_id"] == backup_id + assert stored_backup["failed_agent_ids"] == ["cloud.cloud"] + + @pytest.mark.usefixtures("cloud_logged_in", "mock_list_files") async def test_agents_upload_wrong_size( hass: HomeAssistant, diff --git a/tests/components/comelit/__init__.py b/tests/components/comelit/__init__.py index 916a684de4b..6475f500f01 100644 --- a/tests/components/comelit/__init__.py +++ b/tests/components/comelit/__init__.py @@ -1 +1,13 @@ """Tests for the Comelit SimpleHome integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/comelit/conftest.py b/tests/components/comelit/conftest.py new file mode 100644 index 00000000000..d2d450ccb8d --- /dev/null +++ b/tests/components/comelit/conftest.py @@ -0,0 +1,104 @@ +"""Configure tests for Comelit SimpleHome.""" + +import pytest + +from homeassistant.components.comelit.const import ( + BRIDGE, + DOMAIN as COMELIT_DOMAIN, + VEDO, +) +from homeassistant.const import CONF_HOST, CONF_PIN, CONF_PORT, CONF_TYPE + +from .const import ( + BRIDGE_DEVICE_QUERY, + BRIDGE_HOST, + BRIDGE_PIN, + BRIDGE_PORT, + VEDO_DEVICE_QUERY, + VEDO_HOST, + VEDO_PIN, + VEDO_PORT, +) + +from tests.common import AsyncMock, Generator, MockConfigEntry, patch + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.comelit.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_serial_bridge() -> Generator[AsyncMock]: + """Mock a Comelit serial bridge.""" + with ( + patch( + "homeassistant.components.comelit.coordinator.ComeliteSerialBridgeApi", + autospec=True, + ) as mock_comelit_serial_bridge, + patch( + "homeassistant.components.comelit.config_flow.ComeliteSerialBridgeApi", + new=mock_comelit_serial_bridge, + ), + ): + bridge = mock_comelit_serial_bridge.return_value + bridge.get_all_devices.return_value = BRIDGE_DEVICE_QUERY + bridge.host = BRIDGE_HOST + bridge.port = BRIDGE_PORT + bridge.pin = BRIDGE_PIN + yield bridge + + +@pytest.fixture +def mock_serial_bridge_config_entry() -> Generator[MockConfigEntry]: + """Mock a Comelit config entry for Comelit bridge.""" + return MockConfigEntry( + domain=COMELIT_DOMAIN, + data={ + CONF_HOST: BRIDGE_HOST, + CONF_PORT: BRIDGE_PORT, + CONF_PIN: BRIDGE_PIN, + CONF_TYPE: BRIDGE, + }, + ) + + +@pytest.fixture +def mock_vedo() -> Generator[AsyncMock]: + """Mock a Comelit vedo.""" + with ( + patch( + "homeassistant.components.comelit.coordinator.ComelitVedoApi", + autospec=True, + ) as mock_comelit_vedo, + patch( + "homeassistant.components.comelit.config_flow.ComelitVedoApi", + new=mock_comelit_vedo, + ), + ): + vedo = mock_comelit_vedo.return_value + vedo.get_all_areas_and_zones.return_value = VEDO_DEVICE_QUERY + vedo.host = VEDO_HOST + vedo.port = VEDO_PORT + vedo.pin = VEDO_PIN + vedo.type = VEDO + yield vedo + + +@pytest.fixture +def mock_vedo_config_entry() -> Generator[MockConfigEntry]: + """Mock a Comelit config entry for Comelit vedo.""" + return MockConfigEntry( + domain=COMELIT_DOMAIN, + data={ + CONF_HOST: VEDO_HOST, + CONF_PORT: VEDO_PORT, + CONF_PIN: VEDO_PIN, + CONF_TYPE: VEDO, + }, + ) diff --git a/tests/components/comelit/const.py b/tests/components/comelit/const.py index 92fdfebfa1d..f353ec97628 100644 --- a/tests/components/comelit/const.py +++ b/tests/components/comelit/const.py @@ -1,7 +1,11 @@ """Common stuff for Comelit SimpleHome tests.""" -from aiocomelit import ComelitVedoAreaObject, ComelitVedoZoneObject -from aiocomelit.api import ComelitSerialBridgeObject +from aiocomelit.api import ( + AlarmDataObject, + ComelitSerialBridgeObject, + ComelitVedoAreaObject, + ComelitVedoZoneObject, +) from aiocomelit.const import ( CLIMATE, COVER, @@ -9,37 +13,20 @@ from aiocomelit.const import ( LIGHT, OTHER, SCENARIO, - VEDO, WATT, AlarmAreaState, AlarmZoneState, ) -from homeassistant.components.comelit.const import DOMAIN -from homeassistant.const import CONF_DEVICES, CONF_HOST, CONF_PIN, CONF_PORT, CONF_TYPE +BRIDGE_HOST = "fake_bridge_host" +BRIDGE_PORT = 80 +BRIDGE_PIN = 1234 -MOCK_CONFIG = { - DOMAIN: { - CONF_DEVICES: [ - { - CONF_HOST: "fake_host", - CONF_PORT: 80, - CONF_PIN: 1234, - }, - { - CONF_HOST: "fake_vedo_host", - CONF_PORT: 8080, - CONF_PIN: 1234, - CONF_TYPE: VEDO, - }, - ] - } -} +VEDO_HOST = "fake_vedo_host" +VEDO_PORT = 8080 +VEDO_PIN = 5678 -MOCK_USER_BRIDGE_DATA = MOCK_CONFIG[DOMAIN][CONF_DEVICES][0] -MOCK_USER_VEDO_DATA = MOCK_CONFIG[DOMAIN][CONF_DEVICES][1] - -FAKE_PIN = 5678 +FAKE_PIN = 0000 BRIDGE_DEVICE_QUERY = { CLIMATE: {}, @@ -76,8 +63,8 @@ BRIDGE_DEVICE_QUERY = { SCENARIO: {}, } -VEDO_DEVICE_QUERY = { - "aree": { +VEDO_DEVICE_QUERY = AlarmDataObject( + alarm_areas={ 0: ComelitVedoAreaObject( index=0, name="Area0", @@ -94,7 +81,7 @@ VEDO_DEVICE_QUERY = { human_status=AlarmAreaState.UNKNOWN, ) }, - "zone": { + alarm_zones={ 0: ComelitVedoZoneObject( index=0, name="Zone0", @@ -103,4 +90,4 @@ VEDO_DEVICE_QUERY = { human_status=AlarmZoneState.REST, ) }, -} +) diff --git a/tests/components/comelit/snapshots/test_diagnostics.ambr b/tests/components/comelit/snapshots/test_diagnostics.ambr index 877f48a4611..c4544f38f52 100644 --- a/tests/components/comelit/snapshots/test_diagnostics.ambr +++ b/tests/components/comelit/snapshots/test_diagnostics.ambr @@ -57,9 +57,10 @@ }), 'entry': dict({ 'data': dict({ - 'host': 'fake_host', + 'host': 'fake_bridge_host', 'pin': '**REDACTED**', 'port': 80, + 'type': 'Serial bridge', }), 'disabled_by': None, 'discovery_keys': dict({ @@ -85,7 +86,7 @@ 'device_info': dict({ 'devices': list([ dict({ - 'aree': list([ + 'alarm_areas': list([ dict({ '0': dict({ 'alarm': False, @@ -105,7 +106,7 @@ ]), }), dict({ - 'zone': list([ + 'alarm_zones': list([ dict({ '0': dict({ 'human_status': 'rest', diff --git a/tests/components/comelit/test_config_flow.py b/tests/components/comelit/test_config_flow.py index eeaea0e41e9..dd1d1fb3836 100644 --- a/tests/components/comelit/test_config_flow.py +++ b/tests/components/comelit/test_config_flow.py @@ -1,59 +1,93 @@ """Tests for Comelit SimpleHome config flow.""" -from typing import Any -from unittest.mock import patch +from unittest.mock import AsyncMock from aiocomelit import CannotAuthenticate, CannotConnect +from aiocomelit.const import BRIDGE, VEDO import pytest from homeassistant.components.comelit.const import DOMAIN from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_HOST, CONF_PIN, CONF_PORT +from homeassistant.const import CONF_HOST, CONF_PIN, CONF_PORT, CONF_TYPE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .const import FAKE_PIN, MOCK_USER_BRIDGE_DATA, MOCK_USER_VEDO_DATA +from .const import ( + BRIDGE_HOST, + BRIDGE_PIN, + BRIDGE_PORT, + FAKE_PIN, + VEDO_HOST, + VEDO_PIN, + VEDO_PORT, +) from tests.common import MockConfigEntry -@pytest.mark.parametrize( - ("class_api", "user_input"), - [ - ("ComeliteSerialBridgeApi", MOCK_USER_BRIDGE_DATA), - ("ComelitVedoApi", MOCK_USER_VEDO_DATA), - ], -) -async def test_full_flow( - hass: HomeAssistant, class_api: str, user_input: dict[str, Any] +async def test_flow_serial_bridge( + hass: HomeAssistant, + mock_serial_bridge: AsyncMock, + mock_serial_bridge_config_entry: MockConfigEntry, ) -> None: """Test starting a flow by user.""" - with ( - patch( - f"aiocomelit.api.{class_api}.login", - ), - patch( - f"aiocomelit.api.{class_api}.logout", - ), - patch("homeassistant.components.comelit.async_setup_entry") as mock_setup_entry, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=user_input - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"][CONF_HOST] == user_input[CONF_HOST] - assert result["data"][CONF_PORT] == user_input[CONF_PORT] - assert result["data"][CONF_PIN] == user_input[CONF_PIN] - assert not result["result"].unique_id - await hass.async_block_till_done() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" - assert mock_setup_entry.called + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: BRIDGE_HOST, + CONF_PORT: BRIDGE_PORT, + CONF_PIN: BRIDGE_PIN, + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_HOST: BRIDGE_HOST, + CONF_PORT: BRIDGE_PORT, + CONF_PIN: BRIDGE_PIN, + CONF_TYPE: BRIDGE, + } + assert not result["result"].unique_id + await hass.async_block_till_done() + + +async def test_flow_vedo( + hass: HomeAssistant, + mock_vedo: AsyncMock, + mock_vedo_config_entry: MockConfigEntry, +) -> None: + """Test starting a flow by user.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: VEDO_HOST, + CONF_PORT: VEDO_PORT, + CONF_PIN: VEDO_PIN, + CONF_TYPE: VEDO, + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_HOST: VEDO_HOST, + CONF_PORT: VEDO_PORT, + CONF_PIN: VEDO_PIN, + CONF_TYPE: VEDO, + } + assert not result["result"].unique_id + await hass.async_block_till_done() @pytest.mark.parametrize( @@ -64,7 +98,13 @@ async def test_full_flow( (ConnectionResetError, "unknown"), ], ) -async def test_exception_connection(hass: HomeAssistant, side_effect, error) -> None: +async def test_exception_connection( + hass: HomeAssistant, + mock_vedo: AsyncMock, + mock_vedo_config_entry: MockConfigEntry, + side_effect, + error, +) -> None: """Test starting a flow by user with a connection error.""" result = await hass.config_entries.flow.async_init( @@ -73,59 +113,65 @@ async def test_exception_connection(hass: HomeAssistant, side_effect, error) -> assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "user" - with ( - patch( - "aiocomelit.api.ComeliteSerialBridgeApi.login", - side_effect=side_effect, - ), - patch( - "aiocomelit.api.ComeliteSerialBridgeApi.logout", - ), - patch( - "homeassistant.components.comelit.async_setup_entry", - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input=MOCK_USER_BRIDGE_DATA - ) + mock_vedo.login.side_effect = side_effect - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] is not None - assert result["errors"]["base"] == error + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: VEDO_HOST, + CONF_PORT: VEDO_PORT, + CONF_PIN: VEDO_PIN, + CONF_TYPE: VEDO, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": error} + + mock_vedo.login.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: VEDO_HOST, + CONF_PORT: VEDO_PORT, + CONF_PIN: VEDO_PIN, + CONF_TYPE: VEDO, + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == VEDO_HOST + assert result["data"] == { + CONF_HOST: VEDO_HOST, + CONF_PORT: VEDO_PORT, + CONF_PIN: VEDO_PIN, + CONF_TYPE: VEDO, + } -async def test_reauth_successful(hass: HomeAssistant) -> None: +async def test_reauth_successful( + hass: HomeAssistant, + mock_vedo: AsyncMock, + mock_vedo_config_entry: MockConfigEntry, +) -> None: """Test starting a reauthentication flow.""" - mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_BRIDGE_DATA) - mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) + mock_vedo_config_entry.add_to_hass(hass) + result = await mock_vedo_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - with ( - patch( - "aiocomelit.api.ComeliteSerialBridgeApi.login", - ), - patch( - "aiocomelit.api.ComeliteSerialBridgeApi.logout", - ), - patch("homeassistant.components.comelit.async_setup_entry"), - patch("requests.get") as mock_request_get, - ): - mock_request_get.return_value.status_code = 200 + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PIN: FAKE_PIN, + }, + ) - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_PIN: FAKE_PIN, - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" @pytest.mark.parametrize( @@ -136,30 +182,40 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: (ConnectionResetError, "unknown"), ], ) -async def test_reauth_not_successful(hass: HomeAssistant, side_effect, error) -> None: +async def test_reauth_not_successful( + hass: HomeAssistant, + mock_vedo: AsyncMock, + mock_vedo_config_entry: MockConfigEntry, + side_effect: Exception, + error: str, +) -> None: """Test starting a reauthentication flow but no connection found.""" - - mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_BRIDGE_DATA) - mock_config.add_to_hass(hass) - result = await mock_config.start_reauth_flow(hass) + mock_vedo_config_entry.add_to_hass(hass) + result = await mock_vedo_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - with ( - patch("aiocomelit.api.ComeliteSerialBridgeApi.login", side_effect=side_effect), - patch( - "aiocomelit.api.ComeliteSerialBridgeApi.logout", - ), - patch("homeassistant.components.comelit.async_setup_entry"), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_PIN: FAKE_PIN, - }, - ) + mock_vedo.login.side_effect = side_effect + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PIN: FAKE_PIN, + }, + ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - assert result["errors"] is not None - assert result["errors"]["base"] == error + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] == {"base": error} + + mock_vedo.login.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PIN: VEDO_PIN, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert mock_vedo_config_entry.data[CONF_PIN] == VEDO_PIN diff --git a/tests/components/comelit/test_coordinator.py b/tests/components/comelit/test_coordinator.py new file mode 100644 index 00000000000..a8ef82a7e89 --- /dev/null +++ b/tests/components/comelit/test_coordinator.py @@ -0,0 +1,49 @@ +"""Tests for Comelit SimpleHome coordinator.""" + +from unittest.mock import AsyncMock + +from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData +from freezegun.api import FrozenDateTimeFactory +import pytest + +from homeassistant.components.comelit.const import SCAN_INTERVAL +from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed + + +@pytest.mark.parametrize( + "side_effect", + [ + CannotConnect, + CannotRetrieveData, + CannotAuthenticate, + ], +) +async def test_coordinator_data_update_fails( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_serial_bridge: AsyncMock, + mock_serial_bridge_config_entry: MockConfigEntry, + side_effect: Exception, +) -> None: + """Test coordinator data update exceptions.""" + + entity_id = "light.light0" + + await setup_integration(hass, mock_serial_bridge_config_entry) + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF + + mock_serial_bridge.login.side_effect = side_effect + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/comelit/test_diagnostics.py b/tests/components/comelit/test_diagnostics.py index 39d75af1152..cabcd0f4cac 100644 --- a/tests/components/comelit/test_diagnostics.py +++ b/tests/components/comelit/test_diagnostics.py @@ -2,21 +2,14 @@ from __future__ import annotations -from unittest.mock import patch +from unittest.mock import AsyncMock from syrupy import SnapshotAssertion from syrupy.filters import props -from homeassistant.components.comelit.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from .const import ( - BRIDGE_DEVICE_QUERY, - MOCK_USER_BRIDGE_DATA, - MOCK_USER_VEDO_DATA, - VEDO_DEVICE_QUERY, -) +from . import setup_integration from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry @@ -25,25 +18,17 @@ from tests.typing import ClientSessionGenerator async def test_entry_diagnostics_bridge( hass: HomeAssistant, + mock_serial_bridge: AsyncMock, + mock_serial_bridge_config_entry: MockConfigEntry, hass_client: ClientSessionGenerator, snapshot: SnapshotAssertion, ) -> None: """Test Bridge config entry diagnostics.""" - entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_BRIDGE_DATA) - entry.add_to_hass(hass) + await setup_integration(hass, mock_serial_bridge_config_entry) - with ( - patch("aiocomelit.api.ComeliteSerialBridgeApi.login"), - patch( - "aiocomelit.api.ComeliteSerialBridgeApi.get_all_devices", - return_value=BRIDGE_DEVICE_QUERY, - ), - ): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - assert entry.state == ConfigEntryState.LOADED - assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( + assert await get_diagnostics_for_config_entry( + hass, hass_client, mock_serial_bridge_config_entry + ) == snapshot( exclude=props( "entry_id", "created_at", @@ -54,25 +39,17 @@ async def test_entry_diagnostics_bridge( async def test_entry_diagnostics_vedo( hass: HomeAssistant, + mock_vedo: AsyncMock, + mock_vedo_config_entry: MockConfigEntry, hass_client: ClientSessionGenerator, snapshot: SnapshotAssertion, ) -> None: """Test Vedo System config entry diagnostics.""" - entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_VEDO_DATA) - entry.add_to_hass(hass) + await setup_integration(hass, mock_vedo_config_entry) - with ( - patch("aiocomelit.api.ComelitVedoApi.login"), - patch( - "aiocomelit.api.ComelitVedoApi.get_all_areas_and_zones", - return_value=VEDO_DEVICE_QUERY, - ), - ): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - assert entry.state == ConfigEntryState.LOADED - assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( + assert await get_diagnostics_for_config_entry( + hass, hass_client, mock_vedo_config_entry + ) == snapshot( exclude=props( "entry_id", "created_at", diff --git a/tests/components/config/test_config_entries.py b/tests/components/config/test_config_entries.py index 739b79e22bd..ce10a36c42c 100644 --- a/tests/components/config/test_config_entries.py +++ b/tests/components/config/test_config_entries.py @@ -1193,7 +1193,7 @@ async def test_subentry_reconfigure_flow(hass: HomeAssistant, client) -> None: async def async_step_reconfigure(self, user_input=None): if user_input is not None: return self.async_update_and_abort( - self._get_reconfigure_entry(), + self._get_entry(), self._get_reconfigure_subentry(), title="Test Entry", data={"test": "blah"}, diff --git a/tests/components/conftest.py b/tests/components/conftest.py index 6d6d0d4641f..e0db306cae9 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -929,7 +929,7 @@ async def check_translations( ignored_domains = set(ignore_translations_for_mock_domains) # Set all ignored translation keys to "unused" - translation_errors = {k: "unused" for k in ignore_missing_translations} + translation_errors = dict.fromkeys(ignore_missing_translations, "unused") translation_coros = set() diff --git a/tests/components/conversation/__init__.py b/tests/components/conversation/__init__.py index 314188dbd82..eeab8b6b9af 100644 --- a/tests/components/conversation/__init__.py +++ b/tests/components/conversation/__init__.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import AsyncGenerator from dataclasses import dataclass, field from typing import Literal from unittest.mock import patch @@ -49,7 +50,7 @@ class MockAgent(conversation.AbstractConversationAgent): @pytest.fixture -async def mock_chat_log(hass: HomeAssistant) -> MockChatLog: +async def mock_chat_log(hass: HomeAssistant) -> AsyncGenerator[MockChatLog]: """Return mock chat logs.""" # pylint: disable-next=contextmanager-generator-missing-cleanup with ( diff --git a/tests/components/conversation/snapshots/test_chat_log.ambr b/tests/components/conversation/snapshots/test_chat_log.ambr index 1ddbf68bb84..ff8ebf724cd 100644 --- a/tests/components/conversation/snapshots/test_chat_log.ambr +++ b/tests/components/conversation/snapshots/test_chat_log.ambr @@ -151,6 +151,7 @@ # --- # name: test_template_error dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -171,6 +172,7 @@ # --- # name: test_unknown_llm_api dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ diff --git a/tests/components/conversation/snapshots/test_default_agent.ambr b/tests/components/conversation/snapshots/test_default_agent.ambr index c2b16ea2912..02e4ef1befe 100644 --- a/tests/components/conversation/snapshots/test_default_agent.ambr +++ b/tests/components/conversation/snapshots/test_default_agent.ambr @@ -1,6 +1,7 @@ # serializer version: 1 # name: test_custom_sentences dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -26,6 +27,7 @@ # --- # name: test_custom_sentences.1 dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -51,6 +53,7 @@ # --- # name: test_custom_sentences_config dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -76,6 +79,7 @@ # --- # name: test_intent_alias_added_removed dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -106,6 +110,7 @@ # --- # name: test_intent_alias_added_removed.1 dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -136,6 +141,7 @@ # --- # name: test_intent_alias_added_removed.2 dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -156,6 +162,7 @@ # --- # name: test_intent_conversion_not_expose_new dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -176,6 +183,7 @@ # --- # name: test_intent_conversion_not_expose_new.1 dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -206,6 +214,7 @@ # --- # name: test_intent_entity_added_removed dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -236,6 +245,7 @@ # --- # name: test_intent_entity_added_removed.1 dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -266,6 +276,7 @@ # --- # name: test_intent_entity_added_removed.2 dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -296,6 +307,7 @@ # --- # name: test_intent_entity_added_removed.3 dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -316,6 +328,7 @@ # --- # name: test_intent_entity_exposed dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -346,6 +359,7 @@ # --- # name: test_intent_entity_fail_if_unexposed dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -366,6 +380,7 @@ # --- # name: test_intent_entity_remove_custom_name dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -386,6 +401,7 @@ # --- # name: test_intent_entity_remove_custom_name.1 dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -416,6 +432,7 @@ # --- # name: test_intent_entity_remove_custom_name.2 dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -436,6 +453,7 @@ # --- # name: test_intent_entity_renamed dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -466,6 +484,7 @@ # --- # name: test_intent_entity_renamed.1 dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ diff --git a/tests/components/conversation/snapshots/test_http.ambr b/tests/components/conversation/snapshots/test_http.ambr index c6ac6c2df9c..abce735dd8a 100644 --- a/tests/components/conversation/snapshots/test_http.ambr +++ b/tests/components/conversation/snapshots/test_http.ambr @@ -32,6 +32,7 @@ 'it', 'ka', 'ko', + 'kw', 'lb', 'lt', 'lv', @@ -202,6 +203,7 @@ # --- # name: test_http_api_handle_failure dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -222,6 +224,7 @@ # --- # name: test_http_api_no_match dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -242,6 +245,7 @@ # --- # name: test_http_api_unexpected_failure dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -262,6 +266,7 @@ # --- # name: test_http_processing_intent[None] dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -292,6 +297,7 @@ # --- # name: test_http_processing_intent[conversation.home_assistant] dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -322,6 +328,7 @@ # --- # name: test_http_processing_intent[homeassistant] dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -352,6 +359,7 @@ # --- # name: test_ws_api[payload0] dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -372,6 +380,7 @@ # --- # name: test_ws_api[payload1] dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -392,6 +401,7 @@ # --- # name: test_ws_api[payload2] dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -412,6 +422,7 @@ # --- # name: test_ws_api[payload3] dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -432,6 +443,7 @@ # --- # name: test_ws_api[payload4] dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -452,6 +464,7 @@ # --- # name: test_ws_api[payload5] dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ diff --git a/tests/components/conversation/snapshots/test_init.ambr b/tests/components/conversation/snapshots/test_init.ambr index 911c7043a6d..3d843d4e32a 100644 --- a/tests/components/conversation/snapshots/test_init.ambr +++ b/tests/components/conversation/snapshots/test_init.ambr @@ -1,6 +1,7 @@ # serializer version: 1 # name: test_custom_agent dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -44,6 +45,7 @@ # --- # name: test_turn_on_intent[None-turn kitchen on-None] dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -74,6 +76,7 @@ # --- # name: test_turn_on_intent[None-turn kitchen on-conversation.home_assistant] dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -104,6 +107,7 @@ # --- # name: test_turn_on_intent[None-turn kitchen on-homeassistant] dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -134,6 +138,7 @@ # --- # name: test_turn_on_intent[None-turn on kitchen-None] dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -164,6 +169,7 @@ # --- # name: test_turn_on_intent[None-turn on kitchen-conversation.home_assistant] dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -194,6 +200,7 @@ # --- # name: test_turn_on_intent[None-turn on kitchen-homeassistant] dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -224,6 +231,7 @@ # --- # name: test_turn_on_intent[my_new_conversation-turn kitchen on-None] dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -254,6 +262,7 @@ # --- # name: test_turn_on_intent[my_new_conversation-turn kitchen on-conversation.home_assistant] dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -284,6 +293,7 @@ # --- # name: test_turn_on_intent[my_new_conversation-turn kitchen on-homeassistant] dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -314,6 +324,7 @@ # --- # name: test_turn_on_intent[my_new_conversation-turn on kitchen-None] dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -344,6 +355,7 @@ # --- # name: test_turn_on_intent[my_new_conversation-turn on kitchen-conversation.home_assistant] dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ @@ -374,6 +386,7 @@ # --- # name: test_turn_on_intent[my_new_conversation-turn on kitchen-homeassistant] dict({ + 'continue_conversation': False, 'conversation_id': , 'response': dict({ 'card': dict({ diff --git a/tests/components/conversation/test_chat_log.py b/tests/components/conversation/test_chat_log.py index c0687ebecfb..d7b3531c658 100644 --- a/tests/components/conversation/test_chat_log.py +++ b/tests/components/conversation/test_chat_log.py @@ -14,6 +14,7 @@ from homeassistant.components.conversation import ( ConversationInput, ConverseError, ToolResultContent, + UserContent, async_get_chat_log, ) from homeassistant.components.conversation.chat_log import DATA_CHAT_LOGS @@ -590,7 +591,7 @@ async def test_add_delta_content_stream_errors( async_get_chat_log(hass, session, mock_conversation_input) as chat_log, ): # Stream content without LLM API set - with pytest.raises(ValueError): # noqa: PT012 + with pytest.raises(ValueError): async for _tool_result_content in chat_log.async_add_delta_content_stream( "mock-agent-id", stream( @@ -612,7 +613,7 @@ async def test_add_delta_content_stream_errors( # Non assistant role for role in "system", "user": - with pytest.raises(ValueError): # noqa: PT012 + with pytest.raises(ValueError): async for ( _tool_result_content ) in chat_log.async_add_delta_content_stream( @@ -643,3 +644,30 @@ async def test_chat_log_reuse( assert len(chat_log.content) == 2 assert chat_log.content[1].role == "user" assert chat_log.content[1].content == mock_conversation_input.text + + +async def test_chat_log_continue_conversation( + hass: HomeAssistant, + mock_conversation_input: ConversationInput, +) -> None: + """Test continue conversation.""" + with ( + chat_session.async_get_chat_session(hass) as session, + async_get_chat_log(hass, session) as chat_log, + ): + assert chat_log.continue_conversation is False + chat_log.async_add_user_content(UserContent(mock_conversation_input.text)) + assert chat_log.continue_conversation is False + chat_log.async_add_assistant_content_without_tools( + AssistantContent( + agent_id="mock-agent-id", + content="Hey? ", + ) + ) + chat_log.async_add_assistant_content_without_tools( + AssistantContent( + agent_id="mock-agent-id", + content="Ποιο είναι το αγαπημένο σου χρώμα στα ελληνικά;", + ) + ) + assert chat_log.continue_conversation is True diff --git a/tests/components/conversation/test_http.py b/tests/components/conversation/test_http.py index 6d69ec3c739..77fa97ad845 100644 --- a/tests/components/conversation/test_http.py +++ b/tests/components/conversation/test_http.py @@ -536,3 +536,60 @@ async def test_ws_hass_agent_debug_sentence_trigger( # Trigger should not have been executed assert len(calls) == 0 + + +async def test_ws_hass_language_scores( + hass: HomeAssistant, init_components, hass_ws_client: WebSocketGenerator +) -> None: + """Test getting language support scores.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id( + {"type": "conversation/agent/homeassistant/language_scores"} + ) + + msg = await client.receive_json() + assert msg["success"] + + # Sanity check + result = msg["result"] + assert result["languages"]["en-US"] == { + "cloud": 3, + "focused_local": 2, + "full_local": 3, + } + + +async def test_ws_hass_language_scores_with_filter( + hass: HomeAssistant, init_components, hass_ws_client: WebSocketGenerator +) -> None: + """Test getting language support scores with language/country filter.""" + client = await hass_ws_client(hass) + + # Language filter + await client.send_json_auto_id( + {"type": "conversation/agent/homeassistant/language_scores", "language": "de"} + ) + + msg = await client.receive_json() + assert msg["success"] + + # German should be preferred + result = msg["result"] + assert result["preferred_language"] == "de-DE" + + # Language/country filter + await client.send_json_auto_id( + { + "type": "conversation/agent/homeassistant/language_scores", + "language": "en", + "country": "GB", + } + ) + + msg = await client.receive_json() + assert msg["success"] + + # GB English should be preferred + result = msg["result"] + assert result["preferred_language"] == "en-GB" diff --git a/tests/components/conversation/test_trigger.py b/tests/components/conversation/test_trigger.py index 3aa8ae2939f..a01f4cd8112 100644 --- a/tests/components/conversation/test_trigger.py +++ b/tests/components/conversation/test_trigger.py @@ -104,6 +104,7 @@ async def test_response(hass: HomeAssistant) -> None: "trigger": { "platform": "conversation", "command": ["Open the pod bay door Hal"], + "variables": {"name": "Dr. David Bowman"}, }, "action": { "set_conversation_response": response, diff --git a/tests/components/conversation/test_util.py b/tests/components/conversation/test_util.py deleted file mode 100644 index 72a334232c1..00000000000 --- a/tests/components/conversation/test_util.py +++ /dev/null @@ -1,56 +0,0 @@ -"""Test the conversation utils.""" - -from homeassistant.components.conversation.util import create_matcher - - -def test_create_matcher() -> None: - """Test the create matcher method.""" - # Basic sentence - pattern = create_matcher("Hello world") - assert pattern.match("Hello world") is not None - - # Match a part - pattern = create_matcher("Hello {name}") - match = pattern.match("hello world") - assert match is not None - assert match.groupdict()["name"] == "world" - no_match = pattern.match("Hello world, how are you?") - assert no_match is None - - # Optional and matching part - pattern = create_matcher("Turn on [the] {name}") - match = pattern.match("turn on the kitchen lights") - assert match is not None - assert match.groupdict()["name"] == "kitchen lights" - match = pattern.match("turn on kitchen lights") - assert match is not None - assert match.groupdict()["name"] == "kitchen lights" - match = pattern.match("turn off kitchen lights") - assert match is None - - # Two different optional parts, 1 matching part - pattern = create_matcher("Turn on [the] [a] {name}") - match = pattern.match("turn on the kitchen lights") - assert match is not None - assert match.groupdict()["name"] == "kitchen lights" - match = pattern.match("turn on kitchen lights") - assert match is not None - assert match.groupdict()["name"] == "kitchen lights" - match = pattern.match("turn on a kitchen light") - assert match is not None - assert match.groupdict()["name"] == "kitchen light" - - # Strip plural - pattern = create_matcher("Turn {name}[s] on") - match = pattern.match("turn kitchen lights on") - assert match is not None - assert match.groupdict()["name"] == "kitchen light" - - # Optional 2 words - pattern = create_matcher("Turn [the great] {name} on") - match = pattern.match("turn the great kitchen lights on") - assert match is not None - assert match.groupdict()["name"] == "kitchen lights" - match = pattern.match("turn kitchen lights on") - assert match is not None - assert match.groupdict()["name"] == "kitchen lights" diff --git a/tests/components/cover/test_reproduce_state.py b/tests/components/cover/test_reproduce_state.py index 4aad27011fa..57fc5aed5e9 100644 --- a/tests/components/cover/test_reproduce_state.py +++ b/tests/components/cover/test_reproduce_state.py @@ -7,9 +7,11 @@ from homeassistant.components.cover import ( ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, + CoverEntityFeature, CoverState, ) from homeassistant.const import ( + ATTR_SUPPORTED_FEATURES, SERVICE_CLOSE_COVER, SERVICE_CLOSE_COVER_TILT, SERVICE_OPEN_COVER, @@ -27,35 +29,213 @@ async def test_reproducing_states( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test reproducing Cover states.""" - hass.states.async_set("cover.entity_close", CoverState.CLOSED, {}) + hass.states.async_set( + "cover.entity_close", + CoverState.CLOSED, + { + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE | CoverEntityFeature.OPEN, + }, + ) + hass.states.async_set( + "cover.closed_only_supports_close_open", + CoverState.CLOSED, + { + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE | CoverEntityFeature.OPEN, + }, + ) + hass.states.async_set( + "cover.open_only_supports_close_open", + CoverState.OPEN, + { + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE | CoverEntityFeature.OPEN, + }, + ) + hass.states.async_set( + "cover.open_missing_all_features", + CoverState.OPEN, + ) + hass.states.async_set( + "cover.closed_missing_all_features_has_position", + CoverState.CLOSED, + { + ATTR_CURRENT_POSITION: 0, + }, + ) + hass.states.async_set( + "cover.open_missing_all_features_has_tilt_position", + CoverState.OPEN, + { + ATTR_CURRENT_TILT_POSITION: 50, + }, + ) + hass.states.async_set( + "cover.closed_only_supports_tilt_close_open", + CoverState.CLOSED, + { + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.OPEN_TILT, + }, + ) + hass.states.async_set( + "cover.open_only_supports_tilt_close_open", + CoverState.OPEN, + { + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.OPEN_TILT, + }, + ) + hass.states.async_set( + "cover.closed_only_supports_position", + CoverState.CLOSED, + { + ATTR_CURRENT_POSITION: 0, + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION, + }, + ) + hass.states.async_set( + "cover.open_only_supports_position", + CoverState.OPEN, + {ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION}, + ) hass.states.async_set( "cover.entity_close_attr", CoverState.CLOSED, - {ATTR_CURRENT_POSITION: 0, ATTR_CURRENT_TILT_POSITION: 0}, + { + ATTR_CURRENT_POSITION: 0, + ATTR_CURRENT_TILT_POSITION: 0, + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.OPEN_TILT + | CoverEntityFeature.SET_POSITION + | CoverEntityFeature.SET_TILT_POSITION + | CoverEntityFeature.CLOSE + | CoverEntityFeature.OPEN, + }, ) hass.states.async_set( - "cover.entity_close_tilt", CoverState.CLOSED, {ATTR_CURRENT_TILT_POSITION: 50} + "cover.entity_close_tilt", + CoverState.CLOSED, + { + ATTR_CURRENT_TILT_POSITION: 50, + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.OPEN_TILT + | CoverEntityFeature.SET_TILT_POSITION, + }, ) - hass.states.async_set("cover.entity_open", CoverState.OPEN, {}) hass.states.async_set( - "cover.entity_slightly_open", CoverState.OPEN, {ATTR_CURRENT_POSITION: 50} + "cover.entity_open", + CoverState.OPEN, + {ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE | CoverEntityFeature.OPEN}, + ) + hass.states.async_set( + "cover.entity_slightly_open", + CoverState.OPEN, + { + ATTR_CURRENT_POSITION: 50, + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_POSITION + | CoverEntityFeature.CLOSE + | CoverEntityFeature.OPEN, + }, ) hass.states.async_set( "cover.entity_open_attr", CoverState.OPEN, - {ATTR_CURRENT_POSITION: 100, ATTR_CURRENT_TILT_POSITION: 0}, + { + ATTR_CURRENT_POSITION: 100, + ATTR_CURRENT_TILT_POSITION: 0, + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.OPEN_TILT + | CoverEntityFeature.SET_POSITION + | CoverEntityFeature.SET_TILT_POSITION + | CoverEntityFeature.CLOSE + | CoverEntityFeature.OPEN, + }, ) hass.states.async_set( "cover.entity_open_tilt", CoverState.OPEN, - {ATTR_CURRENT_POSITION: 50, ATTR_CURRENT_TILT_POSITION: 50}, + { + ATTR_CURRENT_POSITION: 50, + ATTR_CURRENT_TILT_POSITION: 50, + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.OPEN_TILT + | CoverEntityFeature.SET_POSITION + | CoverEntityFeature.SET_TILT_POSITION + | CoverEntityFeature.CLOSE + | CoverEntityFeature.OPEN, + }, ) hass.states.async_set( "cover.entity_entirely_open", CoverState.OPEN, - {ATTR_CURRENT_POSITION: 100, ATTR_CURRENT_TILT_POSITION: 100}, + { + ATTR_CURRENT_POSITION: 100, + ATTR_CURRENT_TILT_POSITION: 100, + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.OPEN_TILT + | CoverEntityFeature.SET_POSITION + | CoverEntityFeature.SET_TILT_POSITION + | CoverEntityFeature.CLOSE + | CoverEntityFeature.OPEN, + }, + ) + hass.states.async_set( + "cover.tilt_only_open", + CoverState.OPEN, + { + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.OPEN_TILT, + }, + ) + hass.states.async_set( + "cover.tilt_only_closed", + CoverState.CLOSED, + { + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.OPEN_TILT, + }, + ) + hass.states.async_set( + "cover.tilt_only_tilt_position_100", + CoverState.OPEN, + { + ATTR_CURRENT_TILT_POSITION: 100, + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.OPEN_TILT + | CoverEntityFeature.SET_TILT_POSITION, + }, + ) + hass.states.async_set( + "cover.tilt_only_tilt_position_0", + CoverState.CLOSED, + { + ATTR_CURRENT_TILT_POSITION: 0, + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.OPEN_TILT + | CoverEntityFeature.SET_TILT_POSITION, + }, + ) + hass.states.async_set( + "cover.tilt_open_only_supports_tilt_position", + CoverState.OPEN, + { + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_TILT_POSITION, + }, + ) + hass.states.async_set( + "cover.tilt_partial_open_only_supports_tilt_position", + CoverState.OPEN, + { + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_TILT_POSITION, + ATTR_CURRENT_TILT_POSITION: 50, + }, + ) + hass.states.async_set( + "cover.tilt_closed_only_supports_tilt_position", + CoverState.CLOSED, + { + ATTR_SUPPORTED_FEATURES: CoverEntityFeature.SET_TILT_POSITION, + }, ) - close_calls = async_mock_service(hass, "cover", SERVICE_CLOSE_COVER) open_calls = async_mock_service(hass, "cover", SERVICE_OPEN_COVER) close_tilt_calls = async_mock_service(hass, "cover", SERVICE_CLOSE_COVER_TILT) @@ -70,6 +250,31 @@ async def test_reproducing_states( hass, [ State("cover.entity_close", CoverState.CLOSED), + State("cover.closed_only_supports_close_open", CoverState.CLOSED), + State("cover.closed_only_supports_tilt_close_open", CoverState.CLOSED), + State("cover.open_only_supports_close_open", CoverState.OPEN), + State("cover.open_only_supports_tilt_close_open", CoverState.OPEN), + State("cover.open_missing_all_features", CoverState.OPEN), + State( + "cover.closed_missing_all_features_has_position", + CoverState.CLOSED, + { + ATTR_CURRENT_POSITION: 0, + }, + ), + State( + "cover.open_missing_all_features_has_tilt_position", + CoverState.OPEN, + { + ATTR_CURRENT_TILT_POSITION: 50, + }, + ), + State( + "cover.closed_only_supports_position", + CoverState.CLOSED, + {ATTR_CURRENT_POSITION: 0}, + ), + State("cover.open_only_supports_position", CoverState.OPEN), State( "cover.entity_close_attr", CoverState.CLOSED, @@ -101,6 +306,39 @@ async def test_reproducing_states( CoverState.OPEN, {ATTR_CURRENT_POSITION: 100, ATTR_CURRENT_TILT_POSITION: 100}, ), + State( + "cover.tilt_only_open", + CoverState.OPEN, + {}, + ), + State( + "cover.tilt_only_tilt_position_100", + CoverState.OPEN, + {ATTR_CURRENT_TILT_POSITION: 100}, + ), + State( + "cover.tilt_only_closed", + CoverState.CLOSED, + {}, + ), + State( + "cover.tilt_only_tilt_position_0", + CoverState.CLOSED, + {ATTR_CURRENT_TILT_POSITION: 0}, + ), + State( + "cover.tilt_partial_open_only_supports_tilt_position", + CoverState.OPEN, + {ATTR_CURRENT_TILT_POSITION: 50}, + ), + State( + "cover.tilt_open_only_supports_tilt_position", + CoverState.OPEN, + ), + State( + "cover.tilt_closed_only_supports_tilt_position", + CoverState.CLOSED, + ), ], ) @@ -127,6 +365,35 @@ async def test_reproducing_states( hass, [ State("cover.entity_close", CoverState.OPEN), + State( + "cover.closed_only_supports_close_open", + CoverState.OPEN, + {ATTR_CURRENT_POSITION: 100}, + ), + State( + "cover.open_only_supports_close_open", + CoverState.CLOSED, + {ATTR_CURRENT_POSITION: 50}, + ), + State( + "cover.open_only_supports_tilt_close_open", + CoverState.CLOSED, + {ATTR_CURRENT_TILT_POSITION: 50}, + ), + State("cover.closed_only_supports_tilt_close_open", CoverState.OPEN), + State("cover.open_missing_all_features", CoverState.CLOSED), + State( + "cover.closed_missing_all_features_has_position", + CoverState.OPEN, + {ATTR_CURRENT_POSITION: 70}, + ), + State( + "cover.open_missing_all_features_has_tilt_position", + CoverState.OPEN, + {ATTR_CURRENT_TILT_POSITION: 20}, + ), + State("cover.closed_only_supports_position", CoverState.OPEN), + State("cover.open_only_supports_position", CoverState.CLOSED), State( "cover.entity_close_attr", CoverState.OPEN, @@ -152,6 +419,39 @@ async def test_reproducing_states( ), # Should not raise State("cover.non_existing", "on"), + State( + "cover.tilt_only_open", + CoverState.CLOSED, + {}, + ), + State( + "cover.tilt_only_tilt_position_100", + CoverState.CLOSED, + {ATTR_CURRENT_TILT_POSITION: 0}, + ), + State( + "cover.tilt_only_closed", + CoverState.OPEN, + {}, + ), + State( + "cover.tilt_only_tilt_position_0", + CoverState.OPEN, + {ATTR_CURRENT_TILT_POSITION: 100}, + ), + State( + "cover.tilt_partial_open_only_supports_tilt_position", + CoverState.OPEN, + {ATTR_CURRENT_TILT_POSITION: 70}, + ), + State( + "cover.tilt_open_only_supports_tilt_position", + CoverState.CLOSED, + ), + State( + "cover.tilt_closed_only_supports_tilt_position", + CoverState.OPEN, + ), ], ) @@ -159,8 +459,10 @@ async def test_reproducing_states( {"entity_id": "cover.entity_open"}, {"entity_id": "cover.entity_open_attr"}, {"entity_id": "cover.entity_entirely_open"}, + {"entity_id": "cover.open_only_supports_close_open"}, + {"entity_id": "cover.open_missing_all_features"}, ] - assert len(close_calls) == 3 + assert len(close_calls) == len(valid_close_calls) for call in close_calls: assert call.domain == "cover" assert call.data in valid_close_calls @@ -170,8 +472,9 @@ async def test_reproducing_states( {"entity_id": "cover.entity_close"}, {"entity_id": "cover.entity_slightly_open"}, {"entity_id": "cover.entity_open_tilt"}, + {"entity_id": "cover.closed_only_supports_close_open"}, ] - assert len(open_calls) == 3 + assert len(open_calls) == len(valid_open_calls) for call in open_calls: assert call.domain == "cover" assert call.data in valid_open_calls @@ -180,27 +483,77 @@ async def test_reproducing_states( valid_close_tilt_calls = [ {"entity_id": "cover.entity_open_tilt"}, {"entity_id": "cover.entity_entirely_open"}, + {"entity_id": "cover.tilt_only_open"}, + {"entity_id": "cover.entity_open_attr"}, + {"entity_id": "cover.tilt_only_tilt_position_100"}, + {"entity_id": "cover.open_only_supports_tilt_close_open"}, ] - assert len(close_tilt_calls) == 2 + assert len(close_tilt_calls) == len(valid_close_tilt_calls) for call in close_tilt_calls: assert call.domain == "cover" assert call.data in valid_close_tilt_calls valid_close_tilt_calls.remove(call.data) - assert len(open_tilt_calls) == 1 - assert open_tilt_calls[0].domain == "cover" - assert open_tilt_calls[0].data == {"entity_id": "cover.entity_close_tilt"} + valid_open_tilt_calls = [ + {"entity_id": "cover.entity_close_tilt"}, + {"entity_id": "cover.tilt_only_closed"}, + {"entity_id": "cover.tilt_only_tilt_position_0"}, + {"entity_id": "cover.closed_only_supports_tilt_close_open"}, + ] + assert len(open_tilt_calls) == len(valid_open_tilt_calls) + for call in open_tilt_calls: + assert call.domain == "cover" + assert call.data in valid_open_tilt_calls + valid_open_tilt_calls.remove(call.data) - assert len(position_calls) == 1 - assert position_calls[0].domain == "cover" - assert position_calls[0].data == { - "entity_id": "cover.entity_close_attr", - ATTR_POSITION: 50, - } + valid_position_calls = [ + { + "entity_id": "cover.entity_close_attr", + ATTR_POSITION: 50, + }, + { + "entity_id": "cover.closed_missing_all_features_has_position", + ATTR_POSITION: 70, + }, + { + "entity_id": "cover.closed_only_supports_position", + ATTR_POSITION: 100, + }, + { + "entity_id": "cover.open_only_supports_position", + ATTR_POSITION: 0, + }, + ] + assert len(position_calls) == len(valid_position_calls) + for call in position_calls: + assert call.domain == "cover" + assert call.data in valid_position_calls + valid_position_calls.remove(call.data) - assert len(position_tilt_calls) == 1 - assert position_tilt_calls[0].domain == "cover" - assert position_tilt_calls[0].data == { - "entity_id": "cover.entity_close_attr", - ATTR_TILT_POSITION: 50, - } + valid_position_tilt_calls = [ + { + "entity_id": "cover.entity_close_attr", + ATTR_TILT_POSITION: 50, + }, + { + "entity_id": "cover.open_missing_all_features_has_tilt_position", + ATTR_TILT_POSITION: 20, + }, + { + "entity_id": "cover.tilt_open_only_supports_tilt_position", + ATTR_TILT_POSITION: 0, + }, + { + "entity_id": "cover.tilt_closed_only_supports_tilt_position", + ATTR_TILT_POSITION: 100, + }, + { + "entity_id": "cover.tilt_partial_open_only_supports_tilt_position", + ATTR_TILT_POSITION: 70, + }, + ] + assert len(position_tilt_calls) == len(valid_position_tilt_calls) + for call in position_tilt_calls: + assert call.domain == "cover" + assert call.data in valid_position_tilt_calls + valid_position_tilt_calls.remove(call.data) diff --git a/tests/components/demo/test_valve.py b/tests/components/demo/test_valve.py new file mode 100644 index 00000000000..1057065ce70 --- /dev/null +++ b/tests/components/demo/test_valve.py @@ -0,0 +1,83 @@ +"""The tests for the Demo valve platform.""" + +from unittest.mock import patch + +import pytest + +from homeassistant.components.demo import DOMAIN, valve as demo_valve +from homeassistant.components.valve import ( + DOMAIN as VALVE_DOMAIN, + SERVICE_CLOSE_VALVE, + SERVICE_OPEN_VALVE, + ValveState, +) +from homeassistant.const import ATTR_ENTITY_ID, EVENT_STATE_CHANGED, Platform +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import async_capture_events + +FRONT_GARDEN = "valve.front_garden" +ORCHARD = "valve.orchard" + + +@pytest.fixture +async def valve_only() -> None: + """Enable only the valve platform.""" + with patch( + "homeassistant.components.demo.COMPONENTS_WITH_CONFIG_ENTRY_DEMO_PLATFORM", + [Platform.VALVE], + ): + yield + + +@pytest.fixture(autouse=True) +async def setup_comp(hass: HomeAssistant, valve_only: None): + """Set up demo component.""" + assert await async_setup_component( + hass, VALVE_DOMAIN, {VALVE_DOMAIN: {"platform": DOMAIN}} + ) + await hass.async_block_till_done() + + +@patch.object(demo_valve, "OPEN_CLOSE_DELAY", 0) +async def test_closing(hass: HomeAssistant) -> None: + """Test the closing of a valve.""" + state = hass.states.get(FRONT_GARDEN) + assert state.state == ValveState.OPEN + await hass.async_block_till_done() + + state_changes = async_capture_events(hass, EVENT_STATE_CHANGED) + await hass.services.async_call( + VALVE_DOMAIN, + SERVICE_CLOSE_VALVE, + {ATTR_ENTITY_ID: FRONT_GARDEN}, + blocking=False, + ) + await hass.async_block_till_done() + + assert state_changes[0].data["entity_id"] == FRONT_GARDEN + assert state_changes[0].data["new_state"].state == ValveState.CLOSING + + assert state_changes[1].data["entity_id"] == FRONT_GARDEN + assert state_changes[1].data["new_state"].state == ValveState.CLOSED + + +@patch.object(demo_valve, "OPEN_CLOSE_DELAY", 0) +async def test_opening(hass: HomeAssistant) -> None: + """Test the opening of a valve.""" + state = hass.states.get(ORCHARD) + assert state.state == ValveState.CLOSED + await hass.async_block_till_done() + + state_changes = async_capture_events(hass, EVENT_STATE_CHANGED) + await hass.services.async_call( + VALVE_DOMAIN, SERVICE_OPEN_VALVE, {ATTR_ENTITY_ID: ORCHARD}, blocking=False + ) + await hass.async_block_till_done() + + assert state_changes[0].data["entity_id"] == ORCHARD + assert state_changes[0].data["new_state"].state == ValveState.OPENING + + assert state_changes[1].data["entity_id"] == ORCHARD + assert state_changes[1].data["new_state"].state == ValveState.OPEN diff --git a/tests/components/dormakaba_dkey/test_config_flow.py b/tests/components/dormakaba_dkey/test_config_flow.py index 8d8140d609a..b3657810006 100644 --- a/tests/components/dormakaba_dkey/test_config_flow.py +++ b/tests/components/dormakaba_dkey/test_config_flow.py @@ -9,6 +9,7 @@ import pytest from homeassistant import config_entries from homeassistant.components.dormakaba_dkey.const import DOMAIN +from homeassistant.config_entries import SOURCE_IGNORE from homeassistant.const import CONF_ADDRESS from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResult, FlowResultType @@ -143,6 +144,43 @@ async def test_async_step_user_takes_precedence_over_discovery( assert not hass.config_entries.flow.async_progress(DOMAIN) +async def test_user_setup_removes_ignored_entry(hass: HomeAssistant) -> None: + """Test the user initiated form can replace an ignored device.""" + ignored_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=DKEY_DISCOVERY_INFO.address, + source=SOURCE_IGNORE, + ) + ignored_entry.add_to_hass(hass) + assert hass.config_entries.async_entries(DOMAIN) == [ignored_entry] + + with patch( + "homeassistant.components.dormakaba_dkey.config_flow.async_discovered_service_info", + return_value=[NOT_DKEY_DISCOVERY_INFO, DKEY_DISCOVERY_INFO], + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_ADDRESS: DKEY_DISCOVERY_INFO.address, + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "associate" + assert result["errors"] is None + + await _test_common_success(hass, result) + + # Check the ignored entry is removed + assert ignored_entry not in hass.config_entries.async_entries(DOMAIN) + + async def test_bluetooth_step_success(hass: HomeAssistant) -> None: """Test bluetooth step success path.""" result = await hass.config_entries.flow.async_init( diff --git a/tests/components/duke_energy/conftest.py b/tests/components/duke_energy/conftest.py index f74ef43bf07..f82a2353557 100644 --- a/tests/components/duke_energy/conftest.py +++ b/tests/components/duke_energy/conftest.py @@ -61,8 +61,8 @@ def mock_api() -> Generator[AsyncMock]: ): api = mock_api.return_value api.authenticate.return_value = { - "email": "TEST@EXAMPLE.COM", - "cdp_internal_user_id": "test-username", + "loginEmailAddress": "TEST@EXAMPLE.COM", + "internalUserID": "test-username", } api.get_meters.return_value = {} yield api diff --git a/tests/components/elevenlabs/test_tts.py b/tests/components/elevenlabs/test_tts.py index c4234cb38ae..a63672cc85d 100644 --- a/tests/components/elevenlabs/test_tts.py +++ b/tests/components/elevenlabs/test_tts.py @@ -350,7 +350,7 @@ async def test_tts_service_speak_error( assert len(calls) == 1 assert ( await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) - == HTTPStatus.NOT_FOUND + == HTTPStatus.INTERNAL_SERVER_ERROR ) tts_entity._client.generate.assert_called_once_with( diff --git a/tests/components/energy/test_websocket_api.py b/tests/components/energy/test_websocket_api.py index 959ec7d1687..e4b0e568a70 100644 --- a/tests/components/energy/test_websocket_api.py +++ b/tests/components/energy/test_websocket_api.py @@ -149,7 +149,13 @@ async def test_save_preferences( "stat_energy_to": "my_battery_charging", }, ], - "device_consumption": [{"stat_consumption": "some_device_usage"}], + "device_consumption": [ + { + "stat_consumption": "some_device_usage", + "name": "My Device", + "included_in_stat": "sensor.some_other_device", + } + ], } await client.send_json({"id": 6, "type": "energy/save_prefs", **new_prefs}) diff --git a/tests/components/enphase_envoy/snapshots/test_sensor.ambr b/tests/components/enphase_envoy/snapshots/test_sensor.ambr index c1e2c9270e2..101caaf1aea 100644 --- a/tests/components/enphase_envoy/snapshots/test_sensor.ambr +++ b/tests/components/enphase_envoy/snapshots/test_sensor.ambr @@ -361,7 +361,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption', + 'original_name': 'Balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -374,7 +374,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'friendly_name': 'Envoy 1234 Balanced net power consumption', 'state_class': , 'unit_of_measurement': , }), @@ -1456,7 +1456,7 @@ 'state': '0.3', }) # --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_power_factor_net_consumption_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1471,7 +1471,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1486,7 +1486,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT', + 'original_name': 'Power factor net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -1495,22 +1495,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_net_consumption_ct-state] +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_power_factor_net_consumption_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.21', }) # --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_production_ct-entry] +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_power_factor_production_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1525,7 +1525,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1540,7 +1540,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'powerfactor production CT', + 'original_name': 'Power factor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -1549,15 +1549,15 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_production_ct-state] +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_power_factor_production_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'friendly_name': 'Envoy 1234 Power factor production CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'last_changed': , 'last_reported': , 'last_updated': , @@ -2519,7 +2519,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption', + 'original_name': 'Balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -2532,7 +2532,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'friendly_name': 'Envoy 1234 Balanced net power consumption', 'state_class': , 'unit_of_measurement': , }), @@ -5374,7 +5374,7 @@ 'state': '0.3', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5389,7 +5389,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5404,7 +5404,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT', + 'original_name': 'Power factor net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5413,22 +5413,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.21', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5443,7 +5443,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5458,7 +5458,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l1', + 'original_name': 'Power factor net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5467,22 +5467,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.22', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5497,7 +5497,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5512,7 +5512,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l2', + 'original_name': 'Power factor net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5521,22 +5521,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.23', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5551,7 +5551,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5566,7 +5566,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l3', + 'original_name': 'Power factor net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5575,22 +5575,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.24', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5605,7 +5605,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5620,7 +5620,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'powerfactor production CT', + 'original_name': 'Power factor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5629,22 +5629,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'friendly_name': 'Envoy 1234 Power factor production CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.11', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l1-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5659,7 +5659,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5674,7 +5674,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l1', + 'original_name': 'Power factor production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5683,22 +5683,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l1-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', + 'friendly_name': 'Envoy 1234 Power factor production CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.12', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l2-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5713,7 +5713,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5728,7 +5728,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l2', + 'original_name': 'Power factor production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5737,22 +5737,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l2-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', + 'friendly_name': 'Envoy 1234 Power factor production CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.13', }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l3-entry] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -5767,7 +5767,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -5782,7 +5782,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l3', + 'original_name': 'Power factor production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -5791,15 +5791,15 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_powerfactor_production_ct_l3-state] +# name: test_sensor[envoy_acb_batt][sensor.envoy_1234_power_factor_production_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', + 'friendly_name': 'Envoy 1234 Power factor production CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , @@ -7026,7 +7026,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption', + 'original_name': 'Balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -7039,7 +7039,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'friendly_name': 'Envoy 1234 Balanced net power consumption', 'state_class': , 'unit_of_measurement': , }), @@ -9881,7 +9881,7 @@ 'state': '0.3', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -9896,7 +9896,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -9911,7 +9911,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT', + 'original_name': 'Power factor net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -9920,22 +9920,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.21', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -9950,7 +9950,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -9965,7 +9965,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l1', + 'original_name': 'Power factor net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -9974,22 +9974,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.22', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -10004,7 +10004,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -10019,7 +10019,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l2', + 'original_name': 'Power factor net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -10028,22 +10028,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.23', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -10058,7 +10058,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -10073,7 +10073,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l3', + 'original_name': 'Power factor net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -10082,22 +10082,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_net_consumption_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.24', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -10112,7 +10112,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -10127,7 +10127,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'powerfactor production CT', + 'original_name': 'Power factor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -10136,22 +10136,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'friendly_name': 'Envoy 1234 Power factor production CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.11', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l1-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -10166,7 +10166,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -10181,7 +10181,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l1', + 'original_name': 'Power factor production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -10190,22 +10190,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l1-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', + 'friendly_name': 'Envoy 1234 Power factor production CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.12', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l2-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -10220,7 +10220,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -10235,7 +10235,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l2', + 'original_name': 'Power factor production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -10244,22 +10244,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l2-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', + 'friendly_name': 'Envoy 1234 Power factor production CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.13', }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l3-entry] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -10274,7 +10274,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -10289,7 +10289,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l3', + 'original_name': 'Power factor production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -10298,15 +10298,15 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_powerfactor_production_ct_l3-state] +# name: test_sensor[envoy_eu_batt][sensor.envoy_1234_power_factor_production_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', + 'friendly_name': 'Envoy 1234 Power factor production CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , @@ -11630,7 +11630,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption', + 'original_name': 'Balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -11643,7 +11643,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'friendly_name': 'Envoy 1234 Balanced net power consumption', 'state_class': , 'unit_of_measurement': , }), @@ -11688,7 +11688,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption l1', + 'original_name': 'Balanced net power consumption l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -11701,7 +11701,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l1', + 'friendly_name': 'Envoy 1234 Balanced net power consumption l1', 'state_class': , 'unit_of_measurement': , }), @@ -11746,7 +11746,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption l2', + 'original_name': 'Balanced net power consumption l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -11759,7 +11759,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l2', + 'friendly_name': 'Envoy 1234 Balanced net power consumption l2', 'state_class': , 'unit_of_measurement': , }), @@ -11804,7 +11804,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption l3', + 'original_name': 'Balanced net power consumption l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -11817,7 +11817,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l3', + 'friendly_name': 'Envoy 1234 Balanced net power consumption l3', 'state_class': , 'unit_of_measurement': , }), @@ -17547,7 +17547,7 @@ 'state': '0.3', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17562,7 +17562,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17577,7 +17577,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT', + 'original_name': 'Power factor net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17586,22 +17586,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.21', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17616,7 +17616,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17631,7 +17631,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l1', + 'original_name': 'Power factor net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17640,22 +17640,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.22', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17670,7 +17670,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17685,7 +17685,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l2', + 'original_name': 'Power factor net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17694,22 +17694,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.23', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17724,7 +17724,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17739,7 +17739,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l3', + 'original_name': 'Power factor net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17748,22 +17748,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_net_consumption_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.24', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17778,7 +17778,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17793,7 +17793,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'powerfactor production CT', + 'original_name': 'Power factor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17802,22 +17802,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'friendly_name': 'Envoy 1234 Power factor production CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.11', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l1-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17832,7 +17832,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17847,7 +17847,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l1', + 'original_name': 'Power factor production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17856,22 +17856,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l1-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', + 'friendly_name': 'Envoy 1234 Power factor production CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.12', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l2-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17886,7 +17886,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17901,7 +17901,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l2', + 'original_name': 'Power factor production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17910,22 +17910,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l2-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', + 'friendly_name': 'Envoy 1234 Power factor production CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.13', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l3-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17940,7 +17940,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -17955,7 +17955,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l3', + 'original_name': 'Power factor production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -17964,22 +17964,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l3-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_production_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', + 'friendly_name': 'Envoy 1234 Power factor production CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.14', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -17994,7 +17994,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -18009,7 +18009,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor storage CT', + 'original_name': 'Power factor storage CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -18018,22 +18018,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor storage CT', + 'friendly_name': 'Envoy 1234 Power factor storage CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.23', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l1-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -18048,7 +18048,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -18063,7 +18063,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor storage CT l1', + 'original_name': 'Power factor storage CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -18072,22 +18072,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l1-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor storage CT l1', + 'friendly_name': 'Envoy 1234 Power factor storage CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.32', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l2-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -18102,7 +18102,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -18117,7 +18117,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor storage CT l2', + 'original_name': 'Power factor storage CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -18126,22 +18126,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l2-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor storage CT l2', + 'friendly_name': 'Envoy 1234 Power factor storage CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.23', }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l3-entry] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -18156,7 +18156,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -18171,7 +18171,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor storage CT l3', + 'original_name': 'Power factor storage CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -18180,15 +18180,15 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l3-state] +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_power_factor_storage_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor storage CT l3', + 'friendly_name': 'Envoy 1234 Power factor storage CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_storage_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , @@ -19586,7 +19586,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption', + 'original_name': 'Balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -19599,7 +19599,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'friendly_name': 'Envoy 1234 Balanced net power consumption', 'state_class': , 'unit_of_measurement': , }), @@ -19644,7 +19644,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption l1', + 'original_name': 'Balanced net power consumption l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -19657,7 +19657,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l1', + 'friendly_name': 'Envoy 1234 Balanced net power consumption l1', 'state_class': , 'unit_of_measurement': , }), @@ -19702,7 +19702,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption l2', + 'original_name': 'Balanced net power consumption l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -19715,7 +19715,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l2', + 'friendly_name': 'Envoy 1234 Balanced net power consumption l2', 'state_class': , 'unit_of_measurement': , }), @@ -19760,7 +19760,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption l3', + 'original_name': 'Balanced net power consumption l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -19773,7 +19773,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption l3', + 'friendly_name': 'Envoy 1234 Balanced net power consumption l3', 'state_class': , 'unit_of_measurement': , }), @@ -24065,7 +24065,7 @@ 'state': '0.3', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24080,7 +24080,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24095,7 +24095,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT', + 'original_name': 'Power factor net consumption CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24104,22 +24104,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.21', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24134,7 +24134,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24149,7 +24149,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l1', + 'original_name': 'Power factor net consumption CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24158,22 +24158,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.22', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24188,7 +24188,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24203,7 +24203,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l2', + 'original_name': 'Power factor net consumption CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24212,22 +24212,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.23', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24242,7 +24242,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24257,7 +24257,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor net consumption CT l3', + 'original_name': 'Power factor net consumption CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24266,22 +24266,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_net_consumption_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', + 'friendly_name': 'Envoy 1234 Power factor net consumption CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_net_consumption_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.24', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24296,7 +24296,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24311,7 +24311,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'powerfactor production CT', + 'original_name': 'Power factor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24320,22 +24320,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'friendly_name': 'Envoy 1234 Power factor production CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.11', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l1-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct_l1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24350,7 +24350,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24365,7 +24365,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l1', + 'original_name': 'Power factor production CT l1', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24374,22 +24374,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l1-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct_l1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', + 'friendly_name': 'Envoy 1234 Power factor production CT l1', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.12', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l2-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct_l2-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24404,7 +24404,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24419,7 +24419,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l2', + 'original_name': 'Power factor production CT l2', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24428,22 +24428,22 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l2-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct_l2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', + 'friendly_name': 'Envoy 1234 Power factor production CT l2', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0.13', }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l3-entry] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct_l3-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -24458,7 +24458,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -24473,7 +24473,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Powerfactor production CT l3', + 'original_name': 'Power factor production CT l3', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -24482,15 +24482,15 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l3-state] +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_power_factor_production_ct_l3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', + 'friendly_name': 'Envoy 1234 Power factor production CT l3', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct_l3', 'last_changed': , 'last_reported': , 'last_updated': , @@ -25326,7 +25326,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'balanced net power consumption', + 'original_name': 'Balanced net power consumption', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -25339,7 +25339,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', - 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'friendly_name': 'Envoy 1234 Balanced net power consumption', 'state_class': , 'unit_of_measurement': , }), @@ -25799,7 +25799,7 @@ 'state': 'normal', }) # --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_powerfactor_production_ct-entry] +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_power_factor_production_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -25814,7 +25814,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -25829,7 +25829,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'powerfactor production CT', + 'original_name': 'Power factor production CT', 'platform': 'enphase_envoy', 'previous_unique_id': None, 'supported_features': 0, @@ -25838,15 +25838,15 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_powerfactor_production_ct-state] +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_power_factor_production_ct-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power_factor', - 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'friendly_name': 'Envoy 1234 Power factor production CT', 'state_class': , }), 'context': , - 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'entity_id': 'sensor.envoy_1234_power_factor_production_ct', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/esphome/bluetooth/test_client.py b/tests/components/esphome/bluetooth/test_client.py index 77d315f096d..554f1725f4b 100644 --- a/tests/components/esphome/bluetooth/test_client.py +++ b/tests/components/esphome/bluetooth/test_client.py @@ -6,7 +6,9 @@ from aioesphomeapi import APIClient, APIVersion, BluetoothProxyFeature, DeviceIn from bleak.exc import BleakError from bleak_esphome.backend.client import ESPHomeClient, ESPHomeClientData from bleak_esphome.backend.device import ESPHomeBluetoothDevice -from bleak_esphome.backend.scanner import ESPHomeScanner +from bleak_esphome.backend.scanner import ( # pylint: disable=no-name-in-module + ESPHomeScanner, +) import pytest from homeassistant.components.bluetooth import HaBluetoothConnector diff --git a/tests/components/esphome/conftest.py b/tests/components/esphome/conftest.py index dc6195bfe1f..2786ed8324c 100644 --- a/tests/components/esphome/conftest.py +++ b/tests/components/esphome/conftest.py @@ -30,6 +30,7 @@ from zeroconf import Zeroconf from homeassistant.components.esphome import dashboard from homeassistant.components.esphome.const import ( CONF_ALLOW_SERVICE_CALLS, + CONF_BLUETOOTH_MAC_ADDRESS, CONF_DEVICE_NAME, CONF_NOISE_PSK, DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS, @@ -578,12 +579,29 @@ async def mock_bluetooth_entry( async def _mock_bluetooth_entry( bluetooth_proxy_feature_flags: BluetoothProxyFeature, ) -> MockESPHomeDevice: + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_HOST: "test.local", + CONF_PORT: 6053, + CONF_PASSWORD: "", + CONF_BLUETOOTH_MAC_ADDRESS: "AA:BB:CC:DD:EE:FC", + }, + options={ + CONF_ALLOW_SERVICE_CALLS: DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS + }, + ) + entry.add_to_hass(hass) return await _mock_generic_device_entry( hass, mock_client, - {"bluetooth_proxy_feature_flags": bluetooth_proxy_feature_flags}, + { + "bluetooth_mac_address": "AA:BB:CC:DD:EE:FC", + "bluetooth_proxy_feature_flags": bluetooth_proxy_feature_flags, + }, ([], []), [], + entry=entry, ) return _mock_bluetooth_entry diff --git a/tests/components/esphome/test_assist_satellite.py b/tests/components/esphome/test_assist_satellite.py index 30535236970..ce5de0a1a67 100644 --- a/tests/components/esphome/test_assist_satellite.py +++ b/tests/components/esphome/test_assist_satellite.py @@ -25,7 +25,12 @@ from aioesphomeapi import ( ) import pytest -from homeassistant.components import assist_satellite, tts +from homeassistant.components import ( + assist_pipeline, + assist_satellite, + conversation, + tts, +) from homeassistant.components.assist_pipeline import PipelineEvent, PipelineEventType from homeassistant.components.assist_satellite import ( AssistSatelliteConfiguration, @@ -41,7 +46,6 @@ from homeassistant.components.esphome.assist_satellite import ( EsphomeAssistSatellite, VoiceAssistantUDPServer, ) -from homeassistant.components.media_source import PlayMedia from homeassistant.components.select import ( DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION, @@ -57,6 +61,8 @@ from homeassistant.helpers.entity_component import EntityComponent from .conftest import MockESPHomeDevice +from tests.components.tts.common import MockResultStream + def get_satellite_entity( hass: HomeAssistant, mac_address: str @@ -285,12 +291,21 @@ async def test_pipeline_api_audio( event_callback( PipelineEvent( type=PipelineEventType.INTENT_END, - data={"intent_output": {"conversation_id": conversation_id}}, + data={ + "intent_output": conversation.ConversationResult( + response=intent_helper.IntentResponse("en"), + conversation_id=conversation_id, + continue_conversation=True, + ).as_dict() + }, ) ) assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_END, - {"conversation_id": conversation_id}, + { + "conversation_id": conversation_id, + "continue_conversation": "1", + }, ) # TTS @@ -484,7 +499,12 @@ async def test_pipeline_udp_audio( event_callback( PipelineEvent( type=PipelineEventType.INTENT_END, - data={"intent_output": {"conversation_id": conversation_id}}, + data={ + "intent_output": conversation.ConversationResult( + response=intent_helper.IntentResponse("en"), + conversation_id=conversation_id, + ).as_dict() + }, ) ) @@ -690,7 +710,12 @@ async def test_pipeline_media_player( event_callback( PipelineEvent( type=PipelineEventType.INTENT_END, - data={"intent_output": {"conversation_id": conversation_id}}, + data={ + "intent_output": conversation.ConversationResult( + response=intent_helper.IntentResponse("en"), + conversation_id=conversation_id, + ).as_dict() + }, ) ) @@ -1140,7 +1165,7 @@ async def test_announce_supported_features( Awaitable[MockESPHomeDevice], ], ) -> None: - """Test that the announce supported feature is set by flags.""" + """Test that the announce supported feature is not set by default.""" mock_device: MockESPHomeDevice = await mock_esphome_device( mock_client=mock_client, entity_info=[], @@ -1187,25 +1212,32 @@ async def test_announce_message( done = asyncio.Event() async def send_voice_assistant_announcement_await_response( - media_id: str, timeout: float, text: str + media_id: str, + timeout: float, + text: str, + start_conversation: bool, + preannounce_media_id: str | None = None, ): assert satellite.state == AssistSatelliteState.RESPONDING - assert media_id == "https://www.home-assistant.io/resolved.mp3" + assert media_id == "http://10.10.10.10:8123/api/tts_proxy/test-token" assert text == "test-text" + assert not start_conversation + assert not preannounce_media_id done.set() with ( patch( - "homeassistant.components.assist_satellite.entity.tts_generate_media_source_id", + "homeassistant.components.tts.generate_media_source_id", return_value="media-source://bla", ), patch( - "homeassistant.components.media_source.async_resolve_media", - return_value=PlayMedia( - url="https://www.home-assistant.io/resolved.mp3", - mime_type="audio/mp3", - ), + "homeassistant.components.tts.async_resolve_engine", + return_value="tts.cloud_tts", + ), + patch( + "homeassistant.components.tts.async_create_stream", + return_value=MockResultStream(hass, "wav", b""), ), patch.object( mock_client, @@ -1217,7 +1249,11 @@ async def test_announce_message( await hass.services.async_call( assist_satellite.DOMAIN, "announce", - {"entity_id": satellite.entity_id, "message": "test-text"}, + { + "entity_id": satellite.entity_id, + "message": "test-text", + "preannounce": False, + }, blocking=True, ) await done.wait() @@ -1275,10 +1311,16 @@ async def test_announce_media_id( done = asyncio.Event() async def send_voice_assistant_announcement_await_response( - media_id: str, timeout: float, text: str + media_id: str, + timeout: float, + text: str, + start_conversation: bool, + preannounce_media_id: str | None = None, ): assert satellite.state == AssistSatelliteState.RESPONDING assert media_id == "https://www.home-assistant.io/proxied.flac" + assert not start_conversation + assert not preannounce_media_id done.set() @@ -1300,6 +1342,7 @@ async def test_announce_media_id( { "entity_id": satellite.entity_id, "media_id": "https://www.home-assistant.io/resolved.mp3", + "preannounce": False, }, blocking=True, ) @@ -1307,9 +1350,9 @@ async def test_announce_media_id( assert satellite.state == AssistSatelliteState.IDLE mock_async_create_proxy_url.assert_called_once_with( - hass, - dev.id, - "https://www.home-assistant.io/resolved.mp3", + hass=hass, + device_id=dev.id, + media_url="https://www.home-assistant.io/resolved.mp3", media_format="flac", rate=48000, channels=2, @@ -1317,6 +1360,422 @@ async def test_announce_media_id( ) +async def test_announce_message_with_preannounce( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test announcement with message and preannounce media id.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.SPEAKER + | VoiceAssistantFeature.API_AUDIO + | VoiceAssistantFeature.ANNOUNCE + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + done = asyncio.Event() + + async def send_voice_assistant_announcement_await_response( + media_id: str, + timeout: float, + text: str, + start_conversation: bool, + preannounce_media_id: str | None = None, + ): + assert satellite.state == AssistSatelliteState.RESPONDING + assert media_id == "http://10.10.10.10:8123/api/tts_proxy/test-token" + assert text == "test-text" + assert not start_conversation + assert preannounce_media_id == "test-preannounce" + + done.set() + + with ( + patch( + "homeassistant.components.tts.generate_media_source_id", + return_value="media-source://bla", + ), + patch( + "homeassistant.components.tts.async_resolve_engine", + return_value="tts.cloud_tts", + ), + patch( + "homeassistant.components.tts.async_create_stream", + return_value=MockResultStream(hass, "wav", b""), + ), + patch.object( + mock_client, + "send_voice_assistant_announcement_await_response", + new=send_voice_assistant_announcement_await_response, + ), + ): + async with asyncio.timeout(1): + await hass.services.async_call( + assist_satellite.DOMAIN, + "announce", + { + "entity_id": satellite.entity_id, + "message": "test-text", + "preannounce_media_id": "test-preannounce", + }, + blocking=True, + ) + await done.wait() + assert satellite.state == AssistSatelliteState.IDLE + + +async def test_start_conversation_supported_features( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test that the start conversation supported feature is not set by default.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + assert not ( + satellite.supported_features & AssistSatelliteEntityFeature.START_CONVERSATION + ) + + +async def test_start_conversation_message( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test start conversation with message.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.SPEAKER + | VoiceAssistantFeature.API_AUDIO + | VoiceAssistantFeature.ANNOUNCE + | VoiceAssistantFeature.START_CONVERSATION + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + pipeline = assist_pipeline.Pipeline( + conversation_engine="test engine", + conversation_language="en", + language="en", + name="test pipeline", + stt_engine="test stt", + stt_language="en", + tts_engine="test tts", + tts_language="en", + tts_voice=None, + wake_word_entity=None, + wake_word_id=None, + ) + + done = asyncio.Event() + + async def send_voice_assistant_announcement_await_response( + media_id: str, + timeout: float, + text: str, + start_conversation: bool, + preannounce_media_id: str, + ): + assert satellite.state == AssistSatelliteState.RESPONDING + assert media_id == "http://10.10.10.10:8123/api/tts_proxy/test-token" + assert text == "test-text" + assert start_conversation + assert not preannounce_media_id + + done.set() + + with ( + patch( + "homeassistant.components.tts.generate_media_source_id", + return_value="media-source://bla", + ), + patch( + "homeassistant.components.tts.async_resolve_engine", + return_value="tts.cloud_tts", + ), + patch( + "homeassistant.components.tts.async_create_stream", + return_value=MockResultStream(hass, "wav", b""), + ), + patch.object( + mock_client, + "send_voice_assistant_announcement_await_response", + new=send_voice_assistant_announcement_await_response, + ), + patch( + "homeassistant.components.assist_satellite.entity.async_get_pipeline", + return_value=pipeline, + ), + ): + async with asyncio.timeout(1): + await hass.services.async_call( + assist_satellite.DOMAIN, + "start_conversation", + { + "entity_id": satellite.entity_id, + "start_message": "test-text", + "preannounce": False, + }, + blocking=True, + ) + await done.wait() + assert satellite.state == AssistSatelliteState.IDLE + + +async def test_start_conversation_media_id( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], + device_registry: dr.DeviceRegistry, +) -> None: + """Test start conversation with media id.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[ + MediaPlayerInfo( + object_id="mymedia_player", + key=1, + name="my media_player", + unique_id="my_media_player", + supports_pause=True, + supported_formats=[ + MediaPlayerSupportedFormat( + format="flac", + sample_rate=48000, + num_channels=2, + purpose=MediaPlayerFormatPurpose.ANNOUNCEMENT, + sample_bytes=2, + ), + ], + ) + ], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.SPEAKER + | VoiceAssistantFeature.API_AUDIO + | VoiceAssistantFeature.ANNOUNCE + | VoiceAssistantFeature.START_CONVERSATION + }, + ) + await hass.async_block_till_done() + + dev = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} + ) + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + pipeline = assist_pipeline.Pipeline( + conversation_engine="test engine", + conversation_language="en", + language="en", + name="test pipeline", + stt_engine="test stt", + stt_language="en", + tts_engine="test tts", + tts_language="en", + tts_voice=None, + wake_word_entity=None, + wake_word_id=None, + ) + + done = asyncio.Event() + + async def send_voice_assistant_announcement_await_response( + media_id: str, + timeout: float, + text: str, + start_conversation: bool, + preannounce_media_id: str, + ): + assert satellite.state == AssistSatelliteState.RESPONDING + assert media_id == "https://www.home-assistant.io/proxied.flac" + assert start_conversation + assert not preannounce_media_id + + done.set() + + with ( + patch.object( + mock_client, + "send_voice_assistant_announcement_await_response", + new=send_voice_assistant_announcement_await_response, + ), + patch( + "homeassistant.components.esphome.assist_satellite.async_create_proxy_url", + return_value="https://www.home-assistant.io/proxied.flac", + ) as mock_async_create_proxy_url, + patch( + "homeassistant.components.assist_satellite.entity.async_get_pipeline", + return_value=pipeline, + ), + ): + async with asyncio.timeout(1): + await hass.services.async_call( + assist_satellite.DOMAIN, + "start_conversation", + { + "entity_id": satellite.entity_id, + "start_media_id": "https://www.home-assistant.io/resolved.mp3", + "preannounce": False, + }, + blocking=True, + ) + await done.wait() + assert satellite.state == AssistSatelliteState.IDLE + + mock_async_create_proxy_url.assert_called_once_with( + hass=hass, + device_id=dev.id, + media_url="https://www.home-assistant.io/resolved.mp3", + media_format="flac", + rate=48000, + channels=2, + width=2, + ) + + +async def test_start_conversation_message_with_preannounce( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test start conversation with message and preannounce media id.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.SPEAKER + | VoiceAssistantFeature.API_AUDIO + | VoiceAssistantFeature.ANNOUNCE + | VoiceAssistantFeature.START_CONVERSATION + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + pipeline = assist_pipeline.Pipeline( + conversation_engine="test engine", + conversation_language="en", + language="en", + name="test pipeline", + stt_engine="test stt", + stt_language="en", + tts_engine="test tts", + tts_language="en", + tts_voice=None, + wake_word_entity=None, + wake_word_id=None, + ) + + done = asyncio.Event() + + async def send_voice_assistant_announcement_await_response( + media_id: str, + timeout: float, + text: str, + start_conversation: bool, + preannounce_media_id: str, + ): + assert satellite.state == AssistSatelliteState.RESPONDING + assert media_id == "http://10.10.10.10:8123/api/tts_proxy/test-token" + assert text == "test-text" + assert start_conversation + assert preannounce_media_id == "test-preannounce" + + done.set() + + with ( + patch( + "homeassistant.components.tts.generate_media_source_id", + return_value="media-source://bla", + ), + patch( + "homeassistant.components.tts.async_resolve_engine", + return_value="tts.cloud_tts", + ), + patch( + "homeassistant.components.tts.async_create_stream", + return_value=MockResultStream(hass, "wav", b""), + ), + patch.object( + mock_client, + "send_voice_assistant_announcement_await_response", + new=send_voice_assistant_announcement_await_response, + ), + patch( + "homeassistant.components.assist_satellite.entity.async_get_pipeline", + return_value=pipeline, + ), + ): + async with asyncio.timeout(1): + await hass.services.async_call( + assist_satellite.DOMAIN, + "start_conversation", + { + "entity_id": satellite.entity_id, + "start_message": "test-text", + "preannounce_media_id": "test-preannounce", + }, + blocking=True, + ) + await done.wait() + assert satellite.state == AssistSatelliteState.IDLE + + async def test_satellite_unloaded_on_disconnect( hass: HomeAssistant, mock_client: APIClient, diff --git a/tests/components/esphome/test_bluetooth.py b/tests/components/esphome/test_bluetooth.py index 19bc5a2e7c7..dd7a8f59fe5 100644 --- a/tests/components/esphome/test_bluetooth.py +++ b/tests/components/esphome/test_bluetooth.py @@ -13,7 +13,7 @@ async def test_bluetooth_connect_with_raw_adv( hass: HomeAssistant, mock_bluetooth_entry_with_raw_adv: MockESPHomeDevice ) -> None: """Test bluetooth connect with raw advertisements.""" - scanner = bluetooth.async_scanner_by_source(hass, "11:22:33:44:55:AA") + scanner = bluetooth.async_scanner_by_source(hass, "AA:BB:CC:DD:EE:FC") assert scanner is not None assert scanner.connectable is True assert scanner.scanning is True @@ -21,11 +21,11 @@ async def test_bluetooth_connect_with_raw_adv( await mock_bluetooth_entry_with_raw_adv.mock_disconnect(True) await hass.async_block_till_done() - scanner = bluetooth.async_scanner_by_source(hass, "11:22:33:44:55:AA") + scanner = bluetooth.async_scanner_by_source(hass, "AA:BB:CC:DD:EE:FC") assert scanner is None await mock_bluetooth_entry_with_raw_adv.mock_connect() await hass.async_block_till_done() - scanner = bluetooth.async_scanner_by_source(hass, "11:22:33:44:55:AA") + scanner = bluetooth.async_scanner_by_source(hass, "AA:BB:CC:DD:EE:FC") assert scanner.scanning is True @@ -33,7 +33,7 @@ async def test_bluetooth_connect_with_legacy_adv( hass: HomeAssistant, mock_bluetooth_entry_with_legacy_adv: MockESPHomeDevice ) -> None: """Test bluetooth connect with legacy advertisements.""" - scanner = bluetooth.async_scanner_by_source(hass, "11:22:33:44:55:AA") + scanner = bluetooth.async_scanner_by_source(hass, "AA:BB:CC:DD:EE:FC") assert scanner is not None assert scanner.connectable is True assert scanner.scanning is True @@ -41,11 +41,11 @@ async def test_bluetooth_connect_with_legacy_adv( await mock_bluetooth_entry_with_legacy_adv.mock_disconnect(True) await hass.async_block_till_done() - scanner = bluetooth.async_scanner_by_source(hass, "11:22:33:44:55:AA") + scanner = bluetooth.async_scanner_by_source(hass, "AA:BB:CC:DD:EE:FC") assert scanner is None await mock_bluetooth_entry_with_legacy_adv.mock_connect() await hass.async_block_till_done() - scanner = bluetooth.async_scanner_by_source(hass, "11:22:33:44:55:AA") + scanner = bluetooth.async_scanner_by_source(hass, "AA:BB:CC:DD:EE:FC") assert scanner.scanning is True @@ -55,10 +55,10 @@ async def test_bluetooth_device_linked_via_device( device_registry: dr.DeviceRegistry, ) -> None: """Test the Bluetooth device is linked to the ESPHome device.""" - scanner = bluetooth.async_scanner_by_source(hass, "11:22:33:44:55:AA") + scanner = bluetooth.async_scanner_by_source(hass, "AA:BB:CC:DD:EE:FC") assert scanner.connectable is True entry = hass.config_entries.async_entry_for_domain_unique_id( - "bluetooth", "11:22:33:44:55:AA" + "bluetooth", "AA:BB:CC:DD:EE:FC" ) assert entry is not None esp_device = device_registry.async_get_device( @@ -71,7 +71,7 @@ async def test_bluetooth_device_linked_via_device( ) assert esp_device is not None device = device_registry.async_get_device( - connections={(dr.CONNECTION_BLUETOOTH, "11:22:33:44:55:AA")} + connections={(dr.CONNECTION_BLUETOOTH, "AA:BB:CC:DD:EE:FC")} ) assert device is not None assert device.via_device_id == esp_device.id @@ -81,7 +81,7 @@ async def test_bluetooth_cleanup_on_remove_entry( hass: HomeAssistant, mock_bluetooth_entry_with_raw_adv: MockESPHomeDevice ) -> None: """Test bluetooth is cleaned up on entry removal.""" - scanner = bluetooth.async_scanner_by_source(hass, "11:22:33:44:55:AA") + scanner = bluetooth.async_scanner_by_source(hass, "AA:BB:CC:DD:EE:FC") assert scanner.connectable is True await hass.config_entries.async_unload( mock_bluetooth_entry_with_raw_adv.entry.entry_id diff --git a/tests/components/esphome/test_config_flow.py b/tests/components/esphome/test_config_flow.py index afca6f76b43..d48a1f40482 100644 --- a/tests/components/esphome/test_config_flow.py +++ b/tests/components/esphome/test_config_flow.py @@ -1047,6 +1047,36 @@ async def test_reauth_confirm_invalid_with_unique_id( assert entry.data[CONF_NOISE_PSK] == VALID_NOISE_PSK +@pytest.mark.usefixtures("mock_zeroconf") +async def test_reauth_encryption_key_removed( + hass: HomeAssistant, mock_client, mock_setup_entry: None +) -> None: + """Test reauth when the encryption key was removed.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_HOST: "127.0.0.1", + CONF_PORT: 6053, + CONF_PASSWORD: "", + CONF_NOISE_PSK: VALID_NOISE_PSK, + }, + unique_id="test", + ) + entry.add_to_hass(hass) + + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_encryption_removed_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert entry.data[CONF_NOISE_PSK] == "" + + async def test_discovery_dhcp_updates_host( hass: HomeAssistant, mock_client: APIClient, mock_setup_entry: None ) -> None: diff --git a/tests/components/esphome/test_diagnostics.py b/tests/components/esphome/test_diagnostics.py index 2b2629324d2..2d64170bc97 100644 --- a/tests/components/esphome/test_diagnostics.py +++ b/tests/components/esphome/test_diagnostics.py @@ -37,7 +37,7 @@ async def test_diagnostics_with_bluetooth( mock_bluetooth_entry_with_raw_adv: MockESPHomeDevice, ) -> None: """Test diagnostics for config entry with Bluetooth.""" - scanner = bluetooth.async_scanner_by_source(hass, "11:22:33:44:55:AA") + scanner = bluetooth.async_scanner_by_source(hass, "AA:BB:CC:DD:EE:FC") assert scanner is not None assert scanner.connectable is True entry = mock_bluetooth_entry_with_raw_adv.entry @@ -55,9 +55,9 @@ async def test_diagnostics_with_bluetooth( "discovered_devices_and_advertisement_data": [], "last_detection": ANY, "monotonic_time": ANY, - "name": "test (11:22:33:44:55:AA)", + "name": "test (AA:BB:CC:DD:EE:FC)", "scanning": True, - "source": "11:22:33:44:55:AA", + "source": "AA:BB:CC:DD:EE:FC", "start_time": ANY, "time_since_last_device_detection": {}, "type": "ESPHomeScanner", @@ -66,6 +66,7 @@ async def test_diagnostics_with_bluetooth( "config": { "created_at": ANY, "data": { + "bluetooth_mac_address": "**REDACTED**", "device_name": "test", "host": "test.local", "password": "", @@ -89,6 +90,7 @@ async def test_diagnostics_with_bluetooth( "storage_data": { "api_version": {"major": 99, "minor": 99}, "device_info": { + "bluetooth_mac_address": "**REDACTED**", "bluetooth_proxy_feature_flags": 63, "compilation_time": "", "esphome_version": "1.0.0", diff --git a/tests/components/esphome/test_entity.py b/tests/components/esphome/test_entity.py index 296d61b664d..977ec50ab30 100644 --- a/tests/components/esphome/test_entity.py +++ b/tests/components/esphome/test_entity.py @@ -260,6 +260,76 @@ async def test_entities_removed_after_reload( assert len(hass_storage[storage_key]["data"]["binary_sensor"]) == 1 +async def test_entities_for_entire_platform_removed( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_client: APIClient, + hass_storage: dict[str, Any], + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test removing all entities for a specific platform when static info changes.""" + entity_info = [ + BinarySensorInfo( + object_id="mybinary_sensor_to_be_removed", + key=1, + name="my binary_sensor to be removed", + unique_id="mybinary_sensor_to_be_removed", + ), + ] + states = [ + BinarySensorState(key=1, state=True, missing_state=False), + ] + user_service = [] + mock_device = await mock_esphome_device( + mock_client=mock_client, + entity_info=entity_info, + user_service=user_service, + states=states, + ) + entry = mock_device.entry + entry_id = entry.entry_id + storage_key = f"esphome.{entry_id}" + state = hass.states.get("binary_sensor.test_mybinary_sensor_to_be_removed") + assert state is not None + assert state.state == STATE_ON + + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass_storage[storage_key]["data"]["binary_sensor"]) == 1 + + state = hass.states.get("binary_sensor.test_mybinary_sensor_to_be_removed") + assert state is not None + reg_entry = entity_registry.async_get( + "binary_sensor.test_mybinary_sensor_to_be_removed" + ) + assert reg_entry is not None + assert state.attributes[ATTR_RESTORED] is True + + entity_info = [] + states = [] + mock_device = await mock_esphome_device( + mock_client=mock_client, + entity_info=entity_info, + user_service=user_service, + states=states, + entry=entry, + ) + assert mock_device.entry.entry_id == entry_id + state = hass.states.get("binary_sensor.test_mybinary_sensor_to_be_removed") + assert state is None + reg_entry = entity_registry.async_get( + "binary_sensor.test_mybinary_sensor_to_be_removed" + ) + assert reg_entry is None + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + assert len(hass_storage[storage_key]["data"]["binary_sensor"]) == 0 + + async def test_entity_info_object_ids( hass: HomeAssistant, mock_client: APIClient, diff --git a/tests/components/esphome/test_event.py b/tests/components/esphome/test_event.py index c17dc4d98a9..d4688e8ab4e 100644 --- a/tests/components/esphome/test_event.py +++ b/tests/components/esphome/test_event.py @@ -4,6 +4,7 @@ from aioesphomeapi import APIClient, Event, EventInfo import pytest from homeassistant.components.event import EventDeviceClass +from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant @@ -11,9 +12,9 @@ from homeassistant.core import HomeAssistant async def test_generic_event_entity( hass: HomeAssistant, mock_client: APIClient, - mock_generic_device_entry, + mock_esphome_device, ) -> None: - """Test a generic event entity.""" + """Test a generic event entity and its availability behavior.""" entity_info = [ EventInfo( object_id="myevent", @@ -26,13 +27,31 @@ async def test_generic_event_entity( ] states = [Event(key=1, event_type="type1")] user_service = [] - await mock_generic_device_entry( + device = await mock_esphome_device( mock_client=mock_client, entity_info=entity_info, user_service=user_service, states=states, ) + await hass.async_block_till_done() + + # Test initial state state = hass.states.get("event.test_myevent") assert state is not None assert state.state == "2024-04-24T00:00:00.000+00:00" assert state.attributes["event_type"] == "type1" + + # Test device becomes unavailable + await device.mock_disconnect(True) + await hass.async_block_till_done() + state = hass.states.get("event.test_myevent") + assert state.state == STATE_UNAVAILABLE + + # Test device becomes available again + await device.mock_connect() + await hass.async_block_till_done() + + # Event entity should be available immediately without waiting for data + state = hass.states.get("event.test_myevent") + assert state.state == "2024-04-24T00:00:00.000+00:00" + assert state.attributes["event_type"] == "type1" diff --git a/tests/components/esphome/test_manager.py b/tests/components/esphome/test_manager.py index 79653d3bb66..905a3f6bdc7 100644 --- a/tests/components/esphome/test_manager.py +++ b/tests/components/esphome/test_manager.py @@ -25,6 +25,7 @@ import pytest from homeassistant import config_entries from homeassistant.components.esphome.const import ( CONF_ALLOW_SERVICE_CALLS, + CONF_BLUETOOTH_MAC_ADDRESS, CONF_DEVICE_NAME, CONF_SUBSCRIBE_LOGS, DOMAIN, @@ -476,6 +477,39 @@ async def test_unique_id_updated_to_mac(hass: HomeAssistant, mock_client) -> Non assert entry.unique_id == "11:22:33:44:55:aa" +@pytest.mark.usefixtures("mock_zeroconf") +async def test_add_missing_bluetooth_mac_address( + hass: HomeAssistant, mock_client +) -> None: + """Test bluetooth mac is added if its missing.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={CONF_HOST: "test.local", CONF_PORT: 6053, CONF_PASSWORD: ""}, + unique_id="mock-config-name", + ) + entry.add_to_hass(hass) + subscribe_done = hass.loop.create_future() + + def async_subscribe_states(*args, **kwargs) -> None: + subscribe_done.set_result(None) + + mock_client.subscribe_states = async_subscribe_states + mock_client.device_info = AsyncMock( + return_value=DeviceInfo( + mac_address="1122334455aa", + bluetooth_mac_address="AA:BB:CC:DD:EE:FF", + ) + ) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + async with asyncio.timeout(1): + await subscribe_done + + assert entry.unique_id == "11:22:33:44:55:aa" + assert entry.data.get(CONF_BLUETOOTH_MAC_ADDRESS) == "AA:BB:CC:DD:EE:FF" + + @pytest.mark.usefixtures("mock_zeroconf") async def test_unique_id_not_updated_if_name_same_and_already_mac( hass: HomeAssistant, mock_client: APIClient @@ -1338,3 +1372,32 @@ async def test_entry_missing_unique_id( await mock_esphome_device(mock_client=mock_client, mock_storage=True) await hass.async_block_till_done() assert entry.unique_id == "11:22:33:44:55:aa" + + +async def test_entry_missing_bluetooth_mac_address( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test the bluetooth_mac_address is added if available.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id=None, + data={ + CONF_HOST: "test.local", + CONF_PORT: 6053, + CONF_PASSWORD: "", + }, + options={CONF_ALLOW_SERVICE_CALLS: True}, + ) + entry.add_to_hass(hass) + await mock_esphome_device( + mock_client=mock_client, + mock_storage=True, + device_info={"bluetooth_mac_address": "AA:BB:CC:DD:EE:FC"}, + ) + await hass.async_block_till_done() + assert entry.data[CONF_BLUETOOTH_MAC_ADDRESS] == "AA:BB:CC:DD:EE:FC" diff --git a/tests/components/esphome/test_update.py b/tests/components/esphome/test_update.py index 5060471f5d2..76c0a9b1a70 100644 --- a/tests/components/esphome/test_update.py +++ b/tests/components/esphome/test_update.py @@ -86,26 +86,28 @@ def stub_reconnect(): ) async def test_update_entity( hass: HomeAssistant, - stub_reconnect, - mock_config_entry, - mock_device_info, mock_dashboard: dict[str, Any], - devices_payload, - expected_state, - expected_attributes, + devices_payload: list[dict[str, Any]], + expected_state: str, + expected_attributes: dict[str, Any], + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], ) -> None: """Test ESPHome update entity.""" mock_dashboard["configured"] = devices_payload await async_get_dashboard(hass).async_refresh() - with patch( - "homeassistant.components.esphome.update.DomainData.get_entry_data", - return_value=Mock(available=True, device_info=mock_device_info, info={}), - ): - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + ) - state = hass.states.get("update.none_firmware") + state = hass.states.get("update.test_firmware") assert state is not None assert state.state == expected_state for key, expected_value in expected_attributes.items(): @@ -130,7 +132,7 @@ async def test_update_entity( await hass.services.async_call( "update", "install", - {"entity_id": "update.none_firmware"}, + {"entity_id": "update.test_firmware"}, blocking=True, ) @@ -155,7 +157,7 @@ async def test_update_entity( await hass.services.async_call( "update", "install", - {"entity_id": "update.none_firmware"}, + {"entity_id": "update.test_firmware"}, blocking=True, ) @@ -177,7 +179,7 @@ async def test_update_entity( await hass.services.async_call( "update", "install", - {"entity_id": "update.none_firmware"}, + {"entity_id": "update.test_firmware"}, blocking=True, ) @@ -274,28 +276,30 @@ async def test_update_device_state_for_availability( async def test_update_entity_dashboard_not_available_startup( hass: HomeAssistant, - stub_reconnect, - mock_config_entry, - mock_device_info, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], mock_dashboard: dict[str, Any], ) -> None: """Test ESPHome update entity when dashboard is not available at startup.""" with ( - patch( - "homeassistant.components.esphome.update.DomainData.get_entry_data", - return_value=Mock(available=True, device_info=mock_device_info, info={}), - ), patch( "esphome_dashboard_api.ESPHomeDashboardAPI.get_devices", side_effect=TimeoutError, ), ): await async_get_dashboard(hass).async_refresh() - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + ) # We have a dashboard but it is not available - state = hass.states.get("update.none_firmware") + state = hass.states.get("update.test_firmware") assert state is None mock_dashboard["configured"] = [ @@ -308,7 +312,7 @@ async def test_update_entity_dashboard_not_available_startup( await async_get_dashboard(hass).async_refresh() await hass.async_block_till_done() - state = hass.states.get("update.none_firmware") + state = hass.states.get("update.test_firmware") assert state.state == STATE_ON expected_attributes = { "latest_version": "2023.2.0-dev", diff --git a/tests/components/fibaro/conftest.py b/tests/components/fibaro/conftest.py index 583c44a41e6..55b7e35132c 100644 --- a/tests/components/fibaro/conftest.py +++ b/tests/components/fibaro/conftest.py @@ -157,12 +157,31 @@ def mock_thermostat() -> Mock: return climate +@pytest.fixture +def mock_thermostat_parent() -> Mock: + """Fixture for a thermostat.""" + climate = Mock() + climate.fibaro_id = 5 + climate.parent_fibaro_id = 0 + climate.name = "Test climate" + climate.room_id = 1 + climate.dead = False + climate.visible = True + climate.enabled = True + climate.type = "com.fibaro.device" + climate.base_type = "com.fibaro.device" + climate.properties = {"manufacturer": ""} + climate.actions = [] + return climate + + @pytest.fixture def mock_thermostat_with_operating_mode() -> Mock: """Fixture for a thermostat.""" climate = Mock() - climate.fibaro_id = 4 - climate.parent_fibaro_id = 0 + climate.fibaro_id = 6 + climate.endpoint_id = 1 + climate.parent_fibaro_id = 5 climate.name = "Test climate" climate.room_id = 1 climate.dead = False @@ -171,20 +190,47 @@ def mock_thermostat_with_operating_mode() -> Mock: climate.type = "com.fibaro.thermostatDanfoss" climate.base_type = "com.fibaro.device" climate.properties = {"manufacturer": ""} - climate.actions = {"setOperationMode": 1} + climate.actions = {"setOperatingMode": 1, "setTargetLevel": 1} climate.supported_features = {} climate.has_supported_operating_modes = True climate.supported_operating_modes = [0, 1, 15] climate.has_operating_mode = True climate.operating_mode = 15 + climate.has_supported_thermostat_modes = False climate.has_thermostat_mode = False + climate.has_unit = True + climate.unit = "C" + climate.has_heating_thermostat_setpoint = False + climate.has_heating_thermostat_setpoint_future = False + climate.target_level = 23 value_mock = Mock() value_mock.has_value = True - value_mock.int_value.return_value = 20 + value_mock.float_value.return_value = 20 climate.value = value_mock return climate +@pytest.fixture +def mock_fan_device() -> Mock: + """Fixture for a fan endpoint of a thermostat device.""" + climate = Mock() + climate.fibaro_id = 7 + climate.endpoint_id = 1 + climate.parent_fibaro_id = 5 + climate.name = "Test fan" + climate.room_id = 1 + climate.dead = False + climate.visible = True + climate.enabled = True + climate.type = "com.fibaro.fan" + climate.base_type = "com.fibaro.device" + climate.properties = {"manufacturer": ""} + climate.actions = {"setFanMode": 1} + climate.supported_modes = [0, 1, 2] + climate.mode = 1 + return climate + + @pytest.fixture def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Return the default mocked config entry.""" @@ -209,19 +255,22 @@ def mock_fibaro_client() -> Generator[Mock]: info_mock.hc_name = TEST_NAME info_mock.current_version = TEST_VERSION info_mock.platform = TEST_MODEL + info_mock.manufacturer_name = "Fibaro" + info_mock.model_name = "Home Center 2" + info_mock.mac_address = "00:22:4d:b7:13:24" with patch( "homeassistant.components.fibaro.FibaroClient", autospec=True ) as fibaro_client_mock: client = fibaro_client_mock.return_value - client.set_authentication.return_value = None - client.connect.return_value = True + client.connect_with_credentials.return_value = info_mock client.read_info.return_value = info_mock client.read_rooms.return_value = [] client.read_scenes.return_value = [] client.read_devices.return_value = [] client.register_update_handler.return_value = None client.unregister_update_handler.return_value = None + client.frontend_url.return_value = TEST_URL.removesuffix("/api/") yield client diff --git a/tests/components/fibaro/test_climate.py b/tests/components/fibaro/test_climate.py index 31022e19a08..339d9d23077 100644 --- a/tests/components/fibaro/test_climate.py +++ b/tests/components/fibaro/test_climate.py @@ -130,5 +130,153 @@ async def test_hvac_mode_with_operation_mode_support( # Act await init_integration(hass, mock_config_entry) # Assert - state = hass.states.get("climate.room_1_test_climate_4") + state = hass.states.get("climate.room_1_test_climate_6") assert state.state == HVACMode.AUTO + + +async def test_set_hvac_mode_with_operation_mode_support( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat_with_operating_mode: Mock, + mock_room: Mock, +) -> None: + """Test that set_hvac_mode() works.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_thermostat_with_operating_mode] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + await hass.services.async_call( + "climate", + "set_hvac_mode", + {"entity_id": "climate.room_1_test_climate_6", "hvac_mode": HVACMode.HEAT}, + blocking=True, + ) + + # Assert + mock_thermostat_with_operating_mode.execute_action.assert_called_once() + + +async def test_fan_mode( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat_parent: Mock, + mock_thermostat_with_operating_mode: Mock, + mock_fan_device: Mock, + mock_room: Mock, +) -> None: + """Test that operating mode works.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [ + mock_thermostat_parent, + mock_thermostat_with_operating_mode, + mock_fan_device, + ] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + state = hass.states.get("climate.room_1_test_climate_6") + assert state.attributes["fan_mode"] == "low" + assert state.attributes["fan_modes"] == ["off", "low", "auto_high"] + + +async def test_set_fan_mode( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat_parent: Mock, + mock_thermostat_with_operating_mode: Mock, + mock_fan_device: Mock, + mock_room: Mock, +) -> None: + """Test that set_fan_mode() works.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [ + mock_thermostat_parent, + mock_thermostat_with_operating_mode, + mock_fan_device, + ] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + await hass.services.async_call( + "climate", + "set_fan_mode", + {"entity_id": "climate.room_1_test_climate_6", "fan_mode": "off"}, + blocking=True, + ) + + # Assert + mock_fan_device.execute_action.assert_called_once() + + +async def test_target_temperature( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat_parent: Mock, + mock_thermostat_with_operating_mode: Mock, + mock_fan_device: Mock, + mock_room: Mock, +) -> None: + """Test that operating mode works.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [ + mock_thermostat_parent, + mock_thermostat_with_operating_mode, + mock_fan_device, + ] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + state = hass.states.get("climate.room_1_test_climate_6") + assert state.attributes["temperature"] == 23 + + +async def test_set_target_temperature( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_thermostat_parent: Mock, + mock_thermostat_with_operating_mode: Mock, + mock_fan_device: Mock, + mock_room: Mock, +) -> None: + """Test that set_fan_mode() works.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [ + mock_thermostat_parent, + mock_thermostat_with_operating_mode, + mock_fan_device, + ] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]): + # Act + await init_integration(hass, mock_config_entry) + await hass.services.async_call( + "climate", + "set_temperature", + {"entity_id": "climate.room_1_test_climate_6", "temperature": 25.5}, + blocking=True, + ) + + # Assert + mock_thermostat_with_operating_mode.execute_action.assert_called_once() diff --git a/tests/components/fibaro/test_config_flow.py b/tests/components/fibaro/test_config_flow.py index 508bb81973d..aee7c2eb903 100644 --- a/tests/components/fibaro/test_config_flow.py +++ b/tests/components/fibaro/test_config_flow.py @@ -2,8 +2,8 @@ from unittest.mock import Mock +from pyfibaro.fibaro_client import FibaroAuthenticationFailed, FibaroConnectFailed import pytest -from requests.exceptions import HTTPError from homeassistant import config_entries from homeassistant.components.fibaro import DOMAIN @@ -23,8 +23,10 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry", "mock_fibaro_client") async def _recovery_after_failure_works( hass: HomeAssistant, mock_fibaro_client: Mock, result: FlowResult ) -> None: - mock_fibaro_client.connect.side_effect = None - mock_fibaro_client.connect.return_value = True + mock_fibaro_client.connect_with_credentials.side_effect = None + mock_fibaro_client.connect_with_credentials.return_value = ( + mock_fibaro_client.read_info() + ) result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -48,8 +50,10 @@ async def _recovery_after_failure_works( async def _recovery_after_reauth_failure_works( hass: HomeAssistant, mock_fibaro_client: Mock, result: FlowResult ) -> None: - mock_fibaro_client.connect.side_effect = None - mock_fibaro_client.connect.return_value = True + mock_fibaro_client.connect_with_credentials.side_effect = None + mock_fibaro_client.connect_with_credentials.return_value = ( + mock_fibaro_client.read_info() + ) result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -101,7 +105,9 @@ async def test_config_flow_user_initiated_auth_failure( assert result["step_id"] == "user" assert result["errors"] == {} - mock_fibaro_client.connect.side_effect = HTTPError(response=Mock(status_code=403)) + mock_fibaro_client.connect_with_credentials.side_effect = ( + FibaroAuthenticationFailed() + ) result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -119,7 +125,7 @@ async def test_config_flow_user_initiated_auth_failure( await _recovery_after_failure_works(hass, mock_fibaro_client, result) -async def test_config_flow_user_initiated_unknown_failure_1( +async def test_config_flow_user_initiated_connect_failure( hass: HomeAssistant, mock_fibaro_client: Mock ) -> None: """Unknown failure in flow manually initialized by the user.""" @@ -131,37 +137,7 @@ async def test_config_flow_user_initiated_unknown_failure_1( assert result["step_id"] == "user" assert result["errors"] == {} - mock_fibaro_client.connect.side_effect = HTTPError(response=Mock(status_code=500)) - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_URL: TEST_URL, - CONF_USERNAME: TEST_USERNAME, - CONF_PASSWORD: TEST_PASSWORD, - }, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": "cannot_connect"} - - await _recovery_after_failure_works(hass, mock_fibaro_client, result) - - -async def test_config_flow_user_initiated_unknown_failure_2( - hass: HomeAssistant, mock_fibaro_client: Mock -) -> None: - """Unknown failure in flow manually initialized by the user.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {} - - mock_fibaro_client.connect.side_effect = Exception() + mock_fibaro_client.connect_with_credentials.side_effect = FibaroConnectFailed() result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -208,7 +184,7 @@ async def test_reauth_connect_failure( assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} - mock_fibaro_client.connect.side_effect = Exception() + mock_fibaro_client.connect_with_credentials.side_effect = FibaroConnectFailed() result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -233,7 +209,9 @@ async def test_reauth_auth_failure( assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} - mock_fibaro_client.connect.side_effect = HTTPError(response=Mock(status_code=403)) + mock_fibaro_client.connect_with_credentials.side_effect = ( + FibaroAuthenticationFailed() + ) result = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/fibaro/test_light.py b/tests/components/fibaro/test_light.py index d0a24e009b7..88576e86dc6 100644 --- a/tests/components/fibaro/test_light.py +++ b/tests/components/fibaro/test_light.py @@ -2,7 +2,8 @@ from unittest.mock import Mock, patch -from homeassistant.const import Platform +from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -55,3 +56,28 @@ async def test_light_brightness( state = hass.states.get("light.room_1_test_light_3") assert state.attributes["brightness"] == 51 assert state.state == "on" + + +async def test_light_turn_off( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_light: Mock, + mock_room: Mock, +) -> None: + """Test activate scene is called.""" + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_light] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.LIGHT]): + await init_integration(hass, mock_config_entry) + # Act + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: "light.room_1_test_light_3"}, + blocking=True, + ) + # Assert + assert mock_light.execute_action.call_count == 1 diff --git a/tests/components/forked_daapd/test_browse_media.py b/tests/components/forked_daapd/test_browse_media.py index cbd278128ae..88b29c2bbba 100644 --- a/tests/components/forked_daapd/test_browse_media.py +++ b/tests/components/forked_daapd/test_browse_media.py @@ -34,7 +34,7 @@ async def test_async_browse_media( await hass.async_block_till_done() with patch( - "homeassistant.components.forked_daapd.media_player.ForkedDaapdAPI", + "homeassistant.components.forked_daapd.ForkedDaapdAPI", autospec=True, ) as mock_api: mock_api.return_value.get_request.return_value = {"websocket_port": 2} @@ -214,7 +214,7 @@ async def test_async_browse_media_not_found( await hass.async_block_till_done() with patch( - "homeassistant.components.forked_daapd.media_player.ForkedDaapdAPI", + "homeassistant.components.forked_daapd.ForkedDaapdAPI", autospec=True, ) as mock_api: mock_api.return_value.get_request.return_value = {"websocket_port": 2} @@ -375,7 +375,7 @@ async def test_async_browse_image( """Test browse media images.""" with patch( - "homeassistant.components.forked_daapd.media_player.ForkedDaapdAPI", + "homeassistant.components.forked_daapd.ForkedDaapdAPI", autospec=True, ) as mock_api: mock_api.return_value.get_request.return_value = {"websocket_port": 2} @@ -430,7 +430,7 @@ async def test_async_browse_image_missing( """Test browse media images with no image available.""" with patch( - "homeassistant.components.forked_daapd.media_player.ForkedDaapdAPI", + "homeassistant.components.forked_daapd.ForkedDaapdAPI", autospec=True, ) as mock_api: mock_api.return_value.get_request.return_value = {"websocket_port": 2} diff --git a/tests/components/forked_daapd/test_config_flow.py b/tests/components/forked_daapd/test_config_flow.py index 8bf5de31da2..ba1f0e6c227 100644 --- a/tests/components/forked_daapd/test_config_flow.py +++ b/tests/components/forked_daapd/test_config_flow.py @@ -1,7 +1,7 @@ """The config flow tests for the forked_daapd media player platform.""" from ipaddress import ip_address -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import AsyncMock, patch import pytest @@ -12,12 +12,10 @@ from homeassistant.components.forked_daapd.const import ( CONF_TTS_VOLUME, DOMAIN, ) -from homeassistant.components.forked_daapd.media_player import async_setup_entry -from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF, ConfigEntryState from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo from tests.common import MockConfigEntry @@ -75,7 +73,7 @@ async def test_config_flow(hass: HomeAssistant, config_entry: MockConfigEntry) - new=AsyncMock(), ) as mock_test_connection, patch( - "homeassistant.components.forked_daapd.media_player.ForkedDaapdAPI.get_request", + "homeassistant.components.forked_daapd.ForkedDaapdAPI.get_request", autospec=True, ) as mock_get_request, ): @@ -232,7 +230,7 @@ async def test_options_flow(hass: HomeAssistant, config_entry: MockConfigEntry) """Test config flow options.""" with patch( - "homeassistant.components.forked_daapd.media_player.ForkedDaapdAPI.get_request", + "homeassistant.components.forked_daapd.ForkedDaapdAPI.get_request", autospec=True, ) as mock_get_request: mock_get_request.return_value = SAMPLE_CONFIG @@ -256,17 +254,18 @@ async def test_options_flow(hass: HomeAssistant, config_entry: MockConfigEntry) async def test_async_setup_entry_not_ready( - hass: HomeAssistant, config_entry: MockConfigEntry + hass: HomeAssistant, config_entry: MockConfigEntry, caplog: pytest.LogCaptureFixture ) -> None: """Test that a PlatformNotReady exception is thrown during platform setup.""" with patch( - "homeassistant.components.forked_daapd.media_player.ForkedDaapdAPI", + "homeassistant.components.forked_daapd.ForkedDaapdAPI", autospec=True, ) as mock_api: mock_api.return_value.get_request.return_value = None config_entry.add_to_hass(hass) - with pytest.raises(PlatformNotReady): - await async_setup_entry(hass, config_entry, MagicMock()) + await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() mock_api.return_value.get_request.assert_called_once() + assert "Platform forked_daapd not ready yet" in caplog.text + assert config_entry.state is ConfigEntryState.LOADED diff --git a/tests/components/forked_daapd/test_media_player.py b/tests/components/forked_daapd/test_media_player.py index 6d7d267eb63..8f0105d48d7 100644 --- a/tests/components/forked_daapd/test_media_player.py +++ b/tests/components/forked_daapd/test_media_player.py @@ -313,7 +313,7 @@ async def mock_api_object_fixture( return get_request_return_values[update_type] with patch( - "homeassistant.components.forked_daapd.media_player.ForkedDaapdAPI", + "homeassistant.components.forked_daapd.ForkedDaapdAPI", autospec=True, ) as mock_api: mock_api.return_value.get_request.side_effect = get_request_side_effect @@ -808,7 +808,7 @@ async def test_invalid_websocket_port( ) -> None: """Test invalid websocket port on async_init.""" with patch( - "homeassistant.components.forked_daapd.media_player.ForkedDaapdAPI", + "homeassistant.components.forked_daapd.ForkedDaapdAPI", autospec=True, ) as mock_api: mock_api.return_value.get_request.return_value = SAMPLE_CONFIG_NO_WEBSOCKET diff --git a/tests/components/fritz/snapshots/test_sensor.ambr b/tests/components/fritz/snapshots/test_sensor.ambr index 5ff0e448b15..ffdd3d23f50 100644 --- a/tests/components/fritz/snapshots/test_sensor.ambr +++ b/tests/components/fritz/snapshots/test_sensor.ambr @@ -357,7 +357,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.mock_title_link_download_noise_margin', 'has_entity_name': True, 'hidden_by': None, @@ -405,7 +405,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.mock_title_link_download_power_attenuation', 'has_entity_name': True, 'hidden_by': None, @@ -453,7 +453,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.mock_title_link_download_throughput', 'has_entity_name': True, 'hidden_by': None, @@ -502,7 +502,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.mock_title_link_upload_noise_margin', 'has_entity_name': True, 'hidden_by': None, @@ -550,7 +550,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.mock_title_link_upload_power_attenuation', 'has_entity_name': True, 'hidden_by': None, @@ -598,7 +598,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.mock_title_link_upload_throughput', 'has_entity_name': True, 'hidden_by': None, @@ -647,7 +647,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': , + 'entity_category': None, 'entity_id': 'sensor.mock_title_max_connection_download_throughput', 'has_entity_name': True, 'hidden_by': None, @@ -696,7 +696,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': , + 'entity_category': None, 'entity_id': 'sensor.mock_title_max_connection_upload_throughput', 'has_entity_name': True, 'hidden_by': None, diff --git a/tests/components/fritzbox/test_binary_sensor.py b/tests/components/fritzbox/test_binary_sensor.py index 594ed14a7d1..d5b0b5d196b 100644 --- a/tests/components/fritzbox/test_binary_sensor.py +++ b/tests/components/fritzbox/test_binary_sensor.py @@ -11,7 +11,11 @@ from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, ) from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN -from homeassistant.components.sensor import ATTR_STATE_CLASS, DOMAIN as SENSOR_DOMAIN +from homeassistant.components.sensor import ( + ATTR_STATE_CLASS, + DOMAIN as SENSOR_DOMAIN, + SensorStateClass, +) from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_FRIENDLY_NAME, @@ -71,7 +75,7 @@ async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: assert state.state == "23" assert state.attributes[ATTR_FRIENDLY_NAME] == f"{CONF_FAKE_NAME} Battery" assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE - assert ATTR_STATE_CLASS not in state.attributes + assert state.attributes[ATTR_STATE_CLASS] is SensorStateClass.MEASUREMENT async def test_is_off(hass: HomeAssistant, fritz: Mock) -> None: diff --git a/tests/components/fritzbox/test_climate.py b/tests/components/fritzbox/test_climate.py index f170836fa9b..699a2b8c53e 100644 --- a/tests/components/fritzbox/test_climate.py +++ b/tests/components/fritzbox/test_climate.py @@ -16,6 +16,7 @@ from homeassistant.components.climate import ( ATTR_PRESET_MODE, ATTR_PRESET_MODES, DOMAIN as CLIMATE_DOMAIN, + PRESET_BOOST, PRESET_COMFORT, PRESET_ECO, SERVICE_SET_HVAC_MODE, @@ -36,7 +37,11 @@ from homeassistant.components.fritzbox.const import ( ATTR_STATE_WINDOW_OPEN, DOMAIN as FB_DOMAIN, ) -from homeassistant.components.sensor import ATTR_STATE_CLASS, DOMAIN as SENSOR_DOMAIN +from homeassistant.components.sensor import ( + ATTR_STATE_CLASS, + DOMAIN as SENSOR_DOMAIN, + SensorStateClass, +) from homeassistant.const import ( ATTR_BATTERY_LEVEL, ATTR_ENTITY_ID, @@ -80,7 +85,11 @@ async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: assert state.attributes[ATTR_MAX_TEMP] == 28 assert state.attributes[ATTR_MIN_TEMP] == 8 assert state.attributes[ATTR_PRESET_MODE] is None - assert state.attributes[ATTR_PRESET_MODES] == [PRESET_ECO, PRESET_COMFORT] + assert state.attributes[ATTR_PRESET_MODES] == [ + PRESET_ECO, + PRESET_COMFORT, + PRESET_BOOST, + ] assert state.attributes[ATTR_STATE_BATTERY_LOW] is True assert state.attributes[ATTR_STATE_HOLIDAY_MODE] is False assert state.attributes[ATTR_STATE_SUMMER_MODE] is False @@ -94,7 +103,7 @@ async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: assert state.state == "23" assert state.attributes[ATTR_FRIENDLY_NAME] == f"{CONF_FAKE_NAME} Battery" assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE - assert ATTR_STATE_CLASS not in state.attributes + assert state.attributes[ATTR_STATE_CLASS] is SensorStateClass.MEASUREMENT state = hass.states.get(f"{SENSOR_DOMAIN}.{CONF_FAKE_NAME}_comfort_temperature") assert state @@ -434,11 +443,31 @@ async def test_set_preset_mode_eco( assert device.set_target_temperature.call_args_list == expected_call_args +async def test_set_preset_mode_boost( + hass: HomeAssistant, + fritz: Mock, +) -> None: + """Test setting preset mode.""" + device = FritzDeviceClimateMock() + assert await setup_config_entry( + hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz + ) + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: PRESET_BOOST}, + True, + ) + assert device.set_target_temperature.call_count == 1 + assert device.set_target_temperature.call_args_list == [call(30, True)] + + async def test_preset_mode_update(hass: HomeAssistant, fritz: Mock) -> None: """Test preset mode.""" device = FritzDeviceClimateMock() - device.comfort_temperature = 98 - device.eco_temperature = 99 + device.comfort_temperature = 23 + device.eco_temperature = 20 assert await setup_config_entry( hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz ) @@ -447,8 +476,8 @@ async def test_preset_mode_update(hass: HomeAssistant, fritz: Mock) -> None: assert state assert state.attributes[ATTR_PRESET_MODE] is None - device.target_temperature = 98 - + # test comfort preset + device.target_temperature = 23 next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) @@ -458,8 +487,8 @@ async def test_preset_mode_update(hass: HomeAssistant, fritz: Mock) -> None: assert state assert state.attributes[ATTR_PRESET_MODE] == PRESET_COMFORT - device.target_temperature = 99 - + # test eco preset + device.target_temperature = 20 next_update = dt_util.utcnow() + timedelta(seconds=200) async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) @@ -469,6 +498,17 @@ async def test_preset_mode_update(hass: HomeAssistant, fritz: Mock) -> None: assert state assert state.attributes[ATTR_PRESET_MODE] == PRESET_ECO + # test boost preset + device.target_temperature = 127 # special temp from the api + next_update = dt_util.utcnow() + timedelta(seconds=200) + async_fire_time_changed(hass, next_update) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get(ENTITY_ID) + + assert fritz().update_devices.call_count == 4 + assert state + assert state.attributes[ATTR_PRESET_MODE] == PRESET_BOOST + async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: """Test adding new discovered devices during runtime.""" @@ -509,7 +549,11 @@ async def test_holidy_summer_mode( assert state.attributes[ATTR_STATE_SUMMER_MODE] is False assert state.attributes[ATTR_HVAC_MODES] == [HVACMode.HEAT, HVACMode.OFF] assert state.attributes[ATTR_PRESET_MODE] is None - assert state.attributes[ATTR_PRESET_MODES] == [PRESET_ECO, PRESET_COMFORT] + assert state.attributes[ATTR_PRESET_MODES] == [ + PRESET_ECO, + PRESET_COMFORT, + PRESET_BOOST, + ] # test holiday mode device.holiday_active = True @@ -528,7 +572,7 @@ async def test_holidy_summer_mode( with pytest.raises( HomeAssistantError, - match="Can't change hvac mode while holiday or summer mode is active on the device", + match="Can't change HVAC mode while holiday or summer mode is active on the device", ): await hass.services.async_call( "climate", @@ -564,7 +608,7 @@ async def test_holidy_summer_mode( with pytest.raises( HomeAssistantError, - match="Can't change hvac mode while holiday or summer mode is active on the device", + match="Can't change HVAC mode while holiday or summer mode is active on the device", ): await hass.services.async_call( "climate", @@ -596,4 +640,8 @@ async def test_holidy_summer_mode( assert state.attributes[ATTR_STATE_SUMMER_MODE] is False assert state.attributes[ATTR_HVAC_MODES] == [HVACMode.HEAT, HVACMode.OFF] assert state.attributes[ATTR_PRESET_MODE] is None - assert state.attributes[ATTR_PRESET_MODES] == [PRESET_ECO, PRESET_COMFORT] + assert state.attributes[ATTR_PRESET_MODES] == [ + PRESET_ECO, + PRESET_COMFORT, + PRESET_BOOST, + ] diff --git a/tests/components/fritzbox/test_sensor.py b/tests/components/fritzbox/test_sensor.py index 67b2c3e8ab6..cb136eee993 100644 --- a/tests/components/fritzbox/test_sensor.py +++ b/tests/components/fritzbox/test_sensor.py @@ -71,7 +71,7 @@ async def test_setup( "23", f"{CONF_FAKE_NAME} Battery", PERCENTAGE, - None, + SensorStateClass.MEASUREMENT, EntityCategory.DIAGNOSTIC, ], ) diff --git a/tests/components/fujitsu_fglair/test_sensor.py b/tests/components/fujitsu_fglair/test_sensor.py index e3f6109a2e8..b8200f114ad 100644 --- a/tests/components/fujitsu_fglair/test_sensor.py +++ b/tests/components/fujitsu_fglair/test_sensor.py @@ -31,3 +31,20 @@ async def test_entities( assert await integration_setup() await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_no_outside_temperature( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_ayla_api: AsyncMock, + integration_setup: Callable[[], Awaitable[bool]], +) -> None: + """Test that the outside sensor doesn't get added if the reading is None.""" + mock_ayla_api.async_get_devices.return_value[0].outdoor_temperature = None + + assert await integration_setup() + + assert ( + len(entity_registry.entities) + == len(mock_ayla_api.async_get_devices.return_value) - 1 + ) diff --git a/tests/components/fyta/fixtures/plant_status1.json b/tests/components/fyta/fixtures/plant_status1.json index 21e1fcfb0ab..91157c57c3a 100644 --- a/tests/components/fyta/fixtures/plant_status1.json +++ b/tests/components/fyta/fixtures/plant_status1.json @@ -6,10 +6,18 @@ "low_battery": false, "last_updated": "2023-01-10 10:10:00", "light": 2, + "light_min_good": "20", + "light_max_good": "450", + "light_min_acceptable": "18", + "light_max_acceptable": "675", "light_status": 3, "nickname": "Gummibaum", "nutrients_status": 3, "moisture": 61, + "moisture_min_good": "35", + "moisture_max_good": "70", + "moisture_min_acceptable": "25", + "moisture_max_acceptable": "80", "moisture_status": 3, "sensor_available": true, "sensor_id": "FD:1D:B7:E3:D0:E2", @@ -17,14 +25,24 @@ "sw_version": "1.0", "status": 1, "online": true, + "origin_path": "http://www.plant_picture.com/user_picture", "ph": null, "plant_id": 0, "plant_origin_path": "http://www.plant_picture.com/picture", "plant_thumb_path": "http://www.plant_picture.com/picture_thumb", "is_productive_plant": false, "salinity": 1, + "salinity_min_good": "0.6", + "salinity_max_good": "1", + "salinity_min_acceptable": "0.4", + "salinity_max_acceptable": "1.2", "salinity_status": 4, "scientific_name": "Ficus elastica", "temperature": 25.2, - "temperature_status": 3 + "temperature_min_good": "17", + "temperature_max_good": "36", + "temperature_min_acceptable": "10", + "temperature_max_acceptable": "42", + "temperature_status": 3, + "thumb_path": "http://www.plant_picture.com/user_picture_thumb" } diff --git a/tests/components/fyta/fixtures/plant_status1_update.json b/tests/components/fyta/fixtures/plant_status1_update.json index 98a4c6a9d91..5363c5bd290 100644 --- a/tests/components/fyta/fixtures/plant_status1_update.json +++ b/tests/components/fyta/fixtures/plant_status1_update.json @@ -6,10 +6,18 @@ "low_battery": false, "last_updated": "2023-01-10 10:10:00", "light": 2, + "light_min_good": "20", + "light_max_good": "450", + "light_min_acceptable": "18", + "light_max_acceptable": "675", "light_status": 3, "nickname": "Gummibaum", "nutrients_status": 3, "moisture": 61, + "moisture_min_good": "35", + "moisture_max_good": "70", + "moisture_min_acceptable": "25", + "moisture_max_acceptable": "80", "moisture_status": 3, "sensor_available": true, "sensor_id": "FD:1D:B7:E3:D0:E2", @@ -17,14 +25,24 @@ "sw_version": "1.0", "status": 1, "online": true, + "origin_path": "http://www.plant_picture.com/user_picture", "ph": null, "plant_id": 0, "plant_origin_path": "http://www.plant_picture.com/picture1", "plant_thumb_path": "http://www.plant_picture.com/picture_thumb", "is_productive_plant": false, "salinity": 1, + "salinity_min_good": "0.6", + "salinity_max_good": "1", + "salinity_min_acceptable": "0.4", + "salinity_max_acceptable": "1.2", "salinity_status": 4, "scientific_name": "Ficus elastica", "temperature": 25.2, - "temperature_status": 3 + "temperature_min_good": "17", + "temperature_max_good": "36", + "temperature_min_acceptable": "10", + "temperature_max_acceptable": "42", + "temperature_status": 3, + "thumb_path": "http://www.plant_picture.com/user_picture_thumb" } diff --git a/tests/components/fyta/fixtures/plant_status2.json b/tests/components/fyta/fixtures/plant_status2.json index bf90ab1e50d..5a181bee576 100644 --- a/tests/components/fyta/fixtures/plant_status2.json +++ b/tests/components/fyta/fixtures/plant_status2.json @@ -6,10 +6,18 @@ "low_battery": true, "last_updated": "2023-01-02 10:10:00", "light": 2, + "light_min_good": "20", + "light_max_good": "450", + "light_min_acceptable": "18", + "light_max_acceptable": "675", "light_status": 3, "nickname": "Kakaobaum", "nutrients_status": 3, "moisture": 61, + "moisture_min_good": "35", + "moisture_max_good": "70", + "moisture_min_acceptable": "25", + "moisture_max_acceptable": "80", "moisture_status": 3, "sensor_available": true, "sensor_id": "FD:1D:B7:E3:D0:E3", @@ -17,14 +25,24 @@ "sw_version": "1.0", "status": 1, "online": true, + "origin_path": "http://www.plant_picture.com/user_picture", "ph": 7, "plant_id": 0, "plant_origin_path": "", "plant_thumb_path": "", "is_productive_plant": false, "salinity": 1, + "salinity_min_good": "0.6", + "salinity_max_good": "1", + "salinity_min_acceptable": "0.4", + "salinity_max_acceptable": "1.2", "salinity_status": 4, "scientific_name": "Theobroma cacao", "temperature": 25.2, - "temperature_status": 3 + "temperature_min_good": "17", + "temperature_max_good": "36", + "temperature_min_acceptable": "10", + "temperature_max_acceptable": "42", + "temperature_status": 3, + "thumb_path": "http://www.plant_picture.com/user_picture_thumb" } diff --git a/tests/components/fyta/fixtures/plant_status3.json b/tests/components/fyta/fixtures/plant_status3.json index 4bb4e0b81a7..ad34e01065e 100644 --- a/tests/components/fyta/fixtures/plant_status3.json +++ b/tests/components/fyta/fixtures/plant_status3.json @@ -6,10 +6,18 @@ "low_battery": true, "last_updated": "2023-01-02 10:10:00", "light": 2, + "light_min_good": "20", + "light_max_good": "450", + "light_min_acceptable": "18", + "light_max_acceptable": "675", "light_status": 3, "nickname": "Tomatenpflanze", "nutrients_status": 0, "moisture": 61, + "moisture_min_good": "35", + "moisture_max_good": "70", + "moisture_min_acceptable": "25", + "moisture_max_acceptable": "80", "moisture_status": 3, "sensor_available": true, "sensor_id": "FD:1D:B7:E3:D0:E3", @@ -17,14 +25,24 @@ "sw_version": "1.0", "status": 1, "online": true, + "origin_path": "http://www.plant_picture.com/user_picture", "ph": 7, "plant_id": 0, "plant_origin_path": "http://www.plant_picture.com/picture", "plant_thumb_path": "http://www.plant_picture.com/picture_thumb", "is_productive_plant": true, "salinity": 1, + "salinity_min_good": "0.6", + "salinity_max_good": "1", + "salinity_min_acceptable": "0.4", + "salinity_max_acceptable": "1.2", "salinity_status": 4, "scientific_name": "Solanum lycopersicum", "temperature": 25.2, - "temperature_status": 3 + "temperature_min_good": "17", + "temperature_max_good": "36", + "temperature_min_acceptable": "10", + "temperature_max_acceptable": "42", + "temperature_status": 3, + "thumb_path": "http://www.plant_picture.com/user_picture_thumb" } diff --git a/tests/components/fyta/snapshots/test_diagnostics.ambr b/tests/components/fyta/snapshots/test_diagnostics.ambr index 24206fbb875..7bc6a6f7b5a 100644 --- a/tests/components/fyta/snapshots/test_diagnostics.ambr +++ b/tests/components/fyta/snapshots/test_diagnostics.ambr @@ -32,9 +32,17 @@ 'fertilise_next': None, 'last_updated': '2023-01-10T10:10:00', 'light': 2.0, + 'light_max_acceptable': 675.0, + 'light_max_good': 450.0, + 'light_min_acceptable': 18.0, + 'light_min_good': 20.0, 'light_status': 3, 'low_battery': False, 'moisture': 61.0, + 'moisture_max_acceptable': 80.0, + 'moisture_max_good': 70.0, + 'moisture_min_acceptable': 25.0, + 'moisture_min_good': 35.0, 'moisture_status': 3, 'name': 'Gummibaum', 'notification_light': False, @@ -50,6 +58,10 @@ 'productive_plant': False, 'repotted': True, 'salinity': 1.0, + 'salinity_max_acceptable': 1.2, + 'salinity_max_good': 1.0, + 'salinity_min_acceptable': 0.4, + 'salinity_min_good': 0.6, 'salinity_status': 4, 'scientific_name': 'Ficus elastica', 'sensor_available': True, @@ -59,7 +71,13 @@ 'status': 1, 'sw_version': '1.0', 'temperature': 25.2, + 'temperature_max_acceptable': 42.0, + 'temperature_max_good': 36.0, + 'temperature_min_acceptable': 10.0, + 'temperature_min_good': 17.0, 'temperature_status': 3, + 'user_picture_path': 'http://www.plant_picture.com/user_picture', + 'user_thumb_path': 'http://www.plant_picture.com/user_picture_thumb', }), '1': dict({ 'battery_level': 80.0, @@ -67,9 +85,17 @@ 'fertilise_next': None, 'last_updated': '2023-01-02T10:10:00', 'light': 2.0, + 'light_max_acceptable': 675.0, + 'light_max_good': 450.0, + 'light_min_acceptable': 18.0, + 'light_min_good': 20.0, 'light_status': 3, 'low_battery': True, 'moisture': 61.0, + 'moisture_max_acceptable': 80.0, + 'moisture_max_good': 70.0, + 'moisture_min_acceptable': 25.0, + 'moisture_min_good': 35.0, 'moisture_status': 3, 'name': 'Kakaobaum', 'notification_light': False, @@ -85,6 +111,10 @@ 'productive_plant': False, 'repotted': True, 'salinity': 1.0, + 'salinity_max_acceptable': 1.2, + 'salinity_max_good': 1.0, + 'salinity_min_acceptable': 0.4, + 'salinity_min_good': 0.6, 'salinity_status': 4, 'scientific_name': 'Theobroma cacao', 'sensor_available': True, @@ -94,7 +124,13 @@ 'status': 1, 'sw_version': '1.0', 'temperature': 25.2, + 'temperature_max_acceptable': 42.0, + 'temperature_max_good': 36.0, + 'temperature_min_acceptable': 10.0, + 'temperature_min_good': 17.0, 'temperature_status': 3, + 'user_picture_path': 'http://www.plant_picture.com/user_picture', + 'user_thumb_path': 'http://www.plant_picture.com/user_picture_thumb', }), }), }) diff --git a/tests/components/gios/test_config_flow.py b/tests/components/gios/test_config_flow.py index d81758b0de0..3764c52a810 100644 --- a/tests/components/gios/test_config_flow.py +++ b/tests/components/gios/test_config_flow.py @@ -6,7 +6,8 @@ from unittest.mock import patch from gios import ApiError from homeassistant.components.gios import config_flow -from homeassistant.components.gios.const import CONF_STATION_ID +from homeassistant.components.gios.const import CONF_STATION_ID, DOMAIN +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -17,36 +18,35 @@ from tests.common import load_fixture CONFIG = { CONF_NAME: "Foo", - CONF_STATION_ID: 123, + CONF_STATION_ID: "123", } async def test_show_form(hass: HomeAssistant) -> None: """Test that the form is served with no input.""" - flow = config_flow.GiosFlowHandler() - flow.hass = hass - - result = await flow.async_step_user(user_input=None) + with patch( + "homeassistant.components.gios.coordinator.Gios._get_stations", + return_value=STATIONS, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" -async def test_invalid_station_id(hass: HomeAssistant) -> None: - """Test that errors are shown when measuring station ID is invalid.""" +async def test_form_with_api_error(hass: HomeAssistant) -> None: + """Test the form is aborted because of API error.""" with patch( "homeassistant.components.gios.coordinator.Gios._get_stations", - return_value=STATIONS, + side_effect=ApiError("error"), ): - flow = config_flow.GiosFlowHandler() - flow.hass = hass - flow.context = {} - - result = await flow.async_step_user( - user_input={CONF_NAME: "Foo", CONF_STATION_ID: 0} + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} ) - assert result["errors"] == {CONF_STATION_ID: "wrong_station_id"} + assert result["type"] is FlowResultType.ABORT async def test_invalid_sensor_data(hass: HomeAssistant) -> None: @@ -76,17 +76,25 @@ async def test_invalid_sensor_data(hass: HomeAssistant) -> None: async def test_cannot_connect(hass: HomeAssistant) -> None: """Test that errors are shown when cannot connect to GIOS server.""" - with patch( - "homeassistant.components.gios.coordinator.Gios._async_get", - side_effect=ApiError("error"), + with ( + patch( + "homeassistant.components.gios.coordinator.Gios._get_stations", + return_value=STATIONS, + ), + patch( + "homeassistant.components.gios.coordinator.Gios._async_get", + side_effect=ApiError("error"), + ), ): - flow = config_flow.GiosFlowHandler() - flow.hass = hass - flow.context = {} + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], CONFIG + ) + await hass.async_block_till_done() - result = await flow.async_step_user(user_input=CONFIG) - - assert result["errors"] == {"base": "cannot_connect"} + assert result["errors"] == {"base": "cannot_connect"} async def test_create_entry(hass: HomeAssistant) -> None: diff --git a/tests/components/google/test_calendar.py b/tests/components/google/test_calendar.py index 3d10e753714..720c0176850 100644 --- a/tests/components/google/test_calendar.py +++ b/tests/components/google/test_calendar.py @@ -1451,6 +1451,14 @@ async def test_working_location_ignored( assert state.attributes.get("message") == expected_event_message +@pytest.mark.parametrize( + ("event_type", "expected_event_message"), + [ + ("workingLocation", "Test All Day Event"), + ("birthday", None), + ("default", None), + ], +) @pytest.mark.parametrize("calendar_is_primary", [True]) async def test_working_location_entity( hass: HomeAssistant, @@ -1458,12 +1466,14 @@ async def test_working_location_entity( entity_registry: er.EntityRegistry, mock_events_list_items: Callable[[list[dict[str, Any]]], None], component_setup: ComponentSetup, + event_type: str, + expected_event_message: str | None, ) -> None: """Test that working location events are registered under a disabled by default entity.""" event = { **TEST_EVENT, **upcoming(), - "eventType": "workingLocation", + "eventType": event_type, } mock_events_list_items([event]) assert await component_setup() @@ -1484,7 +1494,7 @@ async def test_working_location_entity( state = hass.states.get("calendar.working_location") assert state assert state.name == "Working location" - assert state.attributes.get("message") == "Test All Day Event" + assert state.attributes.get("message") == expected_event_message @pytest.mark.parametrize("calendar_is_primary", [False]) @@ -1506,3 +1516,49 @@ async def test_no_working_location_entity( entity_entry = entity_registry.async_get("calendar.working_location") assert not entity_entry + + +@pytest.mark.parametrize( + ("event_type", "expected_event_message"), + [ + ("workingLocation", None), + ("birthday", "Test All Day Event"), + ("default", None), + ], +) +@pytest.mark.parametrize("calendar_is_primary", [True]) +async def test_birthday_entity( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + entity_registry: er.EntityRegistry, + mock_events_list_items: Callable[[list[dict[str, Any]]], None], + component_setup: ComponentSetup, + event_type: str, + expected_event_message: str | None, +) -> None: + """Test that birthday events appear only on the birthdays calendar.""" + event = { + **TEST_EVENT, + **upcoming(), + "eventType": event_type, + } + mock_events_list_items([event]) + assert await component_setup() + + entity_entry = entity_registry.async_get("calendar.birthdays") + assert entity_entry + assert entity_entry.disabled_by is None # Enabled by default + + entity_registry.async_update_entity( + entity_id="calendar.birthdays", disabled_by=None + ) + async_fire_time_changed( + hass, + dt_util.utcnow() + datetime.timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), + ) + await hass.async_block_till_done() + + state = hass.states.get("calendar.birthdays") + assert state + assert state.name == "Birthdays" + assert state.attributes.get("message") == expected_event_message diff --git a/tests/components/google_assistant/snapshots/test_diagnostics.ambr b/tests/components/google_assistant/snapshots/test_diagnostics.ambr index 1ecedbd1173..cc5ccbb1de1 100644 --- a/tests/components/google_assistant/snapshots/test_diagnostics.ambr +++ b/tests/components/google_assistant/snapshots/test_diagnostics.ambr @@ -98,6 +98,7 @@ 'humidifier', 'input_boolean', 'input_select', + 'lawn_mower', 'light', 'lock', 'media_player', diff --git a/tests/components/google_assistant/test_google_assistant.py b/tests/components/google_assistant/test_google_assistant.py index 2b0bfd82908..035a8d151c4 100644 --- a/tests/components/google_assistant/test_google_assistant.py +++ b/tests/components/google_assistant/test_google_assistant.py @@ -16,13 +16,9 @@ from homeassistant.components import ( light, media_player, ) -from homeassistant.const import ( - CLOUD_NEVER_EXPOSED_ENTITIES, - EntityCategory, - Platform, - UnitOfTemperature, -) +from homeassistant.const import CLOUD_NEVER_EXPOSED_ENTITIES, EntityCategory, Platform from homeassistant.helpers import entity_registry as er +from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM from . import DEMO_DEVICES @@ -275,7 +271,7 @@ async def test_query_climate_request_f( ) -> None: """Test a query request.""" # Mock demo devices as fahrenheit to see if we convert to celsius - hass_fixture.config.units.temperature_unit = UnitOfTemperature.FAHRENHEIT + hass_fixture.config.units = US_CUSTOMARY_SYSTEM for entity_id in ("climate.hvac", "climate.heatpump", "climate.ecobee"): state = hass_fixture.states.get(entity_id) attr = dict(state.attributes) @@ -332,7 +328,6 @@ async def test_query_climate_request_f( "thermostatHumidityAmbient": 54.2, "currentFanSpeedSetting": "on_high", } - hass_fixture.config.units.temperature_unit = UnitOfTemperature.CELSIUS async def test_query_humidifier_request( diff --git a/tests/components/google_assistant/test_trait.py b/tests/components/google_assistant/test_trait.py index dafe85d97b2..cf9c8047049 100644 --- a/tests/components/google_assistant/test_trait.py +++ b/tests/components/google_assistant/test_trait.py @@ -21,6 +21,7 @@ from homeassistant.components import ( input_boolean, input_button, input_select, + lawn_mower, light, lock, media_player, @@ -44,6 +45,7 @@ from homeassistant.components.fan import FanEntityFeature from homeassistant.components.google_assistant import const, error, helpers, trait from homeassistant.components.google_assistant.error import SmartHomeError from homeassistant.components.humidifier import HumidifierEntityFeature +from homeassistant.components.lawn_mower import LawnMowerEntityFeature from homeassistant.components.light import LightEntityFeature from homeassistant.components.lock import LockEntityFeature from homeassistant.components.media_player import ( @@ -79,6 +81,11 @@ from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, St from homeassistant.core_config import async_process_ha_core_config from homeassistant.util import dt as dt_util from homeassistant.util.unit_conversion import TemperatureConverter +from homeassistant.util.unit_system import ( + METRIC_SYSTEM, + US_CUSTOMARY_SYSTEM, + UnitSystem, +) from . import BASIC_CONFIG, MockConfig @@ -584,6 +591,64 @@ async def test_startstop_vacuum(hass: HomeAssistant) -> None: assert unpause_calls[0].data == {ATTR_ENTITY_ID: "vacuum.bla"} +async def test_dock_lawn_mower(hass: HomeAssistant) -> None: + """Test dock trait support for lawn mower domain.""" + assert helpers.get_google_type(lawn_mower.DOMAIN, None) is not None + assert trait.DockTrait.supported(lawn_mower.DOMAIN, 0, None, None) + + trt = trait.DockTrait( + hass, State("lawn_mower.bla", lawn_mower.LawnMowerActivity.MOWING), BASIC_CONFIG + ) + + assert trt.sync_attributes() == {} + + assert trt.query_attributes() == {"isDocked": False} + + calls = async_mock_service(hass, lawn_mower.DOMAIN, lawn_mower.SERVICE_DOCK) + await trt.execute(trait.COMMAND_DOCK, BASIC_DATA, {}, {}) + assert len(calls) == 1 + assert calls[0].data == {ATTR_ENTITY_ID: "lawn_mower.bla"} + + +async def test_startstop_lawn_mower(hass: HomeAssistant) -> None: + """Test startStop trait support for lawn mower domain.""" + assert helpers.get_google_type(lawn_mower.DOMAIN, None) is not None + assert trait.StartStopTrait.supported(lawn_mower.DOMAIN, 0, None, None) + + trt = trait.StartStopTrait( + hass, + State( + "lawn_mower.bla", + lawn_mower.LawnMowerActivity.PAUSED, + {ATTR_SUPPORTED_FEATURES: LawnMowerEntityFeature.PAUSE}, + ), + BASIC_CONFIG, + ) + + assert trt.sync_attributes() == {"pausable": True} + + assert trt.query_attributes() == {"isRunning": False, "isPaused": True} + + start_calls = async_mock_service( + hass, lawn_mower.DOMAIN, lawn_mower.SERVICE_START_MOWING + ) + await trt.execute(trait.COMMAND_START_STOP, BASIC_DATA, {"start": True}, {}) + assert len(start_calls) == 1 + assert start_calls[0].data == {ATTR_ENTITY_ID: "lawn_mower.bla"} + + pause_calls = async_mock_service(hass, lawn_mower.DOMAIN, lawn_mower.SERVICE_PAUSE) + await trt.execute(trait.COMMAND_PAUSE_UNPAUSE, BASIC_DATA, {"pause": True}, {}) + assert len(pause_calls) == 1 + assert pause_calls[0].data == {ATTR_ENTITY_ID: "lawn_mower.bla"} + + unpause_calls = async_mock_service( + hass, lawn_mower.DOMAIN, lawn_mower.SERVICE_START_MOWING + ) + await trt.execute(trait.COMMAND_PAUSE_UNPAUSE, BASIC_DATA, {"pause": False}, {}) + assert len(unpause_calls) == 1 + assert unpause_calls[0].data == {ATTR_ENTITY_ID: "lawn_mower.bla"} + + @pytest.mark.parametrize( ( "domain", @@ -1072,7 +1137,7 @@ async def test_temperature_setting_climate_onoff(hass: HomeAssistant) -> None: assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None, None) - hass.config.units.temperature_unit = UnitOfTemperature.FAHRENHEIT + hass.config.units = US_CUSTOMARY_SYSTEM trt = trait.TemperatureSettingTrait( hass, @@ -1123,8 +1188,6 @@ async def test_temperature_setting_climate_no_modes(hass: HomeAssistant) -> None assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None, None) - hass.config.units.temperature_unit = UnitOfTemperature.CELSIUS - trt = trait.TemperatureSettingTrait( hass, State( @@ -1153,7 +1216,7 @@ async def test_temperature_setting_climate_range(hass: HomeAssistant) -> None: assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None, None) - hass.config.units.temperature_unit = UnitOfTemperature.FAHRENHEIT + hass.config.units = US_CUSTOMARY_SYSTEM trt = trait.TemperatureSettingTrait( hass, @@ -1261,7 +1324,6 @@ async def test_temperature_setting_climate_range(hass: HomeAssistant) -> None: ATTR_ENTITY_ID: "climate.bla", climate.ATTR_TEMPERATURE: 75, } - hass.config.units.temperature_unit = UnitOfTemperature.CELSIUS async def test_temperature_setting_climate_setpoint(hass: HomeAssistant) -> None: @@ -1269,8 +1331,6 @@ async def test_temperature_setting_climate_setpoint(hass: HomeAssistant) -> None assert helpers.get_google_type(climate.DOMAIN, None) is not None assert trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None, None) - hass.config.units.temperature_unit = UnitOfTemperature.CELSIUS - trt = trait.TemperatureSettingTrait( hass, State( @@ -1356,8 +1416,6 @@ async def test_temperature_setting_climate_setpoint_auto(hass: HomeAssistant) -> Setpoint in auto mode. """ - hass.config.units.temperature_unit = UnitOfTemperature.CELSIUS - trt = trait.TemperatureSettingTrait( hass, State( @@ -1407,8 +1465,6 @@ async def test_temperature_setting_climate_setpoint_auto(hass: HomeAssistant) -> async def test_temperature_control(hass: HomeAssistant) -> None: """Test TemperatureControl trait support for sensor domain.""" - hass.config.units.temperature_unit = UnitOfTemperature.CELSIUS - trt = trait.TemperatureControlTrait( hass, State("sensor.temp", 18), @@ -1431,13 +1487,13 @@ async def test_temperature_control(hass: HomeAssistant) -> None: @pytest.mark.parametrize( ("unit_in", "unit_out", "temp_in", "temp_out", "current_in", "current_out"), [ - (UnitOfTemperature.CELSIUS, "C", "120", 120, "130", 130), - (UnitOfTemperature.FAHRENHEIT, "F", "248", 120, "266", 130), + (METRIC_SYSTEM, "C", "120", 120, "130", 130), + (US_CUSTOMARY_SYSTEM, "F", "248", 120, "266", 130), ], ) async def test_temperature_control_water_heater( hass: HomeAssistant, - unit_in: UnitOfTemperature, + unit_in: UnitSystem, unit_out: str, temp_in: str, temp_out: float, @@ -1445,17 +1501,17 @@ async def test_temperature_control_water_heater( current_out: float, ) -> None: """Test TemperatureControl trait support for water heater domain.""" - hass.config.units.temperature_unit = unit_in + hass.config.units = unit_in min_temp = TemperatureConverter.convert( water_heater.DEFAULT_MIN_TEMP, UnitOfTemperature.CELSIUS, - unit_in, + unit_in.temperature_unit, ) max_temp = TemperatureConverter.convert( water_heater.DEFAULT_MAX_TEMP, UnitOfTemperature.CELSIUS, - unit_in, + unit_in.temperature_unit, ) trt = trait.TemperatureControlTrait( @@ -1489,30 +1545,30 @@ async def test_temperature_control_water_heater( @pytest.mark.parametrize( ("unit", "temp_init", "temp_in", "temp_out", "current_init"), [ - (UnitOfTemperature.CELSIUS, "180", 220, 220, "180"), - (UnitOfTemperature.FAHRENHEIT, "356", 220, 428, "356"), + (METRIC_SYSTEM, "180", 220, 220, "180"), + (US_CUSTOMARY_SYSTEM, "356", 220, 428, "356"), ], ) async def test_temperature_control_water_heater_set_temperature( hass: HomeAssistant, - unit: UnitOfTemperature, + unit: UnitSystem, temp_init: str, temp_in: float, temp_out: float, current_init: str, ) -> None: """Test TemperatureControl trait support for water heater domain - SetTemperature.""" - hass.config.units.temperature_unit = unit + hass.config.units = unit min_temp = TemperatureConverter.convert( 40, UnitOfTemperature.CELSIUS, - unit, + unit.temperature_unit, ) max_temp = TemperatureConverter.convert( 230, UnitOfTemperature.CELSIUS, - unit, + unit.temperature_unit, ) trt = trait.TemperatureControlTrait( @@ -3633,17 +3689,17 @@ async def test_temperature_control_sensor(hass: HomeAssistant) -> None: @pytest.mark.parametrize( ("unit_in", "unit_out", "state", "ambient"), [ - (UnitOfTemperature.FAHRENHEIT, "F", "70", 21.1), - (UnitOfTemperature.CELSIUS, "C", "21.1", 21.1), - (UnitOfTemperature.FAHRENHEIT, "F", "unavailable", None), - (UnitOfTemperature.FAHRENHEIT, "F", "unknown", None), + (US_CUSTOMARY_SYSTEM, "F", "70", 21.1), + (METRIC_SYSTEM, "C", "21.1", 21.1), + (US_CUSTOMARY_SYSTEM, "F", "unavailable", None), + (US_CUSTOMARY_SYSTEM, "F", "unknown", None), ], ) async def test_temperature_control_sensor_data( - hass: HomeAssistant, unit_in, unit_out, state, ambient + hass: HomeAssistant, unit_in: UnitSystem, unit_out, state, ambient ) -> None: """Test TemperatureControl trait support for temperature sensor.""" - hass.config.units.temperature_unit = unit_in + hass.config.units = unit_in trt = trait.TemperatureControlTrait( hass, @@ -3668,7 +3724,6 @@ async def test_temperature_control_sensor_data( } else: assert trt.query_attributes() == {} - hass.config.units.temperature_unit = UnitOfTemperature.CELSIUS async def test_humidity_setting_sensor(hass: HomeAssistant) -> None: diff --git a/tests/components/google_drive/test_backup.py b/tests/components/google_drive/test_backup.py index 2da397def5b..9cf86a280bd 100644 --- a/tests/components/google_drive/test_backup.py +++ b/tests/components/google_drive/test_backup.py @@ -247,9 +247,9 @@ async def test_agents_download_file_not_found( resp = await client.get( f"/api/backup/download/{TEST_AGENT_BACKUP.backup_id}?agent_id={TEST_AGENT_ID}" ) - assert resp.status == 500 + assert resp.status == 404 content = await resp.content.read() - assert "Backup not found" in content.decode() + assert content == b"" async def test_agents_download_metadata_not_found( diff --git a/tests/components/google_generative_ai_conversation/__init__.py b/tests/components/google_generative_ai_conversation/__init__.py index 6e2d37b035b..fbf9ee545db 100644 --- a/tests/components/google_generative_ai_conversation/__init__.py +++ b/tests/components/google_generative_ai_conversation/__init__.py @@ -3,12 +3,12 @@ from unittest.mock import Mock from google.genai.errors import ClientError -import requests +import httpx CLIENT_ERROR_500 = ClientError( 500, Mock( - __class__=requests.Response, + __class__=httpx.Response, json=Mock( return_value={ "message": "Internal Server Error", @@ -20,7 +20,7 @@ CLIENT_ERROR_500 = ClientError( CLIENT_ERROR_API_KEY_INVALID = ClientError( 400, Mock( - __class__=requests.Response, + __class__=httpx.Response, json=Mock( return_value={ "message": "'reason': API_KEY_INVALID", diff --git a/tests/components/google_generative_ai_conversation/conftest.py b/tests/components/google_generative_ai_conversation/conftest.py index 2bc81b10ce4..6ec147da2ab 100644 --- a/tests/components/google_generative_ai_conversation/conftest.py +++ b/tests/components/google_generative_ai_conversation/conftest.py @@ -4,6 +4,9 @@ from unittest.mock import Mock, patch import pytest +from homeassistant.components.google_generative_ai_conversation.conversation import ( + CONF_USE_GOOGLE_SEARCH_TOOL, +) from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import HomeAssistant @@ -41,6 +44,23 @@ async def mock_config_entry_with_assist( return mock_config_entry +@pytest.fixture +async def mock_config_entry_with_google_search( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> MockConfigEntry: + """Mock a config entry with assist.""" + with patch("google.genai.models.AsyncModels.get"): + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + CONF_LLM_HASS_API: llm.LLM_API_ASSIST, + CONF_USE_GOOGLE_SEARCH_TOOL: True, + }, + ) + await hass.async_block_till_done() + return mock_config_entry + + @pytest.fixture async def mock_init_component( hass: HomeAssistant, mock_config_entry: ConfigEntry diff --git a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr index c840f7da324..ec98bdd6529 100644 --- a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr +++ b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr @@ -6,7 +6,7 @@ tuple( ), dict({ - 'config': GenerateContentConfig(http_options=None, system_instruction="Current time is 05:00:00. Today's date is 2024-05-24.\nYou are a voice assistant for Home Assistant.\nAnswer questions about the world truthfully.\nAnswer in plain text. Keep it simple and to the point.\nOnly if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.", temperature=1.0, top_p=0.95, top_k=64.0, candidate_count=None, max_output_tokens=150, stop_sequences=None, response_logprobs=None, logprobs=None, presence_penalty=None, frequency_penalty=None, seed=None, response_mime_type=None, response_schema=None, routing_config=None, safety_settings=[SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=)], tools=[Tool(function_declarations=[FunctionDeclaration(response=None, description='Test function', name='test_tool', parameters=Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=, description=None, enum=None, format=None, items=None, properties={'param1': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=, description='Test parameters', enum=None, format=None, items=Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=, description=None, enum=None, format=None, items=None, properties=None, required=None), properties=None, required=None), 'param2': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=None, description=None, enum=None, format=None, items=None, properties=None, required=None), 'param3': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=, description=None, enum=None, format=None, items=None, properties={'json': Schema(min_items=None, example=None, property_ordering=None, pattern=None, minimum=None, default=None, any_of=None, max_length=None, title=None, min_length=None, min_properties=None, max_items=None, maximum=None, nullable=None, max_properties=None, type=, description=None, enum=None, format=None, items=None, properties=None, required=None)}, required=[])}, required=[]))], retrieval=None, google_search=None, google_search_retrieval=None, code_execution=None)], tool_config=None, labels=None, cached_content=None, response_modalities=None, media_resolution=None, speech_config=None, audio_timestamp=None, automatic_function_calling=AutomaticFunctionCallingConfig(disable=True, maximum_remote_calls=None, ignore_call_history=None), thinking_config=None), + 'config': GenerateContentConfig(http_options=None, system_instruction="Current time is 05:00:00. Today's date is 2024-05-24.\nYou are a voice assistant for Home Assistant.\nAnswer questions about the world truthfully.\nAnswer in plain text. Keep it simple and to the point.\nOnly if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.", temperature=1.0, top_p=0.95, top_k=64.0, candidate_count=None, max_output_tokens=150, stop_sequences=None, response_logprobs=None, logprobs=None, presence_penalty=None, frequency_penalty=None, seed=None, response_mime_type=None, response_schema=None, routing_config=None, safety_settings=[SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=)], tools=[Tool(function_declarations=[FunctionDeclaration(response=None, description='Test function', name='test_tool', parameters=Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties={'param1': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description='Test parameters', enum=None, format=None, items=Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=), max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=), 'param2': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=None), 'param3': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties={'json': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=)}, property_ordering=None, required=[], type=)}, property_ordering=None, required=[], type=))], retrieval=None, google_search=None, google_search_retrieval=None, code_execution=None)], tool_config=None, labels=None, cached_content=None, response_modalities=None, media_resolution=None, speech_config=None, audio_timestamp=None, automatic_function_calling=AutomaticFunctionCallingConfig(disable=True, maximum_remote_calls=None, ignore_call_history=None), thinking_config=None), 'history': list([ ]), 'model': 'models/gemini-2.0-flash', @@ -25,7 +25,9 @@ tuple( ), dict({ - 'message': Content(parts=[Part(video_metadata=None, thought=None, code_execution_result=None, executable_code=None, file_data=None, function_call=None, function_response=FunctionResponse(id=None, name='test_tool', response={'result': 'Test response'}), inline_data=None, text=None)], role=None), + 'message': list([ + Part(video_metadata=None, thought=None, code_execution_result=None, executable_code=None, file_data=None, function_call=None, function_response=FunctionResponse(id=None, name='test_tool', response={'result': 'Test response'}), inline_data=None, text=None), + ]), }), ), ]) @@ -56,7 +58,42 @@ tuple( ), dict({ - 'message': Content(parts=[Part(video_metadata=None, thought=None, code_execution_result=None, executable_code=None, file_data=None, function_call=None, function_response=FunctionResponse(id=None, name='test_tool', response={'result': 'Test response'}), inline_data=None, text=None)], role=None), + 'message': list([ + Part(video_metadata=None, thought=None, code_execution_result=None, executable_code=None, file_data=None, function_call=None, function_response=FunctionResponse(id=None, name='test_tool', response={'result': 'Test response'}), inline_data=None, text=None), + ]), + }), + ), + ]) +# --- +# name: test_use_google_search + list([ + tuple( + '', + tuple( + ), + dict({ + 'config': GenerateContentConfig(http_options=None, system_instruction="Current time is 05:00:00. Today's date is 2024-05-24.\nYou are a voice assistant for Home Assistant.\nAnswer questions about the world truthfully.\nAnswer in plain text. Keep it simple and to the point.\nOnly if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.", temperature=1.0, top_p=0.95, top_k=64.0, candidate_count=None, max_output_tokens=150, stop_sequences=None, response_logprobs=None, logprobs=None, presence_penalty=None, frequency_penalty=None, seed=None, response_mime_type=None, response_schema=None, routing_config=None, safety_settings=[SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=), SafetySetting(method=None, category=, threshold=)], tools=[Tool(function_declarations=[FunctionDeclaration(response=None, description='Test function', name='test_tool', parameters=Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties={'param1': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description='Test parameters', enum=None, format=None, items=Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=), max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=), 'param2': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=None), 'param3': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties={'json': Schema(example=None, pattern=None, default=None, max_length=None, title=None, min_length=None, min_properties=None, max_properties=None, any_of=None, description=None, enum=None, format=None, items=None, max_items=None, maximum=None, min_items=None, minimum=None, nullable=None, properties=None, property_ordering=None, required=None, type=)}, property_ordering=None, required=[], type=)}, property_ordering=None, required=[], type=))], retrieval=None, google_search=None, google_search_retrieval=None, code_execution=None), Tool(function_declarations=None, retrieval=None, google_search=GoogleSearch(), google_search_retrieval=None, code_execution=None)], tool_config=None, labels=None, cached_content=None, response_modalities=None, media_resolution=None, speech_config=None, audio_timestamp=None, automatic_function_calling=AutomaticFunctionCallingConfig(disable=True, maximum_remote_calls=None, ignore_call_history=None), thinking_config=None), + 'history': list([ + ]), + 'model': 'models/gemini-2.0-flash', + }), + ), + tuple( + '().send_message', + tuple( + ), + dict({ + 'message': 'Please call the test function', + }), + ), + tuple( + '().send_message', + tuple( + ), + dict({ + 'message': list([ + Part(video_metadata=None, thought=None, code_execution_result=None, executable_code=None, file_data=None, function_call=None, function_response=FunctionResponse(id=None, name='test_tool', response={'result': 'Test response'}), inline_data=None, text=None), + ]), }), ), ]) diff --git a/tests/components/google_generative_ai_conversation/test_config_flow.py b/tests/components/google_generative_ai_conversation/test_config_flow.py index 30c9d6c46e6..f7635c0b45e 100644 --- a/tests/components/google_generative_ai_conversation/test_config_flow.py +++ b/tests/components/google_generative_ai_conversation/test_config_flow.py @@ -21,12 +21,14 @@ from homeassistant.components.google_generative_ai_conversation.const import ( CONF_TEMPERATURE, CONF_TOP_K, CONF_TOP_P, + CONF_USE_GOOGLE_SEARCH_TOOL, DOMAIN, RECOMMENDED_CHAT_MODEL, RECOMMENDED_HARM_BLOCK_THRESHOLD, RECOMMENDED_MAX_TOKENS, RECOMMENDED_TOP_K, RECOMMENDED_TOP_P, + RECOMMENDED_USE_GOOGLE_SEARCH_TOOL, ) from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import HomeAssistant @@ -143,6 +145,7 @@ async def test_form(hass: HomeAssistant) -> None: CONF_HATE_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, CONF_SEXUAL_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, CONF_DANGEROUS_BLOCK_THRESHOLD: RECOMMENDED_HARM_BLOCK_THRESHOLD, + CONF_USE_GOOGLE_SEARCH_TOOL: RECOMMENDED_USE_GOOGLE_SEARCH_TOOL, }, ), ( diff --git a/tests/components/google_generative_ai_conversation/test_conversation.py b/tests/components/google_generative_ai_conversation/test_conversation.py index 64f71c18bf2..9c4ecc4f9a4 100644 --- a/tests/components/google_generative_ai_conversation/test_conversation.py +++ b/tests/components/google_generative_ai_conversation/test_conversation.py @@ -10,7 +10,7 @@ from syrupy.assertion import SnapshotAssertion import voluptuous as vol from homeassistant.components import conversation -from homeassistant.components.conversation import trace +from homeassistant.components.conversation import UserContent, async_get_chat_log, trace from homeassistant.components.google_generative_ai_conversation.conversation import ( _escape_decode, _format_schema, @@ -18,7 +18,7 @@ from homeassistant.components.google_generative_ai_conversation.conversation imp from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import Context, HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import intent, llm +from homeassistant.helpers import chat_session, intent, llm from . import CLIENT_ERROR_500 @@ -104,28 +104,24 @@ async def test_function_call( assert result.response.response_type == intent.IntentResponseType.ACTION_DONE assert result.response.as_dict()["speech"]["plain"]["speech"] == "Hi there!" - mock_tool_call = mock_create.mock_calls[2][2]["message"] - assert mock_tool_call.model_dump() == { - "parts": [ - { - "code_execution_result": None, - "executable_code": None, - "file_data": None, - "function_call": None, - "function_response": { - "id": None, - "name": "test_tool", - "response": { - "result": "Test response", - }, - }, - "inline_data": None, - "text": None, - "thought": None, - "video_metadata": None, + mock_tool_response_parts = mock_create.mock_calls[2][2]["message"] + assert len(mock_tool_response_parts) == 1 + assert mock_tool_response_parts[0].model_dump() == { + "code_execution_result": None, + "executable_code": None, + "file_data": None, + "function_call": None, + "function_response": { + "id": None, + "name": "test_tool", + "response": { + "result": "Test response", }, - ], - "role": None, + }, + "inline_data": None, + "text": None, + "thought": None, + "video_metadata": None, } mock_tool.async_call.assert_awaited_once_with( @@ -156,8 +152,10 @@ async def test_function_call( trace_events = last_trace.get("events", []) assert [event["event_type"] for event in trace_events] == [ trace.ConversationTraceEventType.ASYNC_PROCESS, - trace.ConversationTraceEventType.AGENT_DETAIL, + trace.ConversationTraceEventType.AGENT_DETAIL, # prompt and tools + trace.ConversationTraceEventType.AGENT_DETAIL, # stats for response trace.ConversationTraceEventType.TOOL_CALL, + trace.ConversationTraceEventType.AGENT_DETAIL, # stats for response ] # AGENT_DETAIL event contains the raw prompt passed to the model detail_event = trace_events[1] @@ -166,6 +164,79 @@ async def test_function_call( p["tool_name"] for p in detail_event["data"]["messages"][2]["tool_calls"] ] == ["test_tool"] + detail_event = trace_events[2] + assert set(detail_event["data"]["stats"].keys()) == { + "input_tokens", + "cached_input_tokens", + "output_tokens", + } + + +@patch( + "homeassistant.components.google_generative_ai_conversation.conversation.llm.AssistAPI._async_get_tools" +) +@pytest.mark.usefixtures("mock_init_component") +@pytest.mark.usefixtures("mock_ulid_tools") +async def test_use_google_search( + mock_get_tools, + hass: HomeAssistant, + mock_config_entry_with_google_search: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test function calling.""" + agent_id = "conversation.google_generative_ai_conversation" + context = Context() + + mock_tool = AsyncMock() + mock_tool.name = "test_tool" + mock_tool.description = "Test function" + mock_tool.parameters = vol.Schema( + { + vol.Optional("param1", description="Test parameters"): [ + vol.All(str, vol.Lower) + ], + vol.Optional("param2"): vol.Any(float, int), + vol.Optional("param3"): dict, + } + ) + + mock_get_tools.return_value = [mock_tool] + + with patch("google.genai.chats.AsyncChats.create") as mock_create: + mock_chat = AsyncMock() + mock_create.return_value.send_message = mock_chat + chat_response = Mock(prompt_feedback=None) + mock_chat.return_value = chat_response + mock_part = Mock() + mock_part.text = "" + mock_part.function_call = FunctionCall( + name="test_tool", + args={ + "param1": ["test_value", "param1\\'s value"], + "param2": 2.7, + }, + ) + + def tool_call( + hass: HomeAssistant, tool_input: llm.ToolInput, tool_context: llm.LLMContext + ) -> dict[str, Any]: + mock_part.function_call = None + mock_part.text = "Hi there!" + return {"result": "Test response"} + + mock_tool.async_call.side_effect = tool_call + chat_response.candidates = [Mock(content=Mock(parts=[mock_part]))] + await conversation.async_converse( + hass, + "Please call the test function", + None, + context, + agent_id=agent_id, + device_id="test_device", + ) + + assert [tuple(mock_call) for mock_call in mock_create.mock_calls] == snapshot + @patch( "homeassistant.components.google_generative_ai_conversation.conversation.llm.AssistAPI._async_get_tools" @@ -217,28 +288,24 @@ async def test_function_call_without_parameters( assert result.response.response_type == intent.IntentResponseType.ACTION_DONE assert result.response.as_dict()["speech"]["plain"]["speech"] == "Hi there!" - mock_tool_call = mock_create.mock_calls[2][2]["message"] - assert mock_tool_call.model_dump() == { - "parts": [ - { - "code_execution_result": None, - "executable_code": None, - "file_data": None, - "function_call": None, - "function_response": { - "id": None, - "name": "test_tool", - "response": { - "result": "Test response", - }, - }, - "inline_data": None, - "text": None, - "thought": None, - "video_metadata": None, + mock_tool_response_parts = mock_create.mock_calls[2][2]["message"] + assert len(mock_tool_response_parts) == 1 + assert mock_tool_response_parts[0].model_dump() == { + "code_execution_result": None, + "executable_code": None, + "file_data": None, + "function_call": None, + "function_response": { + "id": None, + "name": "test_tool", + "response": { + "result": "Test response", }, - ], - "role": None, + }, + "inline_data": None, + "text": None, + "thought": None, + "video_metadata": None, } mock_tool.async_call.assert_awaited_once_with( @@ -315,29 +382,25 @@ async def test_function_exception( assert result.response.response_type == intent.IntentResponseType.ACTION_DONE assert result.response.as_dict()["speech"]["plain"]["speech"] == "Hi there!" - mock_tool_call = mock_create.mock_calls[2][2]["message"] - assert mock_tool_call.model_dump() == { - "parts": [ - { - "code_execution_result": None, - "executable_code": None, - "file_data": None, - "function_call": None, - "function_response": { - "id": None, - "name": "test_tool", - "response": { - "error": "HomeAssistantError", - "error_text": "Test tool exception", - }, - }, - "inline_data": None, - "text": None, - "thought": None, - "video_metadata": None, + mock_tool_response_parts = mock_create.mock_calls[2][2]["message"] + assert len(mock_tool_response_parts) == 1 + assert mock_tool_response_parts[0].model_dump() == { + "code_execution_result": None, + "executable_code": None, + "file_data": None, + "function_call": None, + "function_response": { + "id": None, + "name": "test_tool", + "response": { + "error": "HomeAssistantError", + "error_text": "Test tool exception", }, - ], - "role": None, + }, + "inline_data": None, + "text": None, + "thought": None, + "video_metadata": None, } mock_tool.async_call.assert_awaited_once_with( hass, @@ -618,3 +681,82 @@ async def test_escape_decode() -> None: async def test_format_schema(openapi, genai_schema) -> None: """Test _format_schema.""" assert _format_schema(openapi) == genai_schema + + +@pytest.mark.usefixtures("mock_init_component") +async def test_empty_content_in_chat_history( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Tests that in case of an empty entry in the chat history the google API will receive an injected space sign instead.""" + with ( + patch("google.genai.chats.AsyncChats.create") as mock_create, + chat_session.async_get_chat_session(hass) as session, + async_get_chat_log(hass, session) as chat_log, + ): + mock_chat = AsyncMock() + mock_create.return_value.send_message = mock_chat + + # Chat preparation with two inputs, one being an empty string + first_input = "First request" + second_input = "" + chat_log.async_add_user_content(UserContent(first_input)) + chat_log.async_add_user_content(UserContent(second_input)) + + await conversation.async_converse( + hass, + "Second request", + session.conversation_id, + Context(), + agent_id="conversation.google_generative_ai_conversation", + ) + + _, kwargs = mock_create.call_args + actual_history = kwargs.get("history") + + assert actual_history[0].parts[0].text == first_input + assert actual_history[1].parts[0].text == " " + + +@pytest.mark.usefixtures("mock_init_component") +async def test_history_always_user_first_turn( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test that the user is always first in the chat history.""" + with ( + chat_session.async_get_chat_session(hass) as session, + async_get_chat_log(hass, session) as chat_log, + ): + chat_log.async_add_assistant_content_without_tools( + conversation.AssistantContent( + agent_id="conversation.google_generative_ai_conversation", + content="Garage door left open, do you want to close it?", + ) + ) + + with patch("google.genai.chats.AsyncChats.create") as mock_create: + mock_chat = AsyncMock() + mock_create.return_value.send_message = mock_chat + chat_response = Mock(prompt_feedback=None) + mock_chat.return_value = chat_response + chat_response.candidates = [Mock(content=Mock(parts=[]))] + + await conversation.async_converse( + hass, + "hello", + chat_log.conversation_id, + Context(), + agent_id="conversation.google_generative_ai_conversation", + ) + + _, kwargs = mock_create.call_args + actual_history = kwargs.get("history") + + assert actual_history[0].parts[0].text == " " + assert actual_history[0].role == "user" + assert ( + actual_history[1].parts[0].text + == "Garage door left open, do you want to close it?" + ) + assert actual_history[1].role == "model" diff --git a/tests/components/google_translate/test_tts.py b/tests/components/google_translate/test_tts.py index 5b691da4bdc..54ad47405a1 100644 --- a/tests/components/google_translate/test_tts.py +++ b/tests/components/google_translate/test_tts.py @@ -475,6 +475,6 @@ async def test_service_say_error( await retrieve_media( hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] ) - == HTTPStatus.NOT_FOUND + == HTTPStatus.INTERNAL_SERVER_ERROR ) assert len(mock_gtts.mock_calls) == 2 diff --git a/tests/components/gree/snapshots/test_switch.ambr b/tests/components/gree/snapshots/test_switch.ambr index 836641cb2ab..c3fa3ae24c7 100644 --- a/tests/components/gree/snapshots/test_switch.ambr +++ b/tests/components/gree/snapshots/test_switch.ambr @@ -16,10 +16,10 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'switch', - 'friendly_name': 'fake-device-1 Quiet', + 'friendly_name': 'fake-device-1 Quiet mode', }), 'context': , - 'entity_id': 'switch.fake_device_1_quiet', + 'entity_id': 'switch.fake_device_1_quiet_mode', 'last_changed': , 'last_reported': , 'last_updated': , @@ -40,10 +40,10 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'switch', - 'friendly_name': 'fake-device-1 XFan', + 'friendly_name': 'fake-device-1 Xtra fan', }), 'context': , - 'entity_id': 'switch.fake_device_1_xfan', + 'entity_id': 'switch.fake_device_1_xtra_fan', 'last_changed': , 'last_reported': , 'last_updated': , @@ -109,7 +109,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': None, - 'entity_id': 'switch.fake_device_1_quiet', + 'entity_id': 'switch.fake_device_1_quiet_mode', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -121,7 +121,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Quiet', + 'original_name': 'Quiet mode', 'platform': 'gree', 'previous_unique_id': None, 'supported_features': 0, @@ -173,7 +173,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': None, - 'entity_id': 'switch.fake_device_1_xfan', + 'entity_id': 'switch.fake_device_1_xtra_fan', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -185,7 +185,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'XFan', + 'original_name': 'Xtra fan', 'platform': 'gree', 'previous_unique_id': None, 'supported_features': 0, diff --git a/tests/components/gree/test_climate.py b/tests/components/gree/test_climate.py index d7c011a4c25..e6bfc43252f 100644 --- a/tests/components/gree/test_climate.py +++ b/tests/components/gree/test_climate.py @@ -67,6 +67,11 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er +from homeassistant.util.unit_system import ( + METRIC_SYSTEM, + US_CUSTOMARY_SYSTEM, + UnitSystem, +) from .common import async_setup_gree, build_device_mock @@ -411,19 +416,19 @@ async def test_send_power_off_device_timeout( @pytest.mark.parametrize( ("units", "temperature"), - [(UnitOfTemperature.CELSIUS, 26), (UnitOfTemperature.FAHRENHEIT, 73)], + [(METRIC_SYSTEM, 26), (US_CUSTOMARY_SYSTEM, 73)], ) async def test_send_target_temperature( - hass: HomeAssistant, discovery, device, units, temperature + hass: HomeAssistant, discovery, device, units: UnitSystem, temperature ) -> None: """Test for sending target temperature command to the device.""" - hass.config.units.temperature_unit = units + hass.config.units = units device().power = True device().mode = HVAC_MODES_REVERSE.get(HVACMode.AUTO) fake_device = device() - if units == UnitOfTemperature.FAHRENHEIT: + if units.temperature_unit == UnitOfTemperature.FAHRENHEIT: fake_device.temperature_units = 1 await async_setup_gree(hass) @@ -435,7 +440,7 @@ async def test_send_target_temperature( ENTITY_ID, "off", { - ATTR_UNIT_OF_MEASUREMENT: units, + ATTR_UNIT_OF_MEASUREMENT: units.temperature_unit, }, ) @@ -451,10 +456,6 @@ async def test_send_target_temperature( assert state.attributes.get(ATTR_TEMPERATURE) == temperature assert state.state == HVAC_MODES.get(fake_device.mode) - # Reset config temperature_unit back to CELSIUS, required for - # additional tests outside this component. - hass.config.units.temperature_unit = UnitOfTemperature.CELSIUS - @pytest.mark.parametrize( ("temperature", "hvac_mode"), @@ -493,17 +494,17 @@ async def test_send_target_temperature_with_hvac_mode( @pytest.mark.parametrize( ("units", "temperature"), [ - (UnitOfTemperature.CELSIUS, 25), - (UnitOfTemperature.FAHRENHEIT, 73), - (UnitOfTemperature.FAHRENHEIT, 74), + (METRIC_SYSTEM, 25), + (US_CUSTOMARY_SYSTEM, 73), + (US_CUSTOMARY_SYSTEM, 74), ], ) async def test_send_target_temperature_device_timeout( - hass: HomeAssistant, discovery, device, units, temperature + hass: HomeAssistant, discovery, device, units: UnitSystem, temperature ) -> None: """Test for sending target temperature command to the device with a device timeout.""" - hass.config.units.temperature_unit = units - if units == UnitOfTemperature.FAHRENHEIT: + hass.config.units = units + if units.temperature_unit == UnitOfTemperature.FAHRENHEIT: device().temperature_units = 1 device().push_state_update.side_effect = DeviceTimeoutError @@ -520,24 +521,21 @@ async def test_send_target_temperature_device_timeout( assert state is not None assert state.attributes.get(ATTR_TEMPERATURE) == temperature - # Reset config temperature_unit back to CELSIUS, required for additional tests outside this component. - hass.config.units.temperature_unit = UnitOfTemperature.CELSIUS - @pytest.mark.parametrize( ("units", "temperature"), [ - (UnitOfTemperature.CELSIUS, 25), - (UnitOfTemperature.FAHRENHEIT, 73), - (UnitOfTemperature.FAHRENHEIT, 74), + (METRIC_SYSTEM, 25), + (US_CUSTOMARY_SYSTEM, 73), + (US_CUSTOMARY_SYSTEM, 74), ], ) async def test_update_target_temperature( - hass: HomeAssistant, discovery, device, units, temperature + hass: HomeAssistant, discovery, device, units: UnitSystem, temperature ) -> None: """Test for updating target temperature from the device.""" - hass.config.units.temperature_unit = units - if units == UnitOfTemperature.FAHRENHEIT: + hass.config.units = units + if units.temperature_unit == UnitOfTemperature.FAHRENHEIT: device().temperature_units = 1 device().target_temperature = temperature @@ -554,9 +552,6 @@ async def test_update_target_temperature( assert state is not None assert state.attributes.get(ATTR_TEMPERATURE) == temperature - # Reset config temperature_unit back to CELSIUS, required for additional tests outside this component. - hass.config.units.temperature_unit = UnitOfTemperature.CELSIUS - @pytest.mark.parametrize( "preset", [PRESET_AWAY, PRESET_ECO, PRESET_SLEEP, PRESET_BOOST, PRESET_NONE] diff --git a/tests/components/gree/test_switch.py b/tests/components/gree/test_switch.py index e9491796bdf..331b6dfa4a6 100644 --- a/tests/components/gree/test_switch.py +++ b/tests/components/gree/test_switch.py @@ -22,11 +22,11 @@ from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry -ENTITY_ID_LIGHT_PANEL = f"{SWITCH_DOMAIN}.fake_device_1_panel_light" +ENTITY_ID_PANEL_LIGHT = f"{SWITCH_DOMAIN}.fake_device_1_panel_light" ENTITY_ID_HEALTH_MODE = f"{SWITCH_DOMAIN}.fake_device_1_health_mode" -ENTITY_ID_QUIET = f"{SWITCH_DOMAIN}.fake_device_1_quiet" +ENTITY_ID_QUIET_MODE = f"{SWITCH_DOMAIN}.fake_device_1_quiet_mode" ENTITY_ID_FRESH_AIR = f"{SWITCH_DOMAIN}.fake_device_1_fresh_air" -ENTITY_ID_XFAN = f"{SWITCH_DOMAIN}.fake_device_1_xfan" +ENTITY_ID_XTRA_FAN = f"{SWITCH_DOMAIN}.fake_device_1_xtra_fan" async def async_setup_gree(hass: HomeAssistant) -> MockConfigEntry: @@ -54,11 +54,11 @@ async def test_registry_settings( @pytest.mark.parametrize( "entity", [ - ENTITY_ID_LIGHT_PANEL, + ENTITY_ID_PANEL_LIGHT, ENTITY_ID_HEALTH_MODE, - ENTITY_ID_QUIET, + ENTITY_ID_QUIET_MODE, ENTITY_ID_FRESH_AIR, - ENTITY_ID_XFAN, + ENTITY_ID_XTRA_FAN, ], ) @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -81,11 +81,11 @@ async def test_send_switch_on(hass: HomeAssistant, entity: str) -> None: @pytest.mark.parametrize( "entity", [ - ENTITY_ID_LIGHT_PANEL, + ENTITY_ID_PANEL_LIGHT, ENTITY_ID_HEALTH_MODE, - ENTITY_ID_QUIET, + ENTITY_ID_QUIET_MODE, ENTITY_ID_FRESH_AIR, - ENTITY_ID_XFAN, + ENTITY_ID_XTRA_FAN, ], ) @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -112,11 +112,11 @@ async def test_send_switch_on_device_timeout( @pytest.mark.parametrize( "entity", [ - ENTITY_ID_LIGHT_PANEL, + ENTITY_ID_PANEL_LIGHT, ENTITY_ID_HEALTH_MODE, - ENTITY_ID_QUIET, + ENTITY_ID_QUIET_MODE, ENTITY_ID_FRESH_AIR, - ENTITY_ID_XFAN, + ENTITY_ID_XTRA_FAN, ], ) @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -139,11 +139,11 @@ async def test_send_switch_off(hass: HomeAssistant, entity: str) -> None: @pytest.mark.parametrize( "entity", [ - ENTITY_ID_LIGHT_PANEL, + ENTITY_ID_PANEL_LIGHT, ENTITY_ID_HEALTH_MODE, - ENTITY_ID_QUIET, + ENTITY_ID_QUIET_MODE, ENTITY_ID_FRESH_AIR, - ENTITY_ID_XFAN, + ENTITY_ID_XTRA_FAN, ], ) @pytest.mark.usefixtures("entity_registry_enabled_by_default") diff --git a/tests/components/habitica/conftest.py b/tests/components/habitica/conftest.py index 45c33a9ebb6..4ef14699e0b 100644 --- a/tests/components/habitica/conftest.py +++ b/tests/components/habitica/conftest.py @@ -1,7 +1,8 @@ """Tests for the habitica component.""" from collections.abc import Generator -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, MagicMock, patch +from uuid import UUID from habiticalib import ( BadRequestError, @@ -151,6 +152,9 @@ async def mock_habiticalib() -> Generator[AsyncMock]: client.create_tag.return_value = HabiticaTagResponse.from_json( load_fixture("create_tag.json", DOMAIN) ) + client.create_task.return_value = HabiticaTaskResponse.from_json( + load_fixture("task.json", DOMAIN) + ) client.habitipy.return_value = { "tasks": { "user": { @@ -173,3 +177,13 @@ def mock_setup_entry() -> Generator[AsyncMock]: "homeassistant.components.habitica.async_setup_entry", return_value=True ) as mock_setup_entry: yield mock_setup_entry + + +@pytest.fixture +def mock_uuid4() -> Generator[MagicMock]: + """Mock uuid4.""" + with patch( + "homeassistant.components.habitica.services.uuid4", autospec=True + ) as mock_uuid4: + mock_uuid4.return_value = UUID("12345678-1234-5678-1234-567812345678") + yield mock_uuid4 diff --git a/tests/components/habitica/fixtures/tasks.json b/tests/components/habitica/fixtures/tasks.json index 378652138bc..085508b4432 100644 --- a/tests/components/habitica/fixtures/tasks.json +++ b/tests/components/habitica/fixtures/tasks.json @@ -425,7 +425,18 @@ "date": "2024-09-27T22:17:00.000Z", "completed": false, "collapseChecklist": false, - "checklist": [], + "checklist": [ + { + "completed": false, + "id": "fccc26f2-1e2b-4bf8-9dd0-a405be261036", + "text": "Checklist-item1" + }, + { + "completed": true, + "id": "5a897af4-ea94-456a-a2bd-f336bcd79509", + "text": "Checklist-item2" + } + ], "type": "todo", "text": "Buch zu Ende lesen", "notes": "Das Buch, das du angefangen hast, bis zum Wochenende fertig lesen.", @@ -594,7 +605,18 @@ "startDate": "2024-09-20T23:00:00.000Z", "daysOfMonth": [], "weeksOfMonth": [3], - "checklist": [], + "checklist": [ + { + "completed": false, + "id": "a2a6702d-58e1-46c2-a3ce-422d525cc0b6", + "text": "Checklist-item1" + }, + { + "completed": true, + "id": "9f64e1cd-b0ab-4577-8344-c7a5e1827997", + "text": "Checklist-item2" + } + ], "reminders": [], "createdAt": "2024-10-10T15:57:14.304Z", "updatedAt": "2024-11-27T23:47:29.986Z", diff --git a/tests/components/habitica/snapshots/test_services.ambr b/tests/components/habitica/snapshots/test_services.ambr index 79c9e3eab66..430cd379c0d 100644 --- a/tests/components/habitica/snapshots/test_services.ambr +++ b/tests/components/habitica/snapshots/test_services.ambr @@ -736,6 +736,16 @@ 'winner': None, }), 'checklist': list([ + dict({ + 'completed': False, + 'id': 'fccc26f2-1e2b-4bf8-9dd0-a405be261036', + 'text': 'Checklist-item1', + }), + dict({ + 'completed': True, + 'id': '5a897af4-ea94-456a-a2bd-f336bcd79509', + 'text': 'Checklist-item2', + }), ]), 'collapseChecklist': False, 'completed': False, @@ -1106,6 +1116,16 @@ 'winner': None, }), 'checklist': list([ + dict({ + 'completed': False, + 'id': 'a2a6702d-58e1-46c2-a3ce-422d525cc0b6', + 'text': 'Checklist-item1', + }), + dict({ + 'completed': True, + 'id': '9f64e1cd-b0ab-4577-8344-c7a5e1827997', + 'text': 'Checklist-item2', + }), ]), 'collapseChecklist': False, 'completed': False, @@ -1834,6 +1854,16 @@ 'winner': None, }), 'checklist': list([ + dict({ + 'completed': False, + 'id': 'fccc26f2-1e2b-4bf8-9dd0-a405be261036', + 'text': 'Checklist-item1', + }), + dict({ + 'completed': True, + 'id': '5a897af4-ea94-456a-a2bd-f336bcd79509', + 'text': 'Checklist-item2', + }), ]), 'collapseChecklist': False, 'completed': False, @@ -2978,6 +3008,16 @@ 'winner': None, }), 'checklist': list([ + dict({ + 'completed': False, + 'id': 'fccc26f2-1e2b-4bf8-9dd0-a405be261036', + 'text': 'Checklist-item1', + }), + dict({ + 'completed': True, + 'id': '5a897af4-ea94-456a-a2bd-f336bcd79509', + 'text': 'Checklist-item2', + }), ]), 'collapseChecklist': False, 'completed': False, @@ -3348,6 +3388,16 @@ 'winner': None, }), 'checklist': list([ + dict({ + 'completed': False, + 'id': 'a2a6702d-58e1-46c2-a3ce-422d525cc0b6', + 'text': 'Checklist-item1', + }), + dict({ + 'completed': True, + 'id': '9f64e1cd-b0ab-4577-8344-c7a5e1827997', + 'text': 'Checklist-item2', + }), ]), 'collapseChecklist': False, 'completed': False, @@ -4481,6 +4531,16 @@ 'winner': None, }), 'checklist': list([ + dict({ + 'completed': False, + 'id': 'a2a6702d-58e1-46c2-a3ce-422d525cc0b6', + 'text': 'Checklist-item1', + }), + dict({ + 'completed': True, + 'id': '9f64e1cd-b0ab-4577-8344-c7a5e1827997', + 'text': 'Checklist-item2', + }), ]), 'collapseChecklist': False, 'completed': False, @@ -5062,6 +5122,16 @@ 'winner': None, }), 'checklist': list([ + dict({ + 'completed': False, + 'id': 'a2a6702d-58e1-46c2-a3ce-422d525cc0b6', + 'text': 'Checklist-item1', + }), + dict({ + 'completed': True, + 'id': '9f64e1cd-b0ab-4577-8344-c7a5e1827997', + 'text': 'Checklist-item2', + }), ]), 'collapseChecklist': False, 'completed': False, @@ -5615,6 +5685,16 @@ 'winner': None, }), 'checklist': list([ + dict({ + 'completed': False, + 'id': 'fccc26f2-1e2b-4bf8-9dd0-a405be261036', + 'text': 'Checklist-item1', + }), + dict({ + 'completed': True, + 'id': '5a897af4-ea94-456a-a2bd-f336bcd79509', + 'text': 'Checklist-item2', + }), ]), 'collapseChecklist': False, 'completed': False, @@ -6137,6 +6217,16 @@ 'winner': None, }), 'checklist': list([ + dict({ + 'completed': False, + 'id': 'fccc26f2-1e2b-4bf8-9dd0-a405be261036', + 'text': 'Checklist-item1', + }), + dict({ + 'completed': True, + 'id': '5a897af4-ea94-456a-a2bd-f336bcd79509', + 'text': 'Checklist-item2', + }), ]), 'collapseChecklist': False, 'completed': False, diff --git a/tests/components/habitica/test_services.py b/tests/components/habitica/test_services.py index a4442016784..774593fa0f6 100644 --- a/tests/components/habitica/test_services.py +++ b/tests/components/habitica/test_services.py @@ -1,35 +1,69 @@ """Test Habitica actions.""" from collections.abc import Generator +from datetime import UTC, datetime from typing import Any from unittest.mock import AsyncMock, patch from uuid import UUID from aiohttp import ClientError -from habiticalib import Direction, HabiticaTaskResponse, Skill, Task +from freezegun.api import freeze_time +from habiticalib import ( + Checklist, + Direction, + Frequency, + HabiticaTaskResponse, + Reminders, + Repeat, + Skill, + Task, + TaskPriority, + TaskType, +) import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.habitica.const import ( + ATTR_ADD_CHECKLIST_ITEM, ATTR_ALIAS, + ATTR_CLEAR_DATE, + ATTR_CLEAR_REMINDER, ATTR_CONFIG_ENTRY, ATTR_COST, + ATTR_COUNTER_DOWN, + ATTR_COUNTER_UP, ATTR_DIRECTION, + ATTR_FREQUENCY, + ATTR_INTERVAL, ATTR_ITEM, ATTR_KEYWORD, ATTR_NOTES, ATTR_PRIORITY, + ATTR_REMINDER, + ATTR_REMOVE_CHECKLIST_ITEM, + ATTR_REMOVE_REMINDER, ATTR_REMOVE_TAG, + ATTR_REPEAT, + ATTR_REPEAT_MONTHLY, + ATTR_SCORE_CHECKLIST_ITEM, ATTR_SKILL, + ATTR_START_DATE, + ATTR_STREAK, ATTR_TAG, ATTR_TARGET, ATTR_TASK, ATTR_TYPE, + ATTR_UNSCORE_CHECKLIST_ITEM, + ATTR_UP_DOWN, DOMAIN, SERVICE_ABORT_QUEST, SERVICE_ACCEPT_QUEST, SERVICE_CANCEL_QUEST, SERVICE_CAST_SKILL, + SERVICE_CREATE_DAILY, + SERVICE_CREATE_HABIT, + SERVICE_CREATE_REWARD, + SERVICE_CREATE_TODO, SERVICE_GET_TASKS, SERVICE_LEAVE_QUEST, SERVICE_REJECT_QUEST, @@ -37,10 +71,14 @@ from homeassistant.components.habitica.const import ( SERVICE_SCORE_REWARD, SERVICE_START_QUEST, SERVICE_TRANSFORMATION, + SERVICE_UPDATE_DAILY, + SERVICE_UPDATE_HABIT, SERVICE_UPDATE_REWARD, + SERVICE_UPDATE_TODO, ) from homeassistant.components.todo import ATTR_RENAME from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_DATE, ATTR_NAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError @@ -917,6 +955,15 @@ async def test_get_tasks( ), ], ) +@pytest.mark.parametrize( + ("service", "task_id"), + [ + (SERVICE_UPDATE_REWARD, "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b"), + (SERVICE_UPDATE_HABIT, "f21fa608-cfc6-4413-9fc7-0eb1b48ca43a"), + (SERVICE_UPDATE_TODO, "88de7cd9-af2b-49ce-9afd-bf941d87336b"), + (SERVICE_UPDATE_DAILY, "6e53f1f5-a315-4edd-984d-8d762e4a08ef"), + ], +) @pytest.mark.usefixtures("habitica") async def test_update_task_exceptions( hass: HomeAssistant, @@ -925,15 +972,16 @@ async def test_update_task_exceptions( exception: Exception, expected_exception: Exception, exception_msg: str, + service: str, + task_id: str, ) -> None: """Test Habitica task action exceptions.""" - task_id = "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b" habitica.update_task.side_effect = exception with pytest.raises(expected_exception, match=exception_msg): await hass.services.async_call( DOMAIN, - SERVICE_UPDATE_REWARD, + service, service_data={ ATTR_CONFIG_ENTRY: config_entry.entry_id, ATTR_TASK: task_id, @@ -943,6 +991,61 @@ async def test_update_task_exceptions( ) +@pytest.mark.parametrize( + ("exception", "expected_exception", "exception_msg"), + [ + ( + ERROR_TOO_MANY_REQUESTS, + HomeAssistantError, + RATE_LIMIT_EXCEPTION_MSG, + ), + ( + ERROR_BAD_REQUEST, + HomeAssistantError, + REQUEST_EXCEPTION_MSG, + ), + ( + ClientError, + HomeAssistantError, + "Unable to connect to Habitica: ", + ), + ], +) +@pytest.mark.parametrize( + "service", + [ + SERVICE_CREATE_DAILY, + SERVICE_CREATE_HABIT, + SERVICE_CREATE_REWARD, + SERVICE_CREATE_TODO, + ], +) +@pytest.mark.usefixtures("habitica") +async def test_create_task_exceptions( + hass: HomeAssistant, + config_entry: MockConfigEntry, + habitica: AsyncMock, + exception: Exception, + expected_exception: Exception, + exception_msg: str, + service: str, +) -> None: + """Test Habitica task create action exceptions.""" + + habitica.create_task.side_effect = exception + with pytest.raises(expected_exception, match=exception_msg): + await hass.services.async_call( + DOMAIN, + service, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + ATTR_NAME: "TITLE", + }, + return_response=True, + blocking=True, + ) + + @pytest.mark.usefixtures("habitica") async def test_task_not_found( hass: HomeAssistant, @@ -1024,6 +1127,940 @@ async def test_update_reward( habitica.update_task.assert_awaited_with(UUID(task_id), call_args) +@pytest.mark.parametrize( + ("service_data", "call_args"), + [ + ( + { + ATTR_NAME: "TITLE", + ATTR_COST: 100, + }, + Task(type=TaskType.REWARD, text="TITLE", value=100), + ), + ( + { + ATTR_NAME: "TITLE", + }, + Task(type=TaskType.REWARD, text="TITLE"), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_NOTES: "NOTES", + }, + Task(type=TaskType.REWARD, text="TITLE", notes="NOTES"), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_ALIAS: "ALIAS", + }, + Task(type=TaskType.REWARD, text="TITLE", alias="ALIAS"), + ), + ], +) +async def test_create_reward( + hass: HomeAssistant, + config_entry: MockConfigEntry, + habitica: AsyncMock, + service_data: dict[str, Any], + call_args: Task, +) -> None: + """Test Habitica create_reward action.""" + + await hass.services.async_call( + DOMAIN, + SERVICE_CREATE_REWARD, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + habitica.create_task.assert_awaited_with(call_args) + + +@pytest.mark.parametrize( + ("service_data", "call_args"), + [ + ( + { + ATTR_RENAME: "RENAME", + }, + Task(text="RENAME"), + ), + ( + { + ATTR_NOTES: "NOTES", + }, + Task(notes="NOTES"), + ), + ( + { + ATTR_UP_DOWN: [""], + }, + Task(up=False, down=False), + ), + ( + { + ATTR_UP_DOWN: ["up"], + }, + Task(up=True, down=False), + ), + ( + { + ATTR_UP_DOWN: ["down"], + }, + Task(up=False, down=True), + ), + ( + { + ATTR_PRIORITY: "trivial", + }, + Task(priority=TaskPriority.TRIVIAL), + ), + ( + { + ATTR_FREQUENCY: "daily", + }, + Task(frequency=Frequency.DAILY), + ), + ( + { + ATTR_COUNTER_UP: 1, + ATTR_COUNTER_DOWN: 2, + }, + Task(counterUp=1, counterDown=2), + ), + ( + { + ATTR_ALIAS: "ALIAS", + }, + Task(alias="ALIAS"), + ), + ], +) +async def test_update_habit( + hass: HomeAssistant, + config_entry: MockConfigEntry, + habitica: AsyncMock, + service_data: dict[str, Any], + call_args: Task, +) -> None: + """Test Habitica habit action.""" + task_id = "f21fa608-cfc6-4413-9fc7-0eb1b48ca43a" + + await hass.services.async_call( + DOMAIN, + SERVICE_UPDATE_HABIT, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + ATTR_TASK: task_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + habitica.update_task.assert_awaited_with(UUID(task_id), call_args) + + +@pytest.mark.parametrize( + ("service_data", "call_args"), + [ + ( + { + ATTR_NAME: "TITLE", + }, + Task(type=TaskType.HABIT, text="TITLE"), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_NOTES: "NOTES", + }, + Task(type=TaskType.HABIT, text="TITLE", notes="NOTES"), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_UP_DOWN: [""], + }, + Task(type=TaskType.HABIT, text="TITLE", up=False, down=False), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_UP_DOWN: ["up"], + }, + Task(type=TaskType.HABIT, text="TITLE", up=True, down=False), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_UP_DOWN: ["down"], + }, + Task(type=TaskType.HABIT, text="TITLE", up=False, down=True), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_PRIORITY: "trivial", + }, + Task(type=TaskType.HABIT, text="TITLE", priority=TaskPriority.TRIVIAL), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_FREQUENCY: "daily", + }, + Task(type=TaskType.HABIT, text="TITLE", frequency=Frequency.DAILY), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_ALIAS: "ALIAS", + }, + Task(type=TaskType.HABIT, text="TITLE", alias="ALIAS"), + ), + ], +) +async def test_create_habit( + hass: HomeAssistant, + config_entry: MockConfigEntry, + habitica: AsyncMock, + service_data: dict[str, Any], + call_args: Task, +) -> None: + """Test Habitica create_habit action.""" + + await hass.services.async_call( + DOMAIN, + SERVICE_CREATE_HABIT, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + habitica.create_task.assert_awaited_with(call_args) + + +@pytest.mark.parametrize( + ("service_data", "call_args"), + [ + ( + { + ATTR_RENAME: "RENAME", + }, + Task(text="RENAME"), + ), + ( + { + ATTR_NOTES: "NOTES", + }, + Task(notes="NOTES"), + ), + ( + { + ATTR_ADD_CHECKLIST_ITEM: "Checklist-item", + }, + Task( + { + "checklist": [ + Checklist( + id=UUID("fccc26f2-1e2b-4bf8-9dd0-a405be261036"), + text="Checklist-item1", + completed=False, + ), + Checklist( + id=UUID("5a897af4-ea94-456a-a2bd-f336bcd79509"), + text="Checklist-item2", + completed=True, + ), + Checklist( + id=UUID("12345678-1234-5678-1234-567812345678"), + text="Checklist-item", + completed=False, + ), + ] + } + ), + ), + ( + { + ATTR_REMOVE_CHECKLIST_ITEM: "Checklist-item1", + }, + Task( + { + "checklist": [ + Checklist( + id=UUID("5a897af4-ea94-456a-a2bd-f336bcd79509"), + text="Checklist-item2", + completed=True, + ), + ] + } + ), + ), + ( + { + ATTR_SCORE_CHECKLIST_ITEM: "Checklist-item1", + }, + Task( + { + "checklist": [ + Checklist( + id=UUID("fccc26f2-1e2b-4bf8-9dd0-a405be261036"), + text="Checklist-item1", + completed=True, + ), + Checklist( + id=UUID("5a897af4-ea94-456a-a2bd-f336bcd79509"), + text="Checklist-item2", + completed=True, + ), + ] + } + ), + ), + ( + { + ATTR_UNSCORE_CHECKLIST_ITEM: "Checklist-item2", + }, + Task( + { + "checklist": [ + Checklist( + id=UUID("fccc26f2-1e2b-4bf8-9dd0-a405be261036"), + text="Checklist-item1", + completed=False, + ), + Checklist( + id=UUID("5a897af4-ea94-456a-a2bd-f336bcd79509"), + text="Checklist-item2", + completed=False, + ), + ] + } + ), + ), + ( + { + ATTR_PRIORITY: "trivial", + }, + Task(priority=TaskPriority.TRIVIAL), + ), + ( + { + ATTR_DATE: "2025-03-05", + }, + Task(date=datetime(2025, 3, 5)), + ), + ( + { + ATTR_CLEAR_DATE: True, + }, + Task(date=None), + ), + ( + { + ATTR_REMINDER: ["2025-02-25T00:00"], + }, + Task( + { + "reminders": [ + Reminders( + id=UUID("12345678-1234-5678-1234-567812345678"), + time=datetime(2025, 2, 25, 0, 0), + startDate=None, + ) + ] + } + ), + ), + ( + { + ATTR_REMOVE_REMINDER: ["2025-02-25T00:00"], + }, + Task({"reminders": []}), + ), + ( + { + ATTR_CLEAR_REMINDER: True, + }, + Task({"reminders": []}), + ), + ( + { + ATTR_ALIAS: "ALIAS", + }, + Task(alias="ALIAS"), + ), + ], +) +@pytest.mark.usefixtures("mock_uuid4") +async def test_update_todo( + hass: HomeAssistant, + config_entry: MockConfigEntry, + habitica: AsyncMock, + service_data: dict[str, Any], + call_args: Task, +) -> None: + """Test Habitica update todo action.""" + task_id = "88de7cd9-af2b-49ce-9afd-bf941d87336b" + + await hass.services.async_call( + DOMAIN, + SERVICE_UPDATE_TODO, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + ATTR_TASK: task_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + habitica.update_task.assert_awaited_with(UUID(task_id), call_args) + + +@pytest.mark.parametrize( + ("service_data", "call_args"), + [ + ( + { + ATTR_NAME: "TITLE", + }, + Task(type=TaskType.TODO, text="TITLE"), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_NOTES: "NOTES", + }, + Task(type=TaskType.TODO, text="TITLE", notes="NOTES"), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_ADD_CHECKLIST_ITEM: "Checklist-item", + }, + Task( + type=TaskType.TODO, + text="TITLE", + checklist=[ + Checklist( + id=UUID("12345678-1234-5678-1234-567812345678"), + text="Checklist-item", + completed=False, + ), + ], + ), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_PRIORITY: "trivial", + }, + Task(type=TaskType.TODO, text="TITLE", priority=TaskPriority.TRIVIAL), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_DATE: "2025-03-05", + }, + Task(type=TaskType.TODO, text="TITLE", date=datetime(2025, 3, 5)), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_REMINDER: ["2025-02-25T00:00"], + }, + Task( + type=TaskType.TODO, + text="TITLE", + reminders=[ + Reminders( + id=UUID("12345678-1234-5678-1234-567812345678"), + time=datetime(2025, 2, 25, 0, 0), + startDate=None, + ) + ], + ), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_ALIAS: "ALIAS", + }, + Task(type=TaskType.TODO, text="TITLE", alias="ALIAS"), + ), + ], +) +@pytest.mark.usefixtures("mock_uuid4") +async def test_create_todo( + hass: HomeAssistant, + config_entry: MockConfigEntry, + habitica: AsyncMock, + service_data: dict[str, Any], + call_args: Task, +) -> None: + """Test Habitica create todo action.""" + + await hass.services.async_call( + DOMAIN, + SERVICE_CREATE_TODO, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + habitica.create_task.assert_awaited_with(call_args) + + +@pytest.mark.parametrize( + ("service_data", "call_args"), + [ + ( + { + ATTR_RENAME: "RENAME", + }, + Task(text="RENAME"), + ), + ( + { + ATTR_NOTES: "NOTES", + }, + Task(notes="NOTES"), + ), + ( + { + ATTR_ADD_CHECKLIST_ITEM: "Checklist-item", + }, + Task( + { + "checklist": [ + Checklist( + id=UUID("a2a6702d-58e1-46c2-a3ce-422d525cc0b6"), + text="Checklist-item1", + completed=False, + ), + Checklist( + id=UUID("9f64e1cd-b0ab-4577-8344-c7a5e1827997"), + text="Checklist-item2", + completed=True, + ), + Checklist( + id=UUID("12345678-1234-5678-1234-567812345678"), + text="Checklist-item", + completed=False, + ), + ] + } + ), + ), + ( + { + ATTR_REMOVE_CHECKLIST_ITEM: "Checklist-item1", + }, + Task( + { + "checklist": [ + Checklist( + id=UUID("9f64e1cd-b0ab-4577-8344-c7a5e1827997"), + text="Checklist-item2", + completed=True, + ), + ] + } + ), + ), + ( + { + ATTR_SCORE_CHECKLIST_ITEM: "Checklist-item1", + }, + Task( + { + "checklist": [ + Checklist( + id=UUID("a2a6702d-58e1-46c2-a3ce-422d525cc0b6"), + text="Checklist-item1", + completed=True, + ), + Checklist( + id=UUID("9f64e1cd-b0ab-4577-8344-c7a5e1827997"), + text="Checklist-item2", + completed=True, + ), + ] + } + ), + ), + ( + { + ATTR_UNSCORE_CHECKLIST_ITEM: "Checklist-item2", + }, + Task( + { + "checklist": [ + Checklist( + id=UUID("a2a6702d-58e1-46c2-a3ce-422d525cc0b6"), + text="Checklist-item1", + completed=False, + ), + Checklist( + id=UUID("9f64e1cd-b0ab-4577-8344-c7a5e1827997"), + text="Checklist-item2", + completed=False, + ), + ] + } + ), + ), + ( + { + ATTR_PRIORITY: "trivial", + }, + Task(priority=TaskPriority.TRIVIAL), + ), + ( + { + ATTR_START_DATE: "2025-03-05", + }, + Task(startDate=datetime(2025, 3, 5)), + ), + ( + { + ATTR_FREQUENCY: "weekly", + }, + Task(frequency=Frequency.WEEKLY), + ), + ( + { + ATTR_INTERVAL: 5, + }, + Task(everyX=5), + ), + ( + { + ATTR_FREQUENCY: "weekly", + ATTR_REPEAT: ["m", "t", "w", "th"], + }, + Task( + frequency=Frequency.WEEKLY, + repeat=Repeat(m=True, t=True, w=True, th=True), + ), + ), + ( + { + ATTR_FREQUENCY: "monthly", + ATTR_REPEAT_MONTHLY: "day_of_month", + }, + Task(frequency=Frequency.MONTHLY, daysOfMonth=[20], weeksOfMonth=[]), + ), + ( + { + ATTR_FREQUENCY: "monthly", + ATTR_REPEAT_MONTHLY: "day_of_week", + }, + Task( + frequency=Frequency.MONTHLY, + daysOfMonth=[], + weeksOfMonth=[2], + repeat=Repeat( + m=False, t=False, w=False, th=False, f=True, s=False, su=False + ), + ), + ), + ( + { + ATTR_REMINDER: ["10:00"], + }, + Task( + { + "reminders": [ + Reminders( + id=UUID("12345678-1234-5678-1234-567812345678"), + time=datetime(2025, 2, 25, 10, 0, tzinfo=UTC), + startDate=None, + ) + ] + } + ), + ), + ( + { + ATTR_REMOVE_REMINDER: ["10:00"], + }, + Task({"reminders": []}), + ), + ( + { + ATTR_CLEAR_REMINDER: True, + }, + Task({"reminders": []}), + ), + ( + { + ATTR_STREAK: 10, + }, + Task(streak=10), + ), + ( + { + ATTR_ALIAS: "ALIAS", + }, + Task(alias="ALIAS"), + ), + ], +) +@pytest.mark.usefixtures("mock_uuid4") +@freeze_time("2025-02-25T22:00:00.000Z") +async def test_update_daily( + hass: HomeAssistant, + config_entry: MockConfigEntry, + habitica: AsyncMock, + service_data: dict[str, Any], + call_args: Task, +) -> None: + """Test Habitica update daily action.""" + task_id = "6e53f1f5-a315-4edd-984d-8d762e4a08ef" + + await hass.services.async_call( + DOMAIN, + SERVICE_UPDATE_DAILY, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + ATTR_TASK: task_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + habitica.update_task.assert_awaited_with(UUID(task_id), call_args) + + +@pytest.mark.parametrize( + ("service_data", "call_args"), + [ + ( + { + ATTR_NAME: "TITLE", + }, + Task(type=TaskType.DAILY, text="TITLE"), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_NOTES: "NOTES", + }, + Task(type=TaskType.DAILY, text="TITLE", notes="NOTES"), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_ADD_CHECKLIST_ITEM: "Checklist-item", + }, + Task( + type=TaskType.DAILY, + text="TITLE", + checklist=[ + Checklist( + id=UUID("12345678-1234-5678-1234-567812345678"), + text="Checklist-item", + completed=False, + ), + ], + ), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_PRIORITY: "trivial", + }, + Task(type=TaskType.DAILY, text="TITLE", priority=TaskPriority.TRIVIAL), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_START_DATE: "2025-03-05", + }, + Task(type=TaskType.DAILY, text="TITLE", startDate=datetime(2025, 3, 5)), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_FREQUENCY: "weekly", + }, + Task(type=TaskType.DAILY, text="TITLE", frequency=Frequency.WEEKLY), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_INTERVAL: 5, + }, + Task(type=TaskType.DAILY, text="TITLE", everyX=5), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_FREQUENCY: "weekly", + ATTR_REPEAT: ["m", "t", "w", "th"], + }, + Task( + type=TaskType.DAILY, + text="TITLE", + frequency=Frequency.WEEKLY, + repeat=Repeat(m=True, t=True, w=True, th=True), + ), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_FREQUENCY: "monthly", + ATTR_REPEAT_MONTHLY: "day_of_month", + }, + Task( + type=TaskType.DAILY, + text="TITLE", + frequency=Frequency.MONTHLY, + daysOfMonth=[25], + weeksOfMonth=[], + ), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_FREQUENCY: "monthly", + ATTR_REPEAT_MONTHLY: "day_of_week", + }, + Task( + type=TaskType.DAILY, + text="TITLE", + frequency=Frequency.MONTHLY, + daysOfMonth=[], + weeksOfMonth=[3], + repeat=Repeat( + m=False, t=True, w=False, th=False, f=False, s=False, su=False + ), + ), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_REMINDER: ["10:00"], + }, + Task( + type=TaskType.DAILY, + text="TITLE", + reminders=[ + Reminders( + id=UUID("12345678-1234-5678-1234-567812345678"), + time=datetime(2025, 2, 25, 10, 0, tzinfo=UTC), + startDate=None, + ) + ], + ), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_REMOVE_REMINDER: ["10:00"], + }, + Task(type=TaskType.DAILY, text="TITLE", reminders=[]), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_CLEAR_REMINDER: True, + }, + Task(type=TaskType.DAILY, text="TITLE", reminders=[]), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_STREAK: 10, + }, + Task(type=TaskType.DAILY, text="TITLE", streak=10), + ), + ( + { + ATTR_NAME: "TITLE", + ATTR_ALIAS: "ALIAS", + }, + Task(type=TaskType.DAILY, text="TITLE", alias="ALIAS"), + ), + ], +) +@pytest.mark.usefixtures("mock_uuid4") +@freeze_time("2025-02-25T22:00:00.000Z") +async def test_create_daily( + hass: HomeAssistant, + config_entry: MockConfigEntry, + habitica: AsyncMock, + service_data: dict[str, Any], + call_args: Task, +) -> None: + """Test Habitica create daily action.""" + + await hass.services.async_call( + DOMAIN, + SERVICE_CREATE_DAILY, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + habitica.create_task.assert_awaited_with(call_args) + + +@pytest.mark.parametrize( + "service_data", + [ + { + ATTR_FREQUENCY: "daily", + ATTR_REPEAT: ["m", "t", "w", "th"], + }, + { + ATTR_FREQUENCY: "weekly", + ATTR_REPEAT_MONTHLY: "day_of_month", + }, + { + ATTR_FREQUENCY: "weekly", + ATTR_REPEAT_MONTHLY: "day_of_week", + }, + ], +) +@pytest.mark.usefixtures("mock_uuid4") +@freeze_time("2025-02-25T22:00:00.000Z") +async def test_update_daily_service_validation_errors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + habitica: AsyncMock, + service_data: dict[str, Any], +) -> None: + """Test Habitica update daily action.""" + task_id = "6e53f1f5-a315-4edd-984d-8d762e4a08ef" + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + DOMAIN, + SERVICE_UPDATE_DAILY, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + ATTR_TASK: task_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + + async def test_tags( hass: HomeAssistant, config_entry: MockConfigEntry, diff --git a/tests/components/harmony/test_subscriber.py b/tests/components/harmony/test_subscriber.py index f1d1866a044..22957fc3f69 100644 --- a/tests/components/harmony/test_subscriber.py +++ b/tests/components/harmony/test_subscriber.py @@ -38,7 +38,7 @@ async def test_empty_callbacks(hass: HomeAssistant) -> None: """Ensure we handle a missing callback in a subscription.""" subscriber = HarmonySubscriberMixin(hass) - callbacks = {k: None for k in _ALL_CALLBACK_NAMES} + callbacks = dict.fromkeys(_ALL_CALLBACK_NAMES) subscriber.async_subscribe(HarmonyCallback(**callbacks)) _call_all_callbacks(subscriber) await hass.async_block_till_done() diff --git a/tests/components/hassio/test_backup.py b/tests/components/hassio/test_backup.py index 6e4fe4dd428..af951fe8aa1 100644 --- a/tests/components/hassio/test_backup.py +++ b/tests/components/hassio/test_backup.py @@ -3,16 +3,18 @@ from collections.abc import ( AsyncGenerator, AsyncIterator, + Buffer, Callable, Coroutine, Generator, + Iterable, ) from dataclasses import replace from datetime import datetime from io import StringIO import os from pathlib import PurePath -from typing import Any +from typing import Any, cast from unittest.mock import ANY, AsyncMock, Mock, patch from uuid import UUID @@ -38,6 +40,7 @@ from homeassistant.components.backup import ( AgentBackup, BackupAgent, BackupAgentPlatformProtocol, + BackupNotFound, Folder, store as backup_store, ) @@ -326,43 +329,70 @@ async def setup_backup_integration( await hass.async_block_till_done() -class BackupAgentTest(BackupAgent): - """Test backup agent.""" +async def aiter_from_iter(iterable: Iterable) -> AsyncIterator: + """Convert an iterable to an async iterator.""" + for i in iterable: + yield i - def __init__(self, name: str, domain: str = "test") -> None: - """Initialize the backup agent.""" - self.domain = domain - self.name = name - self.unique_id = name - async def async_download_backup( - self, backup_id: str, **kwargs: Any - ) -> AsyncIterator[bytes]: - """Download a backup file.""" - return AsyncMock(spec_set=["__aiter__"]) +def mock_backup_agent( + name: str, domain: str = "test", backups: list[AgentBackup] | None = None +) -> Mock: + """Create a mock backup agent.""" - async def async_upload_backup( - self, + async def delete_backup(backup_id: str, **kwargs: Any) -> None: + """Mock delete.""" + await get_backup(backup_id) + + async def download_backup(backup_id: str, **kwargs: Any) -> AsyncIterator[bytes]: + """Mock download.""" + return aiter_from_iter((backups_data.get(backup_id, b"backup data"),)) + + async def get_backup(backup_id: str, **kwargs: Any) -> AgentBackup: + """Get a backup.""" + backup = next((b for b in _backups if b.backup_id == backup_id), None) + if backup is None: + raise BackupNotFound + return backup + + async def upload_backup( *, open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]], backup: AgentBackup, **kwargs: Any, ) -> None: """Upload a backup.""" - await open_stream() + _backups.append(backup) + backup_stream = await open_stream() + backup_data = bytearray() + async for chunk in backup_stream: + backup_data += chunk + backups_data[backup.backup_id] = backup_data - async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]: - """List backups.""" - return [] - - async def async_get_backup( - self, backup_id: str, **kwargs: Any - ) -> AgentBackup | None: - """Return a backup.""" - return None - - async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None: - """Delete a backup file.""" + _backups = backups or [] + backups_data: dict[str, Buffer] = {} + mock_agent = Mock(spec=BackupAgent) + mock_agent.domain = domain + mock_agent.name = name + mock_agent.unique_id = name + type(mock_agent).agent_id = BackupAgent.agent_id + mock_agent.async_delete_backup = AsyncMock( + side_effect=delete_backup, spec_set=[BackupAgent.async_delete_backup] + ) + mock_agent.async_download_backup = AsyncMock( + side_effect=download_backup, spec_set=[BackupAgent.async_download_backup] + ) + mock_agent.async_get_backup = AsyncMock( + side_effect=get_backup, spec_set=[BackupAgent.async_get_backup] + ) + mock_agent.async_list_backups = AsyncMock( + return_value=backups, spec_set=[BackupAgent.async_list_backups] + ) + mock_agent.async_upload_backup = AsyncMock( + side_effect=upload_backup, + spec_set=[BackupAgent.async_upload_backup], + ) + return mock_agent async def _setup_backup_platform( @@ -372,7 +402,7 @@ async def _setup_backup_platform( platform: BackupAgentPlatformProtocol, ) -> None: """Set up a mock domain.""" - mock_platform(hass, f"{domain}.backup", platform) + mock_platform(hass, f"{domain}.backup", cast(Mock, platform)) assert await async_setup_component(hass, domain, {}) await hass.async_block_till_done() @@ -383,7 +413,7 @@ async def _setup_backup_platform( [ ( MountsInfo(default_backup_mount=None, mounts=[]), - [BackupAgentTest("local", DOMAIN)], + [mock_backup_agent("local", DOMAIN)], ), ( MountsInfo( @@ -394,14 +424,14 @@ async def _setup_backup_platform( name="test", read_only=False, state=supervisor_mounts.MountState.ACTIVE, - user_path="test", + user_path=PurePath("test"), usage=supervisor_mounts.MountUsage.BACKUP, server="test", type=supervisor_mounts.MountType.CIFS, ) ], ), - [BackupAgentTest("local", DOMAIN), BackupAgentTest("test", DOMAIN)], + [mock_backup_agent("local", DOMAIN), mock_backup_agent("test", DOMAIN)], ), ( MountsInfo( @@ -412,14 +442,14 @@ async def _setup_backup_platform( name="test", read_only=False, state=supervisor_mounts.MountState.ACTIVE, - user_path="test", + user_path=PurePath("test"), usage=supervisor_mounts.MountUsage.MEDIA, server="test", type=supervisor_mounts.MountType.CIFS, ) ], ), - [BackupAgentTest("local", DOMAIN)], + [mock_backup_agent("local", DOMAIN)], ), ], ) @@ -576,40 +606,13 @@ async def test_agent_upload( ) -> None: """Test agent upload backup.""" client = await hass_client() - backup_id = "test-backup" supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS - test_backup = AgentBackup( - addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], - backup_id=backup_id, - database_included=True, - date="1970-01-01T00:00:00.000Z", - extra_metadata={}, - folders=[Folder.MEDIA, Folder.SHARE], - homeassistant_included=True, - homeassistant_version="2024.12.0", - name="Test", - protected=False, - size=0, - ) supervisor_client.backups.reload.assert_not_called() - with ( - patch("pathlib.Path.mkdir"), - patch("pathlib.Path.open"), - patch( - "homeassistant.components.backup.manager.BackupManager.async_get_backup", - ) as fetch_backup, - patch( - "homeassistant.components.backup.manager.read_backup", - return_value=test_backup, - ), - patch("shutil.copy"), - ): - fetch_backup.return_value = test_backup - resp = await client.post( - "/api/backup/upload?agent_id=hassio.local", - data={"file": StringIO("test")}, - ) + resp = await client.post( + "/api/backup/upload?agent_id=hassio.local", + data={"file": StringIO("test")}, + ) assert resp.status == 201 supervisor_client.backups.reload.assert_not_called() @@ -852,7 +855,7 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions( "with_automatic_settings": False, }, filename=PurePath("Test_2025-01-30_05.42_12345678.tar"), - folders={"ssl"}, + folders={supervisor_backups.Folder("ssl")}, homeassistant_exclude_database=False, homeassistant=True, location=[LOCATION_LOCAL_STORAGE], @@ -875,7 +878,7 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions( ), ( {"include_all_addons": True}, - replace(DEFAULT_BACKUP_OPTIONS, addons="ALL"), + replace(DEFAULT_BACKUP_OPTIONS, addons=supervisor_backups.AddonSet("ALL")), ), ( {"include_database": False}, @@ -883,7 +886,14 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions( ), ( {"include_folders": ["media", "share"]}, - replace(DEFAULT_BACKUP_OPTIONS, folders={"media", "share", "ssl"}), + replace( + DEFAULT_BACKUP_OPTIONS, + folders={ + supervisor_backups.Folder("media"), + supervisor_backups.Folder("share"), + supervisor_backups.Folder("ssl"), + }, + ), ), ( { @@ -893,7 +903,7 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions( }, replace( DEFAULT_BACKUP_OPTIONS, - folders={"media"}, + folders={supervisor_backups.Folder("media")}, homeassistant=False, homeassistant_exclude_database=True, ), @@ -1249,11 +1259,11 @@ async def test_reader_writer_create_per_agent_encryption( hass_ws_client: WebSocketGenerator, freezer: FrozenDateTimeFactory, supervisor_client: AsyncMock, - commands: dict[str, Any], + commands: list[dict[str, Any]], password: str | None, agent_ids: list[str], password_sent_to_supervisor: str | None, - create_locations: list[str | None], + create_locations: list[str], create_protected: bool, upload_locations: list[str | None], ) -> None: @@ -1268,7 +1278,7 @@ async def test_reader_writer_create_per_agent_encryption( name=f"share{i}", read_only=False, state=supervisor_mounts.MountState.ACTIVE, - user_path=f"share{i}", + user_path=PurePath(f"share{i}"), usage=supervisor_mounts.MountUsage.BACKUP, server=f"share{i}", type=supervisor_mounts.MountType.CIFS, @@ -1551,7 +1561,7 @@ async def test_reader_writer_create_download_remove_error( method_mock = getattr(supervisor_client.backups, method) method_mock.side_effect = exception - remote_agent = BackupAgentTest("remote") + remote_agent = mock_backup_agent("remote") await _setup_backup_platform( hass, domain="test", @@ -1636,7 +1646,7 @@ async def test_reader_writer_create_info_error( supervisor_client.backups.backup_info.side_effect = exception supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE - remote_agent = BackupAgentTest("remote") + remote_agent = mock_backup_agent("remote") await _setup_backup_platform( hass, domain="test", @@ -1713,7 +1723,7 @@ async def test_reader_writer_create_remote_backup( supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS_5 supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE - remote_agent = BackupAgentTest("remote") + remote_agent = mock_backup_agent("remote") await _setup_backup_platform( hass, domain="test", @@ -1861,24 +1871,10 @@ async def test_agent_receive_remote_backup( ) -> None: """Test receiving a backup which will be uploaded to a remote agent.""" client = await hass_client() - backup_id = "test-backup" supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS_5 supervisor_client.backups.upload_backup.return_value = "test_slug" - test_backup = AgentBackup( - addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], - backup_id=backup_id, - database_included=True, - date="1970-01-01T00:00:00.000Z", - extra_metadata={}, - folders=[Folder.MEDIA, Folder.SHARE], - homeassistant_included=True, - homeassistant_version="2024.12.0", - name="Test", - protected=False, - size=0.0, - ) - remote_agent = BackupAgentTest("remote") + remote_agent = mock_backup_agent("remote") await _setup_backup_platform( hass, domain="test", @@ -1889,23 +1885,10 @@ async def test_agent_receive_remote_backup( ) supervisor_client.backups.reload.assert_not_called() - with ( - patch("pathlib.Path.mkdir"), - patch("pathlib.Path.open"), - patch( - "homeassistant.components.backup.manager.BackupManager.async_get_backup", - ) as fetch_backup, - patch( - "homeassistant.components.backup.manager.read_backup", - return_value=test_backup, - ), - patch("shutil.copy"), - ): - fetch_backup.return_value = test_backup - resp = await client.post( - "/api/backup/upload?agent_id=test.remote", - data={"file": StringIO("test")}, - ) + resp = await client.post( + "/api/backup/upload?agent_id=test.remote", + data={"file": StringIO("test")}, + ) assert resp.status == 201 @@ -1996,6 +1979,103 @@ async def test_reader_writer_restore( assert response["result"] is None +@pytest.mark.usefixtures("hassio_client", "setup_backup_integration") +async def test_reader_writer_restore_remote_backup( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + supervisor_client: AsyncMock, +) -> None: + """Test restoring a backup from a remote agent.""" + client = await hass_ws_client(hass) + supervisor_client.backups.partial_restore.return_value.job_id = UUID(TEST_JOB_ID) + supervisor_client.backups.list.return_value = [TEST_BACKUP_5] + supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS_5 + supervisor_client.jobs.get_job.return_value = TEST_JOB_NOT_DONE + + backup_id = "abc123" + test_backup = AgentBackup( + addons=[AddonInfo(name="Test", slug="test", version="1.0.0")], + backup_id=backup_id, + database_included=True, + date="1970-01-01T00:00:00.000Z", + extra_metadata={}, + folders=[Folder.MEDIA, Folder.SHARE], + homeassistant_included=True, + homeassistant_version="2024.12.0", + name="Test", + protected=False, + size=0, + ) + remote_agent = mock_backup_agent("remote", backups=[test_backup]) + await _setup_backup_platform( + hass, + domain="test", + platform=Mock( + async_get_backup_agents=AsyncMock(return_value=[remote_agent]), + spec_set=BackupAgentPlatformProtocol, + ), + ) + + await client.send_json_auto_id({"type": "backup/subscribe_events"}) + response = await client.receive_json() + assert response["event"] == { + "manager_state": "idle", + } + response = await client.receive_json() + assert response["success"] + + await client.send_json_auto_id( + {"type": "backup/restore", "agent_id": "test.remote", "backup_id": backup_id} + ) + response = await client.receive_json() + assert response["event"] == { + "manager_state": "restore_backup", + "reason": None, + "stage": None, + "state": "in_progress", + } + + remote_agent.async_download_backup.assert_called_once_with(backup_id) + assert len(remote_agent.async_get_backup.mock_calls) == 2 + for call in remote_agent.async_get_backup.mock_calls: + assert call.args[0] == backup_id + supervisor_client.backups.partial_restore.assert_called_once_with( + backup_id, + supervisor_backups.PartialRestoreOptions( + addons=None, + background=True, + folders=None, + homeassistant=True, + location=LOCATION_CLOUD_BACKUP, + password=None, + ), + ) + + await client.send_json_auto_id( + { + "type": "supervisor/event", + "data": {"event": "job", "data": {"done": True, "uuid": TEST_JOB_ID}}, + } + ) + response = await client.receive_json() + assert response["success"] + + response = await client.receive_json() + assert response["event"] == { + "manager_state": "restore_backup", + "reason": None, + "stage": None, + "state": "completed", + } + + response = await client.receive_json() + assert response["event"] == {"manager_state": "idle"} + + response = await client.receive_json() + assert response["success"] + assert response["result"] is None + + @pytest.mark.usefixtures("hassio_client", "setup_backup_integration") async def test_reader_writer_restore_report_progress( hass: HomeAssistant, @@ -2322,7 +2402,7 @@ async def test_reader_writer_restore_wrong_parameters( @pytest.mark.parametrize( - ("get_job_result", "last_non_idle_event"), + ("get_job_result", "last_action_event"), [ ( TEST_JOB_DONE, @@ -2350,7 +2430,7 @@ async def test_restore_progress_after_restart( hass_ws_client: WebSocketGenerator, supervisor_client: AsyncMock, get_job_result: supervisor_jobs.Job, - last_non_idle_event: dict[str, Any], + last_action_event: dict[str, Any], ) -> None: """Test restore backup progress after restart.""" @@ -2366,7 +2446,7 @@ async def test_restore_progress_after_restart( response = await client.receive_json() assert response["success"] - assert response["result"]["last_non_idle_event"] == last_non_idle_event + assert response["result"]["last_action_event"] == last_action_event assert response["result"]["state"] == "idle" @@ -2444,7 +2524,7 @@ async def test_restore_progress_after_restart_report_progress( response = await client.receive_json() assert response["success"] - assert response["result"]["last_non_idle_event"] == { + assert response["result"]["last_action_event"] == { "manager_state": "restore_backup", "reason": None, "stage": "addons", @@ -2473,7 +2553,7 @@ async def test_restore_progress_after_restart_unknown_job( response = await client.receive_json() assert response["success"] - assert response["result"]["last_non_idle_event"] is None + assert response["result"]["last_action_event"] is None assert response["result"]["state"] == "idle" @@ -2554,7 +2634,7 @@ async def test_config_load_config_info( freezer: FrozenDateTimeFactory, snapshot: SnapshotAssertion, hass_storage: dict[str, Any], - storage_data: dict[str, Any] | None, + storage_data: dict[str, Any], ) -> None: """Test loading stored backup config and reading it via config/info.""" client = await hass_ws_client(hass) diff --git a/tests/components/heos/__init__.py b/tests/components/heos/__init__.py index 016cc7b3580..34eba8a9c76 100644 --- a/tests/components/heos/__init__.py +++ b/tests/components/heos/__init__.py @@ -2,7 +2,14 @@ from unittest.mock import AsyncMock -from pyheos import ConnectionState, Heos, HeosGroup, HeosOptions, HeosPlayer +from pyheos import ( + ConnectionState, + Heos, + HeosGroup, + HeosOptions, + HeosPlayer, + MediaMusicSource, +) class MockHeos(Heos): @@ -13,6 +20,7 @@ class MockHeos(Heos): super().__init__(options) # Overwrite the methods with async mocks, changing type self.add_to_queue: AsyncMock = AsyncMock() + self.browse_media: AsyncMock = AsyncMock() self.connect: AsyncMock = AsyncMock() self.disconnect: AsyncMock = AsyncMock() self.get_favorites: AsyncMock = AsyncMock() @@ -20,6 +28,7 @@ class MockHeos(Heos): self.get_input_sources: AsyncMock = AsyncMock() self.get_playlists: AsyncMock = AsyncMock() self.get_players: AsyncMock = AsyncMock() + self.get_music_sources: AsyncMock = AsyncMock() self.group_volume_down: AsyncMock = AsyncMock() self.group_volume_up: AsyncMock = AsyncMock() self.get_system_info: AsyncMock = AsyncMock() @@ -28,6 +37,7 @@ class MockHeos(Heos): self.play_preset_station: AsyncMock = AsyncMock() self.play_url: AsyncMock = AsyncMock() self.player_clear_queue: AsyncMock = AsyncMock() + self.player_get_queue: AsyncMock = AsyncMock() self.player_get_quick_selects: AsyncMock = AsyncMock() self.player_play_next: AsyncMock = AsyncMock() self.player_play_previous: AsyncMock = AsyncMock() @@ -64,3 +74,17 @@ class MockHeos(Heos): def mock_set_connection_state(self, connection_state: ConnectionState) -> None: """Set the connection state on the mock instance.""" self._connection._state = connection_state + + def mock_set_current_host(self, host: str) -> None: + """Set the current host on the mock instance.""" + self._connection._host = host + + def mock_set_music_sources( + self, music_sources: dict[int, MediaMusicSource] + ) -> None: + """Set the music sources on the mock instance.""" + for music_source in music_sources.values(): + music_source.heos = self + self._music_sources = music_sources + self._music_sources_loaded = bool(music_sources) + self.get_music_sources.return_value = music_sources diff --git a/tests/components/heos/conftest.py b/tests/components/heos/conftest.py index 7bed05a0289..835e4436398 100644 --- a/tests/components/heos/conftest.py +++ b/tests/components/heos/conftest.py @@ -6,6 +6,7 @@ from collections.abc import Callable, Iterator from unittest.mock import Mock, patch from pyheos import ( + BrowseResult, HeosGroup, HeosHost, HeosNowPlayingMedia, @@ -14,10 +15,12 @@ from pyheos import ( HeosSystem, LineOutLevelType, MediaItem, + MediaMusicSource, MediaType, NetworkType, PlayerUpdateResult, PlayState, + QueueItem, RepeatType, const, ) @@ -294,10 +297,10 @@ def quick_selects_fixture() -> dict[int, str]: } -@pytest.fixture(name="playlists") -def playlists_fixture() -> list[MediaItem]: - """Create favorites fixture.""" - playlist = MediaItem( +@pytest.fixture(name="playlist") +def playlist_fixture() -> MediaItem: + """Create playlist fixture.""" + return MediaItem( source_id=const.MUSIC_SOURCE_PLAYLISTS, name="Awesome Music", type=MediaType.PLAYLIST, @@ -306,6 +309,44 @@ def playlists_fixture() -> list[MediaItem]: image_url="", heos=None, ) + + +@pytest.fixture(name="music_sources") +def music_sources_fixture() -> dict[int, MediaMusicSource]: + """Create music sources fixture.""" + return { + const.MUSIC_SOURCE_PANDORA: MediaMusicSource( + source_id=const.MUSIC_SOURCE_PANDORA, + name="Pandora", + type=MediaType.MUSIC_SERVICE, + available=True, + service_username="user", + image_url="", + heos=None, + ), + const.MUSIC_SOURCE_TUNEIN: MediaMusicSource( + source_id=const.MUSIC_SOURCE_TUNEIN, + name="TuneIn", + type=MediaType.MUSIC_SERVICE, + available=False, + service_username=None, + image_url="", + heos=None, + ), + } + + +@pytest.fixture(name="pandora_browse_result") +def pandora_browse_response_fixture(favorites: dict[int, MediaItem]) -> BrowseResult: + """Create a mock response for browsing Pandora.""" + return BrowseResult( + 1, 1, const.MUSIC_SOURCE_PANDORA, items=[favorites[1]], options=[] + ) + + +@pytest.fixture(name="playlists") +def playlists_fixture(playlist: MediaItem) -> list[MediaItem]: + """Create playlists fixture.""" return [playlist] @@ -319,3 +360,28 @@ def change_data_fixture() -> PlayerUpdateResult: def change_data_mapped_ids_fixture() -> PlayerUpdateResult: """Create player change data for testing.""" return PlayerUpdateResult(updated_player_ids={1: 101}) + + +@pytest.fixture(name="queue") +def queue_fixture() -> list[QueueItem]: + """Create a queue fixture.""" + return [ + QueueItem( + queue_id=1, + song="Espresso", + album="Espresso", + artist="Sabrina Carpenter", + image_url="http://resources.wimpmusic.com/images/e4f2d75f/a69e/4b8a/b800/e18546b1ad4c/640x640.jpg", + media_id="356276483", + album_id="356276481", + ), + QueueItem( + queue_id=2, + song="A Bar Song (Tipsy)", + album="A Bar Song (Tipsy)", + artist="Shaboozey", + image_url="http://resources.wimpmusic.com/images/d05b8da3/4fae/45ff/ac1b/7ab7caab3523/640x640.jpg", + media_id="354365598", + album_id="354365596", + ), + ] diff --git a/tests/components/heos/snapshots/test_media_player.ambr b/tests/components/heos/snapshots/test_media_player.ambr index 88d27f2073a..d366a7f6317 100644 --- a/tests/components/heos/snapshots/test_media_player.ambr +++ b/tests/components/heos/snapshots/test_media_player.ambr @@ -1,4 +1,190 @@ # serializer version: 1 +# name: test_browse_media_heos_media + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': 'track', + 'media_content_id': 'heos://media/1/station?name=Today%27s+Hits+Radio&image_url=&playable=True&browsable=False&media_id=123456789', + 'media_content_type': '', + 'thumbnail': '', + 'title': "Today's Hits Radio", + }), + ]), + 'children_media_class': 'track', + 'media_class': 'directory', + 'media_content_id': 'heos://media/1/music_service?name=Pandora&image_url=&available=True&service_username=user', + 'media_content_type': '', + 'not_shown': 0, + 'thumbnail': '', + 'title': 'Pandora', + }) +# --- +# name: test_browse_media_heos_media_error_returns_empty + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + ]), + 'children_media_class': None, + 'media_class': 'directory', + 'media_content_id': 'heos://media/1/music_service?name=Pandora&image_url=&available=True&service_username=user', + 'media_content_type': '', + 'not_shown': 0, + 'thumbnail': '', + 'title': 'Pandora', + }) +# --- +# name: test_browse_media_media_source + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': 'music', + 'media_content_id': 'media-source://media_source/local/test.mp3', + 'media_content_type': 'audio/mpeg', + 'thumbnail': None, + 'title': 'test.mp3', + }), + ]), + 'children_media_class': 'music', + 'media_class': 'directory', + 'media_content_id': 'media-source://media_source/local/.', + 'media_content_type': '', + 'not_shown': 1, + 'thumbnail': None, + 'title': 'media', + }) +# --- +# name: test_browse_media_root + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': None, + 'media_class': 'directory', + 'media_content_id': 'heos://media/1/music_service?name=Pandora&image_url=&available=True&service_username=user', + 'media_content_type': '', + 'thumbnail': '', + 'title': 'Pandora', + }), + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': None, + 'media_class': 'directory', + 'media_content_id': 'heos://media/3/music_service?name=TuneIn&image_url=&available=False', + 'media_content_type': '', + 'thumbnail': '', + 'title': 'TuneIn', + }), + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': 'music', + 'media_class': 'directory', + 'media_content_id': 'media-source://media_source/local/.', + 'media_content_type': '', + 'thumbnail': None, + 'title': 'media', + }), + ]), + 'children_media_class': 'directory', + 'media_class': 'directory', + 'media_content_id': 'heos://media', + 'media_content_type': '', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Music Sources', + }) +# --- +# name: test_browse_media_root_no_media_source + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': None, + 'media_class': 'directory', + 'media_content_id': 'heos://media/1/music_service?name=Pandora&image_url=&available=True&service_username=user', + 'media_content_type': '', + 'thumbnail': '', + 'title': 'Pandora', + }), + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': None, + 'media_class': 'directory', + 'media_content_id': 'heos://media/3/music_service?name=TuneIn&image_url=&available=False', + 'media_content_type': '', + 'thumbnail': '', + 'title': 'TuneIn', + }), + ]), + 'children_media_class': 'directory', + 'media_class': 'directory', + 'media_content_id': 'heos://media', + 'media_content_type': '', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Music Sources', + }) +# --- +# name: test_browse_media_root_source_error_continues + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + ]), + 'children_media_class': 'directory', + 'media_class': 'directory', + 'media_content_id': 'heos://media', + 'media_content_type': '', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Music Sources', + }) +# --- +# name: test_get_queue + dict({ + 'media_player.test_player': dict({ + 'queue': list([ + dict({ + 'album': 'Espresso', + 'album_id': '356276481', + 'artist': 'Sabrina Carpenter', + 'image_url': 'http://resources.wimpmusic.com/images/e4f2d75f/a69e/4b8a/b800/e18546b1ad4c/640x640.jpg', + 'media_id': '356276483', + 'queue_id': 1, + 'song': 'Espresso', + }), + dict({ + 'album': 'A Bar Song (Tipsy)', + 'album_id': '354365596', + 'artist': 'Shaboozey', + 'image_url': 'http://resources.wimpmusic.com/images/d05b8da3/4fae/45ff/ac1b/7ab7caab3523/640x640.jpg', + 'media_id': '354365598', + 'queue_id': 2, + 'song': 'A Bar Song (Tipsy)', + }), + ]), + }), + }) +# --- # name: test_state_attributes StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/heos/test_init.py b/tests/components/heos/test_init.py index b155abaf0e9..7bc232ad5a6 100644 --- a/tests/components/heos/test_init.py +++ b/tests/components/heos/test_init.py @@ -297,6 +297,25 @@ async def test_reconnected_new_entities_created( assert entity_registry.async_get_entity_id(MEDIA_PLAYER_DOMAIN, DOMAIN, "3") +async def test_reconnected_failover_updates_host( + hass: HomeAssistant, config_entry: MockConfigEntry, controller: MockHeos +) -> None: + """Test the config entry host is updated after failover.""" + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + assert config_entry.data[CONF_HOST] == "127.0.0.1" + + # Simulate reconnection + controller.mock_set_current_host("127.0.0.2") + await controller.dispatcher.wait_send( + SignalType.HEOS_EVENT, SignalHeosEvent.CONNECTED + ) + await hass.async_block_till_done() + + # Assert config entry host updated + assert config_entry.data[CONF_HOST] == "127.0.0.2" + + async def test_players_changed_new_entities_created( hass: HomeAssistant, entity_registry: er.EntityRegistry, diff --git a/tests/components/heos/test_media_player.py b/tests/components/heos/test_media_player.py index debfe31f427..474d606b5b1 100644 --- a/tests/components/heos/test_media_player.py +++ b/tests/components/heos/test_media_player.py @@ -7,23 +7,28 @@ from typing import Any from freezegun.api import FrozenDateTimeFactory from pyheos import ( AddCriteriaType, + BrowseResult, CommandFailedError, HeosError, MediaItem, + MediaMusicSource, MediaType as HeosMediaType, PlayerUpdateResult, PlayState, + QueueItem, RepeatType, SignalHeosEvent, SignalType, const, ) +from pyheos.util import mediauri import pytest from syrupy.assertion import SnapshotAssertion from syrupy.filters import props from homeassistant.components.heos.const import ( DOMAIN, + SERVICE_GET_QUEUE, SERVICE_GROUP_VOLUME_DOWN, SERVICE_GROUP_VOLUME_SET, SERVICE_GROUP_VOLUME_UP, @@ -51,6 +56,7 @@ from homeassistant.components.media_player import ( MediaType, RepeatMode, ) +from homeassistant.components.media_source import DOMAIN as MS_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_MEDIA_NEXT_TRACK, @@ -73,6 +79,8 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from . import MockHeos from tests.common import MockConfigEntry, async_fire_time_changed +from tests.conftest import async_setup_component +from tests.typing import WebSocketGenerator async def test_state_attributes( @@ -1239,6 +1247,267 @@ async def test_play_media_invalid_type( ) +async def test_play_media_media_uri( + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: MockHeos, + playlist: MediaItem, +) -> None: + """Test the play media service with HEOS media uri.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + media_content_id = mediauri.to_media_uri(playlist) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_ID: media_content_id, + ATTR_MEDIA_CONTENT_TYPE: "", + }, + blocking=True, + ) + controller.play_media.assert_called_once() + + +async def test_play_media_media_uri_invalid( + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: MockHeos, +) -> None: + """Test the play media service with an invalid HEOS media uri raises.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + media_id = "heos://media/1/music_service?name=Pandora&available=False&image_url=" + + with pytest.raises( + HomeAssistantError, + match=re.escape(f"Unable to play media: Invalid media id '{media_id}'"), + ): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_ID: media_id, + ATTR_MEDIA_CONTENT_TYPE: "", + }, + blocking=True, + ) + controller.play_media.assert_not_called() + + +async def test_play_media_music_source_url( + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: MockHeos, +) -> None: + """Test the play media service with a music source url.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await async_setup_component(hass, MS_DOMAIN, {MS_DOMAIN: {}}) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_ID: "media-source://media_source/local/test.mp3", + ATTR_MEDIA_CONTENT_TYPE: "", + }, + blocking=True, + ) + controller.play_url.assert_called_once() + + +async def test_browse_media_root( + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: MockHeos, + music_sources: dict[int, MediaMusicSource], + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test browsing the root.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await async_setup_component(hass, MS_DOMAIN, {MS_DOMAIN: {}}) + + controller.mock_set_music_sources(music_sources) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == snapshot + + +async def test_browse_media_root_no_media_source( + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: MockHeos, + music_sources: dict[int, MediaMusicSource], + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test browsing the root.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + controller.mock_set_music_sources(music_sources) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == snapshot + + +async def test_browse_media_root_source_error_continues( + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: MockHeos, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, + snapshot: SnapshotAssertion, +) -> None: + """Test browsing the root with an error getting sources continues.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + controller.get_music_sources.side_effect = HeosError("error") + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == snapshot + assert "Unable to load music sources" in caplog.text + + +async def test_browse_media_heos_media( + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: MockHeos, + hass_ws_client: WebSocketGenerator, + pandora_browse_result: BrowseResult, + snapshot: SnapshotAssertion, +) -> None: + """Test browsing a heos media item.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + controller.browse_media.return_value = pandora_browse_result + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + "media_content_id": "heos://media/1/music_service?name=Pandora&image_url=&available=True&service_username=user", + "media_content_type": "", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == snapshot + + +async def test_browse_media_heos_media_error_returns_empty( + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: MockHeos, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, + snapshot: SnapshotAssertion, +) -> None: + """Test browsing a heos media item results in an error, returns empty children.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + controller.browse_media.side_effect = HeosError("error") + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + "media_content_id": "heos://media/1/music_service?name=Pandora&image_url=&available=True&service_username=user", + "media_content_type": "", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == snapshot + assert "Unable to browse media" in caplog.text + + +async def test_browse_media_media_source( + hass: HomeAssistant, + config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test browsing a media source.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await async_setup_component(hass, MS_DOMAIN, {MS_DOMAIN: {}}) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + "media_content_id": "media-source://media_source/local/.", + "media_content_type": "", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == snapshot + + +async def test_browse_media_invalid_content_id( + hass: HomeAssistant, + config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test browsing an invalid content id fails.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + "media_content_id": "invalid", + "media_content_type": "", + } + ) + response = await client.receive_json() + assert not response["success"] + + @pytest.mark.parametrize( ("members", "expected"), [ @@ -1429,3 +1698,27 @@ async def test_media_player_group_fails_wrong_integration( blocking=True, ) controller.set_group.assert_not_called() + + +async def test_get_queue( + hass: HomeAssistant, + config_entry: MockConfigEntry, + controller: MockHeos, + queue: list[QueueItem], + snapshot: SnapshotAssertion, +) -> None: + """Test the get queue service.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + controller.player_get_queue.return_value = queue + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_QUEUE, + { + ATTR_ENTITY_ID: "media_player.test_player", + }, + blocking=True, + return_response=True, + ) + controller.player_get_queue.assert_called_once_with(1, None, None) + assert response == snapshot diff --git a/tests/components/history_stats/test_sensor.py b/tests/components/history_stats/test_sensor.py index 721e540b04d..e2dba1b9355 100644 --- a/tests/components/history_stats/test_sensor.py +++ b/tests/components/history_stats/test_sensor.py @@ -122,6 +122,7 @@ async def test_setup_multiple_states( }, ], ) +@pytest.mark.usefixtures("hass") def test_setup_invalid_config(config) -> None: """Test the history statistics sensor setup with invalid config.""" diff --git a/tests/components/home_connect/conftest.py b/tests/components/home_connect/conftest.py index c0caf2b2bdd..21cd236b1a8 100644 --- a/tests/components/home_connect/conftest.py +++ b/tests/components/home_connect/conftest.py @@ -473,20 +473,6 @@ def mock_client_with_exception( return mock -@pytest.fixture(name="appliance_ha_id") -def mock_appliance_ha_id( - appliances: list[HomeAppliance], request: pytest.FixtureRequest -) -> str: - """Fixture to get the ha_id of an appliance.""" - appliance_type = "Washer" - if hasattr(request, "param") and request.param: - appliance_type = request.param - for appliance in appliances: - if appliance.type == appliance_type: - return appliance.ha_id - raise ValueError(f"Appliance {appliance_type} not found") - - @pytest.fixture(name="appliances") def mock_appliances( appliances_data: str, request: pytest.FixtureRequest diff --git a/tests/components/home_connect/snapshots/test_init.ambr b/tests/components/home_connect/snapshots/test_services.ambr similarity index 96% rename from tests/components/home_connect/snapshots/test_init.ambr rename to tests/components/home_connect/snapshots/test_services.ambr index 709621aaefb..610e9fa1248 100644 --- a/tests/components/home_connect/snapshots/test_init.ambr +++ b/tests/components/home_connect/snapshots/test_services.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_set_program_and_options[service_call0-set_selected_program] +# name: test_set_program_and_options[service_call0-set_selected_program-Washer] _Call( tuple( 'SIEMENS-HCS03WCH1-7BC6383CF794', @@ -18,7 +18,7 @@ }), ) # --- -# name: test_set_program_and_options[service_call1-start_program] +# name: test_set_program_and_options[service_call1-start_program-Washer] _Call( tuple( 'SIEMENS-HCS03WCH1-7BC6383CF794', @@ -37,7 +37,7 @@ }), ) # --- -# name: test_set_program_and_options[service_call2-set_active_program_options] +# name: test_set_program_and_options[service_call2-set_active_program_options-Washer] _Call( tuple( 'SIEMENS-HCS03WCH1-7BC6383CF794', @@ -57,7 +57,7 @@ }), ) # --- -# name: test_set_program_and_options[service_call3-set_selected_program_options] +# name: test_set_program_and_options[service_call3-set_selected_program_options-Washer] _Call( tuple( 'SIEMENS-HCS03WCH1-7BC6383CF794', diff --git a/tests/components/home_connect/test_binary_sensor.py b/tests/components/home_connect/test_binary_sensor.py index a06e386b84f..ce879a38de5 100644 --- a/tests/components/home_connect/test_binary_sensor.py +++ b/tests/components/home_connect/test_binary_sensor.py @@ -1,9 +1,17 @@ """Tests for home_connect binary_sensor entities.""" from collections.abc import Awaitable, Callable +from http import HTTPStatus from unittest.mock import AsyncMock, MagicMock -from aiohomeconnect.model import ArrayOfEvents, Event, EventKey, EventMessage, EventType +from aiohomeconnect.model import ( + ArrayOfEvents, + Event, + EventKey, + EventMessage, + EventType, + HomeAppliance, +) from aiohomeconnect.model.error import HomeConnectApiError import pytest @@ -32,6 +40,7 @@ import homeassistant.helpers.issue_registry as ir from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator @pytest.fixture @@ -52,8 +61,9 @@ async def test_binary_sensors( assert config_entry.state == ConfigEntryState.LOADED +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_paired_depaired_devices_flow( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -67,7 +77,7 @@ async def test_paired_depaired_devices_flow( assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert entity_entries @@ -75,7 +85,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DEPAIRED, data=ArrayOfEvents([]), ) @@ -83,7 +93,7 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert not device for entity_entry in entity_entries: assert not entity_registry.async_get(entity_entry.entity_id) @@ -92,7 +102,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.PAIRED, data=ArrayOfEvents([]), ) @@ -100,13 +110,14 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - assert device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + assert device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) for entity_entry in entity_entries: assert entity_registry.async_get(entity_entry.entity_id) +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_connected_devices( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -123,7 +134,7 @@ async def test_connected_devices( get_status_original_mock = client.get_status def get_status_side_effect(ha_id: str): - if ha_id == appliance_ha_id: + if ha_id == appliance.ha_id: raise HomeConnectApiError( "SDK.Error.HomeAppliance.Connection.Initialization.Failed" ) @@ -135,14 +146,14 @@ async def test_connected_devices( assert config_entry.state == ConfigEntryState.LOADED client.get_status = get_status_original_mock - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, data=ArrayOfEvents([]), ) @@ -150,19 +161,21 @@ async def test_connected_devices( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device new_entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert len(new_entity_entries) > len(entity_entries) -async def test_binary_sensors_entity_availabilty( +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) +async def test_binary_sensors_entity_availability( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: """Test if binary sensor entities availability are based on the appliance connection state.""" entity_ids = [ @@ -181,7 +194,7 @@ async def test_binary_sensors_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DISCONNECTED, ArrayOfEvents([]), ) @@ -195,7 +208,7 @@ async def test_binary_sensors_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, ArrayOfEvents([]), ) @@ -209,6 +222,8 @@ async def test_binary_sensors_entity_availabilty( assert state.state != STATE_UNAVAILABLE +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) @pytest.mark.parametrize( ("value", "expected"), [ @@ -219,7 +234,7 @@ async def test_binary_sensors_entity_availabilty( ], ) async def test_binary_sensors_door_states( - appliance_ha_id: str, + appliance: HomeAppliance, expected: str, value: str, hass: HomeAssistant, @@ -237,7 +252,7 @@ async def test_binary_sensors_door_states( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.STATUS, ArrayOfEvents( [ @@ -259,7 +274,7 @@ async def test_binary_sensors_door_states( @pytest.mark.parametrize( - ("entity_id", "event_key", "event_value_update", "expected", "appliance_ha_id"), + ("entity_id", "event_key", "event_value_update", "expected", "appliance"), [ ( "binary_sensor.washer_remote_control", @@ -304,13 +319,13 @@ async def test_binary_sensors_door_states( "FridgeFreezer", ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_binary_sensors_functionality( entity_id: str, event_key: EventKey, event_value_update: str, - appliance_ha_id: str, + appliance: HomeAppliance, expected: str, hass: HomeAssistant, config_entry: MockConfigEntry, @@ -325,7 +340,7 @@ async def test_binary_sensors_functionality( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.STATUS, ArrayOfEvents( [ @@ -346,13 +361,14 @@ async def test_binary_sensors_functionality( assert hass.states.is_state(entity_id, expected) +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_connected_sensor_functionality( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: """Test if the connected binary sensor reports the right values.""" entity_id = "binary_sensor.washer_connectivity" @@ -365,7 +381,7 @@ async def test_connected_sensor_functionality( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DISCONNECTED, ArrayOfEvents([]), ) @@ -378,7 +394,7 @@ async def test_connected_sensor_functionality( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, ArrayOfEvents([]), ) @@ -390,7 +406,7 @@ async def test_connected_sensor_functionality( @pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_create_issue( +async def test_create_door_binary_sensor_deprecation_issue( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -398,7 +414,7 @@ async def test_create_issue( client: MagicMock, issue_registry: ir.IssueRegistry, ) -> None: - """Test we create an issue when an automation or script is using a deprecated entity.""" + """Test that we create an issue when an automation or script is using a door binary sensor entity.""" entity_id = "binary_sensor.washer_door" issue_id = f"deprecated_binary_common_door_sensor_{entity_id}" @@ -452,3 +468,76 @@ async def test_create_issue( # Assert the issue is no longer present assert not issue_registry.async_get_issue(DOMAIN, issue_id) assert len(issue_registry.issues) == 0 + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_door_binary_sensor_deprecation_issue_fix( + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, + issue_registry: ir.IssueRegistry, + hass_client: ClientSessionGenerator, +) -> None: + """Test that we create an issue when an automation or script is using a door binary sensor entity.""" + entity_id = "binary_sensor.washer_door" + issue_id = f"deprecated_binary_common_door_sensor_{entity_id}" + + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": { + "action": "automation.turn_on", + "target": { + "entity_id": "automation.test", + }, + }, + } + }, + ) + assert await async_setup_component( + hass, + script.DOMAIN, + { + script.DOMAIN: { + "test": { + "sequence": [ + { + "condition": "state", + "entity_id": entity_id, + "state": "on", + }, + ], + } + } + }, + ) + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + assert config_entry.state == ConfigEntryState.LOADED + + assert automations_with_entity(hass, entity_id)[0] == "automation.test" + assert scripts_with_entity(hass, entity_id)[0] == "script.test" + + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue(DOMAIN, issue_id) + assert issue + + _client = await hass_client() + resp = await _client.post( + "/api/repairs/issues/fix", + json={"handler": DOMAIN, "issue_id": issue.issue_id}, + ) + assert resp.status == HTTPStatus.OK + flow_id = (await resp.json())["flow_id"] + resp = await _client.post(f"/api/repairs/issues/fix/{flow_id}") + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, issue_id) + assert len(issue_registry.issues) == 0 diff --git a/tests/components/home_connect/test_button.py b/tests/components/home_connect/test_button.py index 5af7e40ca43..f894494792d 100644 --- a/tests/components/home_connect/test_button.py +++ b/tests/components/home_connect/test_button.py @@ -4,7 +4,12 @@ from collections.abc import Awaitable, Callable from typing import Any from unittest.mock import AsyncMock, MagicMock -from aiohomeconnect.model import ArrayOfCommands, CommandKey, EventMessage +from aiohomeconnect.model import ( + ArrayOfCommands, + CommandKey, + EventMessage, + HomeAppliance, +) from aiohomeconnect.model.command import Command from aiohomeconnect.model.error import HomeConnectApiError from aiohomeconnect.model.event import ArrayOfEvents, EventType @@ -40,8 +45,9 @@ async def test_buttons( assert config_entry.state == ConfigEntryState.LOADED +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_paired_depaired_devices_flow( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -55,7 +61,7 @@ async def test_paired_depaired_devices_flow( assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert entity_entries @@ -63,7 +69,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DEPAIRED, data=ArrayOfEvents([]), ) @@ -71,7 +77,7 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert not device for entity_entry in entity_entries: assert not entity_registry.async_get(entity_entry.entity_id) @@ -80,7 +86,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.PAIRED, data=ArrayOfEvents([]), ) @@ -88,13 +94,14 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - assert device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + assert device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) for entity_entry in entity_entries: assert entity_registry.async_get(entity_entry.entity_id) +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_connected_devices( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -112,14 +119,14 @@ async def test_connected_devices( get_available_programs_mock = client.get_available_programs async def get_available_commands_side_effect(ha_id: str): - if ha_id == appliance_ha_id: + if ha_id == appliance.ha_id: raise HomeConnectApiError( "SDK.Error.HomeAppliance.Connection.Initialization.Failed" ) return await get_available_commands_original_mock.side_effect(ha_id) async def get_available_programs_side_effect(ha_id: str): - if ha_id == appliance_ha_id: + if ha_id == appliance.ha_id: raise HomeConnectApiError( "SDK.Error.HomeAppliance.Connection.Initialization.Failed" ) @@ -137,14 +144,14 @@ async def test_connected_devices( client.get_available_commands = get_available_commands_original_mock client.get_available_programs = get_available_programs_mock - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, data=ArrayOfEvents([]), ) @@ -152,19 +159,20 @@ async def test_connected_devices( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device new_entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert len(new_entity_entries) > len(entity_entries) -async def test_button_entity_availabilty( +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) +async def test_button_entity_availability( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: """Test if button entities availability are based on the appliance connection state.""" entity_ids = [ @@ -183,7 +191,7 @@ async def test_button_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DISCONNECTED, ArrayOfEvents([]), ) @@ -197,7 +205,7 @@ async def test_button_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, ArrayOfEvents([]), ) @@ -211,6 +219,7 @@ async def test_button_entity_availabilty( assert state.state != STATE_UNAVAILABLE +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) @pytest.mark.parametrize( ("entity_id", "method_call", "expected_kwargs"), [ @@ -231,7 +240,7 @@ async def test_button_functionality( entity_id: str, method_call: str, expected_kwargs: dict[str, Any], - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: """Test if button entities availability are based on the appliance connection state.""" assert config_entry.state == ConfigEntryState.NOT_LOADED @@ -248,7 +257,7 @@ async def test_button_functionality( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - getattr(client, method_call).assert_called_with(appliance_ha_id, **expected_kwargs) + getattr(client, method_call).assert_called_with(appliance.ha_id, **expected_kwargs) async def test_command_button_exception( diff --git a/tests/components/home_connect/test_coordinator.py b/tests/components/home_connect/test_coordinator.py index 84bef94d658..050758a6568 100644 --- a/tests/components/home_connect/test_coordinator.py +++ b/tests/components/home_connect/test_coordinator.py @@ -31,8 +31,17 @@ from homeassistant.components.home_connect.const import ( BSH_POWER_OFF, DOMAIN, ) +from homeassistant.components.homeassistant import ( + DOMAIN as HA_DOMAIN, + SERVICE_UPDATE_ENTITY, +) from homeassistant.config_entries import ConfigEntries, ConfigEntryState -from homeassistant.const import EVENT_STATE_REPORTED, Platform +from homeassistant.const import ( + ATTR_ENTITY_ID, + EVENT_STATE_REPORTED, + STATE_UNAVAILABLE, + Platform, +) from homeassistant.core import ( Event as HassEvent, EventStateReportedData, @@ -98,30 +107,30 @@ async def test_coordinator_failure_refresh_and_stream( ) entity_id_1 = "binary_sensor.washer_remote_control" entity_id_2 = "binary_sensor.washer_remote_start" - await async_setup_component(hass, "homeassistant", {}) + await async_setup_component(hass, HA_DOMAIN, {}) await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED state = hass.states.get(entity_id_1) assert state - assert state.state != "unavailable" + assert state.state != STATE_UNAVAILABLE state = hass.states.get(entity_id_2) assert state - assert state.state != "unavailable" + assert state.state != STATE_UNAVAILABLE client.get_home_appliances.side_effect = HomeConnectError() # Force a coordinator refresh. await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id_1}, blocking=True + HA_DOMAIN, SERVICE_UPDATE_ENTITY, {ATTR_ENTITY_ID: entity_id_1}, blocking=True ) await hass.async_block_till_done() state = hass.states.get(entity_id_1) assert state - assert state.state == "unavailable" + assert state.state == STATE_UNAVAILABLE state = hass.states.get(entity_id_2) assert state - assert state.state == "unavailable" + assert state.state == STATE_UNAVAILABLE # Test that the entity becomes available again after a successful update. @@ -137,16 +146,16 @@ async def test_coordinator_failure_refresh_and_stream( # Force a coordinator refresh. await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id_1}, blocking=True + HA_DOMAIN, SERVICE_UPDATE_ENTITY, {ATTR_ENTITY_ID: entity_id_1}, blocking=True ) await hass.async_block_till_done() state = hass.states.get(entity_id_1) assert state - assert state.state != "unavailable" + assert state.state != STATE_UNAVAILABLE state = hass.states.get(entity_id_2) assert state - assert state.state != "unavailable" + assert state.state != STATE_UNAVAILABLE # Test that the event stream makes the entity go available too. @@ -160,16 +169,16 @@ async def test_coordinator_failure_refresh_and_stream( # Force a coordinator refresh await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id_1}, blocking=True + HA_DOMAIN, SERVICE_UPDATE_ENTITY, {ATTR_ENTITY_ID: entity_id_1}, blocking=True ) await hass.async_block_till_done() state = hass.states.get(entity_id_1) assert state - assert state.state == "unavailable" + assert state.state == STATE_UNAVAILABLE state = hass.states.get(entity_id_2) assert state - assert state.state == "unavailable" + assert state.state == STATE_UNAVAILABLE # Now make the entity available again. client.get_home_appliances.side_effect = None @@ -199,10 +208,10 @@ async def test_coordinator_failure_refresh_and_stream( state = hass.states.get(entity_id_1) assert state - assert state.state != "unavailable" + assert state.state != STATE_UNAVAILABLE state = hass.states.get(entity_id_2) assert state - assert state.state != "unavailable" + assert state.state != STATE_UNAVAILABLE @pytest.mark.parametrize( @@ -235,9 +244,9 @@ async def test_coordinator_update_failing( getattr(client, mock_method).assert_called() -@pytest.mark.parametrize("appliance_ha_id", ["Dishwasher"], indirect=True) +@pytest.mark.parametrize("appliance", ["Dishwasher"], indirect=True) @pytest.mark.parametrize( - ("event_type", "event_key", "event_value", "entity_id"), + ("event_type", "event_key", "event_value", ATTR_ENTITY_ID), [ ( EventType.STATUS, @@ -269,7 +278,7 @@ async def test_event_listener( integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, entity_registry: er.EntityRegistry, ) -> None: """Test that the event listener works.""" @@ -280,7 +289,7 @@ async def test_event_listener( state = hass.states.get(entity_id) assert state event_message = EventMessage( - appliance_ha_id, + appliance.ha_id, event_type, ArrayOfEvents( [ @@ -327,13 +336,14 @@ async def test_event_listener( listener.assert_called_once_with(new_entity_id) +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def tests_receive_setting_and_status_for_first_time_at_events( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: """Test that the event listener is capable of receiving settings and status for the first time.""" client.get_setting = AsyncMock(return_value=ArrayOfSettings([])) @@ -346,7 +356,7 @@ async def tests_receive_setting_and_status_for_first_time_at_events( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.NOTIFY, ArrayOfEvents( [ @@ -362,7 +372,7 @@ async def tests_receive_setting_and_status_for_first_time_at_events( ), ), EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.STATUS, ArrayOfEvents( [ @@ -519,7 +529,7 @@ async def test_devices_updated_on_refresh( return_value=ArrayOfHomeAppliances(appliances[:2]), ) - await async_setup_component(hass, "homeassistant", {}) + await async_setup_component(hass, HA_DOMAIN, {}) assert config_entry.state == ConfigEntryState.NOT_LOADED await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED @@ -532,9 +542,9 @@ async def test_devices_updated_on_refresh( return_value=ArrayOfHomeAppliances(appliances[1:3]), ) await hass.services.async_call( - "homeassistant", - "update_entity", - {"entity_id": "switch.dishwasher_power"}, + HA_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: "switch.dishwasher_power"}, blocking=True, ) diff --git a/tests/components/home_connect/test_entity.py b/tests/components/home_connect/test_entity.py index 6ac9a2c1d90..e91a01a907a 100644 --- a/tests/components/home_connect/test_entity.py +++ b/tests/components/home_connect/test_entity.py @@ -11,6 +11,7 @@ from aiohomeconnect.model import ( EventKey, EventMessage, EventType, + HomeAppliance, Option, OptionKey, Program, @@ -67,7 +68,7 @@ def platforms() -> list[str]: ) @pytest.mark.parametrize( ( - "appliance_ha_id", + "appliance", "option_entity_id", "options_state_stage_1", "options_availability_stage_2", @@ -86,17 +87,17 @@ def platforms() -> list[str]: [False, True, True], ( OptionKey.DISHCARE_DISHWASHER_HYGIENE_PLUS, - "switch.dishwasher_hygiene_plus", + "switch.dishwasher_hygiene", ), (OptionKey.DISHCARE_DISHWASHER_EXTRA_DRY, "switch.dishwasher_extra_dry"), ) ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_program_options_retrieval( array_of_programs_program_arg: str, event_key: EventKey, - appliance_ha_id: str, + appliance: HomeAppliance, option_entity_id: dict[OptionKey, str], options_state_stage_1: list[tuple[str, bool | None]], options_availability_stage_2: list[bool], @@ -122,7 +123,7 @@ async def test_program_options_retrieval( ] async def get_all_programs_with_options_mock(ha_id: str) -> ArrayOfPrograms: - if ha_id != appliance_ha_id: + if ha_id != appliance.ha_id: return await original_get_all_programs_mock(ha_id) array_of_programs: ArrayOfPrograms = await original_get_all_programs_mock(ha_id) @@ -204,7 +205,7 @@ async def test_program_options_retrieval( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.NOTIFY, data=ArrayOfEvents( [ @@ -235,6 +236,7 @@ async def test_program_options_retrieval( assert hass.states.is_state(entity_id, STATE_UNKNOWN) +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) @pytest.mark.parametrize( ("array_of_programs_program_arg", "event_key"), [ @@ -251,7 +253,7 @@ async def test_program_options_retrieval( async def test_no_options_retrieval_on_unknown_program( array_of_programs_program_arg: str, event_key: EventKey, - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -285,7 +287,7 @@ async def test_no_options_retrieval_on_unknown_program( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.NOTIFY, data=ArrayOfEvents( [ @@ -315,7 +317,7 @@ async def test_no_options_retrieval_on_unknown_program( ], ) @pytest.mark.parametrize( - ("appliance_ha_id", "option_key", "option_entity_id"), + ("appliance", "option_key", "option_entity_id"), [ ( "Dishwasher", @@ -323,11 +325,11 @@ async def test_no_options_retrieval_on_unknown_program( "switch.dishwasher_half_load", ) ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_program_options_retrieval_after_appliance_connection( event_key: EventKey, - appliance_ha_id: str, + appliance: HomeAppliance, option_key: OptionKey, option_entity_id: str, hass: HomeAssistant, @@ -344,7 +346,7 @@ async def test_program_options_retrieval_after_appliance_connection( [ appliance for appliance in array_of_home_appliances.homeappliances - if appliance.ha_id != appliance_ha_id + if appliance.ha_id != appliance.ha_id ] ) @@ -367,7 +369,7 @@ async def test_program_options_retrieval_after_appliance_connection( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, data=ArrayOfEvents( [ @@ -405,7 +407,7 @@ async def test_program_options_retrieval_after_appliance_connection( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.NOTIFY, data=ArrayOfEvents( [ @@ -450,7 +452,6 @@ async def test_program_options_retrieval_after_appliance_connection( async def test_option_entity_functionality_exception( set_active_program_option_side_effect: HomeConnectError | None, set_selected_program_option_side_effect: HomeConnectError | None, - appliance_ha_id: str, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], diff --git a/tests/components/home_connect/test_init.py b/tests/components/home_connect/test_init.py index 291caeafd58..21bb0291e1a 100644 --- a/tests/components/home_connect/test_init.py +++ b/tests/components/home_connect/test_init.py @@ -1,12 +1,11 @@ """Test the integration init functionality.""" from collections.abc import Awaitable, Callable -from http import HTTPStatus from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from aiohomeconnect.const import OAUTH2_TOKEN -from aiohomeconnect.model import OptionKey, ProgramKey, SettingKey, StatusKey +from aiohomeconnect.model import HomeAppliance, SettingKey, StatusKey from aiohomeconnect.model.error import ( HomeConnectError, TooManyRequestsError, @@ -14,7 +13,6 @@ from aiohomeconnect.model.error import ( ) import aiohttp import pytest -from syrupy.assertion import SnapshotAssertion from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.home_connect.const import DOMAIN @@ -25,9 +23,8 @@ from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import device_registry as dr, entity_registry as er -import homeassistant.helpers.issue_registry as ir from script.hassfest.translations import RE_TRANSLATION_KEY from .conftest import ( @@ -40,157 +37,6 @@ from .conftest import ( from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker -from tests.typing import ClientSessionGenerator - -DEPRECATED_SERVICE_KV_CALL_PARAMS = [ - { - "domain": DOMAIN, - "service": "set_option_active", - "service_data": { - "device_id": "DEVICE_ID", - "key": OptionKey.BSH_COMMON_FINISH_IN_RELATIVE.value, - "value": 43200, - "unit": "seconds", - }, - "blocking": True, - }, - { - "domain": DOMAIN, - "service": "set_option_selected", - "service_data": { - "device_id": "DEVICE_ID", - "key": OptionKey.LAUNDRY_CARE_WASHER_TEMPERATURE.value, - "value": "LaundryCare.Washer.EnumType.Temperature.GC40", - }, - "blocking": True, - }, -] - -SERVICE_KV_CALL_PARAMS = [ - *DEPRECATED_SERVICE_KV_CALL_PARAMS, - { - "domain": DOMAIN, - "service": "change_setting", - "service_data": { - "device_id": "DEVICE_ID", - "key": SettingKey.BSH_COMMON_CHILD_LOCK.value, - "value": True, - }, - "blocking": True, - }, -] - -SERVICE_COMMAND_CALL_PARAMS = [ - { - "domain": DOMAIN, - "service": "pause_program", - "service_data": { - "device_id": "DEVICE_ID", - }, - "blocking": True, - }, - { - "domain": DOMAIN, - "service": "resume_program", - "service_data": { - "device_id": "DEVICE_ID", - }, - "blocking": True, - }, -] - - -SERVICE_PROGRAM_CALL_PARAMS = [ - { - "domain": DOMAIN, - "service": "select_program", - "service_data": { - "device_id": "DEVICE_ID", - "program": ProgramKey.LAUNDRY_CARE_WASHER_COTTON.value, - "key": OptionKey.LAUNDRY_CARE_WASHER_TEMPERATURE.value, - "value": "LaundryCare.Washer.EnumType.Temperature.GC40", - }, - "blocking": True, - }, - { - "domain": DOMAIN, - "service": "start_program", - "service_data": { - "device_id": "DEVICE_ID", - "program": ProgramKey.LAUNDRY_CARE_WASHER_COTTON.value, - "key": OptionKey.BSH_COMMON_FINISH_IN_RELATIVE.value, - "value": 43200, - "unit": "seconds", - }, - "blocking": True, - }, -] - -SERVICE_APPLIANCE_METHOD_MAPPING = { - "set_option_active": "set_active_program_option", - "set_option_selected": "set_selected_program_option", - "change_setting": "set_setting", - "pause_program": "put_command", - "resume_program": "put_command", - "select_program": "set_selected_program", - "start_program": "start_program", -} - -SERVICE_VALIDATION_ERROR_MAPPING = { - "set_option_active": r"Error.*setting.*options.*active.*program.*", - "set_option_selected": r"Error.*setting.*options.*selected.*program.*", - "change_setting": r"Error.*assigning.*value.*setting.*", - "pause_program": r"Error.*executing.*command.*", - "resume_program": r"Error.*executing.*command.*", - "select_program": r"Error.*selecting.*program.*", - "start_program": r"Error.*starting.*program.*", -} - - -SERVICES_SET_PROGRAM_AND_OPTIONS = [ - { - "domain": DOMAIN, - "service": "set_program_and_options", - "service_data": { - "device_id": "DEVICE_ID", - "affects_to": "selected_program", - "program": "dishcare_dishwasher_program_eco_50", - "b_s_h_common_option_start_in_relative": 1800, - }, - "blocking": True, - }, - { - "domain": DOMAIN, - "service": "set_program_and_options", - "service_data": { - "device_id": "DEVICE_ID", - "affects_to": "active_program", - "program": "consumer_products_coffee_maker_program_beverage_coffee", - "consumer_products_coffee_maker_option_bean_amount": "consumer_products_coffee_maker_enum_type_bean_amount_normal", - }, - "blocking": True, - }, - { - "domain": DOMAIN, - "service": "set_program_and_options", - "service_data": { - "device_id": "DEVICE_ID", - "affects_to": "active_program", - "consumer_products_coffee_maker_option_coffee_milk_ratio": "consumer_products_coffee_maker_enum_type_coffee_milk_ratio_50_percent", - }, - "blocking": True, - }, - { - "domain": DOMAIN, - "service": "set_program_and_options", - "service_data": { - "device_id": "DEVICE_ID", - "affects_to": "selected_program", - "consumer_products_coffee_maker_option_fill_quantity": 35, - }, - "blocking": True, - }, -] async def test_entry_setup( @@ -401,197 +247,7 @@ async def test_client_rate_limit_error( asyncio_sleep_mock.assert_called_once_with(retry_after) -@pytest.mark.parametrize( - "service_call", - SERVICE_KV_CALL_PARAMS + SERVICE_COMMAND_CALL_PARAMS + SERVICE_PROGRAM_CALL_PARAMS, -) -async def test_key_value_services( - service_call: dict[str, Any], - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - config_entry: MockConfigEntry, - integration_setup: Callable[[MagicMock], Awaitable[bool]], - setup_credentials: None, - client: MagicMock, - appliance_ha_id: str, -) -> None: - """Create and test services.""" - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup(client) - assert config_entry.state == ConfigEntryState.LOADED - - device_entry = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, - identifiers={(DOMAIN, appliance_ha_id)}, - ) - - service_name = service_call["service"] - service_call["service_data"]["device_id"] = device_entry.id - await hass.services.async_call(**service_call) - await hass.async_block_till_done() - assert ( - getattr(client, SERVICE_APPLIANCE_METHOD_MAPPING[service_name]).call_count == 1 - ) - - -@pytest.mark.parametrize( - ("service_call", "issue_id"), - [ - *zip( - DEPRECATED_SERVICE_KV_CALL_PARAMS + SERVICE_PROGRAM_CALL_PARAMS, - ["deprecated_set_program_and_option_actions"] - * ( - len(DEPRECATED_SERVICE_KV_CALL_PARAMS) - + len(SERVICE_PROGRAM_CALL_PARAMS) - ), - strict=True, - ), - *zip( - SERVICE_COMMAND_CALL_PARAMS, - ["deprecated_command_actions"] * len(SERVICE_COMMAND_CALL_PARAMS), - strict=True, - ), - ], -) -async def test_programs_and_options_actions_deprecation( - service_call: dict[str, Any], - issue_id: str, - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - config_entry: MockConfigEntry, - integration_setup: Callable[[MagicMock], Awaitable[bool]], - setup_credentials: None, - client: MagicMock, - appliance_ha_id: str, - issue_registry: ir.IssueRegistry, - hass_client: ClientSessionGenerator, -) -> None: - """Test deprecated service keys.""" - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup(client) - assert config_entry.state == ConfigEntryState.LOADED - - device_entry = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, - identifiers={(DOMAIN, appliance_ha_id)}, - ) - - service_call["service_data"]["device_id"] = device_entry.id - await hass.services.async_call(**service_call) - await hass.async_block_till_done() - - assert len(issue_registry.issues) == 1 - issue = issue_registry.async_get_issue(DOMAIN, issue_id) - assert issue - - _client = await hass_client() - resp = await _client.post( - "/api/repairs/issues/fix", - json={"handler": DOMAIN, "issue_id": issue.issue_id}, - ) - assert resp.status == HTTPStatus.OK - flow_id = (await resp.json())["flow_id"] - resp = await _client.post(f"/api/repairs/issues/fix/{flow_id}") - - assert not issue_registry.async_get_issue(DOMAIN, issue_id) - assert len(issue_registry.issues) == 0 - - await hass.services.async_call(**service_call) - await hass.async_block_till_done() - - assert len(issue_registry.issues) == 1 - assert issue_registry.async_get_issue(DOMAIN, issue_id) - - await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() - - # Assert the issue is no longer present - assert not issue_registry.async_get_issue(DOMAIN, issue_id) - assert len(issue_registry.issues) == 0 - - -@pytest.mark.parametrize( - ("service_call", "called_method"), - zip( - SERVICES_SET_PROGRAM_AND_OPTIONS, - [ - "set_selected_program", - "start_program", - "set_active_program_options", - "set_selected_program_options", - ], - strict=True, - ), -) -async def test_set_program_and_options( - service_call: dict[str, Any], - called_method: str, - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - config_entry: MockConfigEntry, - integration_setup: Callable[[MagicMock], Awaitable[bool]], - setup_credentials: None, - client: MagicMock, - appliance_ha_id: str, - snapshot: SnapshotAssertion, -) -> None: - """Test recognized options.""" - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup(client) - assert config_entry.state == ConfigEntryState.LOADED - - device_entry = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, - identifiers={(DOMAIN, appliance_ha_id)}, - ) - - service_call["service_data"]["device_id"] = device_entry.id - await hass.services.async_call(**service_call) - await hass.async_block_till_done() - method_mock: MagicMock = getattr(client, called_method) - assert method_mock.call_count == 1 - assert method_mock.call_args == snapshot - - -@pytest.mark.parametrize( - ("service_call", "error_regex"), - zip( - SERVICES_SET_PROGRAM_AND_OPTIONS, - [ - r"Error.*selecting.*program.*", - r"Error.*starting.*program.*", - r"Error.*setting.*options.*active.*program.*", - r"Error.*setting.*options.*selected.*program.*", - ], - strict=True, - ), -) -async def test_set_program_and_options_exceptions( - service_call: dict[str, Any], - error_regex: str, - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - config_entry: MockConfigEntry, - integration_setup: Callable[[MagicMock], Awaitable[bool]], - setup_credentials: None, - client_with_exception: MagicMock, - appliance_ha_id: str, -) -> None: - """Test recognized options.""" - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup(client_with_exception) - assert config_entry.state == ConfigEntryState.LOADED - - device_entry = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, - identifiers={(DOMAIN, appliance_ha_id)}, - ) - - service_call["service_data"]["device_id"] = device_entry.id - with pytest.raises(HomeAssistantError, match=error_regex): - await hass.services.async_call(**service_call) - - +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_required_program_or_at_least_an_option( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -599,7 +255,7 @@ async def test_required_program_or_at_least_an_option( integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: "Test that the set_program_and_options does raise an exception if no program nor options are set." @@ -609,7 +265,7 @@ async def test_required_program_or_at_least_an_option( device_entry = device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, - identifiers={(DOMAIN, appliance_ha_id)}, + identifiers={(DOMAIN, appliance.ha_id)}, ) with pytest.raises( @@ -626,119 +282,13 @@ async def test_required_program_or_at_least_an_option( ) -@pytest.mark.parametrize( - "service_call", - SERVICE_KV_CALL_PARAMS + SERVICE_COMMAND_CALL_PARAMS + SERVICE_PROGRAM_CALL_PARAMS, -) -async def test_services_exception_device_id( - service_call: dict[str, Any], - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[MagicMock], Awaitable[bool]], - setup_credentials: None, - client_with_exception: MagicMock, - appliance_ha_id: str, - device_registry: dr.DeviceRegistry, -) -> None: - """Raise a HomeAssistantError when there is an API error.""" - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup(client_with_exception) - assert config_entry.state == ConfigEntryState.LOADED - - device_entry = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, - identifiers={(DOMAIN, appliance_ha_id)}, - ) - - service_call["service_data"]["device_id"] = device_entry.id - - with pytest.raises(HomeAssistantError): - await hass.services.async_call(**service_call) - - -async def test_services_appliance_not_found( - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[MagicMock], Awaitable[bool]], - setup_credentials: None, - client: MagicMock, - device_registry: dr.DeviceRegistry, -) -> None: - """Raise a ServiceValidationError when device id does not match.""" - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup(client) - assert config_entry.state == ConfigEntryState.LOADED - - service_call = SERVICE_KV_CALL_PARAMS[0] - - service_call["service_data"]["device_id"] = "DOES_NOT_EXISTS" - - with pytest.raises(ServiceValidationError, match=r"Device entry.*not found"): - await hass.services.async_call(**service_call) - - unrelated_config_entry = MockConfigEntry( - domain="TEST", - ) - unrelated_config_entry.add_to_hass(hass) - device_entry = device_registry.async_get_or_create( - config_entry_id=unrelated_config_entry.entry_id, - identifiers={("RANDOM", "ABCD")}, - ) - service_call["service_data"]["device_id"] = device_entry.id - - with pytest.raises(ServiceValidationError, match=r"Config entry.*not found"): - await hass.services.async_call(**service_call) - - device_entry = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, - identifiers={("RANDOM", "ABCD")}, - ) - service_call["service_data"]["device_id"] = device_entry.id - - with pytest.raises(ServiceValidationError, match=r"Appliance.*not found"): - await hass.services.async_call(**service_call) - - -@pytest.mark.parametrize( - "service_call", - SERVICE_KV_CALL_PARAMS + SERVICE_COMMAND_CALL_PARAMS + SERVICE_PROGRAM_CALL_PARAMS, -) -async def test_services_exception( - service_call: dict[str, Any], - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[MagicMock], Awaitable[bool]], - setup_credentials: None, - client_with_exception: MagicMock, - appliance_ha_id: str, - device_registry: dr.DeviceRegistry, -) -> None: - """Raise a ValueError when device id does not match.""" - assert config_entry.state == ConfigEntryState.NOT_LOADED - assert await integration_setup(client_with_exception) - assert config_entry.state == ConfigEntryState.LOADED - - device_entry = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, - identifiers={(DOMAIN, appliance_ha_id)}, - ) - - service_call["service_data"]["device_id"] = device_entry.id - - service_name = service_call["service"] - with pytest.raises( - HomeAssistantError, - match=SERVICE_VALIDATION_ERROR_MAPPING[service_name], - ): - await hass.services.async_call(**service_call) - - +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_entity_migration( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, config_entry_v1_1: MockConfigEntry, - appliance_ha_id: str, + appliance: HomeAppliance, platforms: list[Platform], ) -> None: """Test entity migration.""" @@ -747,7 +297,7 @@ async def test_entity_migration( device_entry = device_registry.async_get_or_create( config_entry_id=config_entry_v1_1.entry_id, - identifiers={(DOMAIN, appliance_ha_id)}, + identifiers={(DOMAIN, appliance.ha_id)}, ) test_entities = [ @@ -787,7 +337,7 @@ async def test_entity_migration( entity_registry.async_get_or_create( domain, DOMAIN, - f"{appliance_ha_id}-{old_unique_id_suffix}", + f"{appliance.ha_id}-{old_unique_id_suffix}", device_id=device_entry.id, config_entry=config_entry_v1_1, ) @@ -798,7 +348,7 @@ async def test_entity_migration( for domain, _, expected_unique_id_suffix in test_entities: assert entity_registry.async_get_entity_id( - domain, DOMAIN, f"{appliance_ha_id}-{expected_unique_id_suffix}" + domain, DOMAIN, f"{appliance.ha_id}-{expected_unique_id_suffix}" ) assert config_entry_v1_1.minor_version == 2 diff --git a/tests/components/home_connect/test_light.py b/tests/components/home_connect/test_light.py index 6021c99bb5e..50a1a1e374a 100644 --- a/tests/components/home_connect/test_light.py +++ b/tests/components/home_connect/test_light.py @@ -12,6 +12,7 @@ from aiohomeconnect.model import ( EventMessage, EventType, GetSetting, + HomeAppliance, SettingKey, ) from aiohomeconnect.model.error import HomeConnectApiError, HomeConnectError @@ -21,9 +22,15 @@ from homeassistant.components.home_connect.const import ( BSH_AMBIENT_LIGHT_COLOR_CUSTOM_COLOR, DOMAIN, ) -from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN +from homeassistant.components.light import ( + ATTR_BRIGHTNESS, + ATTR_HS_COLOR, + ATTR_RGB_COLOR, + DOMAIN as LIGHT_DOMAIN, +) from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( + ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, @@ -58,9 +65,9 @@ async def test_light( assert config_entry.state == ConfigEntryState.LOADED -@pytest.mark.parametrize("appliance_ha_id", ["Hood"], indirect=True) +@pytest.mark.parametrize("appliance", ["Hood"], indirect=True) async def test_paired_depaired_devices_flow( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -74,7 +81,7 @@ async def test_paired_depaired_devices_flow( assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert entity_entries @@ -82,7 +89,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DEPAIRED, data=ArrayOfEvents([]), ) @@ -90,7 +97,7 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert not device for entity_entry in entity_entries: assert not entity_registry.async_get(entity_entry.entity_id) @@ -99,7 +106,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.PAIRED, data=ArrayOfEvents([]), ) @@ -107,14 +114,14 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - assert device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + assert device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) for entity_entry in entity_entries: assert entity_registry.async_get(entity_entry.entity_id) -@pytest.mark.parametrize("appliance_ha_id", ["Hood"], indirect=True) +@pytest.mark.parametrize("appliance", ["Hood"], indirect=True) async def test_connected_devices( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -132,14 +139,14 @@ async def test_connected_devices( get_available_programs_mock = client.get_available_programs async def get_settings_side_effect(ha_id: str): - if ha_id == appliance_ha_id: + if ha_id == appliance.ha_id: raise HomeConnectApiError( "SDK.Error.HomeAppliance.Connection.Initialization.Failed" ) return await get_settings_original_mock.side_effect(ha_id) async def get_available_programs_side_effect(ha_id: str): - if ha_id == appliance_ha_id: + if ha_id == appliance.ha_id: raise HomeConnectApiError( "SDK.Error.HomeAppliance.Connection.Initialization.Failed" ) @@ -155,14 +162,14 @@ async def test_connected_devices( client.get_settings = get_settings_original_mock client.get_available_programs = get_available_programs_mock - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, data=ArrayOfEvents([]), ) @@ -170,20 +177,20 @@ async def test_connected_devices( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device new_entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert len(new_entity_entries) > len(entity_entries) -@pytest.mark.parametrize("appliance_ha_id", ["Hood"], indirect=True) -async def test_light_availabilty( +@pytest.mark.parametrize("appliance", ["Hood"], indirect=True) +async def test_light_availability( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: """Test if light entities availability are based on the appliance connection state.""" entity_ids = [ @@ -201,7 +208,7 @@ async def test_light_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DISCONNECTED, ArrayOfEvents([]), ) @@ -215,7 +222,7 @@ async def test_light_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, ArrayOfEvents([]), ) @@ -236,7 +243,7 @@ async def test_light_availabilty( "service", "exprected_attributes", "state", - "appliance_ha_id", + "appliance", ), [ ( @@ -256,7 +263,7 @@ async def test_light_availabilty( SettingKey.COOKING_COMMON_LIGHTING_BRIGHTNESS: 80, }, SERVICE_TURN_ON, - {"brightness": 199}, + {ATTR_BRIGHTNESS: 199}, STATE_ON, "Hood", ), @@ -277,7 +284,7 @@ async def test_light_availabilty( SettingKey.BSH_COMMON_AMBIENT_LIGHT_BRIGHTNESS: 80, }, SERVICE_TURN_ON, - {"brightness": 199}, + {ATTR_BRIGHTNESS: 199}, STATE_ON, "Hood", ), @@ -310,7 +317,7 @@ async def test_light_availabilty( }, SERVICE_TURN_ON, { - "rgb_color": (255, 255, 0), + ATTR_RGB_COLOR: (255, 255, 0), }, STATE_ON, "Hood", @@ -324,8 +331,8 @@ async def test_light_availabilty( }, SERVICE_TURN_ON, { - "hs_color": (255.484, 15.196), - "brightness": 199, + ATTR_HS_COLOR: (255.484, 15.196), + ATTR_BRIGHTNESS: 199, }, STATE_ON, "Hood", @@ -341,7 +348,7 @@ async def test_light_availabilty( "FridgeFreezer", ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_light_functionality( entity_id: str, @@ -349,7 +356,7 @@ async def test_light_functionality( service: str, exprected_attributes: dict[str, Any], state: str, - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -362,7 +369,7 @@ async def test_light_functionality( assert config_entry.state == ConfigEntryState.LOADED service_data = exprected_attributes.copy() - service_data["entity_id"] = entity_id + service_data[ATTR_ENTITY_ID] = entity_id await hass.services.async_call( LIGHT_DOMAIN, service, @@ -371,7 +378,7 @@ async def test_light_functionality( await hass.async_block_till_done() client.set_setting.assert_has_calls( [ - call(appliance_ha_id, setting_key=setting_key, value=value) + call(appliance.ha_id, setting_key=setting_key, value=value) for setting_key, value in set_settings_args.items() ] ) @@ -386,7 +393,7 @@ async def test_light_functionality( ( "entity_id", "events", - "appliance_ha_id", + "appliance", ), [ ( @@ -397,12 +404,12 @@ async def test_light_functionality( "Hood", ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_light_color_different_than_custom( entity_id: str, events: dict[EventKey, Any], - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -417,21 +424,21 @@ async def test_light_color_different_than_custom( LIGHT_DOMAIN, SERVICE_TURN_ON, { - "rgb_color": (255, 255, 0), - "entity_id": entity_id, + ATTR_RGB_COLOR: (255, 255, 0), + ATTR_ENTITY_ID: entity_id, }, ) await hass.async_block_till_done() entity_state = hass.states.get(entity_id) assert entity_state is not None assert entity_state.state == STATE_ON - assert entity_state.attributes["rgb_color"] is not None - assert entity_state.attributes["hs_color"] is not None + assert entity_state.attributes[ATTR_RGB_COLOR] is not None + assert entity_state.attributes[ATTR_HS_COLOR] is not None await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.NOTIFY, ArrayOfEvents( [ @@ -454,8 +461,8 @@ async def test_light_color_different_than_custom( entity_state = hass.states.get(entity_id) assert entity_state is not None assert entity_state.state == STATE_ON - assert entity_state.attributes["rgb_color"] is None - assert entity_state.attributes["hs_color"] is None + assert entity_state.attributes[ATTR_RGB_COLOR] is None + assert entity_state.attributes[ATTR_HS_COLOR] is None @pytest.mark.parametrize( @@ -485,7 +492,7 @@ async def test_light_color_different_than_custom( SettingKey.COOKING_COMMON_LIGHTING_BRIGHTNESS: 70, }, SERVICE_TURN_ON, - {"brightness": 200}, + {ATTR_BRIGHTNESS: 200}, [HomeConnectError, HomeConnectError], r"Error.*turn.*on.*", ), @@ -517,7 +524,7 @@ async def test_light_color_different_than_custom( SettingKey.BSH_COMMON_AMBIENT_LIGHT_BRIGHTNESS: 70, }, SERVICE_TURN_ON, - {"brightness": 200}, + {ATTR_BRIGHTNESS: 200}, [HomeConnectError, None, HomeConnectError], r"Error.*set.*brightness.*", ), @@ -530,7 +537,7 @@ async def test_light_color_different_than_custom( SettingKey.BSH_COMMON_AMBIENT_LIGHT_CUSTOM_COLOR: "#ffff00", }, SERVICE_TURN_ON, - {"rgb_color": (255, 255, 0)}, + {ATTR_RGB_COLOR: (255, 255, 0)}, [HomeConnectError, None, HomeConnectError], r"Error.*select.*custom color.*", ), @@ -543,7 +550,7 @@ async def test_light_color_different_than_custom( SettingKey.BSH_COMMON_AMBIENT_LIGHT_CUSTOM_COLOR: "#ffff00", }, SERVICE_TURN_ON, - {"rgb_color": (255, 255, 0)}, + {ATTR_RGB_COLOR: (255, 255, 0)}, [HomeConnectError, None, None, HomeConnectError], r"Error.*set.*color.*", ), @@ -556,8 +563,8 @@ async def test_light_color_different_than_custom( }, SERVICE_TURN_ON, { - "hs_color": (255.484, 15.196), - "brightness": 199, + ATTR_HS_COLOR: (255.484, 15.196), + ATTR_BRIGHTNESS: 199, }, [HomeConnectError, None, None, HomeConnectError], r"Error.*set.*color.*", @@ -600,7 +607,7 @@ async def test_light_exception_handling( with pytest.raises(HomeConnectError): await client_with_exception.set_setting() - service_data["entity_id"] = entity_id + service_data[ATTR_ENTITY_ID] = entity_id with pytest.raises(HomeAssistantError, match=exception_match): await hass.services.async_call( LIGHT_DOMAIN, service, service_data, blocking=True diff --git a/tests/components/home_connect/test_number.py b/tests/components/home_connect/test_number.py index bb87cf9f3dc..1de384303ce 100644 --- a/tests/components/home_connect/test_number.py +++ b/tests/components/home_connect/test_number.py @@ -12,6 +12,7 @@ from aiohomeconnect.model import ( EventMessage, EventType, GetSetting, + HomeAppliance, OptionKey, ProgramDefinition, ProgramKey, @@ -69,8 +70,9 @@ async def test_number( assert config_entry.state is ConfigEntryState.LOADED +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_paired_depaired_devices_flow( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -95,7 +97,7 @@ async def test_paired_depaired_devices_flow( assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert entity_entries @@ -103,7 +105,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DEPAIRED, data=ArrayOfEvents([]), ) @@ -111,7 +113,7 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert not device for entity_entry in entity_entries: assert not entity_registry.async_get(entity_entry.entity_id) @@ -120,7 +122,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.PAIRED, data=ArrayOfEvents([]), ) @@ -128,14 +130,14 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - assert device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + assert device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) for entity_entry in entity_entries: assert entity_registry.async_get(entity_entry.entity_id) -@pytest.mark.parametrize("appliance_ha_id", ["FridgeFreezer"], indirect=True) +@pytest.mark.parametrize("appliance", ["FridgeFreezer"], indirect=True) async def test_connected_devices( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -152,7 +154,7 @@ async def test_connected_devices( get_settings_original_mock = client.get_settings def get_settings_side_effect(ha_id: str): - if ha_id == appliance_ha_id: + if ha_id == appliance.ha_id: raise HomeConnectApiError( "SDK.Error.HomeAppliance.Connection.Initialization.Failed" ) @@ -164,14 +166,14 @@ async def test_connected_devices( assert config_entry.state == ConfigEntryState.LOADED client.get_settings = get_settings_original_mock - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, data=ArrayOfEvents([]), ) @@ -179,20 +181,20 @@ async def test_connected_devices( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device new_entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert len(new_entity_entries) > len(entity_entries) -@pytest.mark.parametrize("appliance_ha_id", ["FridgeFreezer"], indirect=True) -async def test_number_entity_availabilty( +@pytest.mark.parametrize("appliance", ["FridgeFreezer"], indirect=True) +async def test_number_entity_availability( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: """Test if number entities availability are based on the appliance connection state.""" entity_ids = [ @@ -215,7 +217,7 @@ async def test_number_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DISCONNECTED, ArrayOfEvents([]), ) @@ -229,7 +231,7 @@ async def test_number_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, ArrayOfEvents([]), ) @@ -243,7 +245,7 @@ async def test_number_entity_availabilty( assert state.state != STATE_UNAVAILABLE -@pytest.mark.parametrize("appliance_ha_id", ["FridgeFreezer"], indirect=True) +@pytest.mark.parametrize("appliance", ["FridgeFreezer"], indirect=True) @pytest.mark.parametrize( ( "entity_id", @@ -279,7 +281,7 @@ async def test_number_entity_availabilty( ], ) async def test_number_entity_functionality( - appliance_ha_id: str, + appliance: HomeAppliance, entity_id: str, setting_key: SettingKey, type: str, @@ -336,12 +338,12 @@ async def test_number_entity_functionality( ) await hass.async_block_till_done() client.set_setting.assert_awaited_once_with( - appliance_ha_id, setting_key=setting_key, value=value + appliance.ha_id, setting_key=setting_key, value=value ) assert hass.states.is_state(entity_id, str(float(value))) -@pytest.mark.parametrize("appliance_ha_id", ["FridgeFreezer"], indirect=True) +@pytest.mark.parametrize("appliance", ["FridgeFreezer"], indirect=True) @pytest.mark.parametrize("retry_after", [0, None]) @pytest.mark.parametrize( ( @@ -368,7 +370,7 @@ async def test_number_entity_functionality( @patch("homeassistant.components.home_connect.entity.API_DEFAULT_RETRY_AFTER", new=0) async def test_fetch_constraints_after_rate_limit_error( retry_after: int | None, - appliance_ha_id: str, + appliance: HomeAppliance, entity_id: str, setting_key: SettingKey, type: str, @@ -385,7 +387,7 @@ async def test_fetch_constraints_after_rate_limit_error( """Test that, if a API rate limit error is raised, the constraints are fetched later.""" def get_settings_side_effect(ha_id: str): - if ha_id != appliance_ha_id: + if ha_id != appliance.ha_id: return ArrayOfSettings([]) return ArrayOfSettings( [ @@ -511,7 +513,7 @@ async def test_number_entity_error( ], ) @pytest.mark.parametrize( - ("appliance_ha_id", "entity_id", "option_key", "min", "max", "step_size", "unit"), + ("appliance", "entity_id", "option_key", "min", "max", "step_size", "unit"), [ ( "Oven", @@ -523,12 +525,12 @@ async def test_number_entity_error( "°C", ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_options_functionality( entity_id: str, option_key: OptionKey, - appliance_ha_id: str, + appliance: HomeAppliance, min: int, max: int, step_size: int, @@ -615,7 +617,7 @@ async def test_options_functionality( await hass.async_block_till_done() assert called_mock.called - assert called_mock.call_args.args == (appliance_ha_id,) + assert called_mock.call_args.args == (appliance.ha_id,) assert called_mock.call_args.kwargs == { "option_key": option_key, "value": 80, diff --git a/tests/components/home_connect/test_select.py b/tests/components/home_connect/test_select.py index f20be33081c..f6009640f72 100644 --- a/tests/components/home_connect/test_select.py +++ b/tests/components/home_connect/test_select.py @@ -12,6 +12,7 @@ from aiohomeconnect.model import ( EventMessage, EventType, GetSetting, + HomeAppliance, OptionKey, ProgramDefinition, ProgramKey, @@ -72,8 +73,9 @@ async def test_select( assert config_entry.state is ConfigEntryState.LOADED +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_paired_depaired_devices_flow( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -98,7 +100,7 @@ async def test_paired_depaired_devices_flow( assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert entity_entries @@ -106,7 +108,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DEPAIRED, data=ArrayOfEvents([]), ) @@ -114,7 +116,7 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert not device for entity_entry in entity_entries: assert not entity_registry.async_get(entity_entry.entity_id) @@ -123,7 +125,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.PAIRED, data=ArrayOfEvents([]), ) @@ -131,13 +133,14 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - assert device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + assert device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) for entity_entry in entity_entries: assert entity_registry.async_get(entity_entry.entity_id) +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_connected_devices( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -156,13 +159,13 @@ async def test_connected_devices( assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, data=ArrayOfEvents([]), ) @@ -170,19 +173,20 @@ async def test_connected_devices( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert entity_entries -async def test_select_entity_availabilty( +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) +async def test_select_entity_availability( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: """Test if select entities availability are based on the appliance connection state.""" entity_ids = [ @@ -200,7 +204,7 @@ async def test_select_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DISCONNECTED, ArrayOfEvents([]), ) @@ -214,7 +218,7 @@ async def test_select_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, ArrayOfEvents([]), ) @@ -290,7 +294,7 @@ async def test_filter_programs( @pytest.mark.parametrize( ( - "appliance_ha_id", + "appliance", "entity_id", "expected_initial_state", "mock_method", @@ -318,10 +322,10 @@ async def test_filter_programs( EventKey.BSH_COMMON_ROOT_ACTIVE_PROGRAM, ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_select_program_functionality( - appliance_ha_id: str, + appliance: HomeAppliance, entity_id: str, expected_initial_state: str, mock_method: str, @@ -347,14 +351,14 @@ async def test_select_program_functionality( ) await hass.async_block_till_done() getattr(client, mock_method).assert_awaited_once_with( - appliance_ha_id, program_key=program_key + appliance.ha_id, program_key=program_key ) assert hass.states.is_state(entity_id, program_to_set) await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.NOTIFY, ArrayOfEvents( [ @@ -433,13 +437,13 @@ async def test_select_exception_handling( await hass.services.async_call( SELECT_DOMAIN, SERVICE_SELECT_OPTION, - {"entity_id": entity_id, "option": program_to_set}, + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: program_to_set}, blocking=True, ) assert getattr(client_with_exception, mock_attr).call_count == 2 -@pytest.mark.parametrize("appliance_ha_id", ["Hood"], indirect=True) +@pytest.mark.parametrize("appliance", ["Hood"], indirect=True) @pytest.mark.parametrize( ( "entity_id", @@ -473,7 +477,7 @@ async def test_select_exception_handling( ], ) async def test_select_functionality( - appliance_ha_id: str, + appliance: HomeAppliance, entity_id: str, setting_key: SettingKey, expected_options: set[str], @@ -497,12 +501,12 @@ async def test_select_functionality( await hass.services.async_call( SELECT_DOMAIN, SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: entity_id, "option": value_to_set}, + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: value_to_set}, ) await hass.async_block_till_done() client.set_setting.assert_called_once() - assert client.set_setting.call_args.args == (appliance_ha_id,) + assert client.set_setting.call_args.args == (appliance.ha_id,) assert client.set_setting.call_args.kwargs == { "setting_key": setting_key, "value": expected_value_call_arg, @@ -510,7 +514,7 @@ async def test_select_functionality( assert hass.states.is_state(entity_id, value_to_set) -@pytest.mark.parametrize("appliance_ha_id", ["Hood"], indirect=True) +@pytest.mark.parametrize("appliance", ["Hood"], indirect=True) @pytest.mark.parametrize( ( "entity_id", @@ -537,7 +541,7 @@ async def test_select_functionality( ], ) async def test_fetch_allowed_values( - appliance_ha_id: str, + appliance: HomeAppliance, entity_id: str, test_setting_key: SettingKey, allowed_values: list[str | None], @@ -554,7 +558,7 @@ async def test_fetch_allowed_values( async def get_setting_side_effect( ha_id: str, setting_key: SettingKey ) -> GetSetting: - if ha_id != appliance_ha_id or setting_key != test_setting_key: + if ha_id != appliance.ha_id or setting_key != test_setting_key: return await original_get_setting_side_effect(ha_id, setting_key) return GetSetting( key=test_setting_key, @@ -576,7 +580,7 @@ async def test_fetch_allowed_values( assert set(entity_state.attributes[ATTR_OPTIONS]) == expected_options -@pytest.mark.parametrize("appliance_ha_id", ["Hood"], indirect=True) +@pytest.mark.parametrize("appliance", ["Hood"], indirect=True) @pytest.mark.parametrize( ( "entity_id", @@ -594,7 +598,7 @@ async def test_fetch_allowed_values( ], ) async def test_fetch_allowed_values_after_rate_limit_error( - appliance_ha_id: str, + appliance: HomeAppliance, entity_id: str, setting_key: SettingKey, allowed_values: list[str | None], @@ -608,7 +612,7 @@ async def test_fetch_allowed_values_after_rate_limit_error( """Test fetch allowed values.""" def get_settings_side_effect(ha_id: str): - if ha_id != appliance_ha_id: + if ha_id != appliance.ha_id: return ArrayOfSettings([]) return ArrayOfSettings( [ @@ -648,7 +652,7 @@ async def test_fetch_allowed_values_after_rate_limit_error( assert set(entity_state.attributes[ATTR_OPTIONS]) == expected_options -@pytest.mark.parametrize("appliance_ha_id", ["Hood"], indirect=True) +@pytest.mark.parametrize("appliance", ["Hood"], indirect=True) @pytest.mark.parametrize( ( "entity_id", @@ -669,7 +673,7 @@ async def test_fetch_allowed_values_after_rate_limit_error( ], ) async def test_default_values_after_fetch_allowed_values_error( - appliance_ha_id: str, + appliance: HomeAppliance, entity_id: str, setting_key: SettingKey, exception: Exception, @@ -683,7 +687,7 @@ async def test_default_values_after_fetch_allowed_values_error( """Test fetch allowed values.""" def get_settings_side_effect(ha_id: str): - if ha_id != appliance_ha_id: + if ha_id != appliance.ha_id: return ArrayOfSettings([]) return ArrayOfSettings( [ @@ -758,12 +762,13 @@ async def test_select_entity_error( await hass.services.async_call( SELECT_DOMAIN, SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: entity_id, "option": value_to_set}, + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: value_to_set}, blocking=True, ) assert getattr(client_with_exception, mock_attr).call_count == 2 +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) @pytest.mark.parametrize( ( "set_active_program_options_side_effect", @@ -840,7 +845,7 @@ async def test_options_functionality( option_key: OptionKey, allowed_values: list[str | None] | None, expected_options: set[str], - appliance_ha_id: str, + appliance: HomeAppliance, set_active_program_options_side_effect: ActiveProgramNotSetError | None, set_selected_program_options_side_effect: SelectedProgramNotSetError | None, called_mock_method: str, @@ -894,7 +899,7 @@ async def test_options_functionality( await hass.async_block_till_done() assert called_mock.called - assert called_mock.call_args.args == (appliance_ha_id,) + assert called_mock.call_args.args == (appliance.ha_id,) assert called_mock.call_args.kwargs == { "option_key": option_key, "value": "LaundryCare.Washer.EnumType.Temperature.UlWarm", diff --git a/tests/components/home_connect/test_sensor.py b/tests/components/home_connect/test_sensor.py index a7836223737..f30723af7fa 100644 --- a/tests/components/home_connect/test_sensor.py +++ b/tests/components/home_connect/test_sensor.py @@ -10,6 +10,7 @@ from aiohomeconnect.model import ( EventKey, EventMessage, EventType, + HomeAppliance, Status, StatusKey, ) @@ -99,8 +100,9 @@ async def test_sensors( assert config_entry.state == ConfigEntryState.LOADED +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_paired_depaired_devices_flow( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -114,7 +116,7 @@ async def test_paired_depaired_devices_flow( assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert entity_entries @@ -122,7 +124,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DEPAIRED, data=ArrayOfEvents([]), ) @@ -130,7 +132,7 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert not device for entity_entry in entity_entries: assert not entity_registry.async_get(entity_entry.entity_id) @@ -139,7 +141,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.PAIRED, data=ArrayOfEvents([]), ) @@ -147,13 +149,14 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - assert device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + assert device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) for entity_entry in entity_entries: assert entity_registry.async_get(entity_entry.entity_id) +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_connected_devices( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -170,7 +173,7 @@ async def test_connected_devices( get_status_original_mock = client.get_status def get_status_side_effect(ha_id: str): - if ha_id == appliance_ha_id: + if ha_id == appliance.ha_id: raise HomeConnectApiError( "SDK.Error.HomeAppliance.Connection.Initialization.Failed" ) @@ -182,14 +185,14 @@ async def test_connected_devices( assert config_entry.state == ConfigEntryState.LOADED client.get_status = get_status_original_mock - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, data=ArrayOfEvents([]), ) @@ -197,20 +200,20 @@ async def test_connected_devices( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device new_entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert len(new_entity_entries) > len(entity_entries) -@pytest.mark.parametrize("appliance_ha_id", [TEST_HC_APP], indirect=True) -async def test_sensor_entity_availabilty( +@pytest.mark.parametrize("appliance", [TEST_HC_APP], indirect=True) +async def test_sensor_entity_availability( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: """Test if sensor entities availability are based on the appliance connection state.""" entity_ids = [ @@ -229,7 +232,7 @@ async def test_sensor_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DISCONNECTED, ArrayOfEvents([]), ) @@ -243,7 +246,7 @@ async def test_sensor_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, ArrayOfEvents([]), ) @@ -257,7 +260,7 @@ async def test_sensor_entity_availabilty( assert state.state != STATE_UNAVAILABLE -# Appliance_ha_id program sequence with a delayed start. +# Appliance program sequence with a delayed start. PROGRAM_SEQUENCE_EVENTS = ( EVENT_PROG_DELAYED_START, EVENT_PROG_RUN, @@ -292,7 +295,7 @@ ENTITY_ID_STATES = { } -@pytest.mark.parametrize("appliance_ha_id", [TEST_HC_APP], indirect=True) +@pytest.mark.parametrize("appliance", [TEST_HC_APP], indirect=True) @pytest.mark.parametrize( ("states", "event_run"), list( @@ -305,7 +308,7 @@ ENTITY_ID_STATES = { ) async def test_program_sensors( client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, states: tuple, event_run: dict[EventType, dict[EventKey, str | int]], freezer: FrozenDateTimeFactory, @@ -335,7 +338,7 @@ async def test_program_sensors( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, event_type, ArrayOfEvents( [ @@ -359,7 +362,7 @@ async def test_program_sensors( assert hass.states.is_state(entity_id, state) -@pytest.mark.parametrize("appliance_ha_id", [TEST_HC_APP], indirect=True) +@pytest.mark.parametrize("appliance", [TEST_HC_APP], indirect=True) @pytest.mark.parametrize( ("initial_operation_state", "initial_state", "event_order", "entity_states"), [ @@ -382,7 +385,7 @@ async def test_program_sensor_edge_case( initial_state: str, event_order: tuple[EventType, EventType], entity_states: tuple[str, str], - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -413,7 +416,7 @@ async def test_program_sensor_edge_case( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, event_type, ArrayOfEvents( [ @@ -452,9 +455,9 @@ ENTITY_ID_EDGE_CASE_STATES = [ ] -@pytest.mark.parametrize("appliance_ha_id", [TEST_HC_APP], indirect=True) +@pytest.mark.parametrize("appliance", [TEST_HC_APP], indirect=True) async def test_remaining_prog_time_edge_cases( - appliance_ha_id: str, + appliance: HomeAppliance, freezer: FrozenDateTimeFactory, hass: HomeAssistant, config_entry: MockConfigEntry, @@ -478,7 +481,7 @@ async def test_remaining_prog_time_edge_cases( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, event_type, ArrayOfEvents( [ @@ -509,7 +512,7 @@ async def test_remaining_prog_time_edge_cases( "event_type", "event_value_update", "expected", - "appliance_ha_id", + "appliance", ), [ ( @@ -601,14 +604,14 @@ async def test_remaining_prog_time_edge_cases( "CoffeeMaker", ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_sensors_states( entity_id: str, event_key: EventKey, event_type: EventType, event_value_update: str, - appliance_ha_id: str, + appliance: HomeAppliance, expected: str, hass: HomeAssistant, config_entry: MockConfigEntry, @@ -616,7 +619,7 @@ async def test_sensors_states( setup_credentials: None, client: MagicMock, ) -> None: - """Tests for Appliance_ha_id alarm sensors.""" + """Tests for appliance alarm sensors.""" assert config_entry.state == ConfigEntryState.NOT_LOADED assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED @@ -624,7 +627,7 @@ async def test_sensors_states( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, event_type, ArrayOfEvents( [ @@ -647,7 +650,7 @@ async def test_sensors_states( @pytest.mark.parametrize( ( - "appliance_ha_id", + "appliance", "entity_id", "status_key", "unit_get_status", @@ -672,10 +675,10 @@ async def test_sensors_states( 1, ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_sensor_unit_fetching( - appliance_ha_id: str, + appliance: HomeAppliance, entity_id: str, status_key: StatusKey, unit_get_status: str | None, @@ -690,7 +693,7 @@ async def test_sensor_unit_fetching( """Test that the sensor entities are capable of fetching units.""" async def get_status_mock(ha_id: str) -> ArrayOfStatus: - if ha_id != appliance_ha_id: + if ha_id != appliance.ha_id: return ArrayOfStatus([]) return ArrayOfStatus( [ @@ -729,7 +732,7 @@ async def test_sensor_unit_fetching( @pytest.mark.parametrize( ( - "appliance_ha_id", + "appliance", "entity_id", "status_key", ), @@ -740,10 +743,10 @@ async def test_sensor_unit_fetching( StatusKey.COOKING_OVEN_CURRENT_CAVITY_TEMPERATURE, ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_sensor_unit_fetching_error( - appliance_ha_id: str, + appliance: HomeAppliance, entity_id: str, status_key: StatusKey, hass: HomeAssistant, @@ -755,7 +758,7 @@ async def test_sensor_unit_fetching_error( """Test that the sensor entities are capable of fetching units.""" async def get_status_mock(ha_id: str) -> ArrayOfStatus: - if ha_id != appliance_ha_id: + if ha_id != appliance.ha_id: return ArrayOfStatus([]) return ArrayOfStatus( [ @@ -779,7 +782,7 @@ async def test_sensor_unit_fetching_error( @pytest.mark.parametrize( ( - "appliance_ha_id", + "appliance", "entity_id", "status_key", "unit", @@ -792,10 +795,10 @@ async def test_sensor_unit_fetching_error( "°C", ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_sensor_unit_fetching_after_rate_limit_error( - appliance_ha_id: str, + appliance: HomeAppliance, entity_id: str, status_key: StatusKey, unit: str, @@ -808,7 +811,7 @@ async def test_sensor_unit_fetching_after_rate_limit_error( """Test that the sensor entities are capable of fetching units.""" async def get_status_mock(ha_id: str) -> ArrayOfStatus: - if ha_id != appliance_ha_id: + if ha_id != appliance.ha_id: return ArrayOfStatus([]) return ArrayOfStatus( [ diff --git a/tests/components/home_connect/test_services.py b/tests/components/home_connect/test_services.py new file mode 100644 index 00000000000..2915cbe4f69 --- /dev/null +++ b/tests/components/home_connect/test_services.py @@ -0,0 +1,474 @@ +"""Tests for the Home Connect actions.""" + +from collections.abc import Awaitable, Callable +from http import HTTPStatus +from typing import Any +from unittest.mock import MagicMock + +from aiohomeconnect.model import HomeAppliance, OptionKey, ProgramKey, SettingKey +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.home_connect.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import device_registry as dr +import homeassistant.helpers.issue_registry as ir + +from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator + +DEPRECATED_SERVICE_KV_CALL_PARAMS = [ + { + "domain": DOMAIN, + "service": "set_option_active", + "service_data": { + "device_id": "DEVICE_ID", + "key": OptionKey.BSH_COMMON_FINISH_IN_RELATIVE.value, + "value": 43200, + "unit": "seconds", + }, + "blocking": True, + }, + { + "domain": DOMAIN, + "service": "set_option_selected", + "service_data": { + "device_id": "DEVICE_ID", + "key": OptionKey.LAUNDRY_CARE_WASHER_TEMPERATURE.value, + "value": "LaundryCare.Washer.EnumType.Temperature.GC40", + }, + "blocking": True, + }, +] + +SERVICE_KV_CALL_PARAMS = [ + *DEPRECATED_SERVICE_KV_CALL_PARAMS, + { + "domain": DOMAIN, + "service": "change_setting", + "service_data": { + "device_id": "DEVICE_ID", + "key": SettingKey.BSH_COMMON_CHILD_LOCK.value, + "value": True, + }, + "blocking": True, + }, +] + +SERVICE_COMMAND_CALL_PARAMS = [ + { + "domain": DOMAIN, + "service": "pause_program", + "service_data": { + "device_id": "DEVICE_ID", + }, + "blocking": True, + }, + { + "domain": DOMAIN, + "service": "resume_program", + "service_data": { + "device_id": "DEVICE_ID", + }, + "blocking": True, + }, +] + + +SERVICE_PROGRAM_CALL_PARAMS = [ + { + "domain": DOMAIN, + "service": "select_program", + "service_data": { + "device_id": "DEVICE_ID", + "program": ProgramKey.LAUNDRY_CARE_WASHER_COTTON.value, + "key": OptionKey.LAUNDRY_CARE_WASHER_TEMPERATURE.value, + "value": "LaundryCare.Washer.EnumType.Temperature.GC40", + }, + "blocking": True, + }, + { + "domain": DOMAIN, + "service": "start_program", + "service_data": { + "device_id": "DEVICE_ID", + "program": ProgramKey.LAUNDRY_CARE_WASHER_COTTON.value, + "key": OptionKey.BSH_COMMON_FINISH_IN_RELATIVE.value, + "value": 43200, + "unit": "seconds", + }, + "blocking": True, + }, +] + +SERVICE_APPLIANCE_METHOD_MAPPING = { + "set_option_active": "set_active_program_option", + "set_option_selected": "set_selected_program_option", + "change_setting": "set_setting", + "pause_program": "put_command", + "resume_program": "put_command", + "select_program": "set_selected_program", + "start_program": "start_program", +} + +SERVICE_VALIDATION_ERROR_MAPPING = { + "set_option_active": r"Error.*setting.*options.*active.*program.*", + "set_option_selected": r"Error.*setting.*options.*selected.*program.*", + "change_setting": r"Error.*assigning.*value.*setting.*", + "pause_program": r"Error.*executing.*command.*", + "resume_program": r"Error.*executing.*command.*", + "select_program": r"Error.*selecting.*program.*", + "start_program": r"Error.*starting.*program.*", +} + + +SERVICES_SET_PROGRAM_AND_OPTIONS = [ + { + "domain": DOMAIN, + "service": "set_program_and_options", + "service_data": { + "device_id": "DEVICE_ID", + "affects_to": "selected_program", + "program": "dishcare_dishwasher_program_eco_50", + "b_s_h_common_option_start_in_relative": 1800, + }, + "blocking": True, + }, + { + "domain": DOMAIN, + "service": "set_program_and_options", + "service_data": { + "device_id": "DEVICE_ID", + "affects_to": "active_program", + "program": "consumer_products_coffee_maker_program_beverage_coffee", + "consumer_products_coffee_maker_option_bean_amount": "consumer_products_coffee_maker_enum_type_bean_amount_normal", + }, + "blocking": True, + }, + { + "domain": DOMAIN, + "service": "set_program_and_options", + "service_data": { + "device_id": "DEVICE_ID", + "affects_to": "active_program", + "consumer_products_coffee_maker_option_coffee_milk_ratio": "consumer_products_coffee_maker_enum_type_coffee_milk_ratio_50_percent", + }, + "blocking": True, + }, + { + "domain": DOMAIN, + "service": "set_program_and_options", + "service_data": { + "device_id": "DEVICE_ID", + "affects_to": "selected_program", + "consumer_products_coffee_maker_option_fill_quantity": 35, + }, + "blocking": True, + }, +] + + +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) +@pytest.mark.parametrize( + "service_call", + SERVICE_KV_CALL_PARAMS + SERVICE_COMMAND_CALL_PARAMS + SERVICE_PROGRAM_CALL_PARAMS, +) +async def test_key_value_services( + service_call: dict[str, Any], + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, + appliance: HomeAppliance, +) -> None: + """Create and test services.""" + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + assert config_entry.state == ConfigEntryState.LOADED + + device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, appliance.ha_id)}, + ) + + service_name = service_call["service"] + service_call["service_data"]["device_id"] = device_entry.id + await hass.services.async_call(**service_call) + await hass.async_block_till_done() + assert ( + getattr(client, SERVICE_APPLIANCE_METHOD_MAPPING[service_name]).call_count == 1 + ) + + +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) +@pytest.mark.parametrize( + ("service_call", "issue_id"), + [ + *zip( + DEPRECATED_SERVICE_KV_CALL_PARAMS + SERVICE_PROGRAM_CALL_PARAMS, + ["deprecated_set_program_and_option_actions"] + * ( + len(DEPRECATED_SERVICE_KV_CALL_PARAMS) + + len(SERVICE_PROGRAM_CALL_PARAMS) + ), + strict=True, + ), + *zip( + SERVICE_COMMAND_CALL_PARAMS, + ["deprecated_command_actions"] * len(SERVICE_COMMAND_CALL_PARAMS), + strict=True, + ), + ], +) +async def test_programs_and_options_actions_deprecation( + service_call: dict[str, Any], + issue_id: str, + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, + appliance: HomeAppliance, + issue_registry: ir.IssueRegistry, + hass_client: ClientSessionGenerator, +) -> None: + """Test deprecated service keys.""" + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + assert config_entry.state == ConfigEntryState.LOADED + + device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, appliance.ha_id)}, + ) + + service_call["service_data"]["device_id"] = device_entry.id + await hass.services.async_call(**service_call) + await hass.async_block_till_done() + + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue(DOMAIN, issue_id) + assert issue + + _client = await hass_client() + resp = await _client.post( + "/api/repairs/issues/fix", + json={"handler": DOMAIN, "issue_id": issue.issue_id}, + ) + assert resp.status == HTTPStatus.OK + flow_id = (await resp.json())["flow_id"] + resp = await _client.post(f"/api/repairs/issues/fix/{flow_id}") + + assert not issue_registry.async_get_issue(DOMAIN, issue_id) + assert len(issue_registry.issues) == 0 + + await hass.services.async_call(**service_call) + await hass.async_block_till_done() + + assert len(issue_registry.issues) == 1 + assert issue_registry.async_get_issue(DOMAIN, issue_id) + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, issue_id) + assert len(issue_registry.issues) == 0 + + +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) +@pytest.mark.parametrize( + ("service_call", "called_method"), + zip( + SERVICES_SET_PROGRAM_AND_OPTIONS, + [ + "set_selected_program", + "start_program", + "set_active_program_options", + "set_selected_program_options", + ], + strict=True, + ), +) +async def test_set_program_and_options( + service_call: dict[str, Any], + called_method: str, + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, + appliance: HomeAppliance, + snapshot: SnapshotAssertion, +) -> None: + """Test recognized options.""" + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + assert config_entry.state == ConfigEntryState.LOADED + + device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, appliance.ha_id)}, + ) + + service_call["service_data"]["device_id"] = device_entry.id + await hass.services.async_call(**service_call) + await hass.async_block_till_done() + method_mock: MagicMock = getattr(client, called_method) + assert method_mock.call_count == 1 + assert method_mock.call_args == snapshot + + +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) +@pytest.mark.parametrize( + ("service_call", "error_regex"), + zip( + SERVICES_SET_PROGRAM_AND_OPTIONS, + [ + r"Error.*selecting.*program.*", + r"Error.*starting.*program.*", + r"Error.*setting.*options.*active.*program.*", + r"Error.*setting.*options.*selected.*program.*", + ], + strict=True, + ), +) +async def test_set_program_and_options_exceptions( + service_call: dict[str, Any], + error_regex: str, + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client_with_exception: MagicMock, + appliance: HomeAppliance, +) -> None: + """Test recognized options.""" + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client_with_exception) + assert config_entry.state == ConfigEntryState.LOADED + + device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, appliance.ha_id)}, + ) + + service_call["service_data"]["device_id"] = device_entry.id + with pytest.raises(HomeAssistantError, match=error_regex): + await hass.services.async_call(**service_call) + + +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) +@pytest.mark.parametrize( + "service_call", + SERVICE_KV_CALL_PARAMS + SERVICE_COMMAND_CALL_PARAMS + SERVICE_PROGRAM_CALL_PARAMS, +) +async def test_services_exception_device_id( + service_call: dict[str, Any], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client_with_exception: MagicMock, + appliance: HomeAppliance, + device_registry: dr.DeviceRegistry, +) -> None: + """Raise a HomeAssistantError when there is an API error.""" + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client_with_exception) + assert config_entry.state == ConfigEntryState.LOADED + + device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, appliance.ha_id)}, + ) + + service_call["service_data"]["device_id"] = device_entry.id + + with pytest.raises(HomeAssistantError): + await hass.services.async_call(**service_call) + + +async def test_services_appliance_not_found( + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, + device_registry: dr.DeviceRegistry, +) -> None: + """Raise a ServiceValidationError when device id does not match.""" + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + assert config_entry.state == ConfigEntryState.LOADED + + service_call = SERVICE_KV_CALL_PARAMS[0] + + service_call["service_data"]["device_id"] = "DOES_NOT_EXISTS" + + with pytest.raises(ServiceValidationError, match=r"Device entry.*not found"): + await hass.services.async_call(**service_call) + + unrelated_config_entry = MockConfigEntry( + domain="TEST", + ) + unrelated_config_entry.add_to_hass(hass) + device_entry = device_registry.async_get_or_create( + config_entry_id=unrelated_config_entry.entry_id, + identifiers={("RANDOM", "ABCD")}, + ) + service_call["service_data"]["device_id"] = device_entry.id + + with pytest.raises(ServiceValidationError, match=r"Config entry.*not found"): + await hass.services.async_call(**service_call) + + device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={("RANDOM", "ABCD")}, + ) + service_call["service_data"]["device_id"] = device_entry.id + + with pytest.raises(ServiceValidationError, match=r"Appliance.*not found"): + await hass.services.async_call(**service_call) + + +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) +@pytest.mark.parametrize( + "service_call", + SERVICE_KV_CALL_PARAMS + SERVICE_COMMAND_CALL_PARAMS + SERVICE_PROGRAM_CALL_PARAMS, +) +async def test_services_exception( + service_call: dict[str, Any], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client_with_exception: MagicMock, + appliance: HomeAppliance, + device_registry: dr.DeviceRegistry, +) -> None: + """Raise a ValueError when device id does not match.""" + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client_with_exception) + assert config_entry.state == ConfigEntryState.LOADED + + device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, appliance.ha_id)}, + ) + + service_call["service_data"]["device_id"] = device_entry.id + + service_name = service_call["service"] + with pytest.raises( + HomeAssistantError, + match=SERVICE_VALIDATION_ERROR_MAPPING[service_name], + ): + await hass.services.async_call(**service_call) diff --git a/tests/components/home_connect/test_switch.py b/tests/components/home_connect/test_switch.py index 1b38809dc05..01f9cad5d2e 100644 --- a/tests/components/home_connect/test_switch.py +++ b/tests/components/home_connect/test_switch.py @@ -1,6 +1,7 @@ """Tests for home_connect sensor entities.""" from collections.abc import Awaitable, Callable +from http import HTTPStatus from typing import Any from unittest.mock import AsyncMock, MagicMock @@ -13,6 +14,7 @@ from aiohomeconnect.model import ( EventMessage, EventType, GetSetting, + HomeAppliance, OptionKey, ProgramDefinition, ProgramKey, @@ -58,6 +60,7 @@ from homeassistant.helpers import ( from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator @pytest.fixture @@ -79,8 +82,9 @@ async def test_switches( assert config_entry.state == ConfigEntryState.LOADED +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_paired_depaired_devices_flow( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -105,7 +109,7 @@ async def test_paired_depaired_devices_flow( assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert entity_entries @@ -113,7 +117,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DEPAIRED, data=ArrayOfEvents([]), ) @@ -121,7 +125,7 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert not device for entity_entry in entity_entries: assert not entity_registry.async_get(entity_entry.entity_id) @@ -130,7 +134,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.PAIRED, data=ArrayOfEvents([]), ) @@ -138,13 +142,14 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - assert device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + assert device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) for entity_entry in entity_entries: assert entity_registry.async_get(entity_entry.entity_id) +@pytest.mark.parametrize("appliance", ["Washer"], indirect=True) async def test_connected_devices( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -162,14 +167,14 @@ async def test_connected_devices( get_available_programs_mock = client.get_available_programs async def get_settings_side_effect(ha_id: str): - if ha_id == appliance_ha_id: + if ha_id == appliance.ha_id: raise HomeConnectApiError( "SDK.Error.HomeAppliance.Connection.Initialization.Failed" ) return await get_settings_original_mock.side_effect(ha_id) async def get_available_programs_side_effect(ha_id: str): - if ha_id == appliance_ha_id: + if ha_id == appliance.ha_id: raise HomeConnectApiError( "SDK.Error.HomeAppliance.Connection.Initialization.Failed" ) @@ -185,14 +190,14 @@ async def test_connected_devices( client.get_settings = get_settings_original_mock client.get_available_programs = get_available_programs_mock - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, data=ArrayOfEvents([]), ) @@ -200,20 +205,21 @@ async def test_connected_devices( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device new_entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert len(new_entity_entries) > len(entity_entries) -@pytest.mark.parametrize("appliance_ha_id", ["Dishwasher"], indirect=True) -async def test_switch_entity_availabilty( +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("appliance", ["Dishwasher"], indirect=True) +async def test_switch_entity_availability( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: """Test if switch entities availability are based on the appliance connection state.""" entity_ids = [ @@ -233,7 +239,7 @@ async def test_switch_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DISCONNECTED, ArrayOfEvents([]), ) @@ -247,7 +253,7 @@ async def test_switch_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, ArrayOfEvents([]), ) @@ -268,7 +274,7 @@ async def test_switch_entity_availabilty( "settings_key_arg", "setting_value_arg", "state", - "appliance_ha_id", + "appliance", ), [ ( @@ -288,7 +294,7 @@ async def test_switch_entity_availabilty( "Dishwasher", ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_switch_functionality( entity_id: str, @@ -300,7 +306,7 @@ async def test_switch_functionality( config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, - appliance_ha_id: str, + appliance: HomeAppliance, client: MagicMock, ) -> None: """Test switch functionality.""" @@ -312,13 +318,14 @@ async def test_switch_functionality( await hass.services.async_call(SWITCH_DOMAIN, service, {ATTR_ENTITY_ID: entity_id}) await hass.async_block_till_done() client.set_setting.assert_awaited_once_with( - appliance_ha_id, setting_key=settings_key_arg, value=setting_value_arg + appliance.ha_id, setting_key=settings_key_arg, value=setting_value_arg ) assert hass.states.is_state(entity_id, state) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") @pytest.mark.parametrize( - ("entity_id", "program_key", "initial_state", "appliance_ha_id"), + ("entity_id", "program_key", "initial_state", "appliance"), [ ( "switch.dryer_program_mix", @@ -333,7 +340,7 @@ async def test_switch_functionality( "Dryer", ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_program_switch_functionality( entity_id: str, @@ -343,7 +350,7 @@ async def test_program_switch_functionality( config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, - appliance_ha_id: str, + appliance: HomeAppliance, client: MagicMock, ) -> None: """Test switch functionality.""" @@ -383,7 +390,7 @@ async def test_program_switch_functionality( await hass.async_block_till_done() assert hass.states.is_state(entity_id, STATE_ON) client.start_program.assert_awaited_once_with( - appliance_ha_id, program_key=program_key + appliance.ha_id, program_key=program_key ) await hass.services.async_call( @@ -391,9 +398,10 @@ async def test_program_switch_functionality( ) await hass.async_block_till_done() assert hass.states.is_state(entity_id, STATE_OFF) - client.stop_program.assert_awaited_once_with(appliance_ha_id) + client.stop_program.assert_awaited_once_with(appliance.ha_id) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") @pytest.mark.parametrize( ( "entity_id", @@ -496,7 +504,7 @@ async def test_switch_exception_handling( @pytest.mark.parametrize( - ("entity_id", "status", "service", "state", "appliance_ha_id"), + ("entity_id", "status", "service", "state", "appliance"), [ ( "switch.fridgefreezer_freezer_super_mode", @@ -513,7 +521,7 @@ async def test_switch_exception_handling( "FridgeFreezer", ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_ent_desc_switch_functionality( entity_id: str, @@ -524,7 +532,7 @@ async def test_ent_desc_switch_functionality( config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, - appliance_ha_id: str, + appliance: HomeAppliance, client: MagicMock, ) -> None: """Test switch functionality - entity description setup.""" @@ -544,7 +552,7 @@ async def test_ent_desc_switch_functionality( "status", "service", "mock_attr", - "appliance_ha_id", + "appliance", "exception_match", ), [ @@ -565,7 +573,7 @@ async def test_ent_desc_switch_functionality( r"Error.*turn.*off.*", ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_ent_desc_switch_exception_handling( entity_id: str, @@ -577,7 +585,7 @@ async def test_ent_desc_switch_exception_handling( integration_setup: Callable[[MagicMock], Awaitable[bool]], config_entry: MockConfigEntry, setup_credentials: None, - appliance_ha_id: str, + appliance: HomeAppliance, client_with_exception: MagicMock, ) -> None: """Test switch exception handling - entity description setup.""" @@ -613,7 +621,7 @@ async def test_ent_desc_switch_exception_handling( "service", "setting_value_arg", "power_state", - "appliance_ha_id", + "appliance", ), [ ( @@ -649,9 +657,9 @@ async def test_ent_desc_switch_exception_handling( "Dishwasher", ), ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) -async def test_power_swtich( +async def test_power_switch( entity_id: str, allowed_values: list[str | None] | None, service: str, @@ -661,7 +669,7 @@ async def test_power_swtich( config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, - appliance_ha_id: str, + appliance: HomeAppliance, client: MagicMock, ) -> None: """Test power switch functionality.""" @@ -686,7 +694,7 @@ async def test_power_swtich( await hass.services.async_call(SWITCH_DOMAIN, service, {ATTR_ENTITY_ID: entity_id}) await hass.async_block_till_done() client.set_setting.assert_awaited_once_with( - appliance_ha_id, + appliance.ha_id, setting_key=SettingKey.BSH_COMMON_POWER_STATE, value=setting_value_arg, ) @@ -798,18 +806,24 @@ async def test_power_switch_service_validation_errors( @pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_create_issue( +@pytest.mark.parametrize( + "service", + [SERVICE_TURN_ON, SERVICE_TURN_OFF], +) +async def test_create_program_switch_deprecation_issue( hass: HomeAssistant, - appliance_ha_id: str, + appliance: HomeAppliance, + service: str, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, issue_registry: ir.IssueRegistry, ) -> None: - """Test we create an issue when an automation or script is using a deprecated entity.""" + """Test that we create an issue when an automation or script is using a program switch entity or the entity is used by the user.""" entity_id = "switch.washer_program_mix" - issue_id = f"deprecated_program_switch_{entity_id}" + automation_script_issue_id = f"deprecated_program_switch_{entity_id}" + action_handler_issue_id = f"deprecated_program_switch_{entity_id}" assert await async_setup_component( hass, @@ -848,17 +862,118 @@ async def test_create_issue( assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED + await hass.services.async_call( + SWITCH_DOMAIN, + service, + { + ATTR_ENTITY_ID: entity_id, + }, + blocking=True, + ) + assert automations_with_entity(hass, entity_id)[0] == "automation.test" assert scripts_with_entity(hass, entity_id)[0] == "script.test" - assert len(issue_registry.issues) == 1 - assert issue_registry.async_get_issue(DOMAIN, issue_id) + assert len(issue_registry.issues) == 2 + assert issue_registry.async_get_issue(DOMAIN, automation_script_issue_id) + assert issue_registry.async_get_issue(DOMAIN, action_handler_issue_id) await hass.config_entries.async_unload(config_entry.entry_id) await hass.async_block_till_done() # Assert the issue is no longer present - assert not issue_registry.async_get_issue(DOMAIN, issue_id) + assert not issue_registry.async_get_issue(DOMAIN, automation_script_issue_id) + assert not issue_registry.async_get_issue(DOMAIN, action_handler_issue_id) + assert len(issue_registry.issues) == 0 + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + "service", + [SERVICE_TURN_ON, SERVICE_TURN_OFF], +) +async def test_program_switch_deprecation_issue_fix( + hass: HomeAssistant, + appliance: HomeAppliance, + service: str, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, + issue_registry: ir.IssueRegistry, + hass_client: ClientSessionGenerator, +) -> None: + """Test we can fix the issues created when a program switch entity is in an automation or in a script or when is used.""" + entity_id = "switch.washer_program_mix" + automation_script_issue_id = f"deprecated_program_switch_{entity_id}" + action_handler_issue_id = f"deprecated_program_switch_{entity_id}" + + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": { + "action": "automation.turn_on", + "target": { + "entity_id": "automation.test", + }, + }, + } + }, + ) + assert await async_setup_component( + hass, + script.DOMAIN, + { + script.DOMAIN: { + "test": { + "sequence": [ + { + "action": "switch.turn_on", + "entity_id": entity_id, + }, + ], + } + } + }, + ) + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + assert config_entry.state == ConfigEntryState.LOADED + + await hass.services.async_call( + SWITCH_DOMAIN, + service, + { + ATTR_ENTITY_ID: entity_id, + }, + blocking=True, + ) + + assert automations_with_entity(hass, entity_id)[0] == "automation.test" + assert scripts_with_entity(hass, entity_id)[0] == "script.test" + + assert len(issue_registry.issues) == 2 + assert issue_registry.async_get_issue(DOMAIN, automation_script_issue_id) + assert issue_registry.async_get_issue(DOMAIN, action_handler_issue_id) + + for issue in issue_registry.issues.copy().values(): + _client = await hass_client() + resp = await _client.post( + "/api/repairs/issues/fix", + json={"handler": DOMAIN, "issue_id": issue.issue_id}, + ) + assert resp.status == HTTPStatus.OK + flow_id = (await resp.json())["flow_id"] + resp = await _client.post(f"/api/repairs/issues/fix/{flow_id}") + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, automation_script_issue_id) + assert not issue_registry.async_get_issue(DOMAIN, action_handler_issue_id) assert len(issue_registry.issues) == 0 @@ -882,7 +997,7 @@ async def test_create_issue( ], ) @pytest.mark.parametrize( - ("entity_id", "option_key", "appliance_ha_id"), + ("entity_id", "option_key", "appliance"), [ ( "switch.dishwasher_half_load", @@ -890,12 +1005,12 @@ async def test_create_issue( "Dishwasher", ) ], - indirect=["appliance_ha_id"], + indirect=["appliance"], ) async def test_options_functionality( entity_id: str, option_key: OptionKey, - appliance_ha_id: str, + appliance: HomeAppliance, set_active_program_options_side_effect: ActiveProgramNotSetError | None, set_selected_program_options_side_effect: SelectedProgramNotSetError | None, called_mock_method: str, @@ -933,7 +1048,7 @@ async def test_options_functionality( await hass.async_block_till_done() assert called_mock.called - assert called_mock.call_args.args == (appliance_ha_id,) + assert called_mock.call_args.args == (appliance.ha_id,) assert called_mock.call_args.kwargs == { "option_key": option_key, "value": False, @@ -946,7 +1061,7 @@ async def test_options_functionality( await hass.async_block_till_done() assert called_mock.called - assert called_mock.call_args.args == (appliance_ha_id,) + assert called_mock.call_args.args == (appliance.ha_id,) assert called_mock.call_args.kwargs == { "option_key": option_key, "value": True, diff --git a/tests/components/home_connect/test_time.py b/tests/components/home_connect/test_time.py index affb5ecfedf..8c23a09053a 100644 --- a/tests/components/home_connect/test_time.py +++ b/tests/components/home_connect/test_time.py @@ -2,6 +2,7 @@ from collections.abc import Awaitable, Callable from datetime import time +from http import HTTPStatus from unittest.mock import AsyncMock, MagicMock from aiohomeconnect.model import ( @@ -10,20 +11,32 @@ from aiohomeconnect.model import ( EventMessage, EventType, GetSetting, + HomeAppliance, SettingKey, ) from aiohomeconnect.model.error import HomeConnectApiError, HomeConnectError import pytest +from homeassistant.components.automation import ( + DOMAIN as AUTOMATION_DOMAIN, + automations_with_entity, +) from homeassistant.components.home_connect.const import DOMAIN +from homeassistant.components.script import DOMAIN as SCRIPT_DOMAIN, scripts_with_entity from homeassistant.components.time import DOMAIN as TIME_DOMAIN, SERVICE_SET_VALUE from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, ATTR_TIME, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import ( + device_registry as dr, + entity_registry as er, + issue_registry as ir, +) +from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator @pytest.fixture @@ -44,9 +57,10 @@ async def test_time( assert config_entry.state is ConfigEntryState.LOADED -@pytest.mark.parametrize("appliance_ha_id", ["Oven"], indirect=True) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("appliance", ["Oven"], indirect=True) async def test_paired_depaired_devices_flow( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -60,7 +74,7 @@ async def test_paired_depaired_devices_flow( assert await integration_setup(client) assert config_entry.state == ConfigEntryState.LOADED - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert entity_entries @@ -68,7 +82,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DEPAIRED, data=ArrayOfEvents([]), ) @@ -76,7 +90,7 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert not device for entity_entry in entity_entries: assert not entity_registry.async_get(entity_entry.entity_id) @@ -85,7 +99,7 @@ async def test_paired_depaired_devices_flow( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.PAIRED, data=ArrayOfEvents([]), ) @@ -93,14 +107,15 @@ async def test_paired_depaired_devices_flow( ) await hass.async_block_till_done() - assert device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + assert device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) for entity_entry in entity_entries: assert entity_registry.async_get(entity_entry.entity_id) -@pytest.mark.parametrize("appliance_ha_id", ["Oven"], indirect=True) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("appliance", ["Oven"], indirect=True) async def test_connected_devices( - appliance_ha_id: str, + appliance: HomeAppliance, hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], @@ -117,7 +132,7 @@ async def test_connected_devices( get_settings_original_mock = client.get_settings async def get_settings_side_effect(ha_id: str): - if ha_id == appliance_ha_id: + if ha_id == appliance.ha_id: raise HomeConnectApiError( "SDK.Error.HomeAppliance.Connection.Initialization.Failed" ) @@ -129,14 +144,14 @@ async def test_connected_devices( assert config_entry.state == ConfigEntryState.LOADED client.get_settings = get_settings_original_mock - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, data=ArrayOfEvents([]), ) @@ -144,20 +159,21 @@ async def test_connected_devices( ) await hass.async_block_till_done() - device = device_registry.async_get_device(identifiers={(DOMAIN, appliance_ha_id)}) + device = device_registry.async_get_device(identifiers={(DOMAIN, appliance.ha_id)}) assert device new_entity_entries = entity_registry.entities.get_entries_for_device_id(device.id) assert len(new_entity_entries) > len(entity_entries) -@pytest.mark.parametrize("appliance_ha_id", ["Oven"], indirect=True) -async def test_time_entity_availabilty( +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("appliance", ["Oven"], indirect=True) +async def test_time_entity_availability( hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[MagicMock], Awaitable[bool]], setup_credentials: None, client: MagicMock, - appliance_ha_id: str, + appliance: HomeAppliance, ) -> None: """Test if time entities availability are based on the appliance connection state.""" entity_ids = [ @@ -175,7 +191,7 @@ async def test_time_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.DISCONNECTED, ArrayOfEvents([]), ) @@ -189,7 +205,7 @@ async def test_time_entity_availabilty( await client.add_events( [ EventMessage( - appliance_ha_id, + appliance.ha_id, EventType.CONNECTED, ArrayOfEvents([]), ) @@ -203,7 +219,8 @@ async def test_time_entity_availabilty( assert state.state != STATE_UNAVAILABLE -@pytest.mark.parametrize("appliance_ha_id", ["Oven"], indirect=True) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("appliance", ["Oven"], indirect=True) @pytest.mark.parametrize( ("entity_id", "setting_key"), [ @@ -214,7 +231,7 @@ async def test_time_entity_availabilty( ], ) async def test_time_entity_functionality( - appliance_ha_id: str, + appliance: HomeAppliance, entity_id: str, setting_key: SettingKey, hass: HomeAssistant, @@ -242,11 +259,12 @@ async def test_time_entity_functionality( ) await hass.async_block_till_done() client.set_setting.assert_awaited_once_with( - appliance_ha_id, setting_key=setting_key, value=value + appliance.ha_id, setting_key=setting_key, value=value ) assert hass.states.is_state(entity_id, str(time(second=value))) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") @pytest.mark.parametrize( ("entity_id", "setting_key", "mock_attr"), [ @@ -298,3 +316,170 @@ async def test_time_entity_error( blocking=True, ) assert getattr(client_with_exception, mock_attr).call_count == 2 + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("appliance", ["Oven"], indirect=True) +async def test_create_alarm_clock_deprecation_issue( + hass: HomeAssistant, + appliance: HomeAppliance, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, + issue_registry: ir.IssueRegistry, +) -> None: + """Test that we create an issue when an automation or script is using a alarm clock time entity or the entity is used by the user.""" + entity_id = f"{TIME_DOMAIN}.oven_alarm_clock" + automation_script_issue_id = ( + f"deprecated_time_alarm_clock_in_automations_scripts_{entity_id}" + ) + action_handler_issue_id = f"deprecated_time_alarm_clock_{entity_id}" + + assert await async_setup_component( + hass, + AUTOMATION_DOMAIN, + { + AUTOMATION_DOMAIN: { + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": { + "action": "automation.turn_on", + "target": { + "entity_id": "automation.test", + }, + }, + } + }, + ) + assert await async_setup_component( + hass, + SCRIPT_DOMAIN, + { + SCRIPT_DOMAIN: { + "test": { + "sequence": [ + { + "action": "switch.turn_on", + "entity_id": entity_id, + }, + ], + } + } + }, + ) + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + assert config_entry.state == ConfigEntryState.LOADED + + await hass.services.async_call( + TIME_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_TIME: time(minute=1), + }, + blocking=True, + ) + + assert automations_with_entity(hass, entity_id)[0] == "automation.test" + assert scripts_with_entity(hass, entity_id)[0] == "script.test" + + assert len(issue_registry.issues) == 2 + assert issue_registry.async_get_issue(DOMAIN, automation_script_issue_id) + assert issue_registry.async_get_issue(DOMAIN, action_handler_issue_id) + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, automation_script_issue_id) + assert not issue_registry.async_get_issue(DOMAIN, action_handler_issue_id) + assert len(issue_registry.issues) == 0 + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("appliance", ["Oven"], indirect=True) +async def test_alarm_clock_deprecation_issue_fix( + hass: HomeAssistant, + appliance: HomeAppliance, + config_entry: MockConfigEntry, + integration_setup: Callable[[MagicMock], Awaitable[bool]], + setup_credentials: None, + client: MagicMock, + issue_registry: ir.IssueRegistry, + hass_client: ClientSessionGenerator, +) -> None: + """Test we can fix the issues created when a alarm clock time entity is in an automation or in a script or when is used.""" + entity_id = f"{TIME_DOMAIN}.oven_alarm_clock" + automation_script_issue_id = ( + f"deprecated_time_alarm_clock_in_automations_scripts_{entity_id}" + ) + action_handler_issue_id = f"deprecated_time_alarm_clock_{entity_id}" + + assert await async_setup_component( + hass, + AUTOMATION_DOMAIN, + { + AUTOMATION_DOMAIN: { + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": { + "action": "automation.turn_on", + "target": { + "entity_id": "automation.test", + }, + }, + } + }, + ) + assert await async_setup_component( + hass, + SCRIPT_DOMAIN, + { + SCRIPT_DOMAIN: { + "test": { + "sequence": [ + { + "action": "switch.turn_on", + "entity_id": entity_id, + }, + ], + } + } + }, + ) + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup(client) + assert config_entry.state == ConfigEntryState.LOADED + + await hass.services.async_call( + TIME_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_TIME: time(minute=1), + }, + blocking=True, + ) + + assert len(issue_registry.issues) == 2 + assert issue_registry.async_get_issue(DOMAIN, automation_script_issue_id) + assert issue_registry.async_get_issue(DOMAIN, action_handler_issue_id) + + for issue in issue_registry.issues.copy().values(): + _client = await hass_client() + resp = await _client.post( + "/api/repairs/issues/fix", + json={"handler": DOMAIN, "issue_id": issue.issue_id}, + ) + assert resp.status == HTTPStatus.OK + flow_id = (await resp.json())["flow_id"] + resp = await _client.post(f"/api/repairs/issues/fix/{flow_id}") + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, automation_script_issue_id) + assert not issue_registry.async_get_issue(DOMAIN, action_handler_issue_id) + assert len(issue_registry.issues) == 0 diff --git a/tests/components/homeassistant_hardware/test_config_flow.py b/tests/components/homeassistant_hardware/test_config_flow.py index 32c5a381233..3081c44c681 100644 --- a/tests/components/homeassistant_hardware/test_config_flow.py +++ b/tests/components/homeassistant_hardware/test_config_flow.py @@ -381,6 +381,32 @@ async def test_config_flow_zigbee_skip_step_if_installed(hass: HomeAssistant) -> assert result["step_id"] == "confirm_zigbee" +async def test_config_flow_auto_confirm_if_running(hass: HomeAssistant) -> None: + """Test the config flow skips the confirmation step the hardware is already used.""" + with patch( + "homeassistant.components.homeassistant_hardware.firmware_config_flow.guess_firmware_info", + return_value=FirmwareInfo( + device=TEST_DEVICE, + firmware_type=ApplicationType.EZSP, + firmware_version="7.4.4.0", + owners=[Mock(is_running=AsyncMock(return_value=True))], + source="guess", + ), + ): + result = await hass.config_entries.flow.async_init( + TEST_DOMAIN, context={"source": "hardware"} + ) + + # There are no steps, the config entry is automatically created + assert result["type"] is FlowResultType.CREATE_ENTRY + config_entry = result["result"] + assert config_entry.data == { + "firmware": "ezsp", + "device": TEST_DEVICE, + "hardware": TEST_HARDWARE_NAME, + } + + async def test_config_flow_thread(hass: HomeAssistant) -> None: """Test the config flow.""" result = await hass.config_entries.flow.async_init( diff --git a/tests/components/homeassistant_hardware/test_coordinator.py b/tests/components/homeassistant_hardware/test_coordinator.py new file mode 100644 index 00000000000..9c57aac6811 --- /dev/null +++ b/tests/components/homeassistant_hardware/test_coordinator.py @@ -0,0 +1,55 @@ +"""Test firmware update coordinator for Home Assistant Hardware.""" + +from unittest.mock import AsyncMock, Mock, call, patch + +from ha_silabs_firmware_client import FirmwareManifest, ManifestMissing +import pytest +from yarl import URL + +from homeassistant.components.homeassistant_hardware.coordinator import ( + FirmwareUpdateCoordinator, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.util import dt as dt_util + + +async def test_firmware_update_coordinator_fetching( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test the firmware update coordinator loads manifests.""" + session = async_get_clientsession(hass) + + manifest = FirmwareManifest( + url=URL("https://example.org/firmware"), + html_url=URL("https://example.org/release_notes"), + created_at=dt_util.utcnow(), + firmwares=(), + ) + + mock_client = Mock() + mock_client.async_update_data = AsyncMock(side_effect=[ManifestMissing(), manifest]) + + with patch( + "homeassistant.components.homeassistant_hardware.coordinator.FirmwareUpdateClient", + return_value=mock_client, + ): + coordinator = FirmwareUpdateCoordinator( + hass, session, "https://example.org/firmware" + ) + + listener = Mock() + coordinator.async_add_listener(listener) + + # The first update will fail + await coordinator.async_refresh() + assert listener.mock_calls == [call()] + assert coordinator.data is None + assert "GitHub release assets haven't been uploaded yet" in caplog.text + + # The second will succeed + await coordinator.async_refresh() + assert listener.mock_calls == [call(), call()] + assert coordinator.data == manifest + + await coordinator.async_shutdown() diff --git a/tests/components/homeassistant_hardware/test_update.py b/tests/components/homeassistant_hardware/test_update.py new file mode 100644 index 00000000000..23d1e546791 --- /dev/null +++ b/tests/components/homeassistant_hardware/test_update.py @@ -0,0 +1,644 @@ +"""Test Home Assistant Hardware firmware update entity.""" + +from __future__ import annotations + +import asyncio +from collections.abc import AsyncGenerator +import dataclasses +import logging +from unittest.mock import AsyncMock, Mock, patch + +import aiohttp +from ha_silabs_firmware_client import FirmwareManifest, FirmwareMetadata +import pytest +from yarl import URL + +from homeassistant.components.homeassistant_hardware.coordinator import ( + FirmwareUpdateCoordinator, +) +from homeassistant.components.homeassistant_hardware.helpers import ( + async_notify_firmware_info, + async_register_firmware_info_provider, +) +from homeassistant.components.homeassistant_hardware.update import ( + BaseFirmwareUpdateEntity, + FirmwareUpdateEntityDescription, + FirmwareUpdateExtraStoredData, +) +from homeassistant.components.homeassistant_hardware.util import ( + ApplicationType, + FirmwareInfo, + OwningIntegration, +) +from homeassistant.components.update import UpdateDeviceClass +from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigFlow +from homeassistant.const import EVENT_STATE_CHANGED, EntityCategory +from homeassistant.core import ( + Event, + EventStateChangedData, + HomeAssistant, + HomeAssistantError, + State, + callback, +) +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback +from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util + +from tests.common import ( + MockConfigEntry, + MockModule, + MockPlatform, + async_capture_events, + mock_config_flow, + mock_integration, + mock_platform, + mock_restore_cache_with_extra_data, +) + +TEST_DOMAIN = "test" +TEST_DEVICE = "/dev/serial/by-id/some-unique-serial-device-12345" +TEST_FIRMWARE_RELEASES_URL = "https://example.org/firmware" +TEST_UPDATE_ENTITY_ID = "update.mock_name_firmware" +TEST_MANIFEST = FirmwareManifest( + url=URL("https://example.org/firmware"), + html_url=URL("https://example.org/release_notes"), + created_at=dt_util.utcnow(), + firmwares=( + FirmwareMetadata( + filename="skyconnect_zigbee_ncp_test.gbl", + checksum="aaa", + size=123, + release_notes="Some release notes go here", + metadata={ + "baudrate": 115200, + "ezsp_version": "7.4.4.0", + "fw_type": "zigbee_ncp", + "fw_variant": None, + "metadata_version": 2, + "sdk_version": "4.4.4", + }, + url=URL("https://example.org/firmwares/skyconnect_zigbee_ncp_test.gbl"), + ), + ), +) + + +TEST_FIRMWARE_ENTITY_DESCRIPTIONS: dict[ + ApplicationType | None, FirmwareUpdateEntityDescription +] = { + ApplicationType.EZSP: FirmwareUpdateEntityDescription( + key="firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw.split(" ", 1)[0], + fw_type="skyconnect_zigbee_ncp", + version_key="ezsp_version", + expected_firmware_type=ApplicationType.EZSP, + firmware_name="EmberZNet", + ), + ApplicationType.SPINEL: FirmwareUpdateEntityDescription( + key="firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw.split("/", 1)[1].split("_", 1)[0], + fw_type="skyconnect_openthread_rcp", + version_key="ot_rcp_version", + expected_firmware_type=ApplicationType.SPINEL, + firmware_name="OpenThread RCP", + ), + None: FirmwareUpdateEntityDescription( + key="firmware", + display_precision=0, + device_class=UpdateDeviceClass.FIRMWARE, + entity_category=EntityCategory.CONFIG, + version_parser=lambda fw: fw, + fw_type=None, + version_key=None, + expected_firmware_type=None, + firmware_name=None, + ), +} + + +def _mock_async_create_update_entity( + hass: HomeAssistant, + config_entry: ConfigEntry, + session: aiohttp.ClientSession, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> MockFirmwareUpdateEntity: + """Create an update entity that handles firmware type changes.""" + firmware_type = config_entry.data["firmware"] + entity_description = TEST_FIRMWARE_ENTITY_DESCRIPTIONS[ + ApplicationType(firmware_type) if firmware_type is not None else None + ] + + entity = MockFirmwareUpdateEntity( + device=config_entry.data["device"], + config_entry=config_entry, + update_coordinator=FirmwareUpdateCoordinator( + hass, + session, + TEST_FIRMWARE_RELEASES_URL, + ), + entity_description=entity_description, + ) + + def firmware_type_changed( + old_type: ApplicationType | None, new_type: ApplicationType | None + ) -> None: + """Replace the current entity when the firmware type changes.""" + er.async_get(hass).async_remove(entity.entity_id) + async_add_entities( + [ + _mock_async_create_update_entity( + hass, config_entry, session, async_add_entities + ) + ] + ) + + entity.async_on_remove( + entity.add_firmware_type_changed_callback(firmware_type_changed) + ) + + return entity + + +async def mock_async_setup_entry( + hass: HomeAssistant, config_entry: ConfigEntry +) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, ["update"]) + return True + + +async def mock_async_setup_update_entities( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddConfigEntryEntitiesCallback, +) -> None: + """Set up the firmware update config entry.""" + session = async_get_clientsession(hass) + entity = _mock_async_create_update_entity( + hass, config_entry, session, async_add_entities + ) + + async_add_entities([entity]) + + +class MockFirmwareUpdateEntity(BaseFirmwareUpdateEntity): + """Mock SkyConnect firmware update entity.""" + + bootloader_reset_type = None + + def __init__( + self, + device: str, + config_entry: ConfigEntry, + update_coordinator: FirmwareUpdateCoordinator, + entity_description: FirmwareUpdateEntityDescription, + ) -> None: + """Initialize the mock SkyConnect firmware update entity.""" + super().__init__(device, config_entry, update_coordinator, entity_description) + self._attr_unique_id = self.entity_description.key + self._attr_device_info = DeviceInfo( + identifiers={(TEST_DOMAIN, "yellow")}, + name="Mock Name", + model="Mock Model", + manufacturer="Mock Manufacturer", + ) + + # Use the cached firmware info if it exists + if self._config_entry.data["firmware"] is not None: + self._current_firmware_info = FirmwareInfo( + device=device, + firmware_type=ApplicationType(self._config_entry.data["firmware"]), + firmware_version=self._config_entry.data["firmware_version"], + owners=[], + source=TEST_DOMAIN, + ) + + @callback + def _firmware_info_callback(self, firmware_info: FirmwareInfo) -> None: + """Handle updated firmware info being pushed by an integration.""" + super()._firmware_info_callback(firmware_info) + + self.hass.config_entries.async_update_entry( + self._config_entry, + data={ + **self._config_entry.data, + "firmware": firmware_info.firmware_type, + "firmware_version": firmware_info.firmware_version, + }, + ) + + +@pytest.fixture(name="update_config_entry") +async def mock_update_config_entry( + hass: HomeAssistant, +) -> AsyncGenerator[ConfigEntry]: + """Set up a mock Home Assistant Hardware firmware update entity.""" + await async_setup_component(hass, "homeassistant", {}) + await async_setup_component(hass, "homeassistant_hardware", {}) + + mock_integration( + hass, + MockModule( + TEST_DOMAIN, + async_setup_entry=mock_async_setup_entry, + ), + built_in=False, + ) + mock_platform(hass, "test.config_flow") + mock_platform( + hass, + "test.update", + MockPlatform(async_setup_entry=mock_async_setup_update_entities), + ) + + # Set up a mock integration using the hardware update entity + config_entry = MockConfigEntry( + domain=TEST_DOMAIN, + data={ + "device": TEST_DEVICE, + "firmware": "ezsp", + "firmware_version": "7.3.1.0 build 0", + }, + ) + config_entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.homeassistant_hardware.coordinator.FirmwareUpdateClient", + autospec=True, + ) as mock_update_client, + mock_config_flow(TEST_DOMAIN, ConfigFlow), + ): + mock_update_client.return_value.async_update_data.return_value = TEST_MANIFEST + yield config_entry + + +async def test_update_entity_installation( + hass: HomeAssistant, update_config_entry: ConfigEntry +) -> None: + """Test the Hardware firmware update entity installation.""" + + assert await hass.config_entries.async_setup(update_config_entry.entry_id) + await hass.async_block_till_done() + + # Set up another integration communicating with the device + owning_config_entry = MockConfigEntry( + domain="another_integration", + data={ + "device": { + "path": TEST_DEVICE, + "flow_control": "hardware", + "baudrate": 115200, + }, + "radio_type": "ezsp", + }, + version=4, + ) + owning_config_entry.add_to_hass(hass) + owning_config_entry.mock_state(hass, ConfigEntryState.LOADED) + + # The integration provides firmware info + mock_hw_module = Mock() + mock_hw_module.get_firmware_info = lambda hass, config_entry: FirmwareInfo( + device=TEST_DEVICE, + firmware_type=ApplicationType.EZSP, + firmware_version="7.3.1.0 build 0", + owners=[OwningIntegration(config_entry_id=config_entry.entry_id)], + source="another_integration", + ) + + async_register_firmware_info_provider(hass, "another_integration", mock_hw_module) + + # Pretend the other integration loaded and notified hardware of the running firmware + await async_notify_firmware_info( + hass, + "another_integration", + mock_hw_module.get_firmware_info(hass, owning_config_entry), + ) + + state_before_update = hass.states.get(TEST_UPDATE_ENTITY_ID) + assert state_before_update is not None + assert state_before_update.state == "unknown" + assert state_before_update.attributes["title"] == "EmberZNet" + assert state_before_update.attributes["installed_version"] == "7.3.1.0" + assert state_before_update.attributes["latest_version"] is None + + # When we check for an update, one will be shown + await hass.services.async_call( + "homeassistant", + "update_entity", + {"entity_id": TEST_UPDATE_ENTITY_ID}, + blocking=True, + ) + state_after_update = hass.states.get(TEST_UPDATE_ENTITY_ID) + assert state_after_update is not None + assert state_after_update.state == "on" + assert state_after_update.attributes["title"] == "EmberZNet" + assert state_after_update.attributes["installed_version"] == "7.3.1.0" + assert state_after_update.attributes["latest_version"] == "7.4.4.0" + assert state_after_update.attributes["release_summary"] == ( + "Some release notes go here" + ) + assert state_after_update.attributes["release_url"] == ( + "https://example.org/release_notes" + ) + + mock_firmware = Mock() + mock_flasher = AsyncMock() + + async def mock_flash_firmware(fw_image, progress_callback): + await asyncio.sleep(0) + progress_callback(0, 100) + await asyncio.sleep(0) + progress_callback(50, 100) + await asyncio.sleep(0) + progress_callback(100, 100) + + mock_flasher.flash_firmware = mock_flash_firmware + + # When we install it, the other integration is reloaded + with ( + patch( + "homeassistant.components.homeassistant_hardware.update.parse_firmware_image", + return_value=mock_firmware, + ), + patch( + "homeassistant.components.homeassistant_hardware.update.Flasher", + return_value=mock_flasher, + ), + patch( + "homeassistant.components.homeassistant_hardware.update.probe_silabs_firmware_info", + return_value=FirmwareInfo( + device=TEST_DEVICE, + firmware_type=ApplicationType.EZSP, + firmware_version="7.4.4.0 build 0", + owners=[], + source="probe", + ), + ), + patch.object( + owning_config_entry, "async_unload", wraps=owning_config_entry.async_unload + ) as owning_config_entry_unload, + ): + state_changes: list[Event[EventStateChangedData]] = async_capture_events( + hass, EVENT_STATE_CHANGED + ) + await hass.services.async_call( + "update", + "install", + {"entity_id": TEST_UPDATE_ENTITY_ID}, + blocking=True, + ) + + # Progress events are emitted during the installation + assert len(state_changes) == 7 + + # Indeterminate progress first + assert state_changes[0].data["new_state"].attributes["in_progress"] is True + assert state_changes[0].data["new_state"].attributes["update_percentage"] is None + + # Then the update starts + assert state_changes[1].data["new_state"].attributes["update_percentage"] == 0 + assert state_changes[2].data["new_state"].attributes["update_percentage"] == 50 + assert state_changes[3].data["new_state"].attributes["update_percentage"] == 100 + + # Once it is done, we probe the firmware + assert state_changes[4].data["new_state"].attributes["in_progress"] is True + assert state_changes[4].data["new_state"].attributes["update_percentage"] is None + + # Finally, the update finishes + assert state_changes[5].data["new_state"].attributes["update_percentage"] is None + assert state_changes[6].data["new_state"].attributes["update_percentage"] is None + assert state_changes[6].data["new_state"].attributes["in_progress"] is False + + # The owning integration was unloaded and is again running + assert len(owning_config_entry_unload.mock_calls) == 1 + + # After the firmware update, the entity has the new version and the correct state + state_after_install = hass.states.get(TEST_UPDATE_ENTITY_ID) + assert state_after_install is not None + assert state_after_install.state == "off" + assert state_after_install.attributes["title"] == "EmberZNet" + assert state_after_install.attributes["installed_version"] == "7.4.4.0" + assert state_after_install.attributes["latest_version"] == "7.4.4.0" + + +async def test_update_entity_installation_failure( + hass: HomeAssistant, update_config_entry: ConfigEntry +) -> None: + """Test installation failing during flashing.""" + assert await hass.config_entries.async_setup(update_config_entry.entry_id) + await hass.async_block_till_done() + + await hass.services.async_call( + "homeassistant", + "update_entity", + {"entity_id": TEST_UPDATE_ENTITY_ID}, + blocking=True, + ) + + state_before_install = hass.states.get(TEST_UPDATE_ENTITY_ID) + assert state_before_install is not None + assert state_before_install.state == "on" + assert state_before_install.attributes["title"] == "EmberZNet" + assert state_before_install.attributes["installed_version"] == "7.3.1.0" + assert state_before_install.attributes["latest_version"] == "7.4.4.0" + + mock_flasher = AsyncMock() + mock_flasher.flash_firmware.side_effect = RuntimeError( + "Something broke during flashing!" + ) + + with ( + patch( + "homeassistant.components.homeassistant_hardware.update.parse_firmware_image", + return_value=Mock(), + ), + patch( + "homeassistant.components.homeassistant_hardware.update.Flasher", + return_value=mock_flasher, + ), + pytest.raises(HomeAssistantError, match="Failed to flash firmware"), + ): + await hass.services.async_call( + "update", + "install", + {"entity_id": TEST_UPDATE_ENTITY_ID}, + blocking=True, + ) + + # After the firmware update fails, we can still try again + state_after_install = hass.states.get(TEST_UPDATE_ENTITY_ID) + assert state_after_install is not None + assert state_after_install.state == "on" + assert state_after_install.attributes["title"] == "EmberZNet" + assert state_after_install.attributes["installed_version"] == "7.3.1.0" + assert state_after_install.attributes["latest_version"] == "7.4.4.0" + + +async def test_update_entity_installation_probe_failure( + hass: HomeAssistant, update_config_entry: ConfigEntry +) -> None: + """Test installation failing during post-flashing probing.""" + assert await hass.config_entries.async_setup(update_config_entry.entry_id) + await hass.async_block_till_done() + + await hass.services.async_call( + "homeassistant", + "update_entity", + {"entity_id": TEST_UPDATE_ENTITY_ID}, + blocking=True, + ) + + state_before_install = hass.states.get(TEST_UPDATE_ENTITY_ID) + assert state_before_install is not None + assert state_before_install.state == "on" + assert state_before_install.attributes["title"] == "EmberZNet" + assert state_before_install.attributes["installed_version"] == "7.3.1.0" + assert state_before_install.attributes["latest_version"] == "7.4.4.0" + + with ( + patch( + "homeassistant.components.homeassistant_hardware.update.parse_firmware_image", + return_value=Mock(), + ), + patch( + "homeassistant.components.homeassistant_hardware.update.Flasher", + return_value=AsyncMock(), + ), + patch( + "homeassistant.components.homeassistant_hardware.update.probe_silabs_firmware_info", + return_value=None, + ), + pytest.raises( + HomeAssistantError, match="Failed to probe the firmware after flashing" + ), + ): + await hass.services.async_call( + "update", + "install", + {"entity_id": TEST_UPDATE_ENTITY_ID}, + blocking=True, + ) + + # After the firmware update fails, we can still try again + state_after_install = hass.states.get(TEST_UPDATE_ENTITY_ID) + assert state_after_install is not None + assert state_after_install.state == "on" + assert state_after_install.attributes["title"] == "EmberZNet" + assert state_after_install.attributes["installed_version"] == "7.3.1.0" + assert state_after_install.attributes["latest_version"] == "7.4.4.0" + + +async def test_update_entity_state_restoration( + hass: HomeAssistant, update_config_entry: ConfigEntry +) -> None: + """Test the Hardware firmware update entity state restoration.""" + + mock_restore_cache_with_extra_data( + hass, + [ + ( + State(TEST_UPDATE_ENTITY_ID, "on"), + FirmwareUpdateExtraStoredData( + firmware_manifest=TEST_MANIFEST + ).as_dict(), + ) + ], + ) + + assert await hass.config_entries.async_setup(update_config_entry.entry_id) + await hass.async_block_till_done() + + # The state is correctly restored + state = hass.states.get(TEST_UPDATE_ENTITY_ID) + assert state is not None + assert state.state == "on" + assert state.attributes["title"] == "EmberZNet" + assert state.attributes["installed_version"] == "7.3.1.0" + assert state.attributes["latest_version"] == "7.4.4.0" + assert state.attributes["release_summary"] == ("Some release notes go here") + assert state.attributes["release_url"] == ("https://example.org/release_notes") + + +async def test_update_entity_firmware_missing_from_manifest( + hass: HomeAssistant, update_config_entry: ConfigEntry +) -> None: + """Test the Hardware firmware update entity handles missing firmware.""" + + mock_restore_cache_with_extra_data( + hass, + [ + ( + State(TEST_UPDATE_ENTITY_ID, "on"), + # Ensure the manifest does not contain our expected firmware type + FirmwareUpdateExtraStoredData( + firmware_manifest=dataclasses.replace(TEST_MANIFEST, firmwares=()) + ).as_dict(), + ) + ], + ) + + assert await hass.config_entries.async_setup(update_config_entry.entry_id) + await hass.async_block_till_done() + + # The state is restored, accounting for the missing firmware + state = hass.states.get(TEST_UPDATE_ENTITY_ID) + assert state is not None + assert state.state == "unknown" + assert state.attributes["title"] == "EmberZNet" + assert state.attributes["installed_version"] == "7.3.1.0" + assert state.attributes["latest_version"] is None + assert state.attributes["release_summary"] is None + assert state.attributes["release_url"] is None + + +async def test_update_entity_graceful_firmware_type_callback_errors( + hass: HomeAssistant, + update_config_entry: ConfigEntry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test firmware update entity handling of firmware type callback errors.""" + + session = async_get_clientsession(hass) + update_entity = MockFirmwareUpdateEntity( + device=TEST_DEVICE, + config_entry=update_config_entry, + update_coordinator=FirmwareUpdateCoordinator( + hass, + session, + TEST_FIRMWARE_RELEASES_URL, + ), + entity_description=TEST_FIRMWARE_ENTITY_DESCRIPTIONS[ApplicationType.EZSP], + ) + update_entity.hass = hass + await update_entity.async_added_to_hass() + + callback = Mock(side_effect=RuntimeError("Callback failed")) + unregister_callback = update_entity.add_firmware_type_changed_callback(callback) + + with caplog.at_level(logging.WARNING): + await async_notify_firmware_info( + hass, + "some_integration", + FirmwareInfo( + device=TEST_DEVICE, + firmware_type=ApplicationType.SPINEL, + firmware_version="SL-OPENTHREAD/2.4.4.0_GitHub-7074a43e4; EFR32; Oct 21 2024 14:40:57", + owners=[], + source="probe", + ), + ) + + unregister_callback() + assert "Failed to call firmware type changed callback" in caplog.text diff --git a/tests/components/homeassistant_hardware/test_util.py b/tests/components/homeassistant_hardware/test_util.py index b467380c431..1b7bfe4a8ac 100644 --- a/tests/components/homeassistant_hardware/test_util.py +++ b/tests/components/homeassistant_hardware/test_util.py @@ -205,6 +205,93 @@ async def test_owning_addon(hass: HomeAssistant) -> None: assert (await owning_addon.is_running(hass)) is False +async def test_owning_addon_temporarily_stop_info_error(hass: HomeAssistant) -> None: + """Test `OwningAddon` temporarily stopping with an info error.""" + + owning_addon = OwningAddon(slug="some-addon-slug") + mock_manager = AsyncMock() + mock_manager.async_get_addon_info.side_effect = AddonError() + + with patch( + "homeassistant.components.homeassistant_hardware.util.WaitingAddonManager", + return_value=mock_manager, + ): + async with owning_addon.temporarily_stop(hass): + pass + + # We never restart it + assert len(mock_manager.async_get_addon_info.mock_calls) == 1 + assert len(mock_manager.async_stop_addon.mock_calls) == 0 + assert len(mock_manager.async_wait_until_addon_state.mock_calls) == 0 + assert len(mock_manager.async_start_addon_waiting.mock_calls) == 0 + + +async def test_owning_addon_temporarily_stop_not_running(hass: HomeAssistant) -> None: + """Test `OwningAddon` temporarily stopping when the addon is not running.""" + + owning_addon = OwningAddon(slug="some-addon-slug") + + mock_manager = AsyncMock() + mock_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname="core_some_addon_slug", + options={}, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.0.0", + ) + + with patch( + "homeassistant.components.homeassistant_hardware.util.WaitingAddonManager", + return_value=mock_manager, + ): + async with owning_addon.temporarily_stop(hass): + pass + + # We never restart it + assert len(mock_manager.async_get_addon_info.mock_calls) == 1 + assert len(mock_manager.async_stop_addon.mock_calls) == 0 + assert len(mock_manager.async_wait_until_addon_state.mock_calls) == 0 + assert len(mock_manager.async_start_addon_waiting.mock_calls) == 0 + + +async def test_owning_addon_temporarily_stop(hass: HomeAssistant) -> None: + """Test `OwningAddon` temporarily stopping when the addon is running.""" + + owning_addon = OwningAddon(slug="some-addon-slug") + + mock_manager = AsyncMock() + mock_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname="core_some_addon_slug", + options={}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + mock_manager.async_stop_addon = AsyncMock() + mock_manager.async_wait_until_addon_state = AsyncMock() + mock_manager.async_start_addon_waiting = AsyncMock() + + # The error is propagated but it doesn't affect restarting the addon + with ( + patch( + "homeassistant.components.homeassistant_hardware.util.WaitingAddonManager", + return_value=mock_manager, + ), + pytest.raises(RuntimeError), + ): + async with owning_addon.temporarily_stop(hass): + raise RuntimeError("Some error") + + # We restart it + assert len(mock_manager.async_get_addon_info.mock_calls) == 1 + assert len(mock_manager.async_stop_addon.mock_calls) == 1 + assert len(mock_manager.async_wait_until_addon_state.mock_calls) == 1 + assert len(mock_manager.async_start_addon_waiting.mock_calls) == 1 + + async def test_owning_integration(hass: HomeAssistant) -> None: """Test `OwningIntegration`.""" config_entry = MockConfigEntry(domain="mock_domain", unique_id="some_unique_id") @@ -225,6 +312,67 @@ async def test_owning_integration(hass: HomeAssistant) -> None: assert (await owning_integration2.is_running(hass)) is False +async def test_owning_integration_temporarily_stop_missing_entry( + hass: HomeAssistant, +) -> None: + """Test temporarily stopping the integration when the config entry doesn't exist.""" + missing_integration = OwningIntegration(config_entry_id="missing_entry_id") + + with ( + patch.object(hass.config_entries, "async_unload") as mock_unload, + patch.object(hass.config_entries, "async_setup") as mock_setup, + ): + async with missing_integration.temporarily_stop(hass): + pass + + # Because there's no matching entry, no unload or setup calls are made + assert len(mock_unload.mock_calls) == 0 + assert len(mock_setup.mock_calls) == 0 + + +async def test_owning_integration_temporarily_stop_not_loaded( + hass: HomeAssistant, +) -> None: + """Test temporarily stopping the integration when the config entry is not loaded.""" + entry = MockConfigEntry(domain="test_domain") + entry.add_to_hass(hass) + entry.mock_state(hass, ConfigEntryState.NOT_LOADED) + + integration = OwningIntegration(config_entry_id=entry.entry_id) + + with ( + patch.object(hass.config_entries, "async_unload") as mock_unload, + patch.object(hass.config_entries, "async_setup") as mock_setup, + ): + async with integration.temporarily_stop(hass): + pass + + # Since the entry was not loaded, we never unload or re-setup + assert len(mock_unload.mock_calls) == 0 + assert len(mock_setup.mock_calls) == 0 + + +async def test_owning_integration_temporarily_stop_loaded(hass: HomeAssistant) -> None: + """Test temporarily stopping the integration when the config entry is loaded.""" + entry = MockConfigEntry(domain="test_domain") + entry.add_to_hass(hass) + entry.mock_state(hass, ConfigEntryState.LOADED) + + integration = OwningIntegration(config_entry_id=entry.entry_id) + + with ( + patch.object(hass.config_entries, "async_unload") as mock_unload, + patch.object(hass.config_entries, "async_setup") as mock_setup, + pytest.raises(RuntimeError), + ): + async with integration.temporarily_stop(hass): + raise RuntimeError("Some error during the temporary stop") + + # We expect one unload followed by one setup call + mock_unload.assert_called_once_with(entry.entry_id) + mock_setup.assert_called_once_with(entry.entry_id) + + async def test_firmware_info(hass: HomeAssistant) -> None: """Test `FirmwareInfo`.""" diff --git a/tests/components/homeassistant_sky_connect/common.py b/tests/components/homeassistant_sky_connect/common.py new file mode 100644 index 00000000000..335fd6d2e12 --- /dev/null +++ b/tests/components/homeassistant_sky_connect/common.py @@ -0,0 +1,21 @@ +"""Common constants for the SkyConnect integration tests.""" + +from homeassistant.helpers.service_info.usb import UsbServiceInfo + +USB_DATA_SKY = UsbServiceInfo( + device="/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_9e2adbd75b8beb119fe564a0f320645d-if00-port0", + vid="10C4", + pid="EA60", + serial_number="9e2adbd75b8beb119fe564a0f320645d", + manufacturer="Nabu Casa", + description="SkyConnect v1.0", +) + +USB_DATA_ZBT1 = UsbServiceInfo( + device="/dev/serial/by-id/usb-Nabu_Casa_Home_Assistant_Connect_ZBT-1_9e2adbd75b8beb119fe564a0f320645d-if00-port0", + vid="10C4", + pid="EA60", + serial_number="9e2adbd75b8beb119fe564a0f320645d", + manufacturer="Nabu Casa", + description="Home Assistant Connect ZBT-1", +) diff --git a/tests/components/homeassistant_sky_connect/conftest.py b/tests/components/homeassistant_sky_connect/conftest.py index c5bfa4bd609..89ec292d879 100644 --- a/tests/components/homeassistant_sky_connect/conftest.py +++ b/tests/components/homeassistant_sky_connect/conftest.py @@ -47,3 +47,13 @@ def mock_zha_get_last_network_settings() -> Generator[None]: AsyncMock(return_value=None), ): yield + + +@pytest.fixture(autouse=True) +def mock_usb_path_exists() -> Generator[None]: + """Mock os.path.exists to allow the ZBT-1 integration to load.""" + with patch( + "homeassistant.components.homeassistant_sky_connect.os.path.exists", + return_value=True, + ): + yield diff --git a/tests/components/homeassistant_sky_connect/test_config_flow.py b/tests/components/homeassistant_sky_connect/test_config_flow.py index d8542002ae8..44a5e0029c3 100644 --- a/tests/components/homeassistant_sky_connect/test_config_flow.py +++ b/tests/components/homeassistant_sky_connect/test_config_flow.py @@ -22,26 +22,10 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers.service_info.usb import UsbServiceInfo +from .common import USB_DATA_SKY, USB_DATA_ZBT1 + from tests.common import MockConfigEntry -USB_DATA_SKY = UsbServiceInfo( - device="/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_9e2adbd75b8beb119fe564a0f320645d-if00-port0", - vid="10C4", - pid="EA60", - serial_number="9e2adbd75b8beb119fe564a0f320645d", - manufacturer="Nabu Casa", - description="SkyConnect v1.0", -) - -USB_DATA_ZBT1 = UsbServiceInfo( - device="/dev/serial/by-id/usb-Nabu_Casa_Home_Assistant_Connect_ZBT-1_9e2adbd75b8beb119fe564a0f320645d-if00-port0", - vid="10C4", - pid="EA60", - serial_number="9e2adbd75b8beb119fe564a0f320645d", - manufacturer="Nabu Casa", - description="Home Assistant Connect ZBT-1", -) - @pytest.mark.parametrize( ("usb_data", "model"), @@ -76,7 +60,7 @@ async def test_config_flow( return_value=FirmwareInfo( device=usb_data.device, firmware_type=ApplicationType.EZSP, - firmware_version=None, + firmware_version="7.4.4.0 build 0", owners=[], source="probe", ), @@ -92,6 +76,7 @@ async def test_config_flow( config_entry = result["result"] assert config_entry.data == { "firmware": "ezsp", + "firmware_version": "7.4.4.0 build 0", "device": usb_data.device, "manufacturer": usb_data.manufacturer, "pid": usb_data.pid, @@ -161,7 +146,7 @@ async def test_options_flow( return_value=FirmwareInfo( device=usb_data.device, firmware_type=ApplicationType.EZSP, - firmware_version=None, + firmware_version="7.4.4.0 build 0", owners=[], source="probe", ), @@ -177,6 +162,7 @@ async def test_options_flow( assert config_entry.data == { "firmware": "ezsp", + "firmware_version": "7.4.4.0 build 0", "device": usb_data.device, "manufacturer": usb_data.manufacturer, "pid": usb_data.pid, diff --git a/tests/components/homeassistant_sky_connect/test_init.py b/tests/components/homeassistant_sky_connect/test_init.py index 8e90039a4fc..f027a6d2fb8 100644 --- a/tests/components/homeassistant_sky_connect/test_init.py +++ b/tests/components/homeassistant_sky_connect/test_init.py @@ -1,15 +1,36 @@ """Test the Home Assistant SkyConnect integration.""" +from datetime import timedelta from unittest.mock import patch +import pytest + from homeassistant.components.homeassistant_hardware.util import ( ApplicationType, FirmwareInfo, ) -from homeassistant.components.homeassistant_sky_connect.const import DOMAIN +from homeassistant.components.homeassistant_sky_connect.const import ( + DESCRIPTION, + DOMAIN, + MANUFACTURER, + PID, + PRODUCT, + SERIAL_NUMBER, + VID, +) +from homeassistant.components.usb import USBDevice +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import EVENT_HOMEASSISTANT_STARTED from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed +from tests.components.usb import ( + async_request_scan, + force_usb_polling_watcher, # noqa: F401 + patch_scanned_serial_ports, +) async def test_config_entry_migration_v2(hass: HomeAssistant) -> None: @@ -44,7 +65,7 @@ async def test_config_entry_migration_v2(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(config_entry.entry_id) assert config_entry.version == 1 - assert config_entry.minor_version == 2 + assert config_entry.minor_version == 4 assert config_entry.data == { "description": "SkyConnect v1.0", "device": "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_9e2adbd75b8beb119fe564a0f320645d-if00-port0", @@ -54,6 +75,222 @@ async def test_config_entry_migration_v2(hass: HomeAssistant) -> None: "manufacturer": "Nabu Casa", "product": "SkyConnect v1.0", # `description` has been copied to `product` "firmware": "spinel", # new key + "firmware_version": None, # new key } await hass.config_entries.async_unload(config_entry.entry_id) + + +async def test_setup_fails_on_missing_usb_port(hass: HomeAssistant) -> None: + """Test setup failing when the USB port is missing.""" + + config_entry = MockConfigEntry( + domain=DOMAIN, + unique_id="some_unique_id", + data={ + "description": "SkyConnect v1.0", + "device": "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_9e2adbd75b8beb119fe564a0f320645d-if00-port0", + "vid": "10C4", + "pid": "EA60", + "serial_number": "3c0ed67c628beb11b1cd64a0f320645d", + "manufacturer": "Nabu Casa", + "product": "SkyConnect v1.0", + "firmware": "ezsp", + "firmware_version": "7.4.4.0", + }, + version=1, + minor_version=3, + ) + + config_entry.add_to_hass(hass) + + # Set up the config entry + with patch( + "homeassistant.components.homeassistant_sky_connect.os.path.exists" + ) as mock_exists: + mock_exists.return_value = False + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + # Failed to set up, the device is missing + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + mock_exists.return_value = True + async_fire_time_changed(hass, dt_util.now() + timedelta(seconds=30)) + await hass.async_block_till_done(wait_background_tasks=True) + + # Now it's ready + assert config_entry.state is ConfigEntryState.LOADED + + +@pytest.mark.usefixtures("force_usb_polling_watcher") +async def test_usb_device_reactivity(hass: HomeAssistant) -> None: + """Test setting up USB monitoring.""" + assert await async_setup_component(hass, "usb", {"usb": {}}) + + await hass.async_block_till_done() + hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) + await hass.async_block_till_done() + + config_entry = MockConfigEntry( + domain=DOMAIN, + unique_id="some_unique_id", + data={ + "description": "SkyConnect v1.0", + "device": "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_9e2adbd75b8beb119fe564a0f320645d-if00-port0", + "vid": "10C4", + "pid": "EA60", + "serial_number": "3c0ed67c628beb11b1cd64a0f320645d", + "manufacturer": "Nabu Casa", + "product": "SkyConnect v1.0", + "firmware": "ezsp", + "firmware_version": "7.4.4.0", + }, + version=1, + minor_version=3, + ) + + config_entry.add_to_hass(hass) + + with patch( + "homeassistant.components.homeassistant_sky_connect.os.path.exists" + ) as mock_exists: + mock_exists.return_value = False + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + # Failed to set up, the device is missing + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + # Now we make it available but do not wait + mock_exists.return_value = True + + with patch_scanned_serial_ports( + return_value=[ + USBDevice( + device="/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_9e2adbd75b8beb119fe564a0f320645d-if00-port0", + vid="10C4", + pid="EA60", + serial_number="3c0ed67c628beb11b1cd64a0f320645d", + manufacturer="Nabu Casa", + description="SkyConnect v1.0", + ) + ], + ): + await async_request_scan(hass) + + # It loads immediately + await hass.async_block_till_done(wait_background_tasks=True) + assert config_entry.state is ConfigEntryState.LOADED + + # Wait for a bit for the USB scan debouncer to cool off + async_fire_time_changed(hass, dt_util.now() + timedelta(minutes=5)) + + # Unplug the stick + mock_exists.return_value = False + + with patch_scanned_serial_ports(return_value=[]): + await async_request_scan(hass) + + # The integration has reloaded and is now in a failed state + await hass.async_block_till_done(wait_background_tasks=True) + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_bad_config_entry_fixing(hass: HomeAssistant) -> None: + """Test fixing/deleting config entries with bad data.""" + + # Newly-added ZBT-1 + new_entry = MockConfigEntry( + domain=DOMAIN, + unique_id="some_unique_id-9e2adbd75b8beb119fe564a0f320645d", + data={ + "device": "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_9e2adbd75b8beb119fe564a0f320645d-if00-port0", + "vid": "10C4", + "pid": "EA60", + "serial_number": "9e2adbd75b8beb119fe564a0f320645d", + "manufacturer": "Nabu Casa", + "product": "SkyConnect v1.0", + "firmware": "ezsp", + "firmware_version": "7.4.4.0 (build 123)", + }, + version=1, + minor_version=3, + ) + + new_entry.add_to_hass(hass) + + # Old config entry, without firmware info + old_entry = MockConfigEntry( + domain=DOMAIN, + unique_id="some_unique_id-3c0ed67c628beb11b1cd64a0f320645d", + data={ + "device": "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_3c0ed67c628beb11b1cd64a0f320645d-if00-port0", + "vid": "10C4", + "pid": "EA60", + "serial_number": "3c0ed67c628beb11b1cd64a0f320645d", + "manufacturer": "Nabu Casa", + "description": "SkyConnect v1.0", + }, + version=1, + minor_version=1, + ) + + old_entry.add_to_hass(hass) + + # Bad config entry, missing most keys + bad_entry = MockConfigEntry( + domain=DOMAIN, + unique_id="some_unique_id-9f6c4bba657cc9a4f0cea48bc5948562", + data={ + "device": "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_9f6c4bba657cc9a4f0cea48bc5948562-if00-port0", + }, + version=1, + minor_version=2, + ) + + bad_entry.add_to_hass(hass) + + # Bad config entry, missing most keys, but fixable since the device is present + fixable_entry = MockConfigEntry( + domain=DOMAIN, + unique_id="some_unique_id-4f5f3b26d59f8714a78b599690741999", + data={ + "device": "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_4f5f3b26d59f8714a78b599690741999-if00-port0", + }, + version=1, + minor_version=2, + ) + + fixable_entry.add_to_hass(hass) + + with patch( + "homeassistant.components.homeassistant_sky_connect.scan_serial_ports", + return_value=[ + USBDevice( + device="/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_4f5f3b26d59f8714a78b599690741999-if00-port0", + vid="10C4", + pid="EA60", + serial_number="4f5f3b26d59f8714a78b599690741999", + manufacturer="Nabu Casa", + description="SkyConnect v1.0", + ) + ], + ): + await async_setup_component(hass, "homeassistant_sky_connect", {}) + + assert hass.config_entries.async_get_entry(new_entry.entry_id) is not None + assert hass.config_entries.async_get_entry(old_entry.entry_id) is not None + assert hass.config_entries.async_get_entry(fixable_entry.entry_id) is not None + + updated_entry = hass.config_entries.async_get_entry(fixable_entry.entry_id) + assert updated_entry is not None + assert updated_entry.data[VID] == "10C4" + assert updated_entry.data[PID] == "EA60" + assert updated_entry.data[SERIAL_NUMBER] == "4f5f3b26d59f8714a78b599690741999" + assert updated_entry.data[MANUFACTURER] == "Nabu Casa" + assert updated_entry.data[PRODUCT] == "SkyConnect v1.0" + assert updated_entry.data[DESCRIPTION] == "SkyConnect v1.0" + + untouched_bad_entry = hass.config_entries.async_get_entry(bad_entry.entry_id) + assert untouched_bad_entry.minor_version == 3 diff --git a/tests/components/homeassistant_sky_connect/test_update.py b/tests/components/homeassistant_sky_connect/test_update.py new file mode 100644 index 00000000000..b6c7291e0af --- /dev/null +++ b/tests/components/homeassistant_sky_connect/test_update.py @@ -0,0 +1,132 @@ +"""Test SkyConnect firmware update entity.""" + +import pytest + +from homeassistant.components.homeassistant_hardware.helpers import ( + async_notify_firmware_info, +) +from homeassistant.components.homeassistant_hardware.util import ( + ApplicationType, + FirmwareInfo, +) +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from .common import USB_DATA_ZBT1 + +from tests.common import MockConfigEntry + +UPDATE_ENTITY_ID = "update.home_assistant_connect_zbt_1_9e2adbd7_firmware" + + +async def test_zbt1_update_entity(hass: HomeAssistant) -> None: + """Test the ZBT-1 firmware update entity.""" + await async_setup_component(hass, "homeassistant", {}) + + # Set up the ZBT-1 integration + zbt1_config_entry = MockConfigEntry( + domain="homeassistant_sky_connect", + data={ + "firmware": "ezsp", + "firmware_version": "7.3.1.0 build 0", + "device": USB_DATA_ZBT1.device, + "manufacturer": USB_DATA_ZBT1.manufacturer, + "pid": USB_DATA_ZBT1.pid, + "product": USB_DATA_ZBT1.description, + "serial_number": USB_DATA_ZBT1.serial_number, + "vid": USB_DATA_ZBT1.vid, + }, + version=1, + minor_version=3, + ) + zbt1_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(zbt1_config_entry.entry_id) + await hass.async_block_till_done() + + # Pretend ZHA loaded and notified hardware of the running firmware + await async_notify_firmware_info( + hass, + "zha", + FirmwareInfo( + device=USB_DATA_ZBT1.device, + firmware_type=ApplicationType.EZSP, + firmware_version="7.3.1.0 build 0", + owners=[], + source="zha", + ), + ) + await hass.async_block_till_done() + + state_ezsp = hass.states.get(UPDATE_ENTITY_ID) + assert state_ezsp is not None + assert state_ezsp.state == "unknown" + assert state_ezsp.attributes["title"] == "EmberZNet Zigbee" + assert state_ezsp.attributes["installed_version"] == "7.3.1.0" + assert state_ezsp.attributes["latest_version"] is None + + # Now, have OTBR push some info + await async_notify_firmware_info( + hass, + "otbr", + FirmwareInfo( + device=USB_DATA_ZBT1.device, + firmware_type=ApplicationType.SPINEL, + firmware_version="SL-OPENTHREAD/2.4.4.0_GitHub-7074a43e4; EFR32; Oct 21 2024 14:40:57", + owners=[], + source="otbr", + ), + ) + await hass.async_block_till_done() + + # After the firmware update, the entity has the new version and the correct state + state_spinel = hass.states.get(UPDATE_ENTITY_ID) + assert state_spinel is not None + assert state_spinel.state == "unknown" + assert state_spinel.attributes["title"] == "OpenThread RCP" + assert state_spinel.attributes["installed_version"] == "2.4.4.0" + assert state_spinel.attributes["latest_version"] is None + + +@pytest.mark.parametrize( + ("firmware", "version", "expected"), + [ + ("ezsp", "7.3.1.0 build 0", "EmberZNet Zigbee 7.3.1.0"), + ("spinel", "SL-OPENTHREAD/2.4.4.0_GitHub-7074a43e4", "OpenThread RCP 2.4.4.0"), + ("bootloader", "2.4.2", "Gecko Bootloader 2.4.2"), + ("cpc", "4.3.2", "Multiprotocol 4.3.2"), + ("router", "1.2.3.4", "Unknown 1.2.3.4"), # Not supported but still shown + ], +) +async def test_zbt1_update_entity_state( + hass: HomeAssistant, firmware: str, version: str, expected: str +) -> None: + """Test the ZBT-1 firmware update entity with different firmware types.""" + await async_setup_component(hass, "homeassistant", {}) + + zbt1_config_entry = MockConfigEntry( + domain="homeassistant_sky_connect", + data={ + "firmware": firmware, + "firmware_version": version, + "device": USB_DATA_ZBT1.device, + "manufacturer": USB_DATA_ZBT1.manufacturer, + "pid": USB_DATA_ZBT1.pid, + "product": USB_DATA_ZBT1.description, + "serial_number": USB_DATA_ZBT1.serial_number, + "vid": USB_DATA_ZBT1.vid, + }, + version=1, + minor_version=3, + ) + zbt1_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(zbt1_config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get(UPDATE_ENTITY_ID) + assert state is not None + assert ( + f"{state.attributes['title']} {state.attributes['installed_version']}" + == expected + ) diff --git a/tests/components/homeassistant_yellow/test_config_flow.py b/tests/components/homeassistant_yellow/test_config_flow.py index 78fd45c6b5b..46fec0a1f30 100644 --- a/tests/components/homeassistant_yellow/test_config_flow.py +++ b/tests/components/homeassistant_yellow/test_config_flow.py @@ -350,7 +350,7 @@ async def test_firmware_options_flow(hass: HomeAssistant) -> None: return_value=FirmwareInfo( device=RADIO_DEVICE, firmware_type=ApplicationType.EZSP, - firmware_version=None, + firmware_version="7.4.4.0 build 0", owners=[], source="probe", ), @@ -366,6 +366,7 @@ async def test_firmware_options_flow(hass: HomeAssistant) -> None: assert config_entry.data == { "firmware": "ezsp", + "firmware_version": "7.4.4.0 build 0", } diff --git a/tests/components/homeassistant_yellow/test_update.py b/tests/components/homeassistant_yellow/test_update.py new file mode 100644 index 00000000000..66404dc2176 --- /dev/null +++ b/tests/components/homeassistant_yellow/test_update.py @@ -0,0 +1,144 @@ +"""Test Yellow firmware update entity.""" + +from unittest.mock import patch + +import pytest + +from homeassistant.components.homeassistant_hardware.helpers import ( + async_notify_firmware_info, +) +from homeassistant.components.homeassistant_hardware.util import ( + ApplicationType, + FirmwareInfo, +) +from homeassistant.components.homeassistant_yellow.const import RADIO_DEVICE +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + +UPDATE_ENTITY_ID = "update.home_assistant_yellow_radio_firmware" + + +async def test_yellow_update_entity(hass: HomeAssistant) -> None: + """Test the Yellow firmware update entity.""" + await async_setup_component(hass, "homeassistant", {}) + + # Set up the Yellow integration + yellow_config_entry = MockConfigEntry( + title="Home Assistant Yellow", + domain="homeassistant_yellow", + data={ + "firmware": "ezsp", + "firmware_version": "7.3.1.0 build 0", + "device": RADIO_DEVICE, + }, + version=1, + minor_version=3, + ) + yellow_config_entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.homeassistant_yellow.is_hassio", return_value=True + ), + patch( + "homeassistant.components.homeassistant_yellow.get_os_info", + return_value={"board": "yellow"}, + ), + ): + assert await hass.config_entries.async_setup(yellow_config_entry.entry_id) + await hass.async_block_till_done() + + # Pretend ZHA loaded and notified hardware of the running firmware + await async_notify_firmware_info( + hass, + "zha", + FirmwareInfo( + device=RADIO_DEVICE, + firmware_type=ApplicationType.EZSP, + firmware_version="7.3.1.0 build 0", + owners=[], + source="zha", + ), + ) + await hass.async_block_till_done() + + state_ezsp = hass.states.get(UPDATE_ENTITY_ID) + assert state_ezsp is not None + assert state_ezsp.state == "unknown" + assert state_ezsp.attributes["title"] == "EmberZNet Zigbee" + assert state_ezsp.attributes["installed_version"] == "7.3.1.0" + assert state_ezsp.attributes["latest_version"] is None + + # Now, have OTBR push some info + await async_notify_firmware_info( + hass, + "otbr", + FirmwareInfo( + device=RADIO_DEVICE, + firmware_type=ApplicationType.SPINEL, + firmware_version="SL-OPENTHREAD/2.4.4.0_GitHub-7074a43e4; EFR32; Oct 21 2024 14:40:57", + owners=[], + source="otbr", + ), + ) + await hass.async_block_till_done() + + # After the firmware update, the entity has the new version and the correct state + state_spinel = hass.states.get(UPDATE_ENTITY_ID) + assert state_spinel is not None + assert state_spinel.state == "unknown" + assert state_spinel.attributes["title"] == "OpenThread RCP" + assert state_spinel.attributes["installed_version"] == "2.4.4.0" + assert state_spinel.attributes["latest_version"] is None + + +@pytest.mark.parametrize( + ("firmware", "version", "expected"), + [ + ("ezsp", "7.3.1.0 build 0", "EmberZNet Zigbee 7.3.1.0"), + ("spinel", "SL-OPENTHREAD/2.4.4.0_GitHub-7074a43e4", "OpenThread RCP 2.4.4.0"), + ("bootloader", "2.4.2", "Gecko Bootloader 2.4.2"), + ("cpc", "4.3.2", "Multiprotocol 4.3.2"), + ("router", "1.2.3.4", "Unknown 1.2.3.4"), # Not supported but still shown + ], +) +async def test_yellow_update_entity_state( + hass: HomeAssistant, firmware: str, version: str, expected: str +) -> None: + """Test the Yellow firmware update entity with different firmware types.""" + await async_setup_component(hass, "homeassistant", {}) + + # Set up the Yellow integration + yellow_config_entry = MockConfigEntry( + title="Home Assistant Yellow", + domain="homeassistant_yellow", + data={ + "firmware": firmware, + "firmware_version": version, + "device": RADIO_DEVICE, + }, + version=1, + minor_version=3, + ) + yellow_config_entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.homeassistant_yellow.is_hassio", return_value=True + ), + patch( + "homeassistant.components.homeassistant_yellow.get_os_info", + return_value={"board": "yellow"}, + ), + ): + assert await hass.config_entries.async_setup(yellow_config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get(UPDATE_ENTITY_ID) + assert state is not None + assert ( + f"{state.attributes['title']} {state.attributes['installed_version']}" + == expected + ) diff --git a/tests/components/homee/fixtures/binary_sensors.json b/tests/components/homee/fixtures/binary_sensors.json new file mode 100644 index 00000000000..5ced5dc51da --- /dev/null +++ b/tests/components/homee/fixtures/binary_sensors.json @@ -0,0 +1,891 @@ +{ + "id": 1, + "name": "Test Binary Sensor", + "profile": 4026, + "image": "default", + "favorite": 0, + "order": 20, + "protocol": 1, + "routing": 0, + "state": 1, + "state_changed": 1709379826, + "added": 1676199446, + "history": 1, + "cube_type": 1, + "note": "", + "services": 5, + "phonetic_name": "", + "owner": 2, + "security": 0, + "attributes": [ + { + "id": 1, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 69, + "state": 1, + "last_changed": 1706461181, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 2, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 17, + "state": 1, + "last_changed": 1691668428, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 3, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 132, + "state": 1, + "last_changed": 1691668428, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 4, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 1.0, + "target_value": 1.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 228, + "state": 1, + "last_changed": 1691668428, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 5, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 1.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 12, + "state": 1, + "last_changed": 1699456267, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 6, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 52, + "state": 1, + "last_changed": 1694176210, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 7, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 68, + "state": 1, + "last_changed": 1694176210, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 8, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 1.0, + "target_value": 1.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 139, + "state": 1, + "last_changed": 1650402359, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 9, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 1.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 232, + "state": 1, + "last_changed": 1711897362, + "changed_by": 4, + "changed_by_id": 5, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 35, + "week": 5, + "month": 1, + "stepped": true + } + } + }, + { + "id": 10, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 1.0, + "target_value": 1.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 54, + "state": 1, + "last_changed": 1650402359, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 11, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 70, + "state": 1, + "last_changed": 1738231378, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 4, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 12, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 78, + "state": 1, + "last_changed": 1738231378, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 4, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 13, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 77, + "state": 1, + "last_changed": 1735964135, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 14, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 1.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 25, + "state": 1, + "last_changed": 1709933563, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 35, + "week": 5, + "month": 1, + "stepped": true + } + } + }, + { + "id": 15, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 1.0, + "target_value": 1.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 330, + "state": 1, + "last_changed": 1709933563, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 35, + "week": 5, + "month": 1, + "stepped": true + } + } + }, + { + "id": 16, + "node_id": 1, + "instance": 2, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 1, + "state": 1, + "last_changed": 1694024544, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "can_observe": [300], + "automations": ["reset"], + "history": { + "day": 35, + "week": 5, + "month": 1, + "stepped": true + } + } + }, + { + "id": 17, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 14, + "state": 1, + "last_changed": 1739320320, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "history": { + "day": 35, + "week": 5, + "month": 1, + "stepped": true + } + } + }, + { + "id": 18, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 143, + "state": 1, + "last_changed": 1694992768, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 19, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 140, + "state": 1, + "last_changed": 1718900928, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 20, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 76, + "state": 1, + "last_changed": 1718900928, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 21, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 182, + "state": 1, + "last_changed": 1718900928, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 22, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 101, + "state": 1, + "last_changed": 1700056646, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "history": { + "day": 35, + "week": 5, + "month": 1, + "stepped": true + } + } + }, + { + "id": 23, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "n%2Fa", + "step_value": 1.0, + "editable": 0, + "type": 289, + "state": 1, + "last_changed": 1736106312, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 24, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 2, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 1.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 16, + "state": 1, + "last_changed": 1616314530, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 25, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 2, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 1.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 181, + "state": 1, + "last_changed": 1616314530, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 26, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 138, + "state": 1, + "last_changed": 1700747644, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 27, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 30, + "state": 1, + "last_changed": 1709933563, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 28, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 141, + "state": 1, + "last_changed": 1700747644, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + }, + { + "id": 29, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 0.0, + "unit": "", + "step_value": 1.0, + "editable": 0, + "type": 80, + "state": 1, + "last_changed": 1700747644, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["reset"], + "history": { + "day": 182, + "week": 26, + "month": 6, + "stepped": true + } + } + } + ] +} diff --git a/tests/components/homee/fixtures/lock.json b/tests/components/homee/fixtures/lock.json new file mode 100644 index 00000000000..79fd53e0311 --- /dev/null +++ b/tests/components/homee/fixtures/lock.json @@ -0,0 +1,52 @@ +{ + "id": 1, + "name": "Test Lock", + "profile": 2007, + "image": "default", + "favorite": 0, + "order": 31, + "protocol": 1, + "routing": 0, + "state": 1, + "state_changed": 1711799526, + "added": 1645036891, + "history": 1, + "cube_type": 1, + "note": "", + "services": 3, + "phonetic_name": "", + "owner": 2, + "security": 0, + "attributes": [ + { + "id": 1, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 1, + "current_value": 0.0, + "target_value": 0.0, + "last_value": 1.0, + "unit": "", + "step_value": 1.0, + "editable": 1, + "type": 232, + "state": 1, + "last_changed": 1711897362, + "changed_by": 4, + "changed_by_id": 5, + "based_on": 1, + "data": "", + "name": "", + "options": { + "automations": ["toggle"], + "history": { + "day": 35, + "week": 5, + "month": 1, + "stepped": true + } + } + } + ] +} diff --git a/tests/components/homee/fixtures/selects.json b/tests/components/homee/fixtures/selects.json new file mode 100644 index 00000000000..27adcf07298 --- /dev/null +++ b/tests/components/homee/fixtures/selects.json @@ -0,0 +1,43 @@ +{ + "id": 1, + "name": "Test Select", + "profile": 33, + "image": "nodeicon_dimmablebulb", + "favorite": 0, + "order": 27, + "protocol": 3, + "routing": 0, + "state": 1, + "state_changed": 1736188706, + "added": 1610308228, + "history": 1, + "cube_type": 3, + "note": "", + "services": 7, + "phonetic_name": "", + "owner": 2, + "security": 0, + "attributes": [ + { + "id": 1, + "node_id": 1, + "instance": 0, + "minimum": 0, + "maximum": 2, + "current_value": 1.0, + "target_value": 1.0, + "last_value": 1.0, + "unit": "n/a", + "step_value": 1.0, + "editable": 1, + "type": 226, + "state": 1, + "last_changed": 1680027880, + "changed_by": 1, + "changed_by_id": 0, + "based_on": 1, + "data": "", + "name": "" + } + ] +} diff --git a/tests/components/homee/snapshots/test_binary_sensor.ambr b/tests/components/homee/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..4926c048f5b --- /dev/null +++ b/tests/components/homee/snapshots/test_binary_sensor.ambr @@ -0,0 +1,1392 @@ +# serializer version: 1 +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery', + 'unique_id': '00055511EECC-1-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Test Binary Sensor Battery', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_blackout-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_blackout', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Blackout', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'blackout_alarm', + 'unique_id': '00055511EECC-1-2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_blackout-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Blackout', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_blackout', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_carbon_dioxide-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_binary_sensor_carbon_dioxide', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Carbon dioxide', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'carbon_dioxide', + 'unique_id': '00055511EECC-1-4', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_carbon_dioxide-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Carbon dioxide', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_carbon_dioxide', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_carbon_monoxide-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_binary_sensor_carbon_monoxide', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Carbon monoxide', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'carbon_monoxide', + 'unique_id': '00055511EECC-1-3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_carbon_monoxide-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'carbon_monoxide', + 'friendly_name': 'Test Binary Sensor Carbon monoxide', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_carbon_monoxide', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_flood-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_binary_sensor_flood', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Flood', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'flood', + 'unique_id': '00055511EECC-1-5', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_flood-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'moisture', + 'friendly_name': 'Test Binary Sensor Flood', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_flood', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_high_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_high_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'High temperature', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'high_temperature', + 'unique_id': '00055511EECC-1-6', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_high_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'heat', + 'friendly_name': 'Test Binary Sensor High temperature', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_high_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_leak-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_binary_sensor_leak', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Leak', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'leak_alarm', + 'unique_id': '00055511EECC-1-7', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_leak-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Leak', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_leak', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_load-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_load', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Load', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'load_alarm', + 'unique_id': '00055511EECC-1-8', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_load-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Binary Sensor Load', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_load', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_binary_sensor_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Lock', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lock', + 'unique_id': '00055511EECC-1-9', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'lock', + 'friendly_name': 'Test Binary Sensor Lock', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_low_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_low_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Low temperature', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'low_temperature', + 'unique_id': '00055511EECC-1-10', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_low_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'cold', + 'friendly_name': 'Test Binary Sensor Low temperature', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_low_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_malfunction-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_malfunction', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Malfunction', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'malfunction', + 'unique_id': '00055511EECC-1-11', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_malfunction-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Malfunction', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_malfunction', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_maximum_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_maximum_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Maximum level', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'maximum', + 'unique_id': '00055511EECC-1-12', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_maximum_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Maximum level', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_maximum_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_minimum_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_minimum_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Minimum level', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'minimum', + 'unique_id': '00055511EECC-1-13', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_minimum_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Minimum level', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_minimum_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_motion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_binary_sensor_motion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motion', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'motion', + 'unique_id': '00055511EECC-1-14', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_motion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'Test Binary Sensor Motion', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_motion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_motor_blocked-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_motor_blocked', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motor blocked', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'motor_blocked', + 'unique_id': '00055511EECC-1-15', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_motor_blocked-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Motor blocked', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_motor_blocked', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_opening-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_binary_sensor_opening', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Opening', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'opening', + 'unique_id': '00055511EECC-1-17', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_opening-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'opening', + 'friendly_name': 'Test Binary Sensor Opening', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_opening', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_overcurrent-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_overcurrent', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Overcurrent', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'overcurrent', + 'unique_id': '00055511EECC-1-18', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_overcurrent-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Overcurrent', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_overcurrent', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_overload-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_overload', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Overload', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'overload', + 'unique_id': '00055511EECC-1-19', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_overload-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Overload', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_overload', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_plug-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_binary_sensor_plug', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Plug', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'plug', + 'unique_id': '00055511EECC-1-16', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_plug-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'plug', + 'friendly_name': 'Test Binary Sensor Plug', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_plug', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power', + 'unique_id': '00055511EECC-1-21', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Test Binary Sensor Power', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_presence-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_binary_sensor_presence', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Presence', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'presence', + 'unique_id': '00055511EECC-1-20', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_presence-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'presence', + 'friendly_name': 'Test Binary Sensor Presence', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_presence', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_rain-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_binary_sensor_rain', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rain', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rain', + 'unique_id': '00055511EECC-1-22', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_rain-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'moisture', + 'friendly_name': 'Test Binary Sensor Rain', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_rain', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_replace_filter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_replace_filter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Replace filter', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'replace_filter', + 'unique_id': '00055511EECC-1-23', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_replace_filter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Replace filter', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_replace_filter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_smoke-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_binary_sensor_smoke', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Smoke', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'smoke', + 'unique_id': '00055511EECC-1-24', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_smoke-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'smoke', + 'friendly_name': 'Test Binary Sensor Smoke', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_smoke', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_storage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_storage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Storage', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage', + 'unique_id': '00055511EECC-1-25', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_storage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Storage', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_storage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_surge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_surge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Surge', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'surge', + 'unique_id': '00055511EECC-1-26', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_surge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Surge', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_surge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_tamper-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_tamper', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tamper', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tamper', + 'unique_id': '00055511EECC-1-27', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_tamper-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'Test Binary Sensor Tamper', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_tamper', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_voltage_drop-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_voltage_drop', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage drop', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_drop', + 'unique_id': '00055511EECC-1-28', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_voltage_drop-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Binary Sensor Voltage drop', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_voltage_drop', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_water-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_binary_sensor_water', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Water', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water', + 'unique_id': '00055511EECC-1-29', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_snapshot[binary_sensor.test_binary_sensor_water-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'moisture', + 'friendly_name': 'Test Binary Sensor Water', + }), + 'context': , + 'entity_id': 'binary_sensor.test_binary_sensor_water', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/homee/snapshots/test_lock.ambr b/tests/components/homee/snapshots/test_lock.ambr new file mode 100644 index 00000000000..d055039cca4 --- /dev/null +++ b/tests/components/homee/snapshots/test_lock.ambr @@ -0,0 +1,50 @@ +# serializer version: 1 +# name: test_lock_snapshot[lock.test_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'lock', + 'entity_category': None, + 'entity_id': 'lock.test_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00055511EECC-1-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_lock_snapshot[lock.test_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'changed_by': 'unknown-5', + 'friendly_name': 'Test Lock', + 'supported_features': , + }), + 'context': , + 'entity_id': 'lock.test_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unlocked', + }) +# --- diff --git a/tests/components/homee/snapshots/test_select.ambr b/tests/components/homee/snapshots/test_select.ambr new file mode 100644 index 00000000000..9fa831230c2 --- /dev/null +++ b/tests/components/homee/snapshots/test_select.ambr @@ -0,0 +1,59 @@ +# serializer version: 1 +# name: test_select_snapshot[select.test_select_repeater_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'off', + 'level1', + 'level2', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.test_select_repeater_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Repeater mode', + 'platform': 'homee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'repeater_mode', + 'unique_id': '00055511EECC-1-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_select_snapshot[select.test_select_repeater_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Select Repeater mode', + 'options': list([ + 'off', + 'level1', + 'level2', + ]), + }), + 'context': , + 'entity_id': 'select.test_select_repeater_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'level1', + }) +# --- diff --git a/tests/components/homee/test_binary_sensor.py b/tests/components/homee/test_binary_sensor.py new file mode 100644 index 00000000000..50662616379 --- /dev/null +++ b/tests/components/homee/test_binary_sensor.py @@ -0,0 +1,29 @@ +"""Test homee binary sensors.""" + +from unittest.mock import MagicMock, patch + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import build_mock_node, setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_sensor_snapshot( + hass: HomeAssistant, + mock_homee: MagicMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the multisensor snapshot.""" + mock_homee.nodes = [build_mock_node("binary_sensors.json")] + mock_homee.get_node_by_id.return_value = mock_homee.nodes[0] + with patch("homeassistant.components.homee.PLATFORMS", [Platform.BINARY_SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/homee/test_lock.py b/tests/components/homee/test_lock.py new file mode 100644 index 00000000000..3e6ff3f8ec6 --- /dev/null +++ b/tests/components/homee/test_lock.py @@ -0,0 +1,125 @@ +"""Test Homee locks.""" + +from unittest.mock import MagicMock, patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.lock import ( + DOMAIN as LOCK_DOMAIN, + SERVICE_LOCK, + SERVICE_UNLOCK, + LockState, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import build_mock_node, setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def setup_lock( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_homee: MagicMock +) -> None: + """Setups the integration lock tests.""" + mock_homee.nodes = [build_mock_node("lock.json")] + mock_homee.get_node_by_id.return_value = mock_homee.nodes[0] + await setup_integration(hass, mock_config_entry) + + +@pytest.mark.parametrize( + ("service", "target_value"), + [ + (SERVICE_LOCK, 1), + (SERVICE_UNLOCK, 0), + ], +) +async def test_lock_services( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_homee: MagicMock, + service: str, + target_value: int, +) -> None: + """Test lock services.""" + await setup_lock(hass, mock_config_entry, mock_homee) + + await hass.services.async_call( + LOCK_DOMAIN, + service, + {ATTR_ENTITY_ID: "lock.test_lock"}, + ) + mock_homee.set_value.assert_called_once_with(1, 1, target_value) + + +@pytest.mark.parametrize( + ("target_value", "current_value", "expected"), + [ + (1.0, 1.0, LockState.LOCKED), + (0.0, 0.0, LockState.UNLOCKED), + (1.0, 0.0, LockState.LOCKING), + (0.0, 1.0, LockState.UNLOCKING), + ], +) +async def test_lock_state( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_homee: MagicMock, + target_value: float, + current_value: float, + expected: LockState, +) -> None: + """Test lock state.""" + mock_homee.nodes = [build_mock_node("lock.json")] + mock_homee.get_node_by_id.return_value = mock_homee.nodes[0] + attribute = mock_homee.nodes[0].attributes[0] + attribute.target_value = target_value + attribute.current_value = current_value + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("lock.test_lock").state == expected + + +@pytest.mark.parametrize( + ("attr_changed_by", "changed_by_id", "expected"), + [ + (1, 0, "itself-0"), + (2, 1, "user-testuser"), + (3, 54, "homeegram-54"), + (6, 0, "ai-0"), + ], +) +async def test_lock_changed_by( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_homee: MagicMock, + attr_changed_by: int, + changed_by_id: int, + expected: str, +) -> None: + """Test lock changed by entries.""" + mock_homee.nodes = [build_mock_node("lock.json")] + mock_homee.get_node_by_id.return_value = mock_homee.nodes[0] + mock_homee.get_user_by_id.return_value = MagicMock(username="testuser") + attribute = mock_homee.nodes[0].attributes[0] + attribute.changed_by = attr_changed_by + attribute.changed_by_id = changed_by_id + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("lock.test_lock").attributes["changed_by"] == expected + + +async def test_lock_snapshot( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_homee: MagicMock, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the lock snapshots.""" + with patch("homeassistant.components.homee.PLATFORMS", [Platform.LOCK]): + await setup_lock(hass, mock_config_entry, mock_homee) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/homee/test_select.py b/tests/components/homee/test_select.py new file mode 100644 index 00000000000..c0dec2234d6 --- /dev/null +++ b/tests/components/homee/test_select.py @@ -0,0 +1,106 @@ +"""Test homee selects.""" + +from unittest.mock import MagicMock, patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.select import ( + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_FIRST, + SERVICE_SELECT_LAST, + SERVICE_SELECT_NEXT, + SERVICE_SELECT_OPTION, + SERVICE_SELECT_PREVIOUS, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from . import build_mock_node, setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def setup_select( + hass: HomeAssistant, mock_homee: MagicMock, mock_config_entry: MockConfigEntry +) -> None: + """Setups the integration for select tests.""" + mock_homee.nodes = [build_mock_node("selects.json")] + mock_homee.get_node_by_id.return_value = mock_homee.nodes[0] + await setup_integration(hass, mock_config_entry) + + +@pytest.mark.parametrize( + ("service", "extra_options", "expected"), + [ + (SERVICE_SELECT_FIRST, {}, 0), + (SERVICE_SELECT_LAST, {}, 2), + (SERVICE_SELECT_NEXT, {}, 2), + (SERVICE_SELECT_PREVIOUS, {}, 0), + ( + SERVICE_SELECT_OPTION, + { + "option": "level2", + }, + 2, + ), + ], +) +async def test_select_services( + hass: HomeAssistant, + mock_homee: MagicMock, + mock_config_entry: MockConfigEntry, + service: str, + extra_options: dict[str, str], + expected: int, +) -> None: + """Test the select services.""" + await setup_select(hass, mock_homee, mock_config_entry) + + OPTIONS = {ATTR_ENTITY_ID: "select.test_select_repeater_mode"} + OPTIONS.update(extra_options) + + await hass.services.async_call( + SELECT_DOMAIN, + service, + OPTIONS, + blocking=True, + ) + + mock_homee.set_value.assert_called_once_with(1, 1, expected) + + +async def test_select_option_service_error( + hass: HomeAssistant, + mock_homee: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the select_option service called with invalid option.""" + await setup_select(hass, mock_homee, mock_config_entry) + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: "select.test_select_repeater_mode", + "option": "invalid", + }, + blocking=True, + ) + + +async def test_select_snapshot( + hass: HomeAssistant, + mock_homee: MagicMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the select entity snapshot.""" + with patch("homeassistant.components.homee.PLATFORMS", [Platform.SELECT]): + await setup_select(hass, mock_homee, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/homekit/test_type_switches.py b/tests/components/homekit/test_type_switches.py index 141141e7f15..6a30877a795 100644 --- a/tests/components/homekit/test_type_switches.py +++ b/tests/components/homekit/test_type_switches.py @@ -12,6 +12,7 @@ from homeassistant.components.homekit.const import ( TYPE_VALVE, ) from homeassistant.components.homekit.type_switches import ( + LawnMower, Outlet, SelectSwitch, Switch, @@ -19,6 +20,13 @@ from homeassistant.components.homekit.type_switches import ( Valve, ValveSwitch, ) +from homeassistant.components.lawn_mower import ( + DOMAIN as LAWN_MOWER_DOMAIN, + SERVICE_DOCK, + SERVICE_START_MOWING, + LawnMowerActivity, + LawnMowerEntityFeature, +) from homeassistant.components.select import ATTR_OPTIONS from homeassistant.components.vacuum import ( DOMAIN as VACUUM_DOMAIN, @@ -383,6 +391,73 @@ async def test_vacuum_set_state_without_returnhome_and_start_support( assert events[-1].data[ATTR_VALUE] is None +async def test_lawn_mower_set_state( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: + """Test if Lawn mower accessory and HA are updated accordingly.""" + entity_id = "lawn_mower.mower" + + hass.states.async_set( + entity_id, + None, + { + ATTR_SUPPORTED_FEATURES: LawnMowerEntityFeature.DOCK + | LawnMowerEntityFeature.START_MOWING + }, + ) + await hass.async_block_till_done() + + acc = LawnMower(hass, hk_driver, "LawnMower", entity_id, 2, None) + acc.run() + await hass.async_block_till_done() + assert acc.aid == 2 + assert acc.category == 8 # Switch + + assert acc.char_on.value == 0 + + hass.states.async_set( + entity_id, + LawnMowerActivity.MOWING, + { + ATTR_SUPPORTED_FEATURES: LawnMowerEntityFeature.DOCK + | LawnMowerEntityFeature.START_MOWING + }, + ) + await hass.async_block_till_done() + assert acc.char_on.value == 1 + + hass.states.async_set( + entity_id, + LawnMowerActivity.DOCKED, + { + ATTR_SUPPORTED_FEATURES: LawnMowerEntityFeature.DOCK + | LawnMowerEntityFeature.START_MOWING + }, + ) + await hass.async_block_till_done() + assert acc.char_on.value == 0 + + # Set from HomeKit + call_turn_on = async_mock_service(hass, LAWN_MOWER_DOMAIN, SERVICE_START_MOWING) + call_turn_off = async_mock_service(hass, LAWN_MOWER_DOMAIN, SERVICE_DOCK) + + acc.char_on.client_update_value(1) + await hass.async_block_till_done() + assert acc.char_on.value == 1 + assert call_turn_on + assert call_turn_on[0].data[ATTR_ENTITY_ID] == entity_id + assert len(events) == 1 + assert events[-1].data[ATTR_VALUE] is None + + acc.char_on.client_update_value(0) + await hass.async_block_till_done() + assert acc.char_on.value == 0 + assert call_turn_off + assert call_turn_off[0].data[ATTR_ENTITY_ID] == entity_id + assert len(events) == 2 + assert events[-1].data[ATTR_VALUE] is None + + async def test_reset_switch( hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: diff --git a/tests/components/homekit/test_type_thermostats.py b/tests/components/homekit/test_type_thermostats.py index fc4cfa78ca4..69c347ef55a 100644 --- a/tests/components/homekit/test_type_thermostats.py +++ b/tests/components/homekit/test_type_thermostats.py @@ -69,7 +69,6 @@ from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, - CONF_TEMPERATURE_UNIT, EVENT_HOMEASSISTANT_START, STATE_UNAVAILABLE, STATE_UNKNOWN, @@ -77,6 +76,7 @@ from homeassistant.const import ( ) from homeassistant.core import CoreState, Event, HomeAssistant from homeassistant.helpers import entity_registry as er +from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM from tests.common import async_mock_service @@ -858,6 +858,7 @@ async def test_thermostat_fahrenheit( ) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "climate.test" + hass.config.units = US_CUSTOMARY_SYSTEM # support_ = True hass.states.async_set( @@ -869,10 +870,7 @@ async def test_thermostat_fahrenheit( }, ) await hass.async_block_till_done() - with patch.object( - hass.config.units, CONF_TEMPERATURE_UNIT, new=UnitOfTemperature.FAHRENHEIT - ): - acc = Thermostat(hass, hk_driver, "Climate", entity_id, 1, None) + acc = Thermostat(hass, hk_driver, "Climate", entity_id, 1, None) hk_driver.add_accessory(acc) acc.run() await hass.async_block_till_done() @@ -1786,13 +1784,11 @@ async def test_water_heater_fahrenheit( ) -> None: """Test if accessory and HA are update accordingly.""" entity_id = "water_heater.test" + hass.config.units = US_CUSTOMARY_SYSTEM hass.states.async_set(entity_id, HVACMode.HEAT) await hass.async_block_till_done() - with patch.object( - hass.config.units, CONF_TEMPERATURE_UNIT, new=UnitOfTemperature.FAHRENHEIT - ): - acc = WaterHeater(hass, hk_driver, "WaterHeater", entity_id, 2, None) + acc = WaterHeater(hass, hk_driver, "WaterHeater", entity_id, 2, None) acc.run() await hass.async_block_till_done() diff --git a/tests/components/homekit_controller/snapshots/test_init.ambr b/tests/components/homekit_controller/snapshots/test_init.ambr index a41964d98cc..62b53df33f2 100644 --- a/tests/components/homekit_controller/snapshots/test_init.ambr +++ b/tests/components/homekit_controller/snapshots/test_init.ambr @@ -14352,7 +14352,7 @@ 'original_name': 'LG webOS TV AF80', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1_48', 'unit_of_measurement': None, @@ -14371,7 +14371,7 @@ 'AV', 'HDMI 4', ]), - 'supported_features': , + 'supported_features': , }), 'entity_id': 'media_player.lg_webos_tv_af80', 'state': 'on', diff --git a/tests/components/homekit_controller/specific_devices/test_lg_tv.py b/tests/components/homekit_controller/specific_devices/test_lg_tv.py new file mode 100644 index 00000000000..48d1fc3ebdc --- /dev/null +++ b/tests/components/homekit_controller/specific_devices/test_lg_tv.py @@ -0,0 +1,56 @@ +"""Test against characteristics captured from an LG TV.""" + +from homeassistant.components.media_player import ( + ATTR_INPUT_SOURCE_LIST, + MediaPlayerEntityFeature, +) +from homeassistant.const import ATTR_SUPPORTED_FEATURES, STATE_ON +from homeassistant.core import HomeAssistant + +from ..common import ( + HUB_TEST_ACCESSORY_ID, + DeviceTestInfo, + assert_devices_and_entities_created, + setup_accessories_from_file, + setup_test_accessories, +) + + +async def test_lg_tv_setup(hass: HomeAssistant) -> None: + """Test that a LG TV can be correctly setup in HA.""" + accessories = await setup_accessories_from_file(hass, "lg_tv.json") + await setup_test_accessories(hass, accessories) + + await assert_devices_and_entities_created( + hass, + DeviceTestInfo( + unique_id=HUB_TEST_ACCESSORY_ID, + name="LG webOS TV AF80", + model="OLED55B9PUA", + manufacturer="LG Electronics", + sw_version="04.71.04", + hw_version="1", + serial_number="A0000A000000000A", + devices=[], + entities=[], + ), + ) + + state = hass.states.get("media_player.lg_webos_tv_af80") + assert state is not None + assert state.state == STATE_ON + assert state.attributes[ATTR_INPUT_SOURCE_LIST] == [ + "AirPlay", + "Live TV", + "HDMI 1", + "Sony", + "Apple", + "AV", + "HDMI 4", + ] + features = state.attributes[ATTR_SUPPORTED_FEATURES] + assert features & MediaPlayerEntityFeature.TURN_ON + assert features & MediaPlayerEntityFeature.TURN_OFF + assert features & MediaPlayerEntityFeature.SELECT_SOURCE + assert features & MediaPlayerEntityFeature.PLAY + assert features & MediaPlayerEntityFeature.PAUSE diff --git a/tests/components/homekit_controller/test_media_player.py b/tests/components/homekit_controller/test_media_player.py index d1d280ef265..e00dde92a81 100644 --- a/tests/components/homekit_controller/test_media_player.py +++ b/tests/components/homekit_controller/test_media_player.py @@ -10,6 +10,11 @@ from aiohomekit.model.characteristics import ( from aiohomekit.model.services import Service, ServicesTypes import pytest +from homeassistant.components.media_player import ( + DOMAIN as MEDIA_PLAYER_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -408,3 +413,57 @@ async def test_migrate_unique_id( entity_registry.async_get(media_player_entry.entity_id).unique_id == f"00:00:00:00:00:00_{aid}_8" ) + + +async def test_turn_on(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: + """Test that we can turn on a media player.""" + helper = await setup_test_component( + hass, get_next_aid(), create_tv_service_with_target_media_state + ) + + await helper.async_update( + ServicesTypes.TELEVISION, + { + CharacteristicsTypes.CURRENT_MEDIA_STATE: 0, + }, + ) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_TURN_ON, + {"entity_id": "media_player.testdevice"}, + blocking=True, + ) + helper.async_assert_service_values( + ServicesTypes.TELEVISION, + { + CharacteristicsTypes.ACTIVE: 1, + }, + ) + + +async def test_turn_off(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: + """Test that we can turn off a media player.""" + helper = await setup_test_component( + hass, get_next_aid(), create_tv_service_with_target_media_state + ) + + await helper.async_update( + ServicesTypes.TELEVISION, + { + CharacteristicsTypes.CURRENT_MEDIA_STATE: 0, + }, + ) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_TURN_OFF, + {"entity_id": "media_player.testdevice"}, + blocking=True, + ) + helper.async_assert_service_values( + ServicesTypes.TELEVISION, + { + CharacteristicsTypes.ACTIVE: 0, + }, + ) diff --git a/tests/components/hue/test_light_v2.py b/tests/components/hue/test_light_v2.py index c831d40d261..f4a6fcfba93 100644 --- a/tests/components/hue/test_light_v2.py +++ b/tests/components/hue/test_light_v2.py @@ -2,9 +2,14 @@ from unittest.mock import Mock -from homeassistant.components.light import ColorMode +from homeassistant.components.light import ( + ATTR_EFFECT, + DOMAIN as LIGHT_DOMAIN, + ColorMode, +) +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_ON from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import entity_registry as er, issue_registry as ir from homeassistant.util.json import JsonArrayType from .conftest import setup_platform @@ -42,8 +47,8 @@ async def test_lights( assert light_1.attributes["min_mireds"] == 153 assert light_1.attributes["max_mireds"] == 500 assert light_1.attributes["dynamics"] == "dynamic_palette" - assert light_1.attributes["effect_list"] == ["None", "candle", "fire"] - assert light_1.attributes["effect"] == "None" + assert light_1.attributes["effect_list"] == ["off", "candle", "fire"] + assert light_1.attributes["effect"] == "off" # test light which supports color temperature only light_2 = hass.states.get("light.hue_light_with_color_temperature_only") @@ -57,7 +62,7 @@ async def test_lights( assert light_2.attributes["min_mireds"] == 153 assert light_2.attributes["max_mireds"] == 454 assert light_2.attributes["dynamics"] == "none" - assert light_2.attributes["effect_list"] == ["None", "candle", "sunrise"] + assert light_2.attributes["effect_list"] == ["off", "candle", "sunrise"] # test light which supports color only light_3 = hass.states.get("light.hue_light_with_color_only") @@ -201,7 +206,7 @@ async def test_light_turn_on_service( await hass.services.async_call( "light", "turn_on", - {"entity_id": test_light_id, "effect": "None"}, + {"entity_id": test_light_id, "effect": "off"}, blocking=True, ) assert len(mock_bridge_v2.mock_requests) == 8 @@ -216,14 +221,14 @@ async def test_light_turn_on_service( await hass.async_block_till_done() test_light = hass.states.get(test_light_id) assert test_light is not None - assert test_light.attributes["effect"] == "None" + assert test_light.attributes["effect"] == "off" # test turn on with useless effect # it should send a effect in the request if the device has no effect active await hass.services.async_call( "light", "turn_on", - {"entity_id": test_light_id, "effect": "None"}, + {"entity_id": test_light_id, "effect": "off"}, blocking=True, ) assert len(mock_bridge_v2.mock_requests) == 9 @@ -639,3 +644,38 @@ async def test_grouped_lights( mock_bridge_v2.mock_requests[index]["json"]["identify"]["action"] == "identify" ) + + +async def test_light_turn_on_service_deprecation( + hass: HomeAssistant, + mock_bridge_v2: Mock, + v2_resources_test_data: JsonArrayType, + issue_registry: ir.IssueRegistry, +) -> None: + """Test calling the turn on service on a light.""" + await mock_bridge_v2.api.load_test_data(v2_resources_test_data) + + test_light_id = "light.hue_light_with_color_temperature_only" + + await setup_platform(hass, mock_bridge_v2, "light") + + event = { + "id": "3a6710fa-4474-4eba-b533-5e6e72968feb", + "type": "light", + "effects": {"status": "candle"}, + } + mock_bridge_v2.api.emit_event("update", event) + await hass.async_block_till_done() + + # test disable effect + # it should send a request with effect set to "no_effect" + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: test_light_id, + ATTR_EFFECT: "None", + }, + blocking=True, + ) + assert mock_bridge_v2.mock_requests[0]["json"]["effects"]["effect"] == "no_effect" diff --git a/tests/components/husqvarna_automower/fixtures/mower.json b/tests/components/husqvarna_automower/fixtures/mower.json index 8ab2f96e42f..06e11ec1252 100644 --- a/tests/components/husqvarna_automower/fixtures/mower.json +++ b/tests/components/husqvarna_automower/fixtures/mower.json @@ -176,13 +176,15 @@ ], "statistics": { "cuttingBladeUsageTime": 123, + "downTime": 3600, "numberOfChargingCycles": 1380, "numberOfCollisions": 11396, "totalChargingTime": 4334400, "totalCuttingTime": 4194000, "totalDriveDistance": 1780272, "totalRunningTime": 4564800, - "totalSearchingTime": 370800 + "totalSearchingTime": 370800, + "upTime": 7200 }, "stayOutZones": { "dirty": false, diff --git a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr index 2dab82451a6..d5546b0d2af 100644 --- a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr @@ -95,6 +95,7 @@ }), 'statistics': dict({ 'cutting_blade_usage_time': 123, + 'downtime': 3600, 'number_of_charging_cycles': 1380, 'number_of_collisions': 11396, 'total_charging_time': 4334400, @@ -102,6 +103,7 @@ 'total_drive_distance': 1780272, 'total_running_time': 4564800, 'total_searching_time': 370800, + 'uptime': 7200, }), 'stay_out_zones': dict({ 'dirty': False, diff --git a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr index 02a64718276..92320de6fdb 100644 --- a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr @@ -106,6 +106,64 @@ 'state': '0.034', }) # --- +# name: test_sensor_snapshot[sensor.test_mower_1_downtime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_mower_1_downtime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Downtime', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'downtime', + 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_downtime', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_snapshot[sensor.test_mower_1_downtime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Test Mower 1 Downtime', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_mower_1_downtime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.0', + }) +# --- # name: test_sensor_snapshot[sensor.test_mower_1_error-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1208,6 +1266,64 @@ 'state': '103.000', }) # --- +# name: test_sensor_snapshot[sensor.test_mower_1_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_mower_1_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'husqvarna_automower', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'uptime', + 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_uptime', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_snapshot[sensor.test_mower_1_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Test Mower 1 Uptime', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_mower_1_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- # name: test_sensor_snapshot[sensor.test_mower_1_work_area-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/husqvarna_automower/test_number.py b/tests/components/husqvarna_automower/test_number.py index 55bf5dda7eb..814846ae1c6 100644 --- a/tests/components/husqvarna_automower/test_number.py +++ b/tests/components/husqvarna_automower/test_number.py @@ -79,7 +79,7 @@ async def test_number_workarea_commands( freezer.tick(timedelta(seconds=EXECUTION_TIME_DELAY)) async_fire_time_changed(hass) await hass.async_block_till_done() - mocked_method.assert_called_once_with(TEST_MOWER_ID, 75, 123456) + mocked_method.assert_called_once_with(TEST_MOWER_ID, 123456, cutting_height=75) state = hass.states.get(entity_id) assert state.state is not None assert state.state == "75" diff --git a/tests/components/husqvarna_automower/test_sensor.py b/tests/components/husqvarna_automower/test_sensor.py index 08ed5251344..85d20178e73 100644 --- a/tests/components/husqvarna_automower/test_sensor.py +++ b/tests/components/husqvarna_automower/test_sensor.py @@ -110,6 +110,18 @@ async def test_work_area_sensor( state = hass.states.get("sensor.test_mower_1_work_area") assert state.state == "my_lawn" + # Test EPOS mower, which returns work_area_id = 0, when no + # work area is active and has no default work_area_id=0 + values[TEST_MOWER_ID].mower.work_area_id = 0 + del values[TEST_MOWER_ID].work_areas[0] + del values[TEST_MOWER_ID].work_area_dict[0] + mock_automower_client.get_status.return_value = values + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get("sensor.test_mower_1_work_area") + assert state.state == "no_work_area_active" + @pytest.mark.usefixtures("entity_registry_enabled_by_default") @pytest.mark.parametrize( diff --git a/tests/components/igloohome/conftest.py b/tests/components/igloohome/conftest.py index d630f5af7cb..6c4eb4904ae 100644 --- a/tests/components/igloohome/conftest.py +++ b/tests/components/igloohome/conftest.py @@ -3,7 +3,7 @@ from collections.abc import Generator from unittest.mock import AsyncMock, patch -from igloohome_api import GetDeviceInfoResponse, GetDevicesResponse +from igloohome_api import GetDeviceInfoResponse, GetDevicesResponse, LinkedDevice import pytest from homeassistant.components.igloohome.const import DOMAIN @@ -23,6 +23,28 @@ GET_DEVICE_INFO_RESPONSE_LOCK = GetDeviceInfoResponse( batteryLevel=100, ) +GET_DEVICE_INFO_RESPONSE_BRIDGE_LINKED_LOCK = GetDeviceInfoResponse( + id="001", + type="Bridge", + deviceId="EB1X04eeeeee", + deviceName="Home Bridge", + pairedAt="2024-11-09T12:19:25+00:00", + homeId=[], + linkedDevices=[LinkedDevice(type="Lock", deviceId="OE1X123cbb11")], + batteryLevel=None, +) + +GET_DEVICE_INFO_RESPONSE_BRIDGE_NO_LINKED_DEVICE = GetDeviceInfoResponse( + id="001", + type="Bridge", + deviceId="EB1X04eeeeee", + deviceName="Home Bridge", + pairedAt="2024-11-09T12:19:25+00:00", + homeId=[], + linkedDevices=[], + batteryLevel=None, +) + @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: @@ -66,7 +88,10 @@ def mock_api() -> Generator[AsyncMock]: api = api_mock.return_value api.get_devices.return_value = GetDevicesResponse( nextCursor="", - payload=[GET_DEVICE_INFO_RESPONSE_LOCK], + payload=[ + GET_DEVICE_INFO_RESPONSE_LOCK, + GET_DEVICE_INFO_RESPONSE_BRIDGE_LINKED_LOCK, + ], ) api.get_device_info.return_value = GET_DEVICE_INFO_RESPONSE_LOCK yield api diff --git a/tests/components/igloohome/snapshots/test_lock.ambr b/tests/components/igloohome/snapshots/test_lock.ambr new file mode 100644 index 00000000000..5d94cf27c6b --- /dev/null +++ b/tests/components/igloohome/snapshots/test_lock.ambr @@ -0,0 +1,50 @@ +# serializer version: 1 +# name: test_lock[lock.front_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'lock', + 'entity_category': None, + 'entity_id': 'lock.front_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'igloohome', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'lock_OE1X123cbb11', + 'unit_of_measurement': None, + }) +# --- +# name: test_lock[lock.front_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assumed_state': True, + 'friendly_name': 'Front Door', + 'supported_features': , + }), + 'context': , + 'entity_id': 'lock.front_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/igloohome/test_lock.py b/tests/components/igloohome/test_lock.py new file mode 100644 index 00000000000..324a4ab231a --- /dev/null +++ b/tests/components/igloohome/test_lock.py @@ -0,0 +1,26 @@ +"""Test lock module for igloohome integration.""" + +from unittest.mock import patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_lock( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, +) -> None: + """Test lock entity created.""" + with patch("homeassistant.components.igloohome.PLATFORMS", [Platform.LOCK]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/igloohome/test_utils.py b/tests/components/igloohome/test_utils.py new file mode 100644 index 00000000000..a6262076eed --- /dev/null +++ b/tests/components/igloohome/test_utils.py @@ -0,0 +1,31 @@ +"""Test functions in utils module.""" + +from homeassistant.components.igloohome.utils import get_linked_bridge + +from .conftest import ( + GET_DEVICE_INFO_RESPONSE_BRIDGE_LINKED_LOCK, + GET_DEVICE_INFO_RESPONSE_BRIDGE_NO_LINKED_DEVICE, + GET_DEVICE_INFO_RESPONSE_LOCK, +) + + +def test_get_linked_bridge_expect_bridge_id_returned() -> None: + """Test that get_linked_bridge returns the bridge ID.""" + assert ( + get_linked_bridge( + GET_DEVICE_INFO_RESPONSE_LOCK.deviceId, + [GET_DEVICE_INFO_RESPONSE_BRIDGE_LINKED_LOCK], + ) + == GET_DEVICE_INFO_RESPONSE_BRIDGE_LINKED_LOCK.deviceId + ) + + +def test_get_linked_bridge_expect_none_returned() -> None: + """Test that get_linked_bridge returns None.""" + assert ( + get_linked_bridge( + GET_DEVICE_INFO_RESPONSE_LOCK.deviceId, + [GET_DEVICE_INFO_RESPONSE_BRIDGE_NO_LINKED_DEVICE], + ) + is None + ) diff --git a/tests/components/improv_ble/test_config_flow.py b/tests/components/improv_ble/test_config_flow.py index 4536c64349c..9d883502d28 100644 --- a/tests/components/improv_ble/test_config_flow.py +++ b/tests/components/improv_ble/test_config_flow.py @@ -10,6 +10,7 @@ import pytest from homeassistant import config_entries from homeassistant.components.bluetooth import BluetoothChange from homeassistant.components.improv_ble.const import DOMAIN +from homeassistant.config_entries import SOURCE_IGNORE from homeassistant.const import CONF_ADDRESS from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResult, FlowResultType @@ -21,6 +22,8 @@ from . import ( PROVISIONED_IMPROV_BLE_DISCOVERY_INFO, ) +from tests.common import MockConfigEntry + IMPROV_BLE = "homeassistant.components.improv_ble" @@ -118,6 +121,32 @@ async def test_async_step_user_takes_precedence_over_discovery( assert not hass.config_entries.flow.async_progress(DOMAIN) +async def test_user_setup_removes_ignored_entry(hass: HomeAssistant) -> None: + """Test the user initiated form can replace an ignored device.""" + ignored_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=IMPROV_BLE_DISCOVERY_INFO.address, + source=SOURCE_IGNORE, + ) + ignored_entry.add_to_hass(hass) + with patch( + f"{IMPROV_BLE}.config_flow.bluetooth.async_discovered_service_info", + return_value=[NOT_IMPROV_BLE_DISCOVERY_INFO, IMPROV_BLE_DISCOVERY_INFO], + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + await _test_common_success_wo_identify( + hass, result, IMPROV_BLE_DISCOVERY_INFO.address + ) + # Check the ignored entry is removed + assert not hass.config_entries.async_entries(DOMAIN) + + async def test_bluetooth_step_provisioned_device(hass: HomeAssistant) -> None: """Test bluetooth step when device is already provisioned.""" result = await hass.config_entries.flow.async_init( diff --git a/tests/components/intent/test_temperature.py b/tests/components/intent/test_temperature.py index 0279fa44b28..622e55fe24a 100644 --- a/tests/components/intent/test_temperature.py +++ b/tests/components/intent/test_temperature.py @@ -14,10 +14,16 @@ from homeassistant.components.climate import ( HVACMode, ) from homeassistant.components.homeassistant.exposed_entities import async_expose_entity +from homeassistant.components.sensor import SensorDeviceClass from homeassistant.config_entries import ConfigEntry, ConfigFlow -from homeassistant.const import Platform, UnitOfTemperature +from homeassistant.const import ATTR_DEVICE_CLASS, Platform, UnitOfTemperature from homeassistant.core import HomeAssistant -from homeassistant.helpers import area_registry as ar, entity_registry as er, intent +from homeassistant.helpers import ( + area_registry as ar, + entity_registry as er, + floor_registry as fr, + intent, +) from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback from homeassistant.setup import async_setup_component @@ -131,6 +137,7 @@ async def test_get_temperature( hass: HomeAssistant, area_registry: ar.AreaRegistry, entity_registry: er.EntityRegistry, + floor_registry: fr.FloorRegistry, ) -> None: """Test HassClimateGetTemperature intent.""" assert await async_setup_component(hass, "homeassistant", {}) @@ -157,29 +164,133 @@ async def test_get_temperature( # Add climate entities to different areas: # climate_1 => living room # climate_2 => bedroom - # nothing in office + # nothing in bathroom + # nothing in office yet + # nothing in attic yet living_room_area = area_registry.async_create(name="Living Room") bedroom_area = area_registry.async_create(name="Bedroom") office_area = area_registry.async_create(name="Office") + attic_area = area_registry.async_create(name="Attic") + bathroom_area = area_registry.async_create(name="Bathroom") entity_registry.async_update_entity( climate_1.entity_id, area_id=living_room_area.id ) entity_registry.async_update_entity(climate_2.entity_id, area_id=bedroom_area.id) - # First climate entity will be selected (no area) + # Put areas on different floors: + # first floor => living room and office + # 2nd floor => bedroom + # 3rd floor => attic + floor_registry = fr.async_get(hass) + first_floor = floor_registry.async_create("First floor") + living_room_area = area_registry.async_update( + living_room_area.id, floor_id=first_floor.floor_id + ) + office_area = area_registry.async_update( + office_area.id, floor_id=first_floor.floor_id + ) + + second_floor = floor_registry.async_create("Second floor") + bedroom_area = area_registry.async_update( + bedroom_area.id, floor_id=second_floor.floor_id + ) + bathroom_area = area_registry.async_update( + bathroom_area.id, floor_id=second_floor.floor_id + ) + + third_floor = floor_registry.async_create("Third floor") + attic_area = area_registry.async_update( + attic_area.id, floor_id=third_floor.floor_id + ) + + # Add temperature sensors to each area that should *not* be selected + for area in (living_room_area, office_area, bedroom_area, attic_area): + wrong_temperature_entry = entity_registry.async_get_or_create( + "sensor", "test", f"wrong_temperature_{area.id}" + ) + hass.states.async_set( + wrong_temperature_entry.entity_id, + "10.0", + { + ATTR_TEMPERATURE: "Temperature", + ATTR_DEVICE_CLASS: SensorDeviceClass.TEMPERATURE, + }, + ) + entity_registry.async_update_entity( + wrong_temperature_entry.entity_id, area_id=area.id + ) + + # Create temperature sensor and assign them to the office/attic + office_temperature_id = "sensor.office_temperature" + attic_temperature_id = "sensor.attic_temperature" + hass.states.async_set( + office_temperature_id, + "15.5", + { + ATTR_TEMPERATURE: "Temperature", + ATTR_DEVICE_CLASS: SensorDeviceClass.TEMPERATURE, + }, + ) + office_area = area_registry.async_update( + office_area.id, temperature_entity_id=office_temperature_id + ) + + hass.states.async_set( + attic_temperature_id, + "18.1", + { + ATTR_TEMPERATURE: "Temperature", + ATTR_DEVICE_CLASS: SensorDeviceClass.TEMPERATURE, + }, + ) + attic_area = area_registry.async_update( + attic_area.id, temperature_entity_id=attic_temperature_id + ) + + # Multiple climate entities match (error) + with pytest.raises(intent.MatchFailedError) as error: + await intent.async_handle( + hass, + "test", + intent.INTENT_GET_TEMPERATURE, + {}, + assistant=conversation.DOMAIN, + ) + + # Exception should contain details of what we tried to match + assert isinstance(error.value, intent.MatchFailedError) + assert ( + error.value.result.no_match_reason == intent.MatchFailedReason.MULTIPLE_TARGETS + ) + + # Select by area (office_temperature) response = await intent.async_handle( hass, "test", intent.INTENT_GET_TEMPERATURE, - {}, + {"area": {"value": office_area.name}}, assistant=conversation.DOMAIN, ) assert response.response_type == intent.IntentResponseType.QUERY_ANSWER - assert response.matched_states - assert response.matched_states[0].entity_id == climate_1.entity_id + assert len(response.matched_states) == 1 + assert response.matched_states[0].entity_id == office_temperature_id state = response.matched_states[0] - assert state.attributes["current_temperature"] == 10.0 + assert state.state == "15.5" + + # Select by preferred area (attic_temperature) + response = await intent.async_handle( + hass, + "test", + intent.INTENT_GET_TEMPERATURE, + {"preferred_area_id": {"value": attic_area.id}}, + assistant=conversation.DOMAIN, + ) + assert response.response_type == intent.IntentResponseType.QUERY_ANSWER + assert len(response.matched_states) == 1 + assert response.matched_states[0].entity_id == attic_temperature_id + state = response.matched_states[0] + assert state.state == "18.1" # Select by area (climate_2) response = await intent.async_handle( @@ -215,7 +326,7 @@ async def test_get_temperature( hass, "test", intent.INTENT_GET_TEMPERATURE, - {"area": {"value": office_area.name}}, + {"area": {"value": bathroom_area.name}}, assistant=conversation.DOMAIN, ) @@ -224,7 +335,7 @@ async def test_get_temperature( assert error.value.result.no_match_reason == intent.MatchFailedReason.AREA constraints = error.value.constraints assert constraints.name is None - assert constraints.area_name == office_area.name + assert constraints.area_name == bathroom_area.name assert constraints.domains and (set(constraints.domains) == {CLIMATE_DOMAIN}) assert constraints.device_classes is None @@ -262,6 +373,48 @@ async def test_get_temperature( assert constraints.domains and (set(constraints.domains) == {CLIMATE_DOMAIN}) assert constraints.device_classes is None + # Select by floor (climate_1) + response = await intent.async_handle( + hass, + "test", + intent.INTENT_GET_TEMPERATURE, + {"floor": {"value": first_floor.name}}, + assistant=conversation.DOMAIN, + ) + assert response.response_type == intent.IntentResponseType.QUERY_ANSWER + assert len(response.matched_states) == 1 + assert response.matched_states[0].entity_id == climate_1.entity_id + state = response.matched_states[0] + assert state.attributes["current_temperature"] == 10.0 + + # Select by preferred area (climate_2) + response = await intent.async_handle( + hass, + "test", + intent.INTENT_GET_TEMPERATURE, + {"preferred_area_id": {"value": bedroom_area.id}}, + assistant=conversation.DOMAIN, + ) + assert response.response_type == intent.IntentResponseType.QUERY_ANSWER + assert len(response.matched_states) == 1 + assert response.matched_states[0].entity_id == climate_2.entity_id + state = response.matched_states[0] + assert state.attributes["current_temperature"] == 22.0 + + # Select by preferred floor (climate_1) + response = await intent.async_handle( + hass, + "test", + intent.INTENT_GET_TEMPERATURE, + {"preferred_floor_id": {"value": first_floor.floor_id}}, + assistant=conversation.DOMAIN, + ) + assert response.response_type == intent.IntentResponseType.QUERY_ANSWER + assert len(response.matched_states) == 1 + assert response.matched_states[0].entity_id == climate_1.entity_id + state = response.matched_states[0] + assert state.attributes["current_temperature"] == 10.0 + async def test_get_temperature_no_entities( hass: HomeAssistant, diff --git a/tests/components/iometer/__init__.py b/tests/components/iometer/__init__.py index 5c08438925e..19fe2124f1f 100644 --- a/tests/components/iometer/__init__.py +++ b/tests/components/iometer/__init__.py @@ -1 +1,19 @@ """Tests for the IOmeter integration.""" + +from unittest.mock import patch + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_platform( + hass: HomeAssistant, config_entry: MockConfigEntry, platforms: list[Platform] +) -> MockConfigEntry: + """Fixture for setting up the IOmeter platform.""" + config_entry.add_to_hass(hass) + + with patch("homeassistant.components.iometer.PLATFORMS", platforms): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/iometer/conftest.py b/tests/components/iometer/conftest.py index ee45021952e..f8139c7c64c 100644 --- a/tests/components/iometer/conftest.py +++ b/tests/components/iometer/conftest.py @@ -54,4 +54,5 @@ def mock_config_entry() -> MockConfigEntry: title="IOmeter-1ISK0000000000", data={CONF_HOST: "10.0.0.2"}, unique_id="658c2b34-2017-45f2-a12b-731235f8bb97", + entry_id="01JQ6G5395176MAAWKAAPEZHV6", ) diff --git a/tests/components/iometer/snapshots/test_binary_sensor.ambr b/tests/components/iometer/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..38aab735a14 --- /dev/null +++ b/tests/components/iometer/snapshots/test_binary_sensor.ambr @@ -0,0 +1,97 @@ +# serializer version: 1 +# name: test_binary_sensors[binary_sensor.iometer_1isk0000000000_core_attachment_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.iometer_1isk0000000000_core_attachment_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Core attachment status', + 'platform': 'iometer', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'attachment_status', + 'unique_id': '01JQ6G5395176MAAWKAAPEZHV6_attachment_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.iometer_1isk0000000000_core_attachment_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'IOmeter-1ISK0000000000 Core attachment status', + }), + 'context': , + 'entity_id': 'binary_sensor.iometer_1isk0000000000_core_attachment_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[binary_sensor.iometer_1isk0000000000_core_bridge_connection_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.iometer_1isk0000000000_core_bridge_connection_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Core/Bridge connection status', + 'platform': 'iometer', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'connection_status', + 'unique_id': '01JQ6G5395176MAAWKAAPEZHV6_connection_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.iometer_1isk0000000000_core_bridge_connection_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'IOmeter-1ISK0000000000 Core/Bridge connection status', + }), + 'context': , + 'entity_id': 'binary_sensor.iometer_1isk0000000000_core_bridge_connection_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/iometer/test_binary_sensor.py b/tests/components/iometer/test_binary_sensor.py new file mode 100644 index 00000000000..e007084567e --- /dev/null +++ b/tests/components/iometer/test_binary_sensor.py @@ -0,0 +1,135 @@ +"""Test the IOmeter binary sensors.""" + +from datetime import timedelta +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_platform + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_binary_sensors( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_iometer_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test binary sensors.""" + await setup_platform(hass, mock_config_entry, [Platform.BINARY_SENSOR]) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_connection_status_sensors( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_iometer_client: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test connection status sensor.""" + await setup_platform(hass, mock_config_entry, [Platform.BINARY_SENSOR]) + + assert ( + hass.states.get( + "binary_sensor.iometer_1isk0000000000_core_bridge_connection_status" + ).state + == STATE_ON + ) + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_iometer_client.get_current_status.return_value.device.core.connection_status = "disconnected" + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert ( + hass.states.get( + "binary_sensor.iometer_1isk0000000000_core_bridge_connection_status" + ).state + == STATE_OFF + ) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_attachment_status_sensors( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_iometer_client: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test connection status sensor.""" + await setup_platform(hass, mock_config_entry, [Platform.BINARY_SENSOR]) + + assert ( + hass.states.get( + "binary_sensor.iometer_1isk0000000000_core_attachment_status" + ).state + == STATE_ON + ) + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_iometer_client.get_current_status.return_value.device.core.attachment_status = "detached" + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert ( + hass.states.get( + "binary_sensor.iometer_1isk0000000000_core_attachment_status" + ).state + == STATE_OFF + ) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_attachment_status_sensors_unkown( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_iometer_client: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test connection status sensor.""" + await setup_platform(hass, mock_config_entry, [Platform.BINARY_SENSOR]) + + assert ( + hass.states.get( + "binary_sensor.iometer_1isk0000000000_core_attachment_status" + ).state + == STATE_ON + ) + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_iometer_client.get_current_status.return_value.device.core.attachment_status = None + + freezer.tick(delta=timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert ( + hass.states.get( + "binary_sensor.iometer_1isk0000000000_core_attachment_status" + ).state + == STATE_UNKNOWN + ) diff --git a/tests/components/iometer/test_init.py b/tests/components/iometer/test_init.py new file mode 100644 index 00000000000..9d8eadc5079 --- /dev/null +++ b/tests/components/iometer/test_init.py @@ -0,0 +1,44 @@ +"""Tests for the AirGradient integration.""" + +from datetime import timedelta +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory + +from homeassistant.components.iometer.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_platform + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_new_firmware_version( + hass: HomeAssistant, + mock_iometer_client: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test device registry integration.""" + # await setup_integration(hass, mock_config_entry) + await setup_platform(hass, mock_config_entry, [Platform.SENSOR]) + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) + assert device_entry is not None + assert device_entry.sw_version == "build-58/build-65" + mock_iometer_client.get_current_status.return_value.device.core.version = "build-62" + mock_iometer_client.get_current_status.return_value.device.bridge.version = ( + "build-69" + ) + freezer.tick(timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) + assert device_entry is not None + assert device_entry.sw_version == "build-62/build-69" diff --git a/tests/components/iron_os/conftest.py b/tests/components/iron_os/conftest.py index 63c7d129987..bf8c756ebee 100644 --- a/tests/components/iron_os/conftest.py +++ b/tests/components/iron_os/conftest.py @@ -20,6 +20,7 @@ from pynecil import ( ScrollSpeed, SettingsDataResponse, TempUnit, + TipType, ) import pytest @@ -164,7 +165,7 @@ def mock_pynecil() -> Generator[AsyncMock]: client = mock_client.return_value client.get_device_info.return_value = DeviceInfoResponse( - build="v2.22", + build="v2.23", device_id="c0ffeeC0", address="c0:ff:ee:c0:ff:ee", device_sn="0000c0ffeec0ffee", @@ -205,6 +206,8 @@ def mock_pynecil() -> Generator[AsyncMock]: display_invert=True, calibrate_cjc=True, usb_pd_mode=True, + hall_sleep_time=5, + tip_type=TipType.PINE_SHORT, ) client.get_live_data.return_value = LiveDataResponse( live_temp=298, diff --git a/tests/components/iron_os/snapshots/test_diagnostics.ambr b/tests/components/iron_os/snapshots/test_diagnostics.ambr index f8db1262254..49cb3878b87 100644 --- a/tests/components/iron_os/snapshots/test_diagnostics.ambr +++ b/tests/components/iron_os/snapshots/test_diagnostics.ambr @@ -6,7 +6,7 @@ }), 'device_info': dict({ '__type': "", - 'repr': "DeviceInfoResponse(build='v2.22', device_id='c0ffeeC0', address='c0:ff:ee:c0:ff:ee', device_sn='0000c0ffeec0ffee', name='Pinecil-C0FFEEE', is_synced=False)", + 'repr': "DeviceInfoResponse(build='v2.23', device_id='c0ffeeC0', address='c0:ff:ee:c0:ff:ee', device_sn='0000c0ffeec0ffee', name='Pinecil-C0FFEEE', is_synced=False)", }), 'live_data': dict({ '__type': "", diff --git a/tests/components/iron_os/snapshots/test_number.ambr b/tests/components/iron_os/snapshots/test_number.ambr index 62fcd120201..b2ec7a70a92 100644 --- a/tests/components/iron_os/snapshots/test_number.ambr +++ b/tests/components/iron_os/snapshots/test_number.ambr @@ -226,6 +226,63 @@ 'state': '7', }) # --- +# name: test_state[number.pinecil_hall_sensor_sleep_timeout-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 60, + 'min': 0, + 'mode': , + 'step': 5, + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.pinecil_hall_sensor_sleep_timeout', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Hall sensor sleep timeout', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_hall_effect_sleep_time', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_hall_sensor_sleep_timeout-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Hall sensor sleep timeout', + 'max': 60, + 'min': 0, + 'mode': , + 'step': 5, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_hall_sensor_sleep_timeout', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5', + }) +# --- # name: test_state[number.pinecil_keep_awake_pulse_delay-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/iron_os/snapshots/test_select.ambr b/tests/components/iron_os/snapshots/test_select.ambr index 10aacc838df..540cab234a5 100644 --- a/tests/components/iron_os/snapshots/test_select.ambr +++ b/tests/components/iron_os/snapshots/test_select.ambr @@ -250,6 +250,7 @@ 'options': list([ 'off', 'on', + 'safe', ]), }), 'config_entry_id': , @@ -287,6 +288,7 @@ 'options': list([ 'off', 'on', + 'safe', ]), }), 'context': , @@ -415,6 +417,66 @@ 'state': 'fast', }) # --- +# name: test_state[select.pinecil_soldering_tip_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'auto', + 'ts100_long', + 'pine_short', + 'pts200', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.pinecil_soldering_tip_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Soldering tip type', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_tip_type', + 'unit_of_measurement': None, + }) +# --- +# name: test_state[select.pinecil_soldering_tip_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Soldering tip type', + 'options': list([ + 'auto', + 'ts100_long', + 'pine_short', + 'pts200', + ]), + }), + 'context': , + 'entity_id': 'select.pinecil_soldering_tip_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'pine_short', + }) +# --- # name: test_state[select.pinecil_start_up_behavior-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/iron_os/snapshots/test_update.ambr b/tests/components/iron_os/snapshots/test_update.ambr index f2db3246158..fcd7196a70c 100644 --- a/tests/components/iron_os/snapshots/test_update.ambr +++ b/tests/components/iron_os/snapshots/test_update.ambr @@ -45,7 +45,7 @@ 'entity_picture': 'https://brands.home-assistant.io/_/iron_os/icon.png', 'friendly_name': 'Pinecil Firmware', 'in_progress': False, - 'installed_version': 'v2.22', + 'installed_version': 'v2.23', 'latest_version': 'v2.22', 'release_summary': None, 'release_url': 'https://github.com/Ralim/IronOS/releases/tag/v2.22', diff --git a/tests/components/iron_os/test_init.py b/tests/components/iron_os/test_init.py index 4749e1b6199..d1c596f4de5 100644 --- a/tests/components/iron_os/test_init.py +++ b/tests/components/iron_os/test_init.py @@ -4,13 +4,15 @@ from datetime import timedelta from unittest.mock import AsyncMock from freezegun.api import FrozenDateTimeFactory -from pynecil import CommunicationError +from pynecil import CommunicationError, DeviceInfoResponse import pytest from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_UNKNOWN from homeassistant.core import HomeAssistant +from .conftest import DEFAULT_NAME + from tests.common import MockConfigEntry, async_fire_time_changed @@ -89,3 +91,35 @@ async def test_settings_exception( assert (state := hass.states.get("number.pinecil_boost_temperature")) assert state.state == STATE_UNKNOWN + + +@pytest.mark.usefixtures( + "entity_registry_enabled_by_default", "mock_pynecil", "ble_device" +) +async def test_v223_entities_not_loaded( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pynecil: AsyncMock, +) -> None: + """Test the new entities in IronOS v2.23 are not loaded on smaller versions.""" + + mock_pynecil.get_device_info.return_value = DeviceInfoResponse( + build="v2.22", + device_id="c0ffeeC0", + address="c0:ff:ee:c0:ff:ee", + device_sn="0000c0ffeec0ffee", + name=DEFAULT_NAME, + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + assert hass.states.get("number.pinecil_hall_sensor_sleep_timeout") is None + assert hass.states.get("select.pinecil_soldering_tip_type") is None + assert ( + state := hass.states.get("select.pinecil_power_delivery_3_1_epr") + ) is not None + + assert len(state.attributes["options"]) == 2 diff --git a/tests/components/iron_os/test_number.py b/tests/components/iron_os/test_number.py index bdec922a88c..9a4ba53f338 100644 --- a/tests/components/iron_os/test_number.py +++ b/tests/components/iron_os/test_number.py @@ -138,6 +138,12 @@ async def test_state( ("number.pinecil_sleep_temperature", CharSetting.SLEEP_TEMP, 150, 150), ("number.pinecil_sleep_timeout", CharSetting.SLEEP_TIMEOUT, 5, 5), ("number.pinecil_voltage_divider", CharSetting.VOLTAGE_DIV, 600, 600), + ( + "number.pinecil_hall_sensor_sleep_timeout", + CharSetting.HALL_SLEEP_TIME, + 60, + 60, + ), ], ) @pytest.mark.usefixtures("entity_registry_enabled_by_default", "ble_device") diff --git a/tests/components/iron_os/test_select.py b/tests/components/iron_os/test_select.py index 8cc848dd4cb..5590bfc2ba6 100644 --- a/tests/components/iron_os/test_select.py +++ b/tests/components/iron_os/test_select.py @@ -16,6 +16,7 @@ from pynecil import ( ScreenOrientationMode, ScrollSpeed, TempUnit, + TipType, USBPDMode, ) import pytest @@ -111,6 +112,11 @@ async def test_state( "on", (CharSetting.USB_PD_MODE, USBPDMode.ON), ), + ( + "select.pinecil_soldering_tip_type", + "auto", + (CharSetting.TIP_TYPE, TipType.AUTO), + ), ], ) @pytest.mark.usefixtures("entity_registry_enabled_by_default", "ble_device") diff --git a/tests/components/jellyfin/fixtures/get-media-folders.json b/tests/components/jellyfin/fixtures/get-media-folders.json index ff87751a9da..f6b5c1e8d78 100644 --- a/tests/components/jellyfin/fixtures/get-media-folders.json +++ b/tests/components/jellyfin/fixtures/get-media-folders.json @@ -302,8 +302,6 @@ "Album": "string", "CollectionType": "tvshows", "DisplayOrder": "string", - "AlbumId": "21af9851-8e39-43a9-9c47-513d3b9e99fc", - "AlbumPrimaryImageTag": "string", "SeriesPrimaryImageTag": "string", "AlbumArtist": "string", "AlbumArtists": [ diff --git a/tests/components/jellyfin/fixtures/sessions.json b/tests/components/jellyfin/fixtures/sessions.json index 00a1f5265db..db2b691dff0 100644 --- a/tests/components/jellyfin/fixtures/sessions.json +++ b/tests/components/jellyfin/fixtures/sessions.json @@ -4346,6 +4346,7 @@ ], "Album": "ALBUM", "AlbumId": "ALBUM-UUID", + "AlbumPrimaryImageTag": "ALBUM-PRIMARY-IMAGE-TAG", "AlbumArtist": "Album Artist", "AlbumArtists": [ { "Name": "Album Artist", "Id": "9a65b2c222ddb34e51f5cae360fad3a1" } diff --git a/tests/components/jellyfin/fixtures/user-items-parent-id.json b/tests/components/jellyfin/fixtures/user-items-parent-id.json index 2e06c30894c..cd0232894bc 100644 --- a/tests/components/jellyfin/fixtures/user-items-parent-id.json +++ b/tests/components/jellyfin/fixtures/user-items-parent-id.json @@ -302,8 +302,6 @@ "Album": "string", "CollectionType": "string", "DisplayOrder": "string", - "AlbumId": "21af9851-8e39-43a9-9c47-513d3b9e99fc", - "AlbumPrimaryImageTag": "string", "SeriesPrimaryImageTag": "string", "AlbumArtist": "string", "AlbumArtists": [ diff --git a/tests/components/jellyfin/snapshots/test_diagnostics.ambr b/tests/components/jellyfin/snapshots/test_diagnostics.ambr index c992628f034..9d73ee6397c 100644 --- a/tests/components/jellyfin/snapshots/test_diagnostics.ambr +++ b/tests/components/jellyfin/snapshots/test_diagnostics.ambr @@ -1707,6 +1707,7 @@ }), ]), 'AlbumId': 'ALBUM-UUID', + 'AlbumPrimaryImageTag': 'ALBUM-PRIMARY-IMAGE-TAG', 'ArtistItems': list([ dict({ 'Id': '1d864900526d9a9513b489f1cc28f8ca', diff --git a/tests/components/jellyfin/test_media_player.py b/tests/components/jellyfin/test_media_player.py index 3263639a32f..c6f015e9bb4 100644 --- a/tests/components/jellyfin/test_media_player.py +++ b/tests/components/jellyfin/test_media_player.py @@ -27,6 +27,7 @@ from homeassistant.components.media_player import ( from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, + ATTR_ENTITY_PICTURE, ATTR_FRIENDLY_NAME, ATTR_ICON, ) @@ -124,6 +125,10 @@ async def test_media_player_music( assert state.attributes.get(ATTR_MEDIA_SERIES_TITLE) is None assert state.attributes.get(ATTR_MEDIA_SEASON) is None assert state.attributes.get(ATTR_MEDIA_EPISODE) is None + assert ( + state.attributes.get(ATTR_ENTITY_PICTURE) + == "http://localhost/Items/ALBUM-UUID/Images/Primary.jpg" + ) entry = entity_registry.async_get(state.entity_id) assert entry diff --git a/tests/components/jewish_calendar/__init__.py b/tests/components/jewish_calendar/__init__.py index ba0a2b4835e..dc66c1e0d7d 100644 --- a/tests/components/jewish_calendar/__init__.py +++ b/tests/components/jewish_calendar/__init__.py @@ -3,8 +3,6 @@ from collections import namedtuple from datetime import datetime -from freezegun import freeze_time as alter_time # noqa: F401 - from homeassistant.components import jewish_calendar from homeassistant.util import dt as dt_util @@ -49,7 +47,7 @@ def make_jerusalem_test_params(dtime, results, havdalah_offset=0): } return ( dtime, - jewish_calendar.DEFAULT_CANDLE_LIGHT, + 40, havdalah_offset, False, "Asia/Jerusalem", diff --git a/tests/components/jewish_calendar/test_binary_sensor.py b/tests/components/jewish_calendar/test_binary_sensor.py index 5cfaaedfc72..194e6fe9d01 100644 --- a/tests/components/jewish_calendar/test_binary_sensor.py +++ b/tests/components/jewish_calendar/test_binary_sensor.py @@ -3,6 +3,7 @@ from datetime import datetime as dt, timedelta import logging +from freezegun import freeze_time import pytest from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN @@ -18,7 +19,7 @@ from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util -from . import alter_time, make_jerusalem_test_params, make_nyc_test_params +from . import make_jerusalem_test_params, make_nyc_test_params from tests.common import MockConfigEntry, async_fire_time_changed @@ -191,7 +192,7 @@ async def test_issur_melacha_sensor( hass.config.latitude = latitude hass.config.longitude = longitude - with alter_time(test_time): + with freeze_time(test_time): entry = MockConfigEntry( title=DEFAULT_NAME, domain=DOMAIN, @@ -213,7 +214,7 @@ async def test_issur_melacha_sensor( == result["state"] ) - with alter_time(result["update"]): + with freeze_time(result["update"]): async_fire_time_changed(hass, result["update"]) await hass.async_block_till_done() assert ( @@ -264,7 +265,7 @@ async def test_issur_melacha_sensor_update( hass.config.latitude = latitude hass.config.longitude = longitude - with alter_time(test_time): + with freeze_time(test_time): entry = MockConfigEntry( title=DEFAULT_NAME, domain=DOMAIN, @@ -286,7 +287,7 @@ async def test_issur_melacha_sensor_update( ) test_time += timedelta(microseconds=1) - with alter_time(test_time): + with freeze_time(test_time): async_fire_time_changed(hass, test_time) await hass.async_block_till_done() assert ( diff --git a/tests/components/jewish_calendar/test_init.py b/tests/components/jewish_calendar/test_init.py index cb982afec0f..6a4f57513fa 100644 --- a/tests/components/jewish_calendar/test_init.py +++ b/tests/components/jewish_calendar/test_init.py @@ -1 +1,44 @@ """Tests for the Jewish Calendar component's init.""" + +import pytest + +from homeassistant.components.jewish_calendar.const import DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize( + ("old_key", "new_key"), + [ + ("first_light", "alot_hashachar"), + ("sunset", "shkia"), + ("havdalah", "havdalah"), # Test no change + ], +) +async def test_migrate_unique_id( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + old_key: str, + new_key: str, +) -> None: + """Test unique id migration.""" + entry = MockConfigEntry(domain=DOMAIN, data={}) + entry.add_to_hass(hass) + + entity: er.RegistryEntry = entity_registry.async_get_or_create( + domain=SENSOR_DOMAIN, + platform=DOMAIN, + unique_id=f"{entry.entry_id}-{old_key}", + config_entry=entry, + ) + assert entity.unique_id.endswith(f"-{old_key}") + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + entity_migrated = entity_registry.async_get(entity.entity_id) + assert entity_migrated + assert entity_migrated.unique_id == f"{entry.entry_id}-{new_key}" diff --git a/tests/components/jewish_calendar/test_sensor.py b/tests/components/jewish_calendar/test_sensor.py index aac0f583b05..bc9e69a9717 100644 --- a/tests/components/jewish_calendar/test_sensor.py +++ b/tests/components/jewish_calendar/test_sensor.py @@ -2,10 +2,11 @@ from datetime import datetime as dt, timedelta -from hdate import htables +from freezegun import freeze_time +from hdate.holidays import HolidayDatabase +from hdate.parasha import Parasha import pytest -from homeassistant.components.binary_sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.components.jewish_calendar.const import ( CONF_CANDLE_LIGHT_MINUTES, CONF_DIASPORA, @@ -13,12 +14,13 @@ from homeassistant.components.jewish_calendar.const import ( DEFAULT_NAME, DOMAIN, ) +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.const import CONF_LANGUAGE, CONF_PLATFORM from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util -from . import alter_time, make_jerusalem_test_params, make_nyc_test_params +from . import make_jerusalem_test_params, make_nyc_test_params from tests.common import MockConfigEntry, async_fire_time_changed @@ -92,8 +94,7 @@ TEST_PARAMS = [ "icon": "mdi:calendar-star", "id": "rosh_hashana_i", "type": "YOM_TOV", - "type_id": 1, - "options": htables.get_all_holidays("english"), + "options": HolidayDatabase(False).get_all_names("english"), }, ), ( @@ -111,8 +112,7 @@ TEST_PARAMS = [ "icon": "mdi:calendar-star", "id": "chanukah, rosh_chodesh", "type": "MELACHA_PERMITTED_HOLIDAY, ROSH_CHODESH", - "type_id": "4, 10", - "options": htables.get_all_holidays("english"), + "options": HolidayDatabase(False).get_all_names("english"), }, ), ( @@ -128,7 +128,7 @@ TEST_PARAMS = [ "device_class": "enum", "friendly_name": "Jewish Calendar Parshat Hashavua", "icon": "mdi:book-open-variant", - "options": [p.hebrew for p in htables.PARASHAOT], + "options": list(Parasha), }, ), ( @@ -139,7 +139,7 @@ TEST_PARAMS = [ "hebrew", "t_set_hakochavim", True, - dt(2018, 9, 8, 19, 45), + dt(2018, 9, 8, 19, 47), None, ), ( @@ -150,7 +150,7 @@ TEST_PARAMS = [ "hebrew", "t_set_hakochavim", False, - dt(2018, 9, 8, 19, 19), + dt(2018, 9, 8, 19, 21), None, ), ( @@ -185,9 +185,9 @@ TEST_PARAMS = [ False, "ו' מרחשוון ה' תשע\"ט", { - "hebrew_year": 5779, + "hebrew_year": "5779", "hebrew_month_name": "מרחשוון", - "hebrew_day": 6, + "hebrew_day": "6", "icon": "mdi:star-david", "friendly_name": "Jewish Calendar Date", }, @@ -245,7 +245,7 @@ async def test_jewish_calendar_sensor( hass.config.latitude = latitude hass.config.longitude = longitude - with alter_time(test_time): + with freeze_time(test_time): entry = MockConfigEntry( title=DEFAULT_NAME, domain=DOMAIN, @@ -258,7 +258,7 @@ async def test_jewish_calendar_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - future = dt_util.utcnow() + timedelta(seconds=30) + future = test_time + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() @@ -424,9 +424,9 @@ SHABBAT_PARAMS = [ make_jerusalem_test_params( dt(2018, 9, 29, 21, 25), { - "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 7), + "english_upcoming_candle_lighting": dt(2018, 9, 30, 17, 45), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 1), - "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 1), + "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 17, 39), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 54), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", @@ -437,22 +437,22 @@ SHABBAT_PARAMS = [ make_jerusalem_test_params( dt(2018, 9, 30, 21, 25), { - "english_upcoming_candle_lighting": dt(2018, 9, 30, 18, 7), + "english_upcoming_candle_lighting": dt(2018, 9, 30, 17, 45), "english_upcoming_havdalah": dt(2018, 10, 1, 19, 1), - "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 1), + "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 17, 39), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 54), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", - "english_holiday": "Shmini Atzeret", - "hebrew_holiday": "שמיני עצרת", + "english_holiday": "Shmini Atzeret, Simchat Torah", + "hebrew_holiday": "שמיני עצרת, שמחת תורה", }, ), make_jerusalem_test_params( dt(2018, 10, 1, 21, 25), { - "english_upcoming_candle_lighting": dt(2018, 10, 5, 18, 1), + "english_upcoming_candle_lighting": dt(2018, 10, 5, 17, 39), "english_upcoming_havdalah": dt(2018, 10, 6, 18, 54), - "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 18, 1), + "english_upcoming_shabbat_candle_lighting": dt(2018, 10, 5, 17, 39), "english_upcoming_shabbat_havdalah": dt(2018, 10, 6, 18, 54), "english_parshat_hashavua": "Bereshit", "hebrew_parshat_hashavua": "בראשית", @@ -487,9 +487,9 @@ SHABBAT_PARAMS = [ make_jerusalem_test_params( dt(2017, 9, 21, 8, 25), { - "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 20), + "english_upcoming_candle_lighting": dt(2017, 9, 20, 17, 58), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 11), - "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 12), + "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 17, 56), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 11), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", @@ -500,9 +500,9 @@ SHABBAT_PARAMS = [ make_jerusalem_test_params( dt(2017, 9, 22, 8, 25), { - "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 20), + "english_upcoming_candle_lighting": dt(2017, 9, 20, 17, 58), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 11), - "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 12), + "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 17, 56), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 11), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", @@ -513,9 +513,9 @@ SHABBAT_PARAMS = [ make_jerusalem_test_params( dt(2017, 9, 23, 8, 25), { - "english_upcoming_candle_lighting": dt(2017, 9, 20, 18, 20), + "english_upcoming_candle_lighting": dt(2017, 9, 20, 17, 58), "english_upcoming_havdalah": dt(2017, 9, 23, 19, 11), - "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 19, 12), + "english_upcoming_shabbat_candle_lighting": dt(2017, 9, 22, 17, 56), "english_upcoming_shabbat_havdalah": dt(2017, 9, 23, 19, 11), "english_parshat_hashavua": "Ha'Azinu", "hebrew_parshat_hashavua": "האזינו", @@ -587,7 +587,7 @@ async def test_shabbat_times_sensor( hass.config.latitude = latitude hass.config.longitude = longitude - with alter_time(test_time): + with freeze_time(test_time): entry = MockConfigEntry( title=DEFAULT_NAME, domain=DOMAIN, @@ -604,7 +604,7 @@ async def test_shabbat_times_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - future = dt_util.utcnow() + timedelta(seconds=30) + future = test_time + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() @@ -649,13 +649,13 @@ async def test_omer_sensor(hass: HomeAssistant, test_time, result) -> None: """Test Omer Count sensor output.""" test_time = test_time.replace(tzinfo=dt_util.get_time_zone(hass.config.time_zone)) - with alter_time(test_time): + with freeze_time(test_time): entry = MockConfigEntry(title=DEFAULT_NAME, domain=DOMAIN) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - future = dt_util.utcnow() + timedelta(seconds=30) + future = test_time + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() @@ -684,13 +684,13 @@ async def test_dafyomi_sensor(hass: HomeAssistant, test_time, result) -> None: """Test Daf Yomi sensor output.""" test_time = test_time.replace(tzinfo=dt_util.get_time_zone(hass.config.time_zone)) - with alter_time(test_time): + with freeze_time(test_time): entry = MockConfigEntry(title=DEFAULT_NAME, domain=DOMAIN) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - future = dt_util.utcnow() + timedelta(seconds=30) + future = test_time + timedelta(seconds=30) async_fire_time_changed(hass, future) await hass.async_block_till_done() diff --git a/tests/components/jewish_calendar/test_service.py b/tests/components/jewish_calendar/test_service.py new file mode 100644 index 00000000000..9eb80e5e7f0 --- /dev/null +++ b/tests/components/jewish_calendar/test_service.py @@ -0,0 +1,55 @@ +"""Test jewish calendar service.""" + +import datetime as dt + +from hdate.translator import Language +import pytest + +from homeassistant.components.jewish_calendar.const import DOMAIN +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize( + ("test_date", "nusach", "language", "expected"), + [ + pytest.param(dt.date(2025, 3, 20), "sfarad", "he", "", id="no_blessing"), + pytest.param( + dt.date(2025, 5, 20), + "ashkenaz", + "he", + "היום שבעה ושלושים יום שהם חמישה שבועות ושני ימים בעומר", + id="ahskenaz-hebrew", + ), + pytest.param( + dt.date(2025, 5, 20), + "sfarad", + "en", + "Today is the thirty-seventh day, which are five weeks and two days of the Omer", + id="sefarad-english", + ), + ], +) +async def test_get_omer_blessing( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + test_date: dt.date, + nusach: str, + language: Language, + expected: str, +) -> None: + """Test get omer blessing.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.services.async_call( + DOMAIN, + "count_omer", + {"date": test_date, "nusach": nusach, "language": language}, + blocking=True, + return_response=True, + ) + + assert result["message"] == expected diff --git a/tests/components/kitchen_sink/snapshots/test_init.ambr b/tests/components/kitchen_sink/snapshots/test_init.ambr index b91131eb2b0..fe22f19fb7a 100644 --- a/tests/components/kitchen_sink/snapshots/test_init.ambr +++ b/tests/components/kitchen_sink/snapshots/test_init.ambr @@ -48,5 +48,15 @@ 'type': 'no_state', }), ]), + 'sensor.statistics_issues_issue_5': list([ + dict({ + 'data': dict({ + 'metadata_mean_type': 1, + 'state_mean_type': 2, + 'statistic_id': 'sensor.statistics_issues_issue_5', + }), + 'type': 'mean_type_changed', + }), + ]), }) # --- diff --git a/tests/components/kitchen_sink/snapshots/test_sensor.ambr b/tests/components/kitchen_sink/snapshots/test_sensor.ambr index 7b433c40170..6cd9aa2e855 100644 --- a/tests/components/kitchen_sink/snapshots/test_sensor.ambr +++ b/tests/components/kitchen_sink/snapshots/test_sensor.ambr @@ -29,6 +29,20 @@ 'last_updated': , 'state': '1500', }), + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'wind_direction', + 'friendly_name': 'Statistics issues Issue 5', + 'state_class': , + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'sensor.statistics_issues_issue_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }), StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Statistics issues Issue 1', @@ -99,6 +113,20 @@ 'last_updated': , 'state': '1500', }), + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'wind_direction', + 'friendly_name': 'Statistics issues Issue 5', + 'state_class': , + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'sensor.statistics_issues_issue_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }), StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Sensor test', diff --git a/tests/components/kitchen_sink/test_config_flow.py b/tests/components/kitchen_sink/test_config_flow.py index 1eea1c8036b..88bacc2cb0b 100644 --- a/tests/components/kitchen_sink/test_config_flow.py +++ b/tests/components/kitchen_sink/test_config_flow.py @@ -96,6 +96,15 @@ async def test_options_flow(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "options_1" + section_marker, section_schema = list(result["data_schema"].schema.items())[0] + assert section_marker == "section_1" + section_schema_markers = list(section_schema.schema.schema) + assert len(section_schema_markers) == 2 + assert section_schema_markers[0] == "bool" + assert section_schema_markers[0].description is None + assert section_schema_markers[1] == "int" + assert section_schema_markers[1].description == {"suggested_value": 10} + result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={"section_1": {"bool": True, "int": 15}}, diff --git a/tests/components/kitchen_sink/test_init.py b/tests/components/kitchen_sink/test_init.py index 50518f89107..526801aecfa 100644 --- a/tests/components/kitchen_sink/test_init.py +++ b/tests/components/kitchen_sink/test_init.py @@ -11,6 +11,7 @@ import voluptuous as vol from homeassistant.components.kitchen_sink import DOMAIN from homeassistant.components.recorder import get_instance from homeassistant.components.recorder.statistics import ( + StatisticMeanType, async_add_external_statistics, get_last_statistics, list_statistic_ids, @@ -45,6 +46,7 @@ async def test_demo_statistics(hass: HomeAssistant) -> None: assert { "display_unit_of_measurement": "°C", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": "Outdoor temperature", "source": DOMAIN, @@ -55,6 +57,7 @@ async def test_demo_statistics(hass: HomeAssistant) -> None: assert { "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": "Energy consumption 1", "source": DOMAIN, diff --git a/tests/components/knx/test_expose.py b/tests/components/knx/test_expose.py index 0fd790a3e33..f7a3f4e94f2 100644 --- a/tests/components/knx/test_expose.py +++ b/tests/components/knx/test_expose.py @@ -348,19 +348,20 @@ async def test_expose_conversion_exception( ) -@freeze_time("2022-1-7 9:13:14") +@freeze_time("2022-1-7 9:13:14") # UTC -> +1h = Vienna in winter (9 -> 0xA) @pytest.mark.parametrize( ("time_type", "raw"), [ - ("time", (0xA9, 0x0D, 0x0E)), # localtime includes day of week + ("time", (0xAA, 0x0D, 0x0E)), # localtime includes day of week ("date", (0x07, 0x01, 0x16)), - ("datetime", (0x7A, 0x1, 0x7, 0xA9, 0xD, 0xE, 0x20, 0xC0)), + ("datetime", (0x7A, 0x1, 0x7, 0xAA, 0xD, 0xE, 0x20, 0xC0)), ], ) async def test_expose_with_date( hass: HomeAssistant, knx: KNXTestKit, time_type: str, raw: tuple[int, ...] ) -> None: """Test an expose with a date.""" + await hass.config.async_set_time_zone("Europe/Vienna") await knx.setup_integration( { CONF_KNX_EXPOSE: { diff --git a/tests/components/lektrico/snapshots/test_binary_sensor.ambr b/tests/components/lektrico/snapshots/test_binary_sensor.ambr index b365ff84187..7d812c0fc67 100644 --- a/tests/components/lektrico/snapshots/test_binary_sensor.ambr +++ b/tests/components/lektrico/snapshots/test_binary_sensor.ambr @@ -24,7 +24,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Ev diode short', + 'original_name': 'EV diode short', 'platform': 'lektrico', 'previous_unique_id': None, 'supported_features': 0, @@ -37,7 +37,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Ev diode short', + 'friendly_name': '1p7k_500006 EV diode short', }), 'context': , 'entity_id': 'binary_sensor.1p7k_500006_ev_diode_short', @@ -72,7 +72,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Ev error', + 'original_name': 'EV error', 'platform': 'lektrico', 'previous_unique_id': None, 'supported_features': 0, @@ -85,7 +85,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Ev error', + 'friendly_name': '1p7k_500006 EV error', }), 'context': , 'entity_id': 'binary_sensor.1p7k_500006_ev_error', @@ -312,7 +312,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Rcd error', + 'original_name': 'RCD error', 'platform': 'lektrico', 'previous_unique_id': None, 'supported_features': 0, @@ -325,7 +325,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'problem', - 'friendly_name': '1p7k_500006 Rcd error', + 'friendly_name': '1p7k_500006 RCD error', }), 'context': , 'entity_id': 'binary_sensor.1p7k_500006_rcd_error', diff --git a/tests/components/lektrico/snapshots/test_number.ambr b/tests/components/lektrico/snapshots/test_number.ambr index 57cf40567e7..368479cdd06 100644 --- a/tests/components/lektrico/snapshots/test_number.ambr +++ b/tests/components/lektrico/snapshots/test_number.ambr @@ -86,7 +86,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Led brightness', + 'original_name': 'LED brightness', 'platform': 'lektrico', 'previous_unique_id': None, 'supported_features': 0, @@ -98,7 +98,7 @@ # name: test_all_entities[number.1p7k_500006_led_brightness-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '1p7k_500006 Led brightness', + 'friendly_name': '1p7k_500006 LED brightness', 'max': 100, 'min': 0, 'mode': , diff --git a/tests/components/lg_thinq/test_climate.py b/tests/components/lg_thinq/test_climate.py index 4ac2fa55a21..e53b1c5ff39 100644 --- a/tests/components/lg_thinq/test_climate.py +++ b/tests/components/lg_thinq/test_climate.py @@ -5,9 +5,10 @@ from unittest.mock import AsyncMock, patch import pytest from syrupy import SnapshotAssertion -from homeassistant.const import Platform, UnitOfTemperature +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM from . import setup_integration @@ -23,7 +24,7 @@ async def test_all_entities( entity_registry: er.EntityRegistry, ) -> None: """Test all entities.""" - hass.config.units.temperature_unit = UnitOfTemperature.FAHRENHEIT + hass.config.units = US_CUSTOMARY_SYSTEM with patch("homeassistant.components.lg_thinq.PLATFORMS", [Platform.CLIMATE]): await setup_integration(hass, mock_config_entry) diff --git a/tests/components/light/test_init.py b/tests/components/light/test_init.py index 5bc17ea3e24..29604ce7595 100644 --- a/tests/components/light/test_init.py +++ b/tests/components/light/test_init.py @@ -21,7 +21,6 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, Unauthorized -from homeassistant.helpers import frame from homeassistant.setup import async_setup_component from homeassistant.util import color as color_util @@ -2846,7 +2845,6 @@ def test_report_invalid_color_modes( ], ids=["with_kelvin", "with_mired_values", "with_mired_defaults"], ) -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) def test_missing_kelvin_property_warnings( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, diff --git a/tests/components/lovelace/test_init.py b/tests/components/lovelace/test_init.py index f35f7369f93..4c7cc96504b 100644 --- a/tests/components/lovelace/test_init.py +++ b/tests/components/lovelace/test_init.py @@ -13,6 +13,16 @@ from homeassistant.setup import async_setup_component from tests.typing import WebSocketGenerator +@pytest.fixture +def mock_onboarding_not_done() -> Generator[MagicMock]: + """Mock that Home Assistant is currently onboarding.""" + with patch( + "homeassistant.components.onboarding.async_is_onboarded", + return_value=False, + ) as mock_onboarding: + yield mock_onboarding + + @pytest.fixture def mock_onboarding_done() -> Generator[MagicMock]: """Mock that Home Assistant is currently onboarding.""" @@ -23,6 +33,15 @@ def mock_onboarding_done() -> Generator[MagicMock]: yield mock_onboarding +@pytest.fixture +def mock_add_onboarding_listener() -> Generator[MagicMock]: + """Mock that Home Assistant is currently onboarding.""" + with patch( + "homeassistant.components.onboarding.async_add_listener", + ) as mock_add_onboarding_listener: + yield mock_add_onboarding_listener + + async def test_create_dashboards_when_onboarded( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -41,6 +60,45 @@ async def test_create_dashboards_when_onboarded( assert response["result"] == [] +async def test_create_dashboards_when_not_onboarded( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + hass_storage: dict[str, Any], + mock_add_onboarding_listener, + mock_onboarding_not_done, +) -> None: + """Test we automatically create dashboards when not onboarded.""" + client = await hass_ws_client(hass) + + assert await async_setup_component(hass, "lovelace", {}) + + # Call onboarding listener + mock_add_onboarding_listener.mock_calls[0][1][1]() + await hass.async_block_till_done() + + # List dashboards + await client.send_json_auto_id({"type": "lovelace/dashboards/list"}) + response = await client.receive_json() + assert response["success"] + assert response["result"] == [ + { + "icon": "mdi:map", + "id": "map", + "mode": "storage", + "require_admin": False, + "show_in_sidebar": True, + "title": "Map", + "url_path": "map", + } + ] + + # List map dashboard config + await client.send_json_auto_id({"type": "lovelace/config", "url_path": "map"}) + response = await client.receive_json() + assert response["success"] + assert response["result"] == {"strategy": {"type": "map"}} + + @pytest.mark.parametrize("integration_frame_path", ["custom_components/my_integration"]) @pytest.mark.usefixtures("mock_integration_frame") async def test_hass_data_compatibility( diff --git a/tests/components/lutron/test_config_flow.py b/tests/components/lutron/test_config_flow.py index 47b2a4891cf..df861fafffe 100644 --- a/tests/components/lutron/test_config_flow.py +++ b/tests/components/lutron/test_config_flow.py @@ -6,11 +6,11 @@ from urllib.error import HTTPError import pytest -from homeassistant.components.lutron.const import DOMAIN +from homeassistant.components.lutron.const import CONF_DEFAULT_DIMMER_LEVEL, DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType +from homeassistant.data_entry_flow import FlowResultType, InvalidData from tests.common import MockConfigEntry @@ -146,3 +146,41 @@ MOCK_DATA_IMPORT = { CONF_USERNAME: "lutron", CONF_PASSWORD: "integration", } + + +async def test_options_flow(hass: HomeAssistant) -> None: + """Test options flow.""" + + config_entry = MockConfigEntry( + domain=DOMAIN, + data=MOCK_DATA_STEP, + unique_id="12345678901", + ) + config_entry.add_to_hass(hass) + + result = await hass.config_entries.options.async_init(config_entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + # Try to set an out of range dimmer level (260) + out_of_range_level = 260 + + # The voluptuous validation will raise an exception before the handler processes it + with pytest.raises(InvalidData): + await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={CONF_DEFAULT_DIMMER_LEVEL: out_of_range_level}, + ) + + # Now try with a valid value + valid_level = 100 + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={CONF_DEFAULT_DIMMER_LEVEL: valid_level}, + ) + + # Verify that the flow finishes successfully with the valid value + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == {CONF_DEFAULT_DIMMER_LEVEL: valid_level} diff --git a/tests/components/lutron_caseta/__init__.py b/tests/components/lutron_caseta/__init__.py index b27d30ac31f..5f146cd988a 100644 --- a/tests/components/lutron_caseta/__init__.py +++ b/tests/components/lutron_caseta/__init__.py @@ -1,5 +1,8 @@ """Tests for the Lutron Caseta integration.""" +import asyncio +from collections.abc import Callable +from typing import Any from unittest.mock import patch from homeassistant.components.lutron_caseta import DOMAIN @@ -84,25 +87,12 @@ _LEAP_DEVICE_TYPES = { } -async def async_setup_integration(hass: HomeAssistant, mock_bridge) -> MockConfigEntry: - """Set up a mock bridge.""" - mock_entry = MockConfigEntry(domain=DOMAIN, data=ENTRY_MOCK_DATA) - mock_entry.add_to_hass(hass) - - with patch( - "homeassistant.components.lutron_caseta.Smartbridge.create_tls" - ) as create_tls: - create_tls.return_value = mock_bridge(can_connect=True) - await hass.config_entries.async_setup(mock_entry.entry_id) - await hass.async_block_till_done() - return mock_entry - - class MockBridge: """Mock Lutron bridge that emulates configured connected status.""" - def __init__(self, can_connect=True) -> None: + def __init__(self, can_connect=True, timeout_on_connect=False) -> None: """Initialize MockBridge instance with configured mock connectivity.""" + self.timeout_on_connect = timeout_on_connect self.can_connect = can_connect self.is_currently_connected = False self.areas = self.load_areas() @@ -113,6 +103,8 @@ class MockBridge: async def connect(self): """Connect the mock bridge.""" + if self.timeout_on_connect: + await asyncio.Event().wait() # wait forever if self.can_connect: self.is_currently_connected = True @@ -320,3 +312,43 @@ class MockBridge: async def close(self): """Close the mock bridge connection.""" self.is_currently_connected = False + + +def make_mock_entry() -> MockConfigEntry: + """Create a mock config entry.""" + return MockConfigEntry(domain=DOMAIN, data=ENTRY_MOCK_DATA) + + +async def async_setup_integration( + hass: HomeAssistant, + mock_bridge: MockBridge, + config_entry_id: str | None = None, + can_connect: bool = True, + timeout_during_connect: bool = False, + timeout_during_configure: bool = False, +) -> MockConfigEntry: + """Set up a mock bridge.""" + if config_entry_id is None: + mock_entry = make_mock_entry() + mock_entry.add_to_hass(hass) + config_entry_id = mock_entry.entry_id + else: + mock_entry = hass.config_entries.async_get_entry(config_entry_id) + + def create_tls_factory( + *args: Any, on_connect_callback: Callable[[], None], **kwargs: Any + ) -> None: + """Return a mock bridge.""" + if not timeout_during_connect: + on_connect_callback() + return mock_bridge( + can_connect=can_connect, timeout_on_connect=timeout_during_configure + ) + + with patch( + "homeassistant.components.lutron_caseta.Smartbridge.create_tls", + create_tls_factory, + ): + await hass.config_entries.async_setup(config_entry_id) + await hass.async_block_till_done() + return mock_entry diff --git a/tests/components/lutron_caseta/test_device_trigger.py b/tests/components/lutron_caseta/test_device_trigger.py index 1ab45bf7582..001bf86ad54 100644 --- a/tests/components/lutron_caseta/test_device_trigger.py +++ b/tests/components/lutron_caseta/test_device_trigger.py @@ -1,7 +1,5 @@ """The tests for Lutron Caséta device triggers.""" -from unittest.mock import patch - import pytest from pytest_unordered import unordered @@ -37,7 +35,7 @@ from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component -from . import MockBridge +from . import MockBridge, async_setup_integration from tests.common import MockConfigEntry, async_get_device_automations @@ -112,12 +110,7 @@ async def _async_setup_lutron_with_picos(hass: HomeAssistant) -> str: ) config_entry.add_to_hass(hass) - with patch( - "homeassistant.components.lutron_caseta.Smartbridge.create_tls", - return_value=MockBridge(can_connect=True), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + await async_setup_integration(hass, MockBridge, config_entry.entry_id) return config_entry.entry_id @@ -487,9 +480,7 @@ async def test_if_fires_on_button_event_late_setup( }, ) - with patch("homeassistant.components.lutron_caseta.Smartbridge.create_tls"): - await hass.config_entries.async_setup(config_entry_id) - await hass.async_block_till_done() + await async_setup_integration(hass, MockBridge, config_entry_id) message = { ATTR_SERIAL: device.get("serial"), diff --git a/tests/components/lutron_caseta/test_diagnostics.py b/tests/components/lutron_caseta/test_diagnostics.py index 5c7d20da208..45229918578 100644 --- a/tests/components/lutron_caseta/test_diagnostics.py +++ b/tests/components/lutron_caseta/test_diagnostics.py @@ -1,6 +1,6 @@ """Test the Lutron Caseta diagnostics.""" -from unittest.mock import ANY, patch +from unittest.mock import ANY from homeassistant.components.lutron_caseta import DOMAIN from homeassistant.components.lutron_caseta.const import ( @@ -11,7 +11,7 @@ from homeassistant.components.lutron_caseta.const import ( from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant -from . import MockBridge +from . import MockBridge, async_setup_integration from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry @@ -34,12 +34,7 @@ async def test_diagnostics( ) config_entry.add_to_hass(hass) - with patch( - "homeassistant.components.lutron_caseta.Smartbridge.create_tls", - return_value=MockBridge(can_connect=True), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + await async_setup_integration(hass, MockBridge, config_entry.entry_id) diag = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) assert diag == { diff --git a/tests/components/lutron_caseta/test_init.py b/tests/components/lutron_caseta/test_init.py new file mode 100644 index 00000000000..7e509acbf62 --- /dev/null +++ b/tests/components/lutron_caseta/test_init.py @@ -0,0 +1,54 @@ +"""Tests for the Lutron Caseta integration.""" + +from unittest.mock import patch + +import pytest + +from homeassistant.components import lutron_caseta +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import MockBridge, async_setup_integration, make_mock_entry + + +@pytest.mark.parametrize( + ("constant", "message", "timeout_during_connect", "timeout_during_configure"), + [ + ("CONNECT_TIMEOUT", "Timed out on connect", True, False), + ("CONFIGURE_TIMEOUT", "Timed out on configure", False, True), + ], +) +async def test_timeout_during_setup( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + constant: str, + message: str, + timeout_during_connect: bool, + timeout_during_configure: bool, +) -> None: + """Test a timeout during setup.""" + mock_entry = make_mock_entry() + mock_entry.add_to_hass(hass) + with patch.object(lutron_caseta, constant, 0.001): + await async_setup_integration( + hass, + MockBridge, + config_entry_id=mock_entry.entry_id, + timeout_during_connect=timeout_during_connect, + timeout_during_configure=timeout_during_configure, + ) + assert mock_entry.state is ConfigEntryState.SETUP_RETRY + assert f"{message} for 1.1.1.1" in caplog.text + + +async def test_cannot_connect( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test failing to connect.""" + mock_entry = make_mock_entry() + mock_entry.add_to_hass(hass) + await async_setup_integration( + hass, MockBridge, config_entry_id=mock_entry.entry_id, can_connect=False + ) + assert mock_entry.state is ConfigEntryState.SETUP_RETRY + assert "Connection failed to 1.1.1.1" in caplog.text diff --git a/tests/components/lutron_caseta/test_logbook.py b/tests/components/lutron_caseta/test_logbook.py index 9a58838d65c..8b4a3e00fa9 100644 --- a/tests/components/lutron_caseta/test_logbook.py +++ b/tests/components/lutron_caseta/test_logbook.py @@ -1,7 +1,5 @@ """The tests for lutron caseta logbook.""" -from unittest.mock import patch - from homeassistant.components.lutron_caseta.const import ( ATTR_ACTION, ATTR_AREA_NAME, @@ -43,13 +41,7 @@ async def test_humanify_lutron_caseta_button_event(hass: HomeAssistant) -> None: unique_id="abc", ) config_entry.add_to_hass(hass) - - with patch( - "homeassistant.components.lutron_caseta.Smartbridge.create_tls", - return_value=MockBridge(can_connect=True), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + await async_setup_integration(hass, MockBridge, config_entry.entry_id) await hass.async_block_till_done() @@ -104,15 +96,10 @@ async def test_humanify_lutron_caseta_button_event_integration_not_loaded( ) config_entry.add_to_hass(hass) - with patch( - "homeassistant.components.lutron_caseta.Smartbridge.create_tls", - return_value=MockBridge(can_connect=True), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + await async_setup_integration(hass, MockBridge, config_entry.entry_id) - await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() for device in device_registry.devices.values(): if device.config_entries == {config_entry.entry_id}: diff --git a/tests/components/marytts/test_tts.py b/tests/components/marytts/test_tts.py index 0ad27cde29b..25231c15a32 100644 --- a/tests/components/marytts/test_tts.py +++ b/tests/components/marytts/test_tts.py @@ -155,7 +155,7 @@ async def test_service_say_http_error( await retrieve_media( hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID] ) - == HTTPStatus.NOT_FOUND + == HTTPStatus.INTERNAL_SERVER_ERROR ) mock_speak.assert_called_once() diff --git a/tests/components/mastodon/conftest.py b/tests/components/mastodon/conftest.py index ac23141be55..d8979083de9 100644 --- a/tests/components/mastodon/conftest.py +++ b/tests/components/mastodon/conftest.py @@ -3,12 +3,13 @@ from collections.abc import Generator from unittest.mock import AsyncMock, patch +from mastodon.Mastodon import Account, InstanceV2 import pytest from homeassistant.components.mastodon.const import CONF_BASE_URL, DOMAIN from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_ID, CONF_CLIENT_SECRET -from tests.common import MockConfigEntry, load_json_object_fixture +from tests.common import MockConfigEntry, load_fixture @pytest.fixture @@ -31,9 +32,11 @@ def mock_mastodon_client() -> Generator[AsyncMock]: ) as mock_client, ): client = mock_client.return_value - client.instance.return_value = load_json_object_fixture("instance.json", DOMAIN) - client.account_verify_credentials.return_value = load_json_object_fixture( - "account_verify_credentials.json", DOMAIN + client.instance.return_value = InstanceV2.from_json( + load_fixture("instance.json", DOMAIN) + ) + client.account_verify_credentials.return_value = Account.from_json( + load_fixture("account_verify_credentials.json", DOMAIN) ) client.status_post.return_value = None yield client diff --git a/tests/components/mastodon/fixtures/account_verify_credentials.json b/tests/components/mastodon/fixtures/account_verify_credentials.json index 401caa121ae..7806d280ab9 100644 --- a/tests/components/mastodon/fixtures/account_verify_credentials.json +++ b/tests/components/mastodon/fixtures/account_verify_credentials.json @@ -1,78 +1,60 @@ { - "id": "14715", - "username": "trwnh", - "acct": "trwnh", - "display_name": "infinite love ⴳ", - "locked": false, - "bot": false, - "created_at": "2016-11-24T10:02:12.085Z", - "note": "

i have approximate knowledge of many things. perpetual student. (nb/ace/they)

xmpp/email: a@trwnh.com
https://trwnh.com
help me live: https://liberapay.com/at or https://paypal.me/trwnh

- my triggers are moths and glitter
- i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise
- dm me if i did something wrong, so i can improve
- purest person on fedi, do not lewd in my presence
- #1 ami cole fan account

:fatyoshi:

", - "url": "https://mastodon.social/@trwnh", - "avatar": "https://files.mastodon.social/accounts/avatars/000/014/715/original/34aa222f4ae2e0a9.png", - "avatar_static": "https://files.mastodon.social/accounts/avatars/000/014/715/original/34aa222f4ae2e0a9.png", - "header": "https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg", - "header_static": "https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg", - "followers_count": 821, - "following_count": 178, - "statuses_count": 33120, - "last_status_at": "2019-11-24T15:49:42.251Z", - "source": { - "privacy": "public", - "sensitive": false, - "language": "", - "note": "i have approximate knowledge of many things. perpetual student. (nb/ace/they)\r\n\r\nxmpp/email: a@trwnh.com\r\nhttps://trwnh.com\r\nhelp me live: https://liberapay.com/at or https://paypal.me/trwnh\r\n\r\n- my triggers are moths and glitter\r\n- i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise\r\n- dm me if i did something wrong, so i can improve\r\n- purest person on fedi, do not lewd in my presence\r\n- #1 ami cole fan account\r\n\r\n:fatyoshi:", + "_mastopy_version": "2.0.0", + "_mastopy_type": "Account", + "_mastopy_data": { + "id": "14715", + "username": "trwnh", + "acct": "trwnh", + "display_name": "infinite love \u2d33", + "discoverable": true, + "group": false, + "locked": false, + "created_at": "2016-11-24T00:00:00+00:00", + "following_count": 328, + "followers_count": 3169, + "statuses_count": 69523, + "note": "

i have approximate knowledge of many things. perpetual student. (nb/ace/they)

xmpp/email: a@trwnh.com
https://trwnh.com
help me live:
- https://donate.stripe.com/4gwcPCaMpcQ19RC4gg
- https://liberapay.com/trwnh

notes:
- my triggers are moths and glitter
- i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise
- dm me if i did something wrong, so i can improve
- purest person on fedi, do not lewd in my presence

", + "url": "https://mastodon.social/@trwnh", + "uri": "https://mastodon.social/users/trwnh", + "avatar": "https://files.mastodon.social/accounts/avatars/000/014/715/original/051c958388818705.png", + "header": "https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg", + "avatar_static": "https://files.mastodon.social/accounts/avatars/000/014/715/original/051c958388818705.png", + "header_static": "https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg", + "moved_to_account": null, + "suspended": null, + "limited": null, + "bot": true, "fields": [ { "name": "Website", - "value": "https://trwnh.com", + "value": "https://trwnh.com", "verified_at": "2019-08-29T04:14:55.571+00:00" }, { - "name": "Sponsor", - "value": "https://liberapay.com/at", - "verified_at": "2019-11-15T10:06:15.557+00:00" + "name": "Portfolio", + "value": "https://abdullahtarawneh.com", + "verified_at": "2021-02-11T20:34:13.574+00:00" }, { "name": "Fan of:", - "value": "Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)", + "value": "Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)", "verified_at": null }, { - "name": "Main topics:", - "value": "systemic analysis, design patterns, anticapitalism, info/tech freedom, theory and philosophy, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people!", + "name": "What to expect:", + "value": "talking about various things i find interesting, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people! and to spill my thoughts.", "verified_at": null } ], - "follow_requests_count": 0 - }, - "emojis": [ - { - "shortcode": "fatyoshi", - "url": "https://files.mastodon.social/custom_emojis/images/000/023/920/original/e57ecb623faa0dc9.png", - "static_url": "https://files.mastodon.social/custom_emojis/images/000/023/920/static/e57ecb623faa0dc9.png", - "visible_in_picker": true - } - ], - "fields": [ - { - "name": "Website", - "value": "https://trwnh.com", - "verified_at": "2019-08-29T04:14:55.571+00:00" - }, - { - "name": "Sponsor", - "value": "https://liberapay.com/at", - "verified_at": "2019-11-15T10:06:15.557+00:00" - }, - { - "name": "Fan of:", - "value": "Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)", - "verified_at": null - }, - { - "name": "Main topics:", - "value": "systemic analysis, design patterns, anticapitalism, info/tech freedom, theory and philosophy, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people!", - "verified_at": null - } - ] + "emojis": [], + "last_status_at": "2025-03-04T00:00:00", + "noindex": false, + "roles": [], + "role": null, + "source": null, + "mute_expires_at": null, + "indexable": false, + "hide_collections": true, + "memorial": null + } } diff --git a/tests/components/mastodon/fixtures/instance.json b/tests/components/mastodon/fixtures/instance.json index b0e904e80ef..2e3dfe2d46d 100644 --- a/tests/components/mastodon/fixtures/instance.json +++ b/tests/components/mastodon/fixtures/instance.json @@ -1,147 +1,18 @@ { - "domain": "mastodon.social", - "title": "Mastodon", - "version": "4.0.0rc1", - "source_url": "https://github.com/mastodon/mastodon", - "description": "The original server operated by the Mastodon gGmbH non-profit", - "usage": { - "users": { - "active_month": 123122 + "_mastopy_version": "2.0.0", + "_mastopy_type": "InstanceV2", + "_mastopy_data": { + "uri": "mastodon.social", + "domain": "mastodon.social", + "title": "Mastodon", + "version": "4.4.0-nightly.2025-02-07", + "source_url": "https://github.com/mastodon/mastodon", + + "description": "The original server operated by the Mastodon gGmbH non-profit", + "usage": { + "users": { + "active_month": 380143 + } } - }, - "thumbnail": { - "url": "https://files.mastodon.social/site_uploads/files/000/000/001/@1x/57c12f441d083cde.png", - "blurhash": "UeKUpFxuo~R%0nW;WCnhF6RjaJt757oJodS$", - "versions": { - "@1x": "https://files.mastodon.social/site_uploads/files/000/000/001/@1x/57c12f441d083cde.png", - "@2x": "https://files.mastodon.social/site_uploads/files/000/000/001/@2x/57c12f441d083cde.png" - } - }, - "languages": ["en"], - "configuration": { - "urls": { - "streaming": "wss://mastodon.social" - }, - "vapid": { - "public_key": "BCkMmVdKDnKYwzVCDC99Iuc9GvId-x7-kKtuHnLgfF98ENiZp_aj-UNthbCdI70DqN1zUVis-x0Wrot2sBagkMc=" - }, - "accounts": { - "max_featured_tags": 10, - "max_pinned_statuses": 4 - }, - "statuses": { - "max_characters": 500, - "max_media_attachments": 4, - "characters_reserved_per_url": 23 - }, - "media_attachments": { - "supported_mime_types": [ - "image/jpeg", - "image/png", - "image/gif", - "image/heic", - "image/heif", - "image/webp", - "video/webm", - "video/mp4", - "video/quicktime", - "video/ogg", - "audio/wave", - "audio/wav", - "audio/x-wav", - "audio/x-pn-wave", - "audio/vnd.wave", - "audio/ogg", - "audio/vorbis", - "audio/mpeg", - "audio/mp3", - "audio/webm", - "audio/flac", - "audio/aac", - "audio/m4a", - "audio/x-m4a", - "audio/mp4", - "audio/3gpp", - "video/x-ms-asf" - ], - "image_size_limit": 10485760, - "image_matrix_limit": 16777216, - "video_size_limit": 41943040, - "video_frame_rate_limit": 60, - "video_matrix_limit": 2304000 - }, - "polls": { - "max_options": 4, - "max_characters_per_option": 50, - "min_expiration": 300, - "max_expiration": 2629746 - }, - "translation": { - "enabled": true - } - }, - "registrations": { - "enabled": false, - "approval_required": false, - "message": null - }, - "contact": { - "email": "staff@mastodon.social", - "account": { - "id": "1", - "username": "Gargron", - "acct": "Gargron", - "display_name": "Eugen 💀", - "locked": false, - "bot": false, - "discoverable": true, - "group": false, - "created_at": "2016-03-16T00:00:00.000Z", - "note": "

Founder, CEO and lead developer @Mastodon, Germany.

", - "url": "https://mastodon.social/@Gargron", - "avatar": "https://files.mastodon.social/accounts/avatars/000/000/001/original/dc4286ceb8fab734.jpg", - "avatar_static": "https://files.mastodon.social/accounts/avatars/000/000/001/original/dc4286ceb8fab734.jpg", - "header": "https://files.mastodon.social/accounts/headers/000/000/001/original/3b91c9965d00888b.jpeg", - "header_static": "https://files.mastodon.social/accounts/headers/000/000/001/original/3b91c9965d00888b.jpeg", - "followers_count": 133026, - "following_count": 311, - "statuses_count": 72605, - "last_status_at": "2022-10-31", - "noindex": false, - "emojis": [], - "fields": [ - { - "name": "Patreon", - "value": "https://www.patreon.com/mastodon", - "verified_at": null - } - ] - } - }, - "rules": [ - { - "id": "1", - "text": "Sexually explicit or violent media must be marked as sensitive when posting" - }, - { - "id": "2", - "text": "No racism, sexism, homophobia, transphobia, xenophobia, or casteism" - }, - { - "id": "3", - "text": "No incitement of violence or promotion of violent ideologies" - }, - { - "id": "4", - "text": "No harassment, dogpiling or doxxing of other users" - }, - { - "id": "5", - "text": "No content illegal in Germany" - }, - { - "id": "7", - "text": "Do not share intentionally false or misleading information" - } - ] + } } diff --git a/tests/components/mastodon/snapshots/test_diagnostics.ambr b/tests/components/mastodon/snapshots/test_diagnostics.ambr index 982ecee7ee2..9198410f066 100644 --- a/tests/components/mastodon/snapshots/test_diagnostics.ambr +++ b/tests/components/mastodon/snapshots/test_diagnostics.ambr @@ -3,245 +3,82 @@ dict({ 'account': dict({ 'acct': 'trwnh', - 'avatar': 'https://files.mastodon.social/accounts/avatars/000/014/715/original/34aa222f4ae2e0a9.png', - 'avatar_static': 'https://files.mastodon.social/accounts/avatars/000/014/715/original/34aa222f4ae2e0a9.png', - 'bot': False, - 'created_at': '2016-11-24T10:02:12.085Z', + 'avatar': 'https://files.mastodon.social/accounts/avatars/000/014/715/original/051c958388818705.png', + 'avatar_static': 'https://files.mastodon.social/accounts/avatars/000/014/715/original/051c958388818705.png', + 'bot': True, + 'created_at': '2016-11-24T00:00:00+00:00', + 'discoverable': True, 'display_name': 'infinite love ⴳ', 'emojis': list([ - dict({ - 'shortcode': 'fatyoshi', - 'static_url': 'https://files.mastodon.social/custom_emojis/images/000/023/920/static/e57ecb623faa0dc9.png', - 'url': 'https://files.mastodon.social/custom_emojis/images/000/023/920/original/e57ecb623faa0dc9.png', - 'visible_in_picker': True, - }), ]), 'fields': list([ dict({ 'name': 'Website', - 'value': 'trwnh.com', + 'value': 'trwnh.com', 'verified_at': '2019-08-29T04:14:55.571+00:00', }), dict({ - 'name': 'Sponsor', - 'value': 'liberapay.com/at', - 'verified_at': '2019-11-15T10:06:15.557+00:00', + 'name': 'Portfolio', + 'value': 'abdullahtarawneh.com', + 'verified_at': '2021-02-11T20:34:13.574+00:00', }), dict({ 'name': 'Fan of:', - 'value': 'Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)', + 'value': 'Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)', 'verified_at': None, }), dict({ - 'name': 'Main topics:', - 'value': 'systemic analysis, design patterns, anticapitalism, info/tech freedom, theory and philosophy, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people!', + 'name': 'What to expect:', + 'value': 'talking about various things i find interesting, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people! and to spill my thoughts.', 'verified_at': None, }), ]), - 'followers_count': 821, - 'following_count': 178, + 'followers_count': 3169, + 'following_count': 328, + 'group': False, 'header': 'https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg', 'header_static': 'https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg', + 'hide_collections': True, 'id': '14715', - 'last_status_at': '2019-11-24T15:49:42.251Z', + 'indexable': False, + 'last_status_at': '2025-03-04T00:00:00', + 'limited': None, 'locked': False, - 'note': '

i have approximate knowledge of many things. perpetual student. (nb/ace/they)

xmpp/email: a@trwnh.com
trwnh.com
help me live: liberapay.com/at or paypal.me/trwnh

- my triggers are moths and glitter
- i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise
- dm me if i did something wrong, so i can improve
- purest person on fedi, do not lewd in my presence
- #1 ami cole fan account

:fatyoshi:

', - 'source': dict({ - 'fields': list([ - dict({ - 'name': 'Website', - 'value': 'https://trwnh.com', - 'verified_at': '2019-08-29T04:14:55.571+00:00', - }), - dict({ - 'name': 'Sponsor', - 'value': 'https://liberapay.com/at', - 'verified_at': '2019-11-15T10:06:15.557+00:00', - }), - dict({ - 'name': 'Fan of:', - 'value': "Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)", - 'verified_at': None, - }), - dict({ - 'name': 'Main topics:', - 'value': "systemic analysis, design patterns, anticapitalism, info/tech freedom, theory and philosophy, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people!", - 'verified_at': None, - }), - ]), - 'follow_requests_count': 0, - 'language': '', - 'note': ''' - i have approximate knowledge of many things. perpetual student. (nb/ace/they) - - xmpp/email: a@trwnh.com - https://trwnh.com - help me live: https://liberapay.com/at or https://paypal.me/trwnh - - - my triggers are moths and glitter - - i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise - - dm me if i did something wrong, so i can improve - - purest person on fedi, do not lewd in my presence - - #1 ami cole fan account - - :fatyoshi: - ''', - 'privacy': 'public', - 'sensitive': False, - }), - 'statuses_count': 33120, + 'memorial': None, + 'moved_to_account': None, + 'mute_expires_at': None, + 'noindex': False, + 'note': '

i have approximate knowledge of many things. perpetual student. (nb/ace/they)

xmpp/email: a@trwnh.com
trwnh.com
help me live:
- donate.stripe.com/4gwcPCaMpcQ1
- liberapay.com/trwnh

notes:
- my triggers are moths and glitter
- i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise
- dm me if i did something wrong, so i can improve
- purest person on fedi, do not lewd in my presence

', + 'role': None, + 'roles': list([ + ]), + 'source': None, + 'statuses_count': 69523, + 'suspended': None, + 'uri': 'https://mastodon.social/users/trwnh', 'url': 'https://mastodon.social/@trwnh', 'username': 'trwnh', }), 'instance': dict({ - 'configuration': dict({ - 'accounts': dict({ - 'max_featured_tags': 10, - 'max_pinned_statuses': 4, - }), - 'media_attachments': dict({ - 'image_matrix_limit': 16777216, - 'image_size_limit': 10485760, - 'supported_mime_types': list([ - 'image/jpeg', - 'image/png', - 'image/gif', - 'image/heic', - 'image/heif', - 'image/webp', - 'video/webm', - 'video/mp4', - 'video/quicktime', - 'video/ogg', - 'audio/wave', - 'audio/wav', - 'audio/x-wav', - 'audio/x-pn-wave', - 'audio/vnd.wave', - 'audio/ogg', - 'audio/vorbis', - 'audio/mpeg', - 'audio/mp3', - 'audio/webm', - 'audio/flac', - 'audio/aac', - 'audio/m4a', - 'audio/x-m4a', - 'audio/mp4', - 'audio/3gpp', - 'video/x-ms-asf', - ]), - 'video_frame_rate_limit': 60, - 'video_matrix_limit': 2304000, - 'video_size_limit': 41943040, - }), - 'polls': dict({ - 'max_characters_per_option': 50, - 'max_expiration': 2629746, - 'max_options': 4, - 'min_expiration': 300, - }), - 'statuses': dict({ - 'characters_reserved_per_url': 23, - 'max_characters': 500, - 'max_media_attachments': 4, - }), - 'translation': dict({ - 'enabled': True, - }), - 'urls': dict({ - 'streaming': 'wss://mastodon.social', - }), - 'vapid': dict({ - 'public_key': 'BCkMmVdKDnKYwzVCDC99Iuc9GvId-x7-kKtuHnLgfF98ENiZp_aj-UNthbCdI70DqN1zUVis-x0Wrot2sBagkMc=', - }), - }), - 'contact': dict({ - 'account': dict({ - 'acct': 'Gargron', - 'avatar': 'https://files.mastodon.social/accounts/avatars/000/000/001/original/dc4286ceb8fab734.jpg', - 'avatar_static': 'https://files.mastodon.social/accounts/avatars/000/000/001/original/dc4286ceb8fab734.jpg', - 'bot': False, - 'created_at': '2016-03-16T00:00:00.000Z', - 'discoverable': True, - 'display_name': 'Eugen 💀', - 'emojis': list([ - ]), - 'fields': list([ - dict({ - 'name': 'Patreon', - 'value': 'patreon.com/mastodon', - 'verified_at': None, - }), - ]), - 'followers_count': 133026, - 'following_count': 311, - 'group': False, - 'header': 'https://files.mastodon.social/accounts/headers/000/000/001/original/3b91c9965d00888b.jpeg', - 'header_static': 'https://files.mastodon.social/accounts/headers/000/000/001/original/3b91c9965d00888b.jpeg', - 'id': '1', - 'last_status_at': '2022-10-31', - 'locked': False, - 'noindex': False, - 'note': '

Founder, CEO and lead developer @Mastodon, Germany.

', - 'statuses_count': 72605, - 'url': 'https://mastodon.social/@Gargron', - 'username': 'Gargron', - }), - 'email': 'staff@mastodon.social', - }), + 'api_versions': None, + 'configuration': None, + 'contact': None, 'description': 'The original server operated by the Mastodon gGmbH non-profit', 'domain': 'mastodon.social', - 'languages': list([ - 'en', - ]), - 'registrations': dict({ - 'approval_required': False, - 'enabled': False, - 'message': None, - }), - 'rules': list([ - dict({ - 'id': '1', - 'text': 'Sexually explicit or violent media must be marked as sensitive when posting', - }), - dict({ - 'id': '2', - 'text': 'No racism, sexism, homophobia, transphobia, xenophobia, or casteism', - }), - dict({ - 'id': '3', - 'text': 'No incitement of violence or promotion of violent ideologies', - }), - dict({ - 'id': '4', - 'text': 'No harassment, dogpiling or doxxing of other users', - }), - dict({ - 'id': '5', - 'text': 'No content illegal in Germany', - }), - dict({ - 'id': '7', - 'text': 'Do not share intentionally false or misleading information', - }), - ]), + 'icon': None, + 'languages': None, + 'registrations': None, + 'rules': None, 'source_url': 'https://github.com/mastodon/mastodon', - 'thumbnail': dict({ - 'blurhash': 'UeKUpFxuo~R%0nW;WCnhF6RjaJt757oJodS$', - 'url': 'https://files.mastodon.social/site_uploads/files/000/000/001/@1x/57c12f441d083cde.png', - 'versions': dict({ - '@1x': 'https://files.mastodon.social/site_uploads/files/000/000/001/@1x/57c12f441d083cde.png', - '@2x': 'https://files.mastodon.social/site_uploads/files/000/000/001/@2x/57c12f441d083cde.png', - }), - }), + 'thumbnail': None, 'title': 'Mastodon', + 'uri': 'mastodon.social', 'usage': dict({ 'users': dict({ - 'active_month': 123122, + 'active_month': 380143, }), }), - 'version': '4.0.0rc1', + 'version': '4.4.0-nightly.2025-02-07', }), }) # --- diff --git a/tests/components/mastodon/snapshots/test_init.ambr b/tests/components/mastodon/snapshots/test_init.ambr index 28157b9e6eb..46fb4c1d4e0 100644 --- a/tests/components/mastodon/snapshots/test_init.ambr +++ b/tests/components/mastodon/snapshots/test_init.ambr @@ -28,7 +28,7 @@ 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '4.0.0rc1', + 'sw_version': '4.4.0-nightly.2025-02-07', 'via_device_id': None, }) # --- diff --git a/tests/components/mastodon/snapshots/test_sensor.ambr b/tests/components/mastodon/snapshots/test_sensor.ambr index 22ac2671c36..40986210454 100644 --- a/tests/components/mastodon/snapshots/test_sensor.ambr +++ b/tests/components/mastodon/snapshots/test_sensor.ambr @@ -47,7 +47,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '821', + 'state': '3169', }) # --- # name: test_sensors[sensor.mastodon_trwnh_mastodon_social_following-entry] @@ -98,7 +98,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '178', + 'state': '328', }) # --- # name: test_sensors[sensor.mastodon_trwnh_mastodon_social_posts-entry] @@ -149,6 +149,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '33120', + 'state': '69523', }) # --- diff --git a/tests/components/mastodon/test_services.py b/tests/components/mastodon/test_services.py index 4dafa9a8e5b..f51d39f8687 100644 --- a/tests/components/mastodon/test_services.py +++ b/tests/components/mastodon/test_services.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock, Mock, patch -from mastodon.Mastodon import MastodonAPIError +from mastodon.Mastodon import MastodonAPIError, MediaAttachment import pytest from homeassistant.components.mastodon.const import ( @@ -106,7 +106,9 @@ async def test_service_post( with ( patch.object(hass.config, "is_allowed_path", return_value=True), - patch.object(mock_mastodon_client, "media_post", return_value={"id": "1"}), + patch.object( + mock_mastodon_client, "media_post", return_value=MediaAttachment(id="1") + ), ): await hass.services.async_call( DOMAIN, @@ -163,7 +165,7 @@ async def test_post_service_failed( await hass.async_block_till_done() hass.config.is_allowed_path = Mock(return_value=True) - mock_mastodon_client.media_post.return_value = {"id": "1"} + mock_mastodon_client.media_post.return_value = MediaAttachment(id="1") mock_mastodon_client.status_post.side_effect = MastodonAPIError diff --git a/tests/components/matter/fixtures/nodes/generic_switch_multi.json b/tests/components/matter/fixtures/nodes/generic_switch_multi.json index 8923198c31e..4055c9dc336 100644 --- a/tests/components/matter/fixtures/nodes/generic_switch_multi.json +++ b/tests/components/matter/fixtures/nodes/generic_switch_multi.json @@ -72,7 +72,6 @@ "1/59/0": 2, "1/59/65533": 1, "1/59/1": 0, - "1/59/2": 2, "1/59/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], "1/59/65532": 30, "1/59/65528": [], @@ -102,7 +101,7 @@ "2/59/0": 2, "2/59/65533": 1, "2/59/1": 0, - "2/59/2": 2, + "2/59/2": 4, "2/59/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], "2/59/65532": 30, "2/59/65528": [], diff --git a/tests/components/matter/snapshots/test_event.ambr b/tests/components/matter/snapshots/test_event.ambr index b0ddfaed8bf..153f5751f14 100644 --- a/tests/components/matter/snapshots/test_event.ambr +++ b/tests/components/matter/snapshots/test_event.ambr @@ -132,6 +132,8 @@ 'event_types': list([ 'multi_press_1', 'multi_press_2', + 'multi_press_3', + 'multi_press_4', 'long_press', 'long_release', ]), @@ -172,6 +174,8 @@ 'event_types': list([ 'multi_press_1', 'multi_press_2', + 'multi_press_3', + 'multi_press_4', 'long_press', 'long_release', ]), diff --git a/tests/components/matter/snapshots/test_number.ambr b/tests/components/matter/snapshots/test_number.ambr index dc35f6f2a69..d777b9d48d0 100644 --- a/tests/components/matter/snapshots/test_number.ambr +++ b/tests/components/matter/snapshots/test_number.ambr @@ -401,8 +401,8 @@ }), 'area_id': None, 'capabilities': dict({ - 'max': 25, - 'min': -25, + 'max': 50, + 'min': -50, 'mode': , 'step': 0.5, }), @@ -439,8 +439,8 @@ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', 'friendly_name': 'Eve Thermo Temperature offset', - 'max': 25, - 'min': -25, + 'max': 50, + 'min': -50, 'mode': , 'step': 0.5, 'unit_of_measurement': , diff --git a/tests/components/matter/snapshots/test_sensor.ambr b/tests/components/matter/snapshots/test_sensor.ambr index 9caa84bbf96..cb26f1d8e70 100644 --- a/tests/components/matter/snapshots/test_sensor.ambr +++ b/tests/components/matter/snapshots/test_sensor.ambr @@ -686,7 +686,7 @@ 'state': '20.0', }) # --- -# name: test_sensors[air_purifier][sensor.air_purifier_vocs-entry] +# name: test_sensors[air_purifier][sensor.air_purifier_volatile_organic_compounds_parts-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -701,7 +701,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.air_purifier_vocs', + 'entity_id': 'sensor.air_purifier_volatile_organic_compounds_parts', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -713,7 +713,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'VOCs', + 'original_name': 'Volatile organic compounds parts', 'platform': 'matter', 'previous_unique_id': None, 'supported_features': 0, @@ -722,16 +722,16 @@ 'unit_of_measurement': 'ppm', }) # --- -# name: test_sensors[air_purifier][sensor.air_purifier_vocs-state] +# name: test_sensors[air_purifier][sensor.air_purifier_volatile_organic_compounds_parts-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'volatile_organic_compounds_parts', - 'friendly_name': 'Air Purifier VOCs', + 'friendly_name': 'Air Purifier Volatile organic compounds parts', 'state_class': , 'unit_of_measurement': 'ppm', }), 'context': , - 'entity_id': 'sensor.air_purifier_vocs', + 'entity_id': 'sensor.air_purifier_volatile_organic_compounds_parts', 'last_changed': , 'last_reported': , 'last_updated': , @@ -1167,7 +1167,7 @@ 'state': '20.08', }) # --- -# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_vocs-entry] +# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_volatile_organic_compounds_parts-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1182,7 +1182,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_vocs', + 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_volatile_organic_compounds_parts', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1194,7 +1194,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'VOCs', + 'original_name': 'Volatile organic compounds parts', 'platform': 'matter', 'previous_unique_id': None, 'supported_features': 0, @@ -1203,16 +1203,16 @@ 'unit_of_measurement': 'ppm', }) # --- -# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_vocs-state] +# name: test_sensors[air_quality_sensor][sensor.lightfi_aq1_air_quality_sensor_volatile_organic_compounds_parts-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'volatile_organic_compounds_parts', - 'friendly_name': 'lightfi-aq1-air-quality-sensor VOCs', + 'friendly_name': 'lightfi-aq1-air-quality-sensor Volatile organic compounds parts', 'state_class': , 'unit_of_measurement': 'ppm', }), 'context': , - 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_vocs', + 'entity_id': 'sensor.lightfi_aq1_air_quality_sensor_volatile_organic_compounds_parts', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/matter/test_event.py b/tests/components/matter/test_event.py index f3a318c4e8b..651c71a5dce 100644 --- a/tests/components/matter/test_event.py +++ b/tests/components/matter/test_event.py @@ -36,7 +36,7 @@ async def test_generic_switch_node( assert state assert state.state == "unknown" assert state.name == "Mock Generic Switch Button" - # check event_types from featuremap 30 + # check event_types from featuremap 14 (0b1110) assert state.attributes[ATTR_EVENT_TYPES] == [ "initial_press", "short_release", @@ -76,7 +76,7 @@ async def test_generic_switch_multi_node( assert state_button_1.state == "unknown" # name should be 'DeviceName Button (1)' due to the label set to just '1' assert state_button_1.name == "Mock Generic Switch Button (1)" - # check event_types from featuremap 14 + # check event_types from featuremap 30 (0b11110) and MultiPressMax unset (default 2) assert state_button_1.attributes[ATTR_EVENT_TYPES] == [ "multi_press_1", "multi_press_2", @@ -84,11 +84,20 @@ async def test_generic_switch_multi_node( "long_release", ] # check button 2 - state_button_1 = hass.states.get("event.mock_generic_switch_fancy_button") - assert state_button_1 - assert state_button_1.state == "unknown" + state_button_2 = hass.states.get("event.mock_generic_switch_fancy_button") + assert state_button_2 + assert state_button_2.state == "unknown" # name should be 'DeviceName Fancy Button' due to the label set to 'Fancy Button' - assert state_button_1.name == "Mock Generic Switch Fancy Button" + assert state_button_2.name == "Mock Generic Switch Fancy Button" + # check event_types from featuremap 30 (0b11110) and MultiPressMax 4 + assert state_button_2.attributes[ATTR_EVENT_TYPES] == [ + "multi_press_1", + "multi_press_2", + "multi_press_3", + "multi_press_4", + "long_press", + "long_release", + ] # trigger firing a multi press event await trigger_subscription_callback( diff --git a/tests/components/mcp_server/conftest.py b/tests/components/mcp_server/conftest.py index 5ec67fb6ce3..b5e25d9fe50 100644 --- a/tests/components/mcp_server/conftest.py +++ b/tests/components/mcp_server/conftest.py @@ -5,9 +5,10 @@ from unittest.mock import AsyncMock, patch import pytest -from homeassistant.components.mcp_server.const import DOMAIN, LLM_API +from homeassistant.components.mcp_server.const import DOMAIN from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import HomeAssistant +from homeassistant.helpers import llm from tests.common import MockConfigEntry @@ -21,13 +22,19 @@ def mock_setup_entry() -> Generator[AsyncMock]: yield mock_setup_entry +@pytest.fixture(name="llm_hass_api") +def llm_hass_api_fixture() -> str: + """Fixture for the config entry llm_hass_api.""" + return llm.LLM_API_ASSIST + + @pytest.fixture(name="config_entry") -def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: +def mock_config_entry(hass: HomeAssistant, llm_hass_api: str) -> MockConfigEntry: """Fixture to load the integration.""" config_entry = MockConfigEntry( domain=DOMAIN, data={ - CONF_LLM_HASS_API: LLM_API, + CONF_LLM_HASS_API: llm_hass_api, }, ) config_entry.add_to_hass(hass) diff --git a/tests/components/mcp_server/test_http.py b/tests/components/mcp_server/test_http.py index 905bfaa11d7..70efd211b57 100644 --- a/tests/components/mcp_server/test_http.py +++ b/tests/components/mcp_server/test_http.py @@ -16,6 +16,7 @@ import pytest from homeassistant.components.conversation import DOMAIN as CONVERSATION_DOMAIN from homeassistant.components.homeassistant.exposed_entities import async_expose_entity from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN +from homeassistant.components.mcp_server.const import STATELESS_LLM_API from homeassistant.components.mcp_server.http import MESSAGES_API, SSE_API from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_LLM_HASS_API, STATE_OFF, STATE_ON @@ -24,6 +25,7 @@ from homeassistant.helpers import ( area_registry as ar, device_registry as dr, entity_registry as er, + llm, ) from homeassistant.setup import async_setup_component @@ -297,6 +299,7 @@ async def mcp_session( yield session +@pytest.mark.parametrize("llm_hass_api", [llm.LLM_API_ASSIST, STATELESS_LLM_API]) async def test_mcp_tools_list( hass: HomeAssistant, setup_integration: None, @@ -319,6 +322,7 @@ async def test_mcp_tools_list( assert properties.get("name") == {"type": "string"} +@pytest.mark.parametrize("llm_hass_api", [llm.LLM_API_ASSIST, STATELESS_LLM_API]) async def test_mcp_tool_call( hass: HomeAssistant, setup_integration: None, @@ -371,6 +375,7 @@ async def test_mcp_tool_call_failed( assert "Error calling tool" in result.content[0].text +@pytest.mark.parametrize("llm_hass_api", [llm.LLM_API_ASSIST, STATELESS_LLM_API]) async def test_prompt_list( hass: HomeAssistant, setup_integration: None, @@ -384,13 +389,11 @@ async def test_prompt_list( assert len(result.prompts) == 1 prompt = result.prompts[0] - assert prompt.name == "Stateless Assist" - assert ( - prompt.description - == "Default prompt for the Home Assistant LLM API Stateless Assist" - ) + assert prompt.name == "Assist" + assert prompt.description == "Default prompt for Home Assistant Assist API" +@pytest.mark.parametrize("llm_hass_api", [llm.LLM_API_ASSIST, STATELESS_LLM_API]) async def test_prompt_get( hass: HomeAssistant, setup_integration: None, @@ -400,12 +403,9 @@ async def test_prompt_get( """Test the get prompt endpoint.""" async with mcp_session(mcp_sse_url, hass_supervisor_access_token) as session: - result = await session.get_prompt(name="Stateless Assist") + result = await session.get_prompt(name="Assist") - assert ( - result.description - == "Default prompt for the Home Assistant LLM API Stateless Assist" - ) + assert result.description == "Default prompt for Home Assistant Assist API" assert len(result.messages) == 1 assert result.messages[0].role == "assistant" assert result.messages[0].content.type == "text" diff --git a/tests/components/media_player/test_async_helpers.py b/tests/components/media_player/test_async_helpers.py index 680603c097d..3ab79db73e1 100644 --- a/tests/components/media_player/test_async_helpers.py +++ b/tests/components/media_player/test_async_helpers.py @@ -69,6 +69,10 @@ class SimpleMediaPlayer(mp.MediaPlayerEntity): """Put device in standby.""" self._state = STATE_STANDBY + def idle(self): + """Put device in idle.""" + self._state = STATE_IDLE + class ExtendedMediaPlayer(SimpleMediaPlayer): """Media player test class.""" @@ -92,7 +96,7 @@ class ExtendedMediaPlayer(SimpleMediaPlayer): def toggle(self): """Toggle the power on the media player.""" - if self._state in [STATE_OFF, STATE_IDLE, STATE_STANDBY]: + if self._state in [STATE_OFF, STATE_STANDBY]: self._state = STATE_ON else: self._state = STATE_OFF @@ -187,3 +191,7 @@ async def test_toggle(player) -> None: assert player.state == STATE_STANDBY await player.async_toggle() assert player.state == STATE_ON + player.idle() + assert player.state == STATE_IDLE + await player.async_toggle() + assert player.state == STATE_OFF diff --git a/tests/components/media_source/test_init.py b/tests/components/media_source/test_init.py index c37e418020b..2c2952068ee 100644 --- a/tests/components/media_source/test_init.py +++ b/tests/components/media_source/test_init.py @@ -114,7 +114,6 @@ async def test_async_resolve_media(hass: HomeAssistant) -> None: assert media.mime_type == "audio/mpeg" -@patch("homeassistant.helpers.frame._REPORTED_INTEGRATIONS", set()) async def test_async_resolve_media_no_entity( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: diff --git a/tests/components/meteo_france/conftest.py b/tests/components/meteo_france/conftest.py index eb28ec0a838..82b220e331e 100644 --- a/tests/components/meteo_france/conftest.py +++ b/tests/components/meteo_france/conftest.py @@ -24,8 +24,8 @@ def patch_requests(): mock_data.get_rain.return_value = Rain( load_json_object_fixture("raw_rain.json", DOMAIN) ) - mock_data.get_warning_current_phenomenoms.return_value = CurrentPhenomenons( - load_json_object_fixture("raw_warning_current_phenomenoms.json", DOMAIN) + mock_data.get_warning_current_phenomenons.return_value = CurrentPhenomenons( + load_json_object_fixture("raw_warning_current_phenomenons.json", DOMAIN) ) yield mock_data diff --git a/tests/components/meteo_france/fixtures/raw_warning_current_phenomenoms.json b/tests/components/meteo_france/fixtures/raw_warning_current_phenomenons.json similarity index 100% rename from tests/components/meteo_france/fixtures/raw_warning_current_phenomenoms.json rename to tests/components/meteo_france/fixtures/raw_warning_current_phenomenons.json diff --git a/tests/components/microsoft/test_tts.py b/tests/components/microsoft/test_tts.py index e10ec589113..38f1318a683 100644 --- a/tests/components/microsoft/test_tts.py +++ b/tests/components/microsoft/test_tts.py @@ -366,7 +366,7 @@ async def test_service_say_error( await retrieve_media( hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] ) - == HTTPStatus.NOT_FOUND + == HTTPStatus.INTERNAL_SERVER_ERROR ) assert len(mock_tts.mock_calls) == 2 diff --git a/tests/components/mobile_app/test_webhook.py b/tests/components/mobile_app/test_webhook.py index dda5f369ad5..b071caebd16 100644 --- a/tests/components/mobile_app/test_webhook.py +++ b/tests/components/mobile_app/test_webhook.py @@ -1081,6 +1081,7 @@ async def test_webhook_handle_conversation_process( }, }, "conversation_id": None, + "continue_conversation": False, } diff --git a/tests/components/modbus/test_climate.py b/tests/components/modbus/test_climate.py index 3c30efe9dce..54d4c5f6666 100644 --- a/tests/components/modbus/test_climate.py +++ b/tests/components/modbus/test_climate.py @@ -5,6 +5,7 @@ import pytest from homeassistant.components.climate import ( ATTR_FAN_MODE, ATTR_FAN_MODES, + ATTR_HVAC_ACTION, ATTR_HVAC_MODE, ATTR_HVAC_MODES, ATTR_SWING_MODE, @@ -31,6 +32,7 @@ from homeassistant.components.climate import ( SWING_OFF, SWING_ON, SWING_VERTICAL, + HVACAction, HVACMode, ) from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY @@ -47,6 +49,16 @@ from homeassistant.components.modbus.const import ( CONF_FAN_MODE_REGISTER, CONF_FAN_MODE_TOP, CONF_FAN_MODE_VALUES, + CONF_HVAC_ACTION_COOLING, + CONF_HVAC_ACTION_DEFROSTING, + CONF_HVAC_ACTION_DRYING, + CONF_HVAC_ACTION_FAN, + CONF_HVAC_ACTION_HEATING, + CONF_HVAC_ACTION_IDLE, + CONF_HVAC_ACTION_OFF, + CONF_HVAC_ACTION_PREHEATING, + CONF_HVAC_ACTION_REGISTER, + CONF_HVAC_ACTION_VALUES, CONF_HVAC_MODE_AUTO, CONF_HVAC_MODE_COOL, CONF_HVAC_MODE_DRY, @@ -224,6 +236,43 @@ ENTITY_ID = f"{CLIMATE_DOMAIN}.{TEST_ENTITY_NAME}".replace(" ", "_") } ], }, + { + CONF_CLIMATES: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_TARGET_TEMP: 117, + CONF_ADDRESS: 117, + CONF_SLAVE: 10, + CONF_HVAC_ONOFF_REGISTER: 12, + CONF_HVAC_MODE_REGISTER: { + CONF_ADDRESS: 11, + CONF_WRITE_REGISTERS: True, + CONF_HVAC_MODE_VALUES: { + CONF_HVAC_MODE_OFF: 0, + CONF_HVAC_MODE_HEAT: 1, + CONF_HVAC_MODE_COOL: 2, + CONF_HVAC_MODE_HEAT_COOL: 3, + CONF_HVAC_MODE_DRY: 4, + CONF_HVAC_MODE_FAN_ONLY: 5, + CONF_HVAC_MODE_AUTO: 6, + }, + }, + CONF_HVAC_ACTION_REGISTER: { + CONF_ADDRESS: 14, + CONF_HVAC_ACTION_VALUES: { + CONF_HVAC_ACTION_COOLING: 0, + CONF_HVAC_ACTION_DEFROSTING: 1, + CONF_HVAC_ACTION_DRYING: 2, + CONF_HVAC_ACTION_FAN: 3, + CONF_HVAC_ACTION_HEATING: 4, + CONF_HVAC_ACTION_IDLE: 5, + CONF_HVAC_ACTION_OFF: 6, + CONF_HVAC_ACTION_PREHEATING: 7, + }, + }, + } + ], + }, ], ) async def test_config_climate(hass: HomeAssistant, mock_modbus) -> None: @@ -745,6 +794,95 @@ async def test_hvac_onoff_coil_update( assert state.state == result +@pytest.mark.parametrize( + ("do_config", "result", "register_words"), + [ + ( + { + CONF_CLIMATES: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_TARGET_TEMP: 116, + CONF_ADDRESS: 117, + CONF_SLAVE: 10, + CONF_SCAN_INTERVAL: 0, + CONF_DATA_TYPE: DataType.INT32, + CONF_HVAC_ACTION_REGISTER: { + CONF_ADDRESS: 118, + CONF_HVAC_ACTION_VALUES: { + CONF_HVAC_ACTION_IDLE: 0, + CONF_HVAC_ACTION_HEATING: 1, + }, + }, + }, + ] + }, + HVACAction.HEATING, + [0x01], + ), + ( + { + CONF_CLIMATES: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_TARGET_TEMP: 116, + CONF_ADDRESS: 117, + CONF_SLAVE: 10, + CONF_SCAN_INTERVAL: 0, + CONF_DATA_TYPE: DataType.INT32, + CONF_HVAC_ACTION_REGISTER: { + CONF_ADDRESS: 118, + CONF_HVAC_ACTION_VALUES: { + CONF_HVAC_ACTION_COOLING: 0, + CONF_HVAC_ACTION_HEATING: 1, + }, + }, + }, + ] + }, + HVACAction.COOLING, + [0x00], + ), + ( + { + CONF_CLIMATES: [ + { + CONF_NAME: TEST_ENTITY_NAME, + CONF_TARGET_TEMP: 116, + CONF_ADDRESS: 117, + CONF_SLAVE: 10, + CONF_SCAN_INTERVAL: 0, + CONF_DATA_TYPE: DataType.INT32, + CONF_HVAC_ACTION_REGISTER: { + CONF_ADDRESS: 118, + CONF_HVAC_ACTION_VALUES: { + CONF_HVAC_ACTION_OFF: 0, + CONF_HVAC_ACTION_DRYING: 1, + }, + }, + }, + ] + }, + HVACAction.DRYING, + [0x01], + ), + ], +) +async def test_service_climate_action_update( + hass: HomeAssistant, mock_modbus_ha, result, register_words +) -> None: + """Test HVAC action updates.""" + mock_modbus_ha.read_holding_registers.return_value = ReadResult(register_words) + await hass.services.async_call( + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: ENTITY_ID}, + blocking=True, + ) + await hass.async_block_till_done() + assert hass.states.get(ENTITY_ID).attributes[ATTR_HVAC_ACTION] == result + + @pytest.mark.parametrize( ("do_config", "result", "register_words"), [ diff --git a/tests/components/moehlenhoff_alpha2/__init__.py b/tests/components/moehlenhoff_alpha2/__init__.py index 50087794560..90d6d88fedc 100644 --- a/tests/components/moehlenhoff_alpha2/__init__.py +++ b/tests/components/moehlenhoff_alpha2/__init__.py @@ -19,7 +19,7 @@ async def mock_update_data(self): for _type in ("HEATAREA", "HEATCTRL", "IODEVICE"): if not isinstance(data["Devices"]["Device"][_type], list): data["Devices"]["Device"][_type] = [data["Devices"]["Device"][_type]] - self.static_data = data + self._static_data = data async def init_integration(hass: HomeAssistant) -> MockConfigEntry: diff --git a/tests/components/mqtt/test_common.py b/tests/components/mqtt/common.py similarity index 91% rename from tests/components/mqtt/test_common.py rename to tests/components/mqtt/common.py index 3bb8657e2f2..e4a368f0d71 100644 --- a/tests/components/mqtt/test_common.py +++ b/tests/components/mqtt/common.py @@ -66,6 +66,212 @@ DEFAULT_CONFIG_DEVICE_INFO_MAC = { "configuration_url": "http://example.com", } +MOCK_SUBENTRY_NOTIFY_COMPONENT1 = { + "363a7ecad6be4a19b939a016ea93e994": { + "platform": "notify", + "name": "Milkman alert", + "command_topic": "test-topic", + "command_template": "{{ value }}", + "entity_picture": "https://example.com/363a7ecad6be4a19b939a016ea93e994", + "retain": False, + }, +} +MOCK_SUBENTRY_NOTIFY_COMPONENT2 = { + "6494827dac294fa0827c54b02459d309": { + "platform": "notify", + "name": "The second notifier", + "command_topic": "test-topic2", + "entity_picture": "https://example.com/6494827dac294fa0827c54b02459d309", + }, +} +MOCK_SUBENTRY_NOTIFY_COMPONENT_NO_NAME = { + "5269352dd9534c908d22812ea5d714cd": { + "platform": "notify", + "command_topic": "test-topic", + "command_template": "{{ value }}", + "entity_picture": "https://example.com/5269352dd9534c908d22812ea5d714cd", + "retain": False, + }, +} + +MOCK_SUBENTRY_SENSOR_COMPONENT = { + "e9261f6feed443e7b7d5f3fbe2a47412": { + "platform": "sensor", + "name": "Energy", + "device_class": "enum", + "state_topic": "test-topic", + "options": ["low", "medium", "high"], + "expire_after": 30, + "value_template": "{{ value_json.value }}", + "entity_picture": "https://example.com/e9261f6feed443e7b7d5f3fbe2a47412", + }, +} +MOCK_SUBENTRY_SENSOR_COMPONENT_STATE_CLASS = { + "a0f85790a95d4889924602effff06b6e": { + "platform": "sensor", + "name": "Energy", + "state_class": "measurement", + "state_topic": "test-topic", + "entity_picture": "https://example.com/a0f85790a95d4889924602effff06b6e", + }, +} +MOCK_SUBENTRY_SENSOR_COMPONENT_LAST_RESET = { + "e9261f6feed443e7b7d5f3fbe2a47412": { + "platform": "sensor", + "name": "Energy", + "state_class": "total", + "last_reset_value_template": "{{ value_json.value }}", + "state_topic": "test-topic", + "entity_picture": "https://example.com/e9261f6feed443e7b7d5f3fbe2a47412", + }, +} +MOCK_SUBENTRY_SWITCH_COMPONENT = { + "3faf1318016c46c5aea26707eeb6f12e": { + "platform": "switch", + "name": "Outlet", + "device_class": "outlet", + "command_topic": "test-topic", + "state_topic": "test-topic", + "command_template": "{{ value }}", + "value_template": "{{ value_json.value }}", + "entity_picture": "https://example.com/3faf1318016c46c5aea26707eeb6f12e", + "optimistic": True, + }, +} + +# Bogus light component just for code coverage +# Note that light cannot be setup through the UI yet +# The test is for code coverage +MOCK_SUBENTRY_LIGHT_COMPONENT = { + "8131babc5e8d4f44b82e0761d39091a2": { + "platform": "light", + "name": "Test light", + "command_topic": "test-topic4", + "schema": "basic", + "entity_picture": "https://example.com/8131babc5e8d4f44b82e0761d39091a2", + }, +} +MOCK_SUBENTRY_NOTIFY_BAD_SCHEMA = { + "b10b531e15244425a74bb0abb1e9d2c6": { + "platform": "notify", + "name": "Test", + "command_topic": "bad#topic", + }, +} + +MOCK_SUBENTRY_AVAILABILITY_DATA = { + "availability": { + "availability_topic": "test/availability", + "availability_template": "{{ value_json.availability }}", + "payload_available": "online", + "payload_not_available": "offline", + } +} + +MOCK_NOTIFY_SUBENTRY_DATA_MULTI = { + "device": { + "name": "Milk notifier", + "sw_version": "1.0", + "hw_version": "2.1 rev a", + "model": "Model XL", + "model_id": "mn002", + "configuration_url": "https://example.com", + }, + "components": MOCK_SUBENTRY_NOTIFY_COMPONENT1 | MOCK_SUBENTRY_NOTIFY_COMPONENT2, +} | MOCK_SUBENTRY_AVAILABILITY_DATA + +MOCK_NOTIFY_SUBENTRY_DATA_SINGLE = { + "device": { + "name": "Milk notifier", + "sw_version": "1.0", + "hw_version": "2.1 rev a", + "model": "Model XL", + "model_id": "mn002", + "configuration_url": "https://example.com", + "mqtt_settings": {"qos": 1}, + }, + "components": MOCK_SUBENTRY_NOTIFY_COMPONENT1, +} +MOCK_NOTIFY_SUBENTRY_DATA_NO_NAME = { + "device": { + "name": "Milk notifier", + "sw_version": "1.0", + "hw_version": "2.1 rev a", + "model": "Model XL", + "model_id": "mn002", + "configuration_url": "https://example.com", + }, + "components": MOCK_SUBENTRY_NOTIFY_COMPONENT_NO_NAME, +} +MOCK_SENSOR_SUBENTRY_DATA_SINGLE = { + "device": { + "name": "Test sensor", + "sw_version": "1.0", + "hw_version": "2.1 rev a", + "model": "Model XL", + "model_id": "mn002", + "configuration_url": "https://example.com", + }, + "components": MOCK_SUBENTRY_SENSOR_COMPONENT, +} +MOCK_SENSOR_SUBENTRY_DATA_SINGLE_STATE_CLASS = { + "device": { + "name": "Test sensor", + "sw_version": "1.0", + "hw_version": "2.1 rev a", + "model": "Model XL", + "model_id": "mn002", + "configuration_url": "https://example.com", + }, + "components": MOCK_SUBENTRY_SENSOR_COMPONENT_STATE_CLASS, +} +MOCK_SENSOR_SUBENTRY_DATA_SINGLE_LAST_RESET_TEMPLATE = { + "device": { + "name": "Test sensor", + "sw_version": "1.0", + "hw_version": "2.1 rev a", + "model": "Model XL", + "model_id": "mn002", + "configuration_url": "https://example.com", + }, + "components": MOCK_SUBENTRY_SENSOR_COMPONENT_LAST_RESET, +} +MOCK_SWITCH_SUBENTRY_DATA_SINGLE_STATE_CLASS = { + "device": { + "name": "Test switch", + "sw_version": "1.0", + "hw_version": "2.1 rev a", + "model": "Model XL", + "model_id": "mn002", + "configuration_url": "https://example.com", + }, + "components": MOCK_SUBENTRY_SWITCH_COMPONENT, +} +MOCK_SUBENTRY_DATA_BAD_COMPONENT_SCHEMA = { + "device": { + "name": "Milk notifier", + "sw_version": "1.0", + "hw_version": "2.1 rev a", + "model": "Model XL", + "model_id": "mn002", + "configuration_url": "https://example.com", + }, + "components": MOCK_SUBENTRY_NOTIFY_BAD_SCHEMA, +} +MOCK_SUBENTRY_DATA_SET_MIX = { + "device": { + "name": "Milk notifier", + "sw_version": "1.0", + "hw_version": "2.1 rev a", + "model": "Model XL", + "model_id": "mn002", + "configuration_url": "https://example.com", + }, + "components": MOCK_SUBENTRY_NOTIFY_COMPONENT1 + | MOCK_SUBENTRY_NOTIFY_COMPONENT2 + | MOCK_SUBENTRY_LIGHT_COMPONENT + | MOCK_SUBENTRY_SWITCH_COMPONENT, +} | MOCK_SUBENTRY_AVAILABILITY_DATA _SENTINEL = object() DISCOVERY_COUNT = sum(len(discovery_topic) for discovery_topic in MQTT.values()) diff --git a/tests/components/mqtt/test_alarm_control_panel.py b/tests/components/mqtt/test_alarm_control_panel.py index b46829650f6..9241106496b 100644 --- a/tests/components/mqtt/test_alarm_control_panel.py +++ b/tests/components/mqtt/test_alarm_control_panel.py @@ -33,7 +33,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_binary_sensor.py b/tests/components/mqtt/test_binary_sensor.py index 8809f2201f2..169e1ab4c6b 100644 --- a/tests/components/mqtt/test_binary_sensor.py +++ b/tests/components/mqtt/test_binary_sensor.py @@ -23,7 +23,7 @@ from homeassistant.core import HomeAssistant, State, callback from homeassistant.helpers.typing import ConfigType from homeassistant.util import dt as dt_util -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_button.py b/tests/components/mqtt/test_button.py index f147b33c88b..f99c48a440f 100644 --- a/tests/components/mqtt/test_button.py +++ b/tests/components/mqtt/test_button.py @@ -10,7 +10,7 @@ from homeassistant.components import button, mqtt from homeassistant.const import ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, STATE_UNKNOWN from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, diff --git a/tests/components/mqtt/test_camera.py b/tests/components/mqtt/test_camera.py index cda536dc19e..b5971adcb92 100644 --- a/tests/components/mqtt/test_camera.py +++ b/tests/components/mqtt/test_camera.py @@ -11,7 +11,7 @@ from homeassistant.components import camera, mqtt from homeassistant.components.mqtt.camera import MQTT_CAMERA_ATTRIBUTES_BLOCKED from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, diff --git a/tests/components/mqtt/test_client.py b/tests/components/mqtt/test_client.py index 0dbbff58026..c2cce3d1344 100644 --- a/tests/components/mqtt/test_client.py +++ b/tests/components/mqtt/test_client.py @@ -27,8 +27,8 @@ from homeassistant.core import CALLBACK_TYPE, CoreState, HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.util.dt import utcnow +from .common import help_all_subscribe_calls from .conftest import ENTRY_DEFAULT_BIRTH_MESSAGE -from .test_common import help_all_subscribe_calls from tests.common import ( MockConfigEntry, diff --git a/tests/components/mqtt/test_climate.py b/tests/components/mqtt/test_climate.py index 3760b0226f5..fd0b95f2b13 100644 --- a/tests/components/mqtt/test_climate.py +++ b/tests/components/mqtt/test_climate.py @@ -33,11 +33,16 @@ from homeassistant.components.mqtt.climate import ( MQTT_CLIMATE_ATTRIBUTES_BLOCKED, VALUE_TEMPLATE_KEYS, ) -from homeassistant.const import ATTR_TEMPERATURE, STATE_UNKNOWN, UnitOfTemperature +from homeassistant.const import ATTR_TEMPERATURE, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError +from homeassistant.util.unit_system import ( + METRIC_SYSTEM, + US_CUSTOMARY_SYSTEM, + UnitSystem, +) -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, @@ -1823,7 +1828,7 @@ async def test_temperature_unit( @pytest.mark.parametrize( - ("hass_config", "temperature_unit", "initial", "min", "max", "current"), + ("hass_config", "units", "initial", "min", "max", "current"), [ ( help_custom_config( @@ -1836,7 +1841,7 @@ async def test_temperature_unit( }, ), ), - UnitOfTemperature.CELSIUS, + METRIC_SYSTEM, DEFAULT_INITIAL_TEMPERATURE, DEFAULT_MIN_TEMP, DEFAULT_MAX_TEMP, @@ -1854,7 +1859,7 @@ async def test_temperature_unit( }, ), ), - UnitOfTemperature.CELSIUS, + METRIC_SYSTEM, 20.5, DEFAULT_MIN_TEMP, DEFAULT_MAX_TEMP, @@ -1871,24 +1876,7 @@ async def test_temperature_unit( }, ), ), - UnitOfTemperature.KELVIN, - 294, - 280, - 308, - 298, - ), - ( - help_custom_config( - climate.DOMAIN, - DEFAULT_CONFIG, - ( - { - "temperature_unit": "F", - "current_temperature_topic": "current_temperature", - }, - ), - ), - UnitOfTemperature.FAHRENHEIT, + US_CUSTOMARY_SYSTEM, 70, 45, 95, @@ -1899,25 +1887,25 @@ async def test_temperature_unit( async def test_alt_temperature_unit( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, - temperature_unit: UnitOfTemperature, + units: UnitSystem, initial: float, min: float, max: float, current: float, ) -> None: """Test deriving the systems temperature unit.""" - with patch.object(hass.config.units, "temperature_unit", temperature_unit): - await mqtt_mock_entry() + hass.config.units = units + await mqtt_mock_entry() - state = hass.states.get(ENTITY_CLIMATE) - assert state.attributes.get("temperature") == initial - assert state.attributes.get("min_temp") == min - assert state.attributes.get("max_temp") == max + state = hass.states.get(ENTITY_CLIMATE) + assert state.attributes.get("temperature") == initial + assert state.attributes.get("min_temp") == min + assert state.attributes.get("max_temp") == max - async_fire_mqtt_message(hass, "current_temperature", "77") + async_fire_mqtt_message(hass, "current_temperature", "77") - state = hass.states.get(ENTITY_CLIMATE) - assert state.attributes.get("current_temperature") == current + state = hass.states.get(ENTITY_CLIMATE) + assert state.attributes.get("current_temperature") == current async def test_setting_attribute_via_mqtt_json_message( diff --git a/tests/components/mqtt/test_config_flow.py b/tests/components/mqtt/test_config_flow.py index de70fd32763..c94d692b374 100644 --- a/tests/components/mqtt/test_config_flow.py +++ b/tests/components/mqtt/test_config_flow.py @@ -2,6 +2,7 @@ from collections.abc import Generator, Iterator from contextlib import contextmanager +from copy import deepcopy from pathlib import Path from ssl import SSLError from typing import Any @@ -17,6 +18,8 @@ from homeassistant import config_entries from homeassistant.components import mqtt from homeassistant.components.hassio import AddonError from homeassistant.components.mqtt.config_flow import PWD_NOT_CHANGED +from homeassistant.components.mqtt.util import learn_more_url +from homeassistant.config_entries import ConfigSubentry, ConfigSubentryData from homeassistant.const import ( CONF_CLIENT_ID, CONF_PASSWORD, @@ -26,8 +29,19 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.service_info.hassio import HassioServiceInfo +from .common import ( + MOCK_NOTIFY_SUBENTRY_DATA_MULTI, + MOCK_NOTIFY_SUBENTRY_DATA_NO_NAME, + MOCK_NOTIFY_SUBENTRY_DATA_SINGLE, + MOCK_SENSOR_SUBENTRY_DATA_SINGLE, + MOCK_SENSOR_SUBENTRY_DATA_SINGLE_LAST_RESET_TEMPLATE, + MOCK_SENSOR_SUBENTRY_DATA_SINGLE_STATE_CLASS, + MOCK_SWITCH_SUBENTRY_DATA_SINGLE_STATE_CLASS, +) + from tests.common import MockConfigEntry, MockMqttReasonCode from tests.typing import MqttMockHAClientGenerator, MqttMockPahoClient @@ -40,8 +54,37 @@ ADD_ON_DISCOVERY_INFO = { "protocol": "3.1.1", "ssl": False, } -MOCK_CLIENT_CERT = b"## mock client certificate file ##" -MOCK_CLIENT_KEY = b"## mock key file ##" + +MOCK_CA_CERT = ( + b"-----BEGIN CERTIFICATE-----\n" + b"## mock CA certificate file ##" + b"\n-----END CERTIFICATE-----\n" +) +MOCK_GENERIC_CERT = ( + b"-----BEGIN CERTIFICATE-----\n" + b"## mock generic certificate file ##" + b"\n-----END CERTIFICATE-----\n" +) +MOCK_CA_CERT_DER = b"## mock DER formatted CA certificate file ##\n" +MOCK_CLIENT_CERT = ( + b"-----BEGIN CERTIFICATE-----\n" + b"## mock client certificate file ##" + b"\n-----END CERTIFICATE-----\n" +) +MOCK_CLIENT_CERT_DER = b"## mock DER formatted client certificate file ##\n" +MOCK_CLIENT_KEY = ( + b"-----BEGIN PRIVATE KEY-----\n" + b"## mock client key file ##" + b"\n-----END PRIVATE KEY-----" +) +MOCK_ENCRYPTED_CLIENT_KEY = ( + b"-----BEGIN ENCRYPTED PRIVATE KEY-----\n" + b"## mock client key file ##\n" + b"-----END ENCRYPTED PRIVATE KEY-----" +) +MOCK_CLIENT_KEY_DER = b"## mock DER formatted key file ##\n" +MOCK_ENCRYPTED_CLIENT_KEY_DER = b"## mock DER formatted encrypted key file ##\n" + MOCK_ENTRY_DATA = { mqtt.CONF_BROKER: "test-broker", @@ -102,15 +145,27 @@ def mock_ssl_context() -> Generator[dict[str, MagicMock]]: patch("homeassistant.components.mqtt.config_flow.SSLContext") as mock_context, patch( "homeassistant.components.mqtt.config_flow.load_pem_private_key" - ) as mock_key_check, + ) as mock_pem_key_check, + patch( + "homeassistant.components.mqtt.config_flow.load_der_private_key" + ) as mock_der_key_check, patch( "homeassistant.components.mqtt.config_flow.load_pem_x509_certificate" - ) as mock_cert_check, + ) as mock_pem_cert_check, + patch( + "homeassistant.components.mqtt.config_flow.load_der_x509_certificate" + ) as mock_der_cert_check, ): + mock_pem_key_check().private_bytes.return_value = MOCK_CLIENT_KEY + mock_pem_cert_check().public_bytes.return_value = MOCK_GENERIC_CERT + mock_der_key_check().private_bytes.return_value = MOCK_CLIENT_KEY + mock_der_cert_check().public_bytes.return_value = MOCK_GENERIC_CERT yield { "context": mock_context, - "load_pem_x509_certificate": mock_cert_check, - "load_pem_private_key": mock_key_check, + "load_der_private_key": mock_der_key_check, + "load_der_x509_certificate": mock_der_cert_check, + "load_pem_private_key": mock_pem_key_check, + "load_pem_x509_certificate": mock_pem_cert_check, } @@ -180,9 +235,31 @@ def mock_try_connection_time_out() -> Generator[MagicMock]: yield mock_client() +@pytest.fixture +def mock_ca_cert() -> bytes: + """Mock the CA certificate.""" + return MOCK_CA_CERT + + +@pytest.fixture +def mock_client_cert() -> bytes: + """Mock the client certificate.""" + return MOCK_CLIENT_CERT + + +@pytest.fixture +def mock_client_key() -> bytes: + """Mock the client key.""" + return MOCK_CLIENT_KEY + + @pytest.fixture def mock_process_uploaded_file( - tmp_path: Path, mock_temp_dir: str + tmp_path: Path, + mock_ca_cert: bytes, + mock_client_cert: bytes, + mock_client_key: bytes, + mock_temp_dir: str, ) -> Generator[MagicMock]: """Mock upload certificate files.""" file_id_ca = str(uuid4()) @@ -195,15 +272,15 @@ def mock_process_uploaded_file( ) -> Iterator[Path | None]: if file_id == file_id_ca: with open(tmp_path / "ca.crt", "wb") as cafile: - cafile.write(b"## mock CA certificate file ##") + cafile.write(mock_ca_cert) yield tmp_path / "ca.crt" elif file_id == file_id_cert: with open(tmp_path / "client.crt", "wb") as certfile: - certfile.write(b"## mock client certificate file ##") + certfile.write(mock_client_cert) yield tmp_path / "client.crt" elif file_id == file_id_key: with open(tmp_path / "client.key", "wb") as keyfile: - keyfile.write(b"## mock key file ##") + keyfile.write(mock_client_key) yield tmp_path / "client.key" else: pytest.fail(f"Unexpected file_id: {file_id}") @@ -1024,12 +1101,37 @@ async def test_option_flow( assert yaml_mock.await_count +@pytest.mark.parametrize( + ("mock_ca_cert", "mock_client_cert", "mock_client_key", "client_key_password"), + [ + (MOCK_GENERIC_CERT, MOCK_GENERIC_CERT, MOCK_CLIENT_KEY, ""), + ( + MOCK_GENERIC_CERT, + MOCK_GENERIC_CERT, + MOCK_ENCRYPTED_CLIENT_KEY, + "very*secret", + ), + (MOCK_CA_CERT_DER, MOCK_CLIENT_CERT_DER, MOCK_CLIENT_KEY_DER, ""), + ( + MOCK_CA_CERT_DER, + MOCK_CLIENT_CERT_DER, + MOCK_ENCRYPTED_CLIENT_KEY_DER, + "very*secret", + ), + ], + ids=[ + "pem_certs_private_key_no_password", + "pem_certs_private_key_with_password", + "der_certs_private_key_no_password", + "der_certs_private_key_with_password", + ], +) @pytest.mark.parametrize( "test_error", [ "bad_certificate", "bad_client_cert", - "bad_client_key", + "client_key_error", "bad_client_cert_key", "invalid_inclusion", None, @@ -1042,31 +1144,54 @@ async def test_bad_certificate( mock_ssl_context: dict[str, MagicMock], mock_process_uploaded_file: MagicMock, test_error: str | None, + client_key_password: str, + mock_ca_cert: bytes, ) -> None: """Test bad certificate tests.""" + + def _side_effect_on_client_cert(data: bytes) -> MagicMock: + """Raise on client cert only. + + The function is called twice, once for the CA chain + and once for the client cert. We only want to raise on a client cert. + """ + if data == MOCK_CLIENT_CERT_DER: + raise ValueError + mock_certificate_side_effect = MagicMock() + mock_certificate_side_effect().public_bytes.return_value = MOCK_GENERIC_CERT + return mock_certificate_side_effect + # Mock certificate files file_id = mock_process_uploaded_file.file_id + set_ca_cert = "custom" + set_client_cert = True + tls_insecure = False test_input = { mqtt.CONF_BROKER: "another-broker", CONF_PORT: 2345, mqtt.CONF_CERTIFICATE: file_id[mqtt.CONF_CERTIFICATE], mqtt.CONF_CLIENT_CERT: file_id[mqtt.CONF_CLIENT_CERT], mqtt.CONF_CLIENT_KEY: file_id[mqtt.CONF_CLIENT_KEY], - "set_ca_cert": True, + "client_key_password": client_key_password, + "set_ca_cert": set_ca_cert, "set_client_cert": True, } - set_client_cert = True - set_ca_cert = "custom" - tls_insecure = False if test_error == "bad_certificate": # CA chain is not loading mock_ssl_context["context"]().load_verify_locations.side_effect = SSLError + # Fail on the CA cert if DER encoded + mock_ssl_context["load_der_x509_certificate"].side_effect = ValueError elif test_error == "bad_client_cert": # Client certificate is invalid mock_ssl_context["load_pem_x509_certificate"].side_effect = ValueError - elif test_error == "bad_client_key": + # Fail on the client cert if DER encoded + mock_ssl_context[ + "load_der_x509_certificate" + ].side_effect = _side_effect_on_client_cert + elif test_error == "client_key_error": # Client key file is invalid mock_ssl_context["load_pem_private_key"].side_effect = ValueError + mock_ssl_context["load_der_private_key"].side_effect = ValueError elif test_error == "bad_client_cert_key": # Client key file file and certificate do not pair mock_ssl_context["context"]().load_cert_chain.side_effect = SSLError @@ -2078,8 +2203,8 @@ async def test_setup_with_advanced_settings( CONF_USERNAME: "user", CONF_PASSWORD: "secret", mqtt.CONF_KEEPALIVE: 30, - mqtt.CONF_CLIENT_CERT: "## mock client certificate file ##", - mqtt.CONF_CLIENT_KEY: "## mock key file ##", + mqtt.CONF_CLIENT_CERT: MOCK_CLIENT_CERT.decode(encoding="utf-8"), + mqtt.CONF_CLIENT_KEY: MOCK_CLIENT_KEY.decode(encoding="utf-8"), "tls_insecure": True, mqtt.CONF_TRANSPORT: "websockets", mqtt.CONF_WS_PATH: "/custom_path/", @@ -2091,6 +2216,155 @@ async def test_setup_with_advanced_settings( } +@pytest.mark.usefixtures("mock_ssl_context") +@pytest.mark.parametrize( + ("mock_ca_cert", "mock_client_cert", "mock_client_key", "client_key_password"), + [ + (MOCK_GENERIC_CERT, MOCK_GENERIC_CERT, MOCK_CLIENT_KEY, ""), + ( + MOCK_GENERIC_CERT, + MOCK_GENERIC_CERT, + MOCK_ENCRYPTED_CLIENT_KEY, + "very*secret", + ), + (MOCK_CA_CERT_DER, MOCK_CLIENT_CERT_DER, MOCK_CLIENT_KEY_DER, ""), + ( + MOCK_CA_CERT_DER, + MOCK_CLIENT_CERT_DER, + MOCK_ENCRYPTED_CLIENT_KEY_DER, + "very*secret", + ), + ], + ids=[ + "pem_certs_private_key_no_password", + "pem_certs_private_key_with_password", + "der_certs_private_key_no_password", + "der_certs_private_key_with_password", + ], +) +async def test_setup_with_certificates( + hass: HomeAssistant, + mock_try_connection: MagicMock, + mock_process_uploaded_file: MagicMock, + client_key_password: str, +) -> None: + """Test config flow setup with PEM and DER encoded certificates.""" + file_id = mock_process_uploaded_file.file_id + + config_entry = MockConfigEntry( + domain=mqtt.DOMAIN, + version=mqtt.CONFIG_ENTRY_VERSION, + minor_version=mqtt.CONFIG_ENTRY_MINOR_VERSION, + ) + config_entry.add_to_hass(hass) + hass.config_entries.async_update_entry( + config_entry, + data={ + mqtt.CONF_BROKER: "test-broker", + CONF_PORT: 1234, + }, + ) + + mock_try_connection.return_value = True + + result = await config_entry.start_reconfigure_flow(hass, show_advanced_options=True) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "broker" + assert result["data_schema"].schema["advanced_options"] + + # first iteration, basic settings + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + mqtt.CONF_BROKER: "test-broker", + CONF_PORT: 2345, + CONF_USERNAME: "user", + CONF_PASSWORD: "secret", + "advanced_options": True, + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "broker" + assert "advanced_options" not in result["data_schema"].schema + assert result["data_schema"].schema[CONF_CLIENT_ID] + assert result["data_schema"].schema[mqtt.CONF_KEEPALIVE] + assert result["data_schema"].schema["set_client_cert"] + assert result["data_schema"].schema["set_ca_cert"] + assert result["data_schema"].schema[mqtt.CONF_TLS_INSECURE] + assert result["data_schema"].schema[CONF_PROTOCOL] + assert result["data_schema"].schema[mqtt.CONF_TRANSPORT] + assert mqtt.CONF_CLIENT_CERT not in result["data_schema"].schema + assert mqtt.CONF_CLIENT_KEY not in result["data_schema"].schema + + # second iteration, advanced settings with request for client cert + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + mqtt.CONF_BROKER: "test-broker", + CONF_PORT: 2345, + CONF_USERNAME: "user", + CONF_PASSWORD: "secret", + mqtt.CONF_KEEPALIVE: 30, + "set_ca_cert": "custom", + "set_client_cert": True, + mqtt.CONF_TLS_INSECURE: False, + CONF_PROTOCOL: "3.1.1", + mqtt.CONF_TRANSPORT: "tcp", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "broker" + assert "advanced_options" not in result["data_schema"].schema + assert result["data_schema"].schema[CONF_CLIENT_ID] + assert result["data_schema"].schema[mqtt.CONF_KEEPALIVE] + assert result["data_schema"].schema["set_client_cert"] + assert result["data_schema"].schema["set_ca_cert"] + assert result["data_schema"].schema["client_key_password"] + assert result["data_schema"].schema[mqtt.CONF_TLS_INSECURE] + assert result["data_schema"].schema[CONF_PROTOCOL] + assert result["data_schema"].schema[mqtt.CONF_CERTIFICATE] + assert result["data_schema"].schema[mqtt.CONF_CLIENT_CERT] + assert result["data_schema"].schema[mqtt.CONF_CLIENT_KEY] + assert result["data_schema"].schema[mqtt.CONF_TRANSPORT] + + # third iteration, advanced settings with client cert and key and CA certificate + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + mqtt.CONF_BROKER: "test-broker", + CONF_PORT: 2345, + CONF_USERNAME: "user", + CONF_PASSWORD: "secret", + mqtt.CONF_KEEPALIVE: 30, + "set_ca_cert": "custom", + "set_client_cert": True, + "client_key_password": client_key_password, + mqtt.CONF_CERTIFICATE: file_id[mqtt.CONF_CERTIFICATE], + mqtt.CONF_CLIENT_CERT: file_id[mqtt.CONF_CLIENT_CERT], + mqtt.CONF_CLIENT_KEY: file_id[mqtt.CONF_CLIENT_KEY], + mqtt.CONF_TLS_INSECURE: False, + mqtt.CONF_TRANSPORT: "tcp", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # Check config entry result + assert config_entry.data == { + mqtt.CONF_BROKER: "test-broker", + CONF_PORT: 2345, + CONF_USERNAME: "user", + CONF_PASSWORD: "secret", + mqtt.CONF_KEEPALIVE: 30, + mqtt.CONF_CLIENT_CERT: MOCK_GENERIC_CERT.decode(encoding="utf-8"), + mqtt.CONF_CLIENT_KEY: MOCK_CLIENT_KEY.decode(encoding="utf-8"), + "tls_insecure": False, + mqtt.CONF_TRANSPORT: "tcp", + mqtt.CONF_CERTIFICATE: MOCK_GENERIC_CERT.decode(encoding="utf-8"), + } + + @pytest.mark.usefixtures("mock_ssl_context", "mock_process_uploaded_file") async def test_change_websockets_transport_to_tcp( hass: HomeAssistant, mock_try_connection: MagicMock @@ -2338,3 +2612,1194 @@ async def test_migrate_of_incompatible_config_entry( await mqtt_mock_entry() assert config_entry.state is config_entries.ConfigEntryState.MIGRATION_ERROR + + +@pytest.mark.parametrize( + ( + "config_subentries_data", + "mock_device_user_input", + "mock_entity_user_input", + "mock_entity_details_user_input", + "mock_entity_details_failed_user_input", + "mock_mqtt_user_input", + "mock_failed_mqtt_user_input", + "entity_name", + ), + [ + ( + MOCK_NOTIFY_SUBENTRY_DATA_SINGLE, + {"name": "Milk notifier", "mqtt_settings": {"qos": 1}}, + {"name": "Milkman alert"}, + None, + None, + { + "command_topic": "test-topic", + "command_template": "{{ value }}", + "retain": False, + }, + ( + ( + {"command_topic": "test-topic#invalid"}, + {"command_topic": "invalid_publish_topic"}, + ), + ), + "Milk notifier Milkman alert", + ), + ( + MOCK_NOTIFY_SUBENTRY_DATA_NO_NAME, + {"name": "Milk notifier", "mqtt_settings": {"qos": 0}}, + {}, + None, + None, + { + "command_topic": "test-topic", + "command_template": "{{ value }}", + "retain": False, + }, + ( + ( + {"command_topic": "test-topic#invalid"}, + {"command_topic": "invalid_publish_topic"}, + ), + ), + "Milk notifier", + ), + ( + MOCK_SENSOR_SUBENTRY_DATA_SINGLE, + {"name": "Test sensor", "mqtt_settings": {"qos": 0}}, + {"name": "Energy"}, + {"device_class": "enum", "options": ["low", "medium", "high"]}, + ( + ( + { + "device_class": "energy", + "unit_of_measurement": "ppm", + }, + {"unit_of_measurement": "invalid_uom"}, + ), + # Trigger options to be shown on the form + ( + {"device_class": "enum"}, + {"options": "options_with_enum_device_class"}, + ), + # Test options are only allowed with device_class enum + ( + { + "device_class": "energy", + "options": ["less", "more"], + }, + { + "device_class": "options_device_class_enum", + "unit_of_measurement": "uom_required_for_device_class", + }, + ), + # Include options again to allow flow with valid data + ( + {"device_class": "enum"}, + {"options": "options_with_enum_device_class"}, + ), + ( + { + "device_class": "enum", + "state_class": "measurement", + "options": ["less", "more"], + }, + {"options": "options_not_allowed_with_state_class_or_uom"}, + ), + ), + { + "state_topic": "test-topic", + "value_template": "{{ value_json.value }}", + "advanced_settings": {"expire_after": 30}, + }, + ( + ( + {"state_topic": "test-topic#invalid"}, + {"state_topic": "invalid_subscribe_topic"}, + ), + ), + "Test sensor Energy", + ), + ( + MOCK_SENSOR_SUBENTRY_DATA_SINGLE_STATE_CLASS, + {"name": "Test sensor", "mqtt_settings": {"qos": 0}}, + {"name": "Energy"}, + { + "state_class": "measurement", + }, + (), + { + "state_topic": "test-topic", + }, + (), + "Test sensor Energy", + ), + ( + MOCK_SWITCH_SUBENTRY_DATA_SINGLE_STATE_CLASS, + {"name": "Test switch", "mqtt_settings": {"qos": 0}}, + {"name": "Outlet"}, + {"device_class": "outlet"}, + (), + { + "command_topic": "test-topic", + "command_template": "{{ value }}", + "state_topic": "test-topic", + "value_template": "{{ value_json.value }}", + "optimistic": True, + }, + ( + ( + {"command_topic": "test-topic#invalid"}, + {"command_topic": "invalid_publish_topic"}, + ), + ( + { + "command_topic": "test-topic", + "state_topic": "test-topic#invalid", + }, + {"state_topic": "invalid_subscribe_topic"}, + ), + ), + "Test switch Outlet", + ), + ], + ids=[ + "notify_with_entity_name", + "notify_no_entity_name", + "sensor_options", + "sensor_total", + "switch", + ], +) +async def test_subentry_configflow( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + config_subentries_data: dict[str, Any], + mock_device_user_input: dict[str, Any], + mock_entity_user_input: dict[str, Any], + mock_entity_details_user_input: dict[str, Any], + mock_entity_details_failed_user_input: tuple[ + tuple[dict[str, Any], dict[str, str]], + ], + mock_mqtt_user_input: dict[str, Any], + mock_failed_mqtt_user_input: tuple[tuple[dict[str, Any], dict[str, str]],], + entity_name: str, +) -> None: + """Test the subentry ConfigFlow.""" + device_name = mock_device_user_input["name"] + component = next(iter(config_subentries_data["components"].values())) + + await mqtt_mock_entry() + config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + + result = await hass.config_entries.subentries.async_init( + (config_entry.entry_id, "device"), + context={"source": config_entries.SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "device" + + # Test the URL validation + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={ + "name": device_name, + "configuration_url": "http:/badurl.example.com", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "device" + assert result["errors"]["configuration_url"] == "invalid_url" + + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=mock_device_user_input, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "entity" + assert result["errors"] == {} + + # Process entity flow (initial step) + + # Test the entity picture URL validation + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={ + "platform": component["platform"], + "entity_picture": "invalid url", + } + | mock_entity_user_input, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "entity" + + # Try again with valid data + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={ + "platform": component["platform"], + "entity_picture": component["entity_picture"], + } + | mock_entity_user_input, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + assert result["description_placeholders"] == { + "mqtt_device": device_name, + "platform": component["platform"], + "entity": entity_name, + "url": learn_more_url(component["platform"]), + } + + # Process extra step if the platform supports it + if mock_entity_details_user_input is not None: + # Extra entity details flow step + assert result["step_id"] == "entity_platform_config" + + # First test validators if set of test + for failed_user_input, failed_errors in mock_entity_details_failed_user_input: + # Test an invalid entity details user input case + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=failed_user_input, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == failed_errors + + # Now try again with valid data + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=mock_entity_details_user_input, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + assert result["description_placeholders"] == { + "mqtt_device": device_name, + "platform": component["platform"], + "entity": entity_name, + "url": learn_more_url(component["platform"]), + } + else: + # No details form step + assert result["step_id"] == "mqtt_platform_config" + + # Process mqtt platform config flow + # Test an invalid mqtt user input case + for failed_user_input, failed_errors in mock_failed_mqtt_user_input: + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=failed_user_input, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == failed_errors + + # Try again with a valid configuration + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], user_input=mock_mqtt_user_input + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == device_name + + subentry_component = next( + iter(next(iter(config_entry.subentries.values())).data["components"].values()) + ) + assert subentry_component == next( + iter(config_subentries_data["components"].values()) + ) + + subentry_device_data = next(iter(config_entry.subentries.values())).data["device"] + for option, value in mock_device_user_input.items(): + assert subentry_device_data[option] == value + + await hass.async_block_till_done() + + +@pytest.mark.parametrize( + "mqtt_config_subentries_data", + [ + ( + ConfigSubentryData( + data=MOCK_NOTIFY_SUBENTRY_DATA_MULTI, + subentry_type="device", + title="Mock subentry", + ), + ) + ], + ids=["notify"], +) +async def test_subentry_reconfigure_remove_entity( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test the subentry ConfigFlow reconfigure removing an entity.""" + await mqtt_mock_entry() + config_entry: MockConfigEntry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + subentry_id: str + subentry: ConfigSubentry + subentry_id, subentry = next(iter(config_entry.subentries.items())) + result = await config_entry.start_subentry_reconfigure_flow( + hass, "device", subentry_id + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # assert we have a device for the subentry + device = device_registry.async_get_device(identifiers={(mqtt.DOMAIN, subentry_id)}) + assert device is not None + + # assert we have an entity for all subentry components + components = deepcopy(dict(subentry.data))["components"] + assert len(components) == 2 + object_list = list(components) + component_list = list(components.values()) + entity_name_0 = ( + f"{device.name} {component_list[0]['name']} ({component_list[0]['platform']})" + ) + entity_name_1 = ( + f"{device.name} {component_list[1]['name']} ({component_list[1]['platform']})" + ) + + for key, component in components.items(): + unique_entity_id = f"{subentry_id}_{key}" + entity_id = entity_registry.async_get_entity_id( + domain=component["platform"], + platform=mqtt.DOMAIN, + unique_id=unique_entity_id, + ) + assert entity_id is not None + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is not None + assert entity_entry.config_subentry_id == subentry_id + + # assert menu options, we have the option to delete one entity + # we have no option to save and finish yet + assert result["menu_options"] == [ + "entity", + "update_entity", + "delete_entity", + "device", + "availability", + ] + + # assert we can delete an entity + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "delete_entity"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "delete_entity" + assert result["data_schema"].schema["component"].config["options"] == [ + {"value": object_list[0], "label": entity_name_0}, + {"value": object_list[1], "label": entity_name_1}, + ] + # remove notify_the_second_notifier + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={ + "component": object_list[1], + }, + ) + + # assert menu options, we have only one item left, we cannot delete it + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + assert result["menu_options"] == [ + "entity", + "update_entity", + "device", + "availability", + "save_changes", + ] + + # finish reconfigure flow + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "save_changes"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # check if the second entity was removed from the subentry and entity registry + unique_entity_id = f"{subentry_id}_{object_list[1]}" + entity_id = entity_registry.async_get_entity_id( + domain=components[object_list[1]]["platform"], + platform=mqtt.DOMAIN, + unique_id=unique_entity_id, + ) + assert entity_id is None + new_components = deepcopy(dict(subentry.data))["components"] + assert object_list[0] in new_components + assert object_list[1] not in new_components + + +@pytest.mark.parametrize( + ("mqtt_config_subentries_data", "user_input_mqtt"), + [ + ( + ( + ConfigSubentryData( + data=MOCK_NOTIFY_SUBENTRY_DATA_MULTI, + subentry_type="device", + title="Mock subentry", + ), + ), + {"command_topic": "test-topic2-updated"}, + ) + ], + ids=["notify"], +) +async def test_subentry_reconfigure_edit_entity_multi_entitites( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + user_input_mqtt: dict[str, Any], +) -> None: + """Test the subentry ConfigFlow reconfigure with multi entities.""" + await mqtt_mock_entry() + config_entry: MockConfigEntry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + subentry_id: str + subentry: ConfigSubentry + subentry_id, subentry = next(iter(config_entry.subentries.items())) + result = await config_entry.start_subentry_reconfigure_flow( + hass, "device", subentry_id + ) + + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # assert we have a device for the subentry + device = device_registry.async_get_device(identifiers={(mqtt.DOMAIN, subentry_id)}) + assert device is not None + + # assert we have an entity for all subentry components + components = deepcopy(dict(subentry.data))["components"] + assert len(components) == 2 + object_list = list(components) + component_list = list(components.values()) + entity_name_0 = ( + f"{device.name} {component_list[0]['name']} ({component_list[0]['platform']})" + ) + entity_name_1 = ( + f"{device.name} {component_list[1]['name']} ({component_list[1]['platform']})" + ) + + for key in components: + unique_entity_id = f"{subentry_id}_{key}" + entity_id = entity_registry.async_get_entity_id( + domain="notify", platform=mqtt.DOMAIN, unique_id=unique_entity_id + ) + assert entity_id is not None + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is not None + assert entity_entry.config_subentry_id == subentry_id + + # assert menu options, we have the option to delete one entity + # we have no option to save and finish yet + assert result["menu_options"] == [ + "entity", + "update_entity", + "delete_entity", + "device", + "availability", + ] + + # assert we can update an entity + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "update_entity"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "update_entity" + assert result["data_schema"].schema["component"].config["options"] == [ + {"value": object_list[0], "label": entity_name_0}, + {"value": object_list[1], "label": entity_name_1}, + ] + # select second entity + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={ + "component": object_list[1], + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "entity" + + # submit the common entity data with changed entity_picture + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={ + "entity_picture": "https://example.com", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "mqtt_platform_config" + + # submit the new platform specific entity data + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=user_input_mqtt, + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # finish reconfigure flow + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "save_changes"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # Check we still have our components + new_components = deepcopy(dict(subentry.data))["components"] + + # Check the second component was updated + assert new_components[object_list[0]] == components[object_list[0]] + for key, value in user_input_mqtt.items(): + assert new_components[object_list[1]][key] == value + + +@pytest.mark.parametrize( + ( + "mqtt_config_subentries_data", + "user_input_platform_config_validation", + "user_input_platform_config", + "user_input_mqtt", + "removed_options", + ), + [ + ( + ( + ConfigSubentryData( + data=MOCK_NOTIFY_SUBENTRY_DATA_SINGLE, + subentry_type="device", + title="Mock subentry", + ), + ), + (), + None, + { + "command_topic": "test-topic1-updated", + "command_template": "{{ value }}", + "retain": True, + }, + {"entity_picture"}, + ), + ( + ( + ConfigSubentryData( + data=MOCK_SENSOR_SUBENTRY_DATA_SINGLE, + subentry_type="device", + title="Mock subentry", + ), + ), + ( + ( + { + "device_class": "battery", + "options": [], + "state_class": "measurement", + "unit_of_measurement": "invalid", + }, + # Allow to accept options are being removed + { + "device_class": "options_device_class_enum", + "options": "options_not_allowed_with_state_class_or_uom", + "unit_of_measurement": "invalid_uom", + }, + ), + ), + { + "device_class": "battery", + "state_class": "measurement", + "unit_of_measurement": "%", + "advanced_settings": {"suggested_display_precision": 1}, + }, + { + "state_topic": "test-topic1-updated", + "value_template": "{{ value_json.value }}", + }, + {"options", "expire_after", "entity_picture"}, + ), + ], + ids=["notify", "sensor"], +) +async def test_subentry_reconfigure_edit_entity_single_entity( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + user_input_platform_config_validation: tuple[ + tuple[dict[str, Any], dict[str, str] | None], ... + ] + | None, + user_input_platform_config: dict[str, Any] | None, + user_input_mqtt: dict[str, Any], + removed_options: tuple[str, ...], +) -> None: + """Test the subentry ConfigFlow reconfigure with single entity.""" + await mqtt_mock_entry() + config_entry: MockConfigEntry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + subentry_id: str + subentry: ConfigSubentry + subentry_id, subentry = next(iter(config_entry.subentries.items())) + result = await config_entry.start_subentry_reconfigure_flow( + hass, "device", subentry_id + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # assert we have a device for the subentry + device = device_registry.async_get_device(identifiers={(mqtt.DOMAIN, subentry_id)}) + assert device is not None + + # assert we have an entity for the subentry component + # Check we have "notify_milkman_alert" in our mock data + components = deepcopy(dict(subentry.data))["components"] + assert len(components) == 1 + + component_id, component = next(iter(components.items())) + + unique_entity_id = f"{subentry_id}_{component_id}" + entity_id = entity_registry.async_get_entity_id( + domain=component["platform"], platform=mqtt.DOMAIN, unique_id=unique_entity_id + ) + assert entity_id is not None + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is not None + assert entity_entry.config_subentry_id == subentry_id + + # assert menu options, we do not have the option to delete an entity + # we have no option to save and finish yet + assert result["menu_options"] == [ + "entity", + "update_entity", + "device", + "availability", + ] + + # assert we can update the entity, there is no select step + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "update_entity"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "entity" + + # submit the new common entity data, reset entity_picture + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={}, + ) + assert result["type"] is FlowResultType.FORM + + if user_input_platform_config is None: + # Skip entity flow step + assert result["step_id"] == "mqtt_platform_config" + else: + # Additional entity flow step + assert result["step_id"] == "entity_platform_config" + for entity_validation_config, errors in user_input_platform_config_validation: + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=entity_validation_config, + ) + assert result["step_id"] == "entity_platform_config" + assert result.get("errors") == errors + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=user_input_platform_config, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "mqtt_platform_config" + + # submit the new platform specific entity data, + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=user_input_mqtt, + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # finish reconfigure flow + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "save_changes"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # Check we still have out components + new_components = deepcopy(dict(subentry.data))["components"] + assert len(new_components) == 1 + + # Check our update was successful + assert "entity_picture" not in new_components[component_id] + + # Check the second component was updated + for key, value in user_input_mqtt.items(): + assert new_components[component_id][key] == value + + assert set(component) - set(new_components[component_id]) == removed_options + + +@pytest.mark.parametrize( + ( + "mqtt_config_subentries_data", + "user_input_entity_details", + "user_input_mqtt", + "filtered_out_fields", + ), + [ + ( + ( + ConfigSubentryData( + data=MOCK_SENSOR_SUBENTRY_DATA_SINGLE_LAST_RESET_TEMPLATE, + subentry_type="device", + title="Mock subentry", + ), + ), + { + "state_class": "measurement", + }, + { + "state_topic": "test-topic", + }, + ("last_reset_value_template",), + ), + ], + ids=["sensor_last_reset_template"], +) +async def test_subentry_reconfigure_edit_entity_reset_fields( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + user_input_entity_details: dict[str, Any], + user_input_mqtt: dict[str, Any], + filtered_out_fields: tuple[str, ...], +) -> None: + """Test the subentry ConfigFlow reconfigure resets filtered out fields.""" + await mqtt_mock_entry() + config_entry: MockConfigEntry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + subentry_id: str + subentry: ConfigSubentry + subentry_id, subentry = next(iter(config_entry.subentries.items())) + result = await config_entry.start_subentry_reconfigure_flow( + hass, "device", subentry_id + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # assert we have a device for the subentry + device = device_registry.async_get_device(identifiers={(mqtt.DOMAIN, subentry_id)}) + assert device is not None + + # assert we have an entity for the subentry component + components = deepcopy(dict(subentry.data))["components"] + assert len(components) == 1 + + component_id, component = next(iter(components.items())) + for field in filtered_out_fields: + assert field in component + + unique_entity_id = f"{subentry_id}_{component_id}" + entity_id = entity_registry.async_get_entity_id( + domain=component["platform"], platform=mqtt.DOMAIN, unique_id=unique_entity_id + ) + assert entity_id is not None + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is not None + assert entity_entry.config_subentry_id == subentry_id + + # assert menu options, we do not have the option to delete an entity + # we have no option to save and finish yet + assert result["menu_options"] == [ + "entity", + "update_entity", + "device", + "availability", + ] + + # assert we can update the entity, there is no select step + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "update_entity"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "entity" + + # submit the new common entity data, reset entity_picture + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "entity_platform_config" + + # submit the new entity platform config + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=user_input_entity_details, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "mqtt_platform_config" + + # submit the new platform specific mqtt data, + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=user_input_mqtt, + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # finish reconfigure flow + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "save_changes"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # Check we still have out components + new_components = deepcopy(dict(subentry.data))["components"] + assert len(new_components) == 1 + + # Check our update was successful + assert "entity_picture" not in new_components[component_id] + + # Check the second component was updated + for key, value in user_input_mqtt.items(): + assert new_components[component_id][key] == value + + # Check field are filtered out correctly + for field in filtered_out_fields: + assert field not in new_components[component_id] + + +@pytest.mark.parametrize( + ("mqtt_config_subentries_data", "user_input_entity", "user_input_mqtt"), + [ + ( + ( + ConfigSubentryData( + data=MOCK_NOTIFY_SUBENTRY_DATA_SINGLE, + subentry_type="device", + title="Mock subentry", + ), + ), + { + "platform": "notify", + "name": "The second notifier", + "entity_picture": "https://example.com", + }, + { + "command_topic": "test-topic2", + }, + ) + ], + ids=["notify_notify"], +) +async def test_subentry_reconfigure_add_entity( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + user_input_entity: dict[str, Any], + user_input_mqtt: dict[str, Any], +) -> None: + """Test the subentry ConfigFlow reconfigure and add an entity.""" + await mqtt_mock_entry() + config_entry: MockConfigEntry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + subentry_id: str + subentry: ConfigSubentry + subentry_id, subentry = next(iter(config_entry.subentries.items())) + result = await config_entry.start_subentry_reconfigure_flow( + hass, "device", subentry_id + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # assert we have a device for the subentry + device = device_registry.async_get_device(identifiers={(mqtt.DOMAIN, subentry_id)}) + assert device is not None + + # assert we have an entity for the subentry component + components = deepcopy(dict(subentry.data))["components"] + assert len(components) == 1 + component_id_1, component1 = next(iter(components.items())) + unique_entity_id = f"{subentry_id}_{component_id_1}" + entity_id = entity_registry.async_get_entity_id( + domain=component1["platform"], platform=mqtt.DOMAIN, unique_id=unique_entity_id + ) + assert entity_id is not None + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry is not None + assert entity_entry.config_subentry_id == subentry_id + + # assert menu options, we do not have the option to delete an entity + # we have no option to save and finish yet + assert result["menu_options"] == [ + "entity", + "update_entity", + "device", + "availability", + ] + + # assert we can update the entity, there is no select step + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "entity"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "entity" + + # submit the new common entity data + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=user_input_entity, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "mqtt_platform_config" + + # submit the new platform specific entity data + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input=user_input_mqtt, + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # Finish reconfigure flow + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "save_changes"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # Check we still have out components + new_components = deepcopy(dict(subentry.data))["components"] + assert len(new_components) == 2 + + component_id_2 = next(iter(set(new_components) - {component_id_1})) + + # Check our new entity was added correctly + expected_component_config = user_input_entity | user_input_mqtt + for key, value in expected_component_config.items(): + assert new_components[component_id_2][key] == value + + +@pytest.mark.parametrize( + "mqtt_config_subentries_data", + [ + ( + ConfigSubentryData( + data=MOCK_NOTIFY_SUBENTRY_DATA_MULTI, + subentry_type="device", + title="Mock subentry", + ), + ) + ], +) +async def test_subentry_reconfigure_update_device_properties( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + device_registry: dr.DeviceRegistry, +) -> None: + """Test the subentry ConfigFlow reconfigure and update device properties.""" + await mqtt_mock_entry() + config_entry: MockConfigEntry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + subentry_id: str + subentry: ConfigSubentry + subentry_id, subentry = next(iter(config_entry.subentries.items())) + result = await config_entry.start_subentry_reconfigure_flow( + hass, "device", subentry_id + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # assert we have a device for the subentry + device = device_registry.async_get_device(identifiers={(mqtt.DOMAIN, subentry_id)}) + assert device is not None + + # assert we have an entity for all subentry components + components = deepcopy(dict(subentry.data))["components"] + assert len(components) == 2 + + # Assert initial data + device = deepcopy(dict(subentry.data))["device"] + assert device["name"] == "Milk notifier" + assert device["sw_version"] == "1.0" + assert device["hw_version"] == "2.1 rev a" + assert device["model"] == "Model XL" + assert device["model_id"] == "mn002" + + # assert menu options, we have the option to delete one entity + # we have no option to save and finish yet + assert result["menu_options"] == [ + "entity", + "update_entity", + "delete_entity", + "device", + "availability", + ] + + # assert we can update the device properties + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "device"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "device" + + # Update the device details + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={ + "name": "Beer notifier", + "sw_version": "1.1", + "model": "Beer bottle XL", + "model_id": "bn003", + "configuration_url": "https://example.com", + }, + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # finish reconfigure flow + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "save_changes"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # Check our device was updated + device = deepcopy(dict(subentry.data))["device"] + assert device["name"] == "Beer notifier" + assert "hw_version" not in device + assert device["model"] == "Beer bottle XL" + assert device["model_id"] == "bn003" + + +@pytest.mark.parametrize( + "mqtt_config_subentries_data", + [ + ( + ConfigSubentryData( + data=MOCK_NOTIFY_SUBENTRY_DATA_MULTI, + subentry_type="device", + title="Mock subentry", + ), + ) + ], +) +async def test_subentry_reconfigure_availablity( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test the subentry ConfigFlow reconfigure and update device properties.""" + await mqtt_mock_entry() + config_entry: MockConfigEntry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + subentry_id: str + subentry: ConfigSubentry + subentry_id, subentry = next(iter(config_entry.subentries.items())) + + expected_availability = { + "availability_topic": "test/availability", + "availability_template": "{{ value_json.availability }}", + "payload_available": "online", + "payload_not_available": "offline", + } + assert subentry.data.get("availability") == expected_availability + + result = await config_entry.start_subentry_reconfigure_flow( + hass, "device", subentry_id + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + + # assert we can set the availability config + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "availability"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "availability" + + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={ + "availability_topic": "test/new_availability#invalid_topic", + "payload_available": "1", + "payload_not_available": "0", + }, + ) + assert result["errors"] == {"availability_topic": "invalid_subscribe_topic"} + + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={ + "availability_topic": "test/new_availability", + "payload_available": "1", + "payload_not_available": "0", + }, + ) + + # finish reconfigure flow + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "save_changes"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # Check the availability was updated + expected_availability = { + "availability_topic": "test/new_availability", + "payload_available": "1", + "payload_not_available": "0", + } + assert subentry.data.get("availability") == expected_availability + + # Assert we can reset the availability config + result = await config_entry.start_subentry_reconfigure_flow( + hass, "device", subentry_id + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "summary_menu" + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "availability"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "availability" + + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + user_input={ + "payload_available": "1", + "payload_not_available": "0", + }, + ) + + # Finish reconfigure flow + result = await hass.config_entries.subentries.async_configure( + result["flow_id"], + {"next_step_id": "save_changes"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # Check the availability was updated + assert subentry.data.get("availability") == { + "payload_available": "1", + "payload_not_available": "0", + } diff --git a/tests/components/mqtt/test_cover.py b/tests/components/mqtt/test_cover.py index ee74b78be81..81530758de7 100644 --- a/tests/components/mqtt/test_cover.py +++ b/tests/components/mqtt/test_cover.py @@ -37,6 +37,7 @@ from homeassistant.const import ( SERVICE_SET_COVER_POSITION, SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, + SERVICE_STOP_COVER_TILT, SERVICE_TOGGLE, SERVICE_TOGGLE_COVER_TILT, STATE_CLOSED, @@ -45,7 +46,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, @@ -936,6 +937,63 @@ async def test_send_stop_cover_command( assert state.state == STATE_UNKNOWN +@pytest.mark.parametrize( + ("hass_config", "payload_stop"), + [ + ( + { + mqtt.DOMAIN: { + cover.DOMAIN: { + "name": "test", + "state_topic": "state-topic", + "tilt_command_topic": "tilt-command-topic", + "payload_stop_tilt": "TILT_STOP", + "qos": 2, + } + } + }, + "TILT_STOP", + ), + ( + { + mqtt.DOMAIN: { + cover.DOMAIN: { + "name": "test", + "state_topic": "state-topic", + "tilt_command_topic": "tilt-command-topic", + "qos": 2, + } + } + }, + "STOP", + ), + ], +) +async def test_send_stop_tilt_command( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + payload_stop: str, +) -> None: + """Test the sending of stop_cover_tilt.""" + mqtt_mock = await mqtt_mock_entry() + + state = hass.states.get("cover.test") + assert state.state == STATE_UNKNOWN + + await hass.services.async_call( + cover.DOMAIN, + SERVICE_STOP_COVER_TILT, + {ATTR_ENTITY_ID: "cover.test"}, + blocking=True, + ) + + mqtt_mock.async_publish.assert_called_once_with( + "tilt-command-topic", payload_stop, 2, False + ) + state = hass.states.get("cover.test") + assert state.state == STATE_UNKNOWN + + @pytest.mark.parametrize( "hass_config", [ diff --git a/tests/components/mqtt/test_device_tracker.py b/tests/components/mqtt/test_device_tracker.py index 00e88860299..02289c8e476 100644 --- a/tests/components/mqtt/test_device_tracker.py +++ b/tests/components/mqtt/test_device_tracker.py @@ -12,7 +12,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component -from .test_common import ( +from .common import ( help_custom_config, help_test_reloadable, help_test_setting_blocked_attribute_via_mqtt_json_message, diff --git a/tests/components/mqtt/test_device_trigger.py b/tests/components/mqtt/test_device_trigger.py index 5cdfb14a5cf..ecf922e54a1 100644 --- a/tests/components/mqtt/test_device_trigger.py +++ b/tests/components/mqtt/test_device_trigger.py @@ -16,7 +16,7 @@ from homeassistant.helpers import device_registry as dr from homeassistant.helpers.trigger import async_initialize_triggers from homeassistant.setup import async_setup_component -from .test_common import help_test_unload_config_entry +from .common import help_test_unload_config_entry from tests.common import async_fire_mqtt_message, async_get_device_automations from tests.typing import MqttMockHAClientGenerator, WebSocketGenerator diff --git a/tests/components/mqtt/test_discovery.py b/tests/components/mqtt/test_discovery.py index 47c3a1e1988..ee33cbcbaa1 100644 --- a/tests/components/mqtt/test_discovery.py +++ b/tests/components/mqtt/test_discovery.py @@ -46,8 +46,8 @@ from homeassistant.helpers.service_info.mqtt import MqttServiceInfo from homeassistant.setup import async_setup_component from homeassistant.util.signal_type import SignalTypeFormat +from .common import help_all_subscribe_calls, help_test_unload_config_entry from .conftest import ENTRY_DEFAULT_BIRTH_MESSAGE -from .test_common import help_all_subscribe_calls, help_test_unload_config_entry from .test_tag import DEFAULT_TAG_ID, DEFAULT_TAG_SCAN from tests.common import ( diff --git a/tests/components/mqtt/test_event.py b/tests/components/mqtt/test_event.py index 41049ed0887..a7f00a1d1a8 100644 --- a/tests/components/mqtt/test_event.py +++ b/tests/components/mqtt/test_event.py @@ -13,7 +13,7 @@ from homeassistant.const import STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_fan.py b/tests/components/mqtt/test_fan.py index 6c8afe8c1b4..36b5032e282 100644 --- a/tests/components/mqtt/test_fan.py +++ b/tests/components/mqtt/test_fan.py @@ -36,7 +36,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_humidifier.py b/tests/components/mqtt/test_humidifier.py index 20ca89181eb..435531182ed 100644 --- a/tests/components/mqtt/test_humidifier.py +++ b/tests/components/mqtt/test_humidifier.py @@ -36,7 +36,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_image.py b/tests/components/mqtt/test_image.py index 6f0eb8edf49..9b64a8836a0 100644 --- a/tests/components/mqtt/test_image.py +++ b/tests/components/mqtt/test_image.py @@ -14,7 +14,7 @@ from homeassistant.components import image, mqtt from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_lawn_mower.py b/tests/components/mqtt/test_lawn_mower.py index 0bef4196ef2..c58402c4f5c 100644 --- a/tests/components/mqtt/test_lawn_mower.py +++ b/tests/components/mqtt/test_lawn_mower.py @@ -19,7 +19,7 @@ from homeassistant.components.mqtt.lawn_mower import MQTT_LAWN_MOWER_ATTRIBUTES_ from homeassistant.const import ATTR_ASSUMED_STATE, ATTR_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant, State -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_light.py b/tests/components/mqtt/test_light.py index f8c66a3de1d..a8be259c1c9 100644 --- a/tests/components/mqtt/test_light.py +++ b/tests/components/mqtt/test_light.py @@ -210,7 +210,7 @@ from homeassistant.components.mqtt.models import PublishPayloadType from homeassistant.const import ATTR_ASSUMED_STATE, STATE_OFF, STATE_ON, STATE_UNKNOWN from homeassistant.core import HomeAssistant, State -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index 67d382826ae..f3264858095 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -102,7 +102,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, State from homeassistant.util.json import json_loads -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_light_template.py b/tests/components/mqtt/test_light_template.py index 568d86f8bd9..e2cc801e97d 100644 --- a/tests/components/mqtt/test_light_template.py +++ b/tests/components/mqtt/test_light_template.py @@ -45,7 +45,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, State -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, @@ -1545,3 +1545,109 @@ async def test_rgb_value_template_fails( "TypeError: unsupported operand type(s) for *: 'NoneType' and 'int' rendering template" in caplog.text ) + + +@pytest.mark.parametrize( + "hass_config", + [ + help_custom_config( + light.DOMAIN, + DEFAULT_CONFIG, + ( + { + "effect_list": ["rainbow", "colorloop"], + "state_topic": "test-topic", + "state_template": "{{ value_json.state }}", + "brightness_template": "{{ value_json.brightness }}", + "color_temp_template": "{{ value_json.color_temp }}", + "red_template": "{{ value_json.color.red }}", + "green_template": "{{ value_json.color.green }}", + "blue_template": "{{ value_json.color.blue }}", + "effect_template": "{{ value_json.effect }}", + }, + ), + ) + ], +) +async def test_state_templates_ignore_missing_values( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, +) -> None: + """Test that rendering of MQTT value template ignores missing values.""" + await mqtt_mock_entry() + + # turn on the light + async_fire_mqtt_message(hass, "test-topic", '{"state": "on"}') + state = hass.states.get("light.test") + assert state.state == STATE_ON + assert state.attributes.get("rgb_color") is None + assert state.attributes.get("brightness") is None + assert state.attributes.get("color_temp_kelvin") is None + assert state.attributes.get("effect") is None + + # update brightness and color temperature (with no state) + async_fire_mqtt_message( + hass, "test-topic", '{"brightness": 255, "color_temp": 145}' + ) + state = hass.states.get("light.test") + assert state.state == STATE_ON + assert state.attributes.get("rgb_color") == ( + 246, + 244, + 255, + ) # temp converted to color + assert state.attributes.get("brightness") == 255 + assert state.attributes.get("color_temp_kelvin") == 6896 + assert state.attributes.get("effect") is None + assert state.attributes.get("xy_color") == (0.317, 0.317) # temp converted to color + assert state.attributes.get("hs_color") == ( + 251.249, + 4.253, + ) # temp converted to color + + # update color + async_fire_mqtt_message( + hass, "test-topic", '{"color": {"red": 255, "green": 128, "blue": 64}}' + ) + state = hass.states.get("light.test") + assert state.state == STATE_ON + assert state.attributes.get("rgb_color") == (255, 128, 64) + assert state.attributes.get("brightness") == 255 + assert state.attributes.get("color_temp_kelvin") is None # rgb color has priority + assert state.attributes.get("effect") is None + + # update brightness + async_fire_mqtt_message(hass, "test-topic", '{"brightness": 128}') + state = hass.states.get("light.test") + assert state.state == STATE_ON + assert state.attributes.get("rgb_color") == (255, 128, 64) + assert state.attributes.get("brightness") == 128 + assert state.attributes.get("color_temp_kelvin") is None # rgb color has priority + assert state.attributes.get("effect") is None + + # update effect + async_fire_mqtt_message(hass, "test-topic", '{"effect": "rainbow"}') + state = hass.states.get("light.test") + assert state.state == STATE_ON + assert state.attributes.get("rgb_color") == (255, 128, 64) + assert state.attributes.get("brightness") == 128 + assert state.attributes.get("color_temp_kelvin") is None # rgb color has priority + assert state.attributes.get("effect") == "rainbow" + + # invalid effect + async_fire_mqtt_message(hass, "test-topic", '{"effect": "invalid"}') + state = hass.states.get("light.test") + assert state.state == STATE_ON + assert state.attributes.get("rgb_color") == (255, 128, 64) + assert state.attributes.get("brightness") == 128 + assert state.attributes.get("color_temp_kelvin") is None # rgb color has priority + assert state.attributes.get("effect") == "rainbow" + + # turn off the light + async_fire_mqtt_message(hass, "test-topic", '{"state": "off"}') + state = hass.states.get("light.test") + assert state.state == STATE_OFF + assert state.attributes.get("rgb_color") is None + assert state.attributes.get("brightness") is None + assert state.attributes.get("color_temp_kelvin") is None + assert state.attributes.get("effect") is None diff --git a/tests/components/mqtt/test_lock.py b/tests/components/mqtt/test_lock.py index 034f9b5ff6e..4aa6ecd03ef 100644 --- a/tests/components/mqtt/test_lock.py +++ b/tests/components/mqtt/test_lock.py @@ -23,7 +23,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_mixins.py b/tests/components/mqtt/test_mixins.py index d65f1a4d661..fa30283962b 100644 --- a/tests/components/mqtt/test_mixins.py +++ b/tests/components/mqtt/test_mixins.py @@ -1,18 +1,31 @@ """The tests for shared code of the MQTT platform.""" -from unittest.mock import patch +from typing import Any +from unittest.mock import call, patch import pytest from homeassistant.components import mqtt, sensor from homeassistant.components.mqtt.sensor import DEFAULT_NAME as DEFAULT_SENSOR_NAME +from homeassistant.config_entries import ConfigSubentryData from homeassistant.const import ( ATTR_FRIENDLY_NAME, EVENT_HOMEASSISTANT_STARTED, EVENT_STATE_CHANGED, ) from homeassistant.core import CoreState, HomeAssistant, callback -from homeassistant.helpers import device_registry as dr, issue_registry as ir +from homeassistant.helpers import ( + device_registry as dr, + entity_registry as er, + issue_registry as ir, +) +from homeassistant.util import slugify + +from .common import ( + MOCK_NOTIFY_SUBENTRY_DATA_SINGLE, + MOCK_SUBENTRY_DATA_BAD_COMPONENT_SCHEMA, + MOCK_SUBENTRY_DATA_SET_MIX, +) from tests.common import MockConfigEntry, async_capture_events, async_fire_mqtt_message from tests.typing import MqttMockHAClientGenerator @@ -453,3 +466,124 @@ async def test_value_template_fails( "TypeError: unsupported operand type(s) for *: 'NoneType' and 'int' rendering template" in caplog.text ) + + +@pytest.mark.parametrize( + "mqtt_config_subentries_data", + [ + ( + ConfigSubentryData( + data=MOCK_SUBENTRY_DATA_SET_MIX, + subentry_type="device", + title="Mock subentry", + ), + ) + ], +) +async def test_loading_subentries( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + mqtt_config_subentries_data: tuple[dict[str, Any]], + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test loading subentries.""" + await mqtt_mock_entry() + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + subentry_id = next(iter(entry.subentries)) + # Each subentry has one device + device = device_registry.async_get_device({("mqtt", subentry_id)}) + assert device is not None + for object_id, component in mqtt_config_subentries_data[0]["data"][ + "components" + ].items(): + platform = component["platform"] + entity_id = f"{platform}.{slugify(device.name)}_{slugify(component['name'])}" + entity_entry_entity_id = entity_registry.async_get_entity_id( + platform, mqtt.DOMAIN, f"{subentry_id}_{object_id}" + ) + assert entity_entry_entity_id == entity_id + state = hass.states.get(entity_id) + assert state is not None + assert ( + state.attributes.get("entity_picture") == f"https://example.com/{object_id}" + ) + # Availability was configured, so entities are unavailable + assert state.state == "unavailable" + + # Make entities available + async_fire_mqtt_message(hass, "test/availability", '{"availability": "online"}') + for component in mqtt_config_subentries_data[0]["data"]["components"].values(): + platform = component["platform"] + entity_id = f"{platform}.{slugify(device.name)}_{slugify(component['name'])}" + state = hass.states.get(entity_id) + assert state is not None + assert state.state == "unknown" + + +@pytest.mark.parametrize( + "mqtt_config_subentries_data", + [ + ( + ConfigSubentryData( + data=MOCK_SUBENTRY_DATA_BAD_COMPONENT_SCHEMA, + subentry_type="device", + title="Mock subentry", + ), + ) + ], +) +async def test_loading_subentry_with_bad_component_schema( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + mqtt_config_subentries_data: tuple[dict[str, Any]], + device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test loading subentries.""" + await mqtt_mock_entry() + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + subentry_id = next(iter(entry.subentries)) + # Each subentry has one device + device = device_registry.async_get_device({("mqtt", subentry_id)}) + assert device is None + assert ( + "Schema violation occurred when trying to set up entity from subentry" + in caplog.text + ) + + +@pytest.mark.parametrize( + "mqtt_config_subentries_data", + [ + ( + ConfigSubentryData( + data=MOCK_NOTIFY_SUBENTRY_DATA_SINGLE, + subentry_type="device", + title="Mock subentry", + ), + ) + ], +) +async def test_qos_on_mqt_device_from_subentry( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + mqtt_config_subentries_data: tuple[dict[str, Any]], + device_registry: dr.DeviceRegistry, +) -> None: + """Test QoS is set correctly on entities from MQTT device.""" + mqtt_mock = await mqtt_mock_entry() + entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0] + subentry_id = next(iter(entry.subentries)) + # Each subentry has one device + device = device_registry.async_get_device({("mqtt", subentry_id)}) + assert device is not None + assert hass.states.get("notify.milk_notifier_milkman_alert") is not None + await hass.services.async_call( + "notify", + "send_message", + {"entity_id": "notify.milk_notifier_milkman_alert", "message": "Test message"}, + ) + await hass.async_block_till_done() + assert len(mqtt_mock.async_publish.mock_calls) == 1 + mqtt_mock.async_publish.mock_calls[0] = call("test-topic", "Test message", 1, False) diff --git a/tests/components/mqtt/test_notify.py b/tests/components/mqtt/test_notify.py index 4837ee214c4..56da809d1b6 100644 --- a/tests/components/mqtt/test_notify.py +++ b/tests/components/mqtt/test_notify.py @@ -11,7 +11,7 @@ from homeassistant.components.notify import ATTR_MESSAGE from homeassistant.const import ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, STATE_UNKNOWN from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, diff --git a/tests/components/mqtt/test_number.py b/tests/components/mqtt/test_number.py index 7bdd39e81a7..f391236aca4 100644 --- a/tests/components/mqtt/test_number.py +++ b/tests/components/mqtt/test_number.py @@ -31,7 +31,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, State from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_scene.py b/tests/components/mqtt/test_scene.py index d78dbe5c003..1650fe74601 100644 --- a/tests/components/mqtt/test_scene.py +++ b/tests/components/mqtt/test_scene.py @@ -10,7 +10,7 @@ from homeassistant.components import mqtt, scene from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_ON, STATE_UNKNOWN from homeassistant.core import HomeAssistant, State -from .test_common import ( +from .common import ( help_test_availability_when_connection_lost, help_test_availability_without_topic, help_test_custom_availability_payload, diff --git a/tests/components/mqtt/test_select.py b/tests/components/mqtt/test_select.py index 8d79a3ce609..a880368fa51 100644 --- a/tests/components/mqtt/test_select.py +++ b/tests/components/mqtt/test_select.py @@ -21,7 +21,7 @@ from homeassistant.const import ATTR_ASSUMED_STATE, ATTR_ENTITY_ID, STATE_UNKNOW from homeassistant.core import HomeAssistant, State from homeassistant.helpers.typing import ConfigType -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_sensor.py b/tests/components/mqtt/test_sensor.py index 9226b03a7d2..74dc94de21e 100644 --- a/tests/components/mqtt/test_sensor.py +++ b/tests/components/mqtt/test_sensor.py @@ -21,11 +21,11 @@ from homeassistant.const import ( UnitOfTemperature, ) from homeassistant.core import Event, HomeAssistant, State, callback -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import device_registry as dr, issue_registry as ir from homeassistant.helpers.typing import ConfigType from homeassistant.util import dt as dt_util -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, @@ -71,6 +71,7 @@ from .test_common import ( from tests.common import ( MockConfigEntry, + async_capture_events, async_fire_mqtt_message, async_fire_time_changed, mock_restore_cache_with_extra_data, @@ -870,6 +871,71 @@ async def test_invalid_device_class( assert "expected SensorDeviceClass or one of" in caplog.text +@pytest.mark.parametrize( + "hass_config", + [ + { + mqtt.DOMAIN: { + sensor.DOMAIN: { + "name": "test", + "state_topic": "test-topic", + "device_class": "energy", + "unit_of_measurement": "ppm", + } + } + } + ], +) +async def test_invalid_unit_of_measurement( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test device_class with invalid unit of measurement.""" + events = async_capture_events(hass, ir.EVENT_REPAIRS_ISSUE_REGISTRY_UPDATED) + assert await mqtt_mock_entry() + assert ( + "The unit of measurement `ppm` is not valid together with device class `energy`" + in caplog.text + ) + # A repair issue was logged + assert len(events) == 1 + assert events[0].data["issue_id"] == "sensor.test" + # Assert the sensor works + async_fire_mqtt_message(hass, "test-topic", "100") + await hass.async_block_till_done() + state = hass.states.get("sensor.test") + assert state is not None + assert state.state == "100" + + caplog.clear() + + discovery_payload = { + "name": "bla", + "state_topic": "test-topic2", + "device_class": "temperature", + "unit_of_measurement": "C", + } + # Now discover an other invalid sensor + async_fire_mqtt_message( + hass, "homeassistant/sensor/bla/config", json.dumps(discovery_payload) + ) + await hass.async_block_till_done() + assert ( + "The unit of measurement `C` is not valid together with device class `temperature`" + in caplog.text + ) + # Assert the sensor works + async_fire_mqtt_message(hass, "test-topic2", "21") + await hass.async_block_till_done() + state = hass.states.get("sensor.bla") + assert state is not None + assert state.state == "21" + + # No new issue was registered for the discovered entity + assert len(events) == 1 + + @pytest.mark.parametrize( "hass_config", [ diff --git a/tests/components/mqtt/test_siren.py b/tests/components/mqtt/test_siren.py index 58a5cb735f9..5d82708e242 100644 --- a/tests/components/mqtt/test_siren.py +++ b/tests/components/mqtt/test_siren.py @@ -20,7 +20,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_switch.py b/tests/components/mqtt/test_switch.py index dceeff07377..d834595afe0 100644 --- a/tests/components/mqtt/test_switch.py +++ b/tests/components/mqtt/test_switch.py @@ -16,7 +16,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, State -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_tag.py b/tests/components/mqtt/test_tag.py index 41c417fe3e9..95326382dcc 100644 --- a/tests/components/mqtt/test_tag.py +++ b/tests/components/mqtt/test_tag.py @@ -13,7 +13,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component -from .test_common import help_test_unload_config_entry +from .common import help_test_unload_config_entry from tests.common import ( MockConfigEntry, diff --git a/tests/components/mqtt/test_text.py b/tests/components/mqtt/test_text.py index 96924030279..050b2b59590 100644 --- a/tests/components/mqtt/test_text.py +++ b/tests/components/mqtt/test_text.py @@ -11,7 +11,7 @@ from homeassistant.components import mqtt, text from homeassistant.const import ATTR_ASSUMED_STATE, ATTR_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_update.py b/tests/components/mqtt/test_update.py index 4ca10cbe8b2..87eb381db03 100644 --- a/tests/components/mqtt/test_update.py +++ b/tests/components/mqtt/test_update.py @@ -1,6 +1,7 @@ """The tests for mqtt update component.""" import json +from typing import Any from unittest.mock import patch import pytest @@ -10,7 +11,7 @@ from homeassistant.components.update import DOMAIN as UPDATE_DOMAIN, SERVICE_INS from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, STATE_UNKNOWN from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, @@ -225,6 +226,71 @@ async def test_value_template( assert state.attributes.get("latest_version") == "2.0.0" +@pytest.mark.parametrize( + "hass_config", + [ + { + mqtt.DOMAIN: { + update.DOMAIN: { + "state_topic": "test/update", + "value_template": ( + "{\"latest_version\":\"{{ value_json['update']['latest_version'] }}\"," + "\"installed_version\":\"{{ value_json['update']['installed_version'] }}\"," + "\"update_percentage\":{{ value_json['update'].get('progress', 'null') }}}" + ), + "name": "Test Update", + } + } + } + ], +) +async def test_errornous_value_template( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that it fetches the given payload with a template or handles the exception.""" + state_topic = "test/update" + await mqtt_mock_entry() + + # Simulate a template redendering error with payload + # without "update" mapping + example_payload: dict[str, Any] = { + "child_lock": "UNLOCK", + "current": 0.02, + "energy": 212.92, + "indicator_mode": "off/on", + "linkquality": 65, + "power": 0, + "power_outage_memory": "off", + "state": "ON", + "voltage": 232, + } + + async_fire_mqtt_message(hass, state_topic, json.dumps(example_payload)) + await hass.async_block_till_done() + assert hass.states.get("update.test_update") is not None + assert "Unable to process payload '" in caplog.text + + # Add update info + example_payload["update"] = { + "latest_version": "2.0.0", + "installed_version": "1.9.0", + "progress": 20, + } + + async_fire_mqtt_message(hass, state_topic, json.dumps(example_payload)) + await hass.async_block_till_done() + + state = hass.states.get("update.test_update") + assert state is not None + + assert state.state == STATE_ON + assert state.attributes.get("installed_version") == "1.9.0" + assert state.attributes.get("latest_version") == "2.0.0" + assert state.attributes.get("update_percentage") == 20 + + @pytest.mark.parametrize( "hass_config", [ diff --git a/tests/components/mqtt/test_vacuum.py b/tests/components/mqtt/test_vacuum.py index c1c662048d7..ba404e2dff0 100644 --- a/tests/components/mqtt/test_vacuum.py +++ b/tests/components/mqtt/test_vacuum.py @@ -33,7 +33,7 @@ from homeassistant.const import CONF_NAME, ENTITY_MATCH_ALL, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_valve.py b/tests/components/mqtt/test_valve.py index 6dd0102b8a3..10387a5b19e 100644 --- a/tests/components/mqtt/test_valve.py +++ b/tests/components/mqtt/test_valve.py @@ -27,7 +27,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, diff --git a/tests/components/mqtt/test_water_heater.py b/tests/components/mqtt/test_water_heater.py index 02ae54c1a85..21969ad7788 100644 --- a/tests/components/mqtt/test_water_heater.py +++ b/tests/components/mqtt/test_water_heater.py @@ -33,8 +33,13 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.util.unit_conversion import TemperatureConverter +from homeassistant.util.unit_system import ( + METRIC_SYSTEM, + US_CUSTOMARY_SYSTEM, + UnitSystem, +) -from .test_common import ( +from .common import ( help_custom_config, help_test_availability_when_connection_lost, help_test_availability_without_topic, @@ -714,7 +719,7 @@ async def test_temperature_unit( @pytest.mark.parametrize( - ("hass_config", "temperature_unit", "initial", "min_temp", "max_temp", "current"), + ("hass_config", "units", "initial", "min_temp", "max_temp", "current"), [ ( help_custom_config( @@ -727,7 +732,7 @@ async def test_temperature_unit( }, ), ), - UnitOfTemperature.CELSIUS, + METRIC_SYSTEM, _DEFAULT_MIN_TEMP_CELSIUS, _DEFAULT_MIN_TEMP_CELSIUS, _DEFAULT_MAX_TEMP_CELSIUS, @@ -744,24 +749,7 @@ async def test_temperature_unit( }, ), ), - UnitOfTemperature.KELVIN, - 316, - 316, - 333, - 322, - ), - ( - help_custom_config( - water_heater.DOMAIN, - DEFAULT_CONFIG, - ( - { - "temperature_unit": "F", - "current_temperature_topic": "current_temperature", - }, - ), - ), - UnitOfTemperature.FAHRENHEIT, + US_CUSTOMARY_SYSTEM, DEFAULT_MIN_TEMP, DEFAULT_MIN_TEMP, DEFAULT_MAX_TEMP, @@ -772,25 +760,25 @@ async def test_temperature_unit( async def test_alt_temperature_unit( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, - temperature_unit: UnitOfTemperature, + units: UnitSystem, initial: float, min_temp: float, max_temp: float, current: float, ) -> None: """Test deriving the systems temperature unit.""" - with patch.object(hass.config.units, "temperature_unit", temperature_unit): - await mqtt_mock_entry() + hass.config.units = units + await mqtt_mock_entry() - state = hass.states.get(ENTITY_WATER_HEATER) - assert state.attributes.get("temperature") == initial - assert state.attributes.get("min_temp") == min_temp - assert state.attributes.get("max_temp") == max_temp + state = hass.states.get(ENTITY_WATER_HEATER) + assert state.attributes.get("temperature") == initial + assert state.attributes.get("min_temp") == min_temp + assert state.attributes.get("max_temp") == max_temp - async_fire_mqtt_message(hass, "current_temperature", "120") + async_fire_mqtt_message(hass, "current_temperature", "120") - state = hass.states.get(ENTITY_WATER_HEATER) - assert state.attributes.get("current_temperature") == current + state = hass.states.get(ENTITY_WATER_HEATER) + assert state.attributes.get("current_temperature") == current async def test_setting_attribute_via_mqtt_json_message( diff --git a/tests/components/music_assistant/fixtures/players.json b/tests/components/music_assistant/fixtures/players.json index 8a08a55dc45..e8978f17f86 100644 --- a/tests/components/music_assistant/fixtures/players.json +++ b/tests/components/music_assistant/fixtures/players.json @@ -34,12 +34,16 @@ "needs_poll": false, "poll_interval": 30, "enabled": true, - "hidden": false, "icon": "mdi-speaker", "group_volume": 20, "display_name": "Test Player 1", - "extra_data": {}, - "announcement_in_progress": false + "power_control": "native", + "volume_control": "native", + "mute_control": "native", + "hide_player_in_ui": ["when_unavailable"], + "expose_to_ha": true, + "can_group_with": ["00:00:00:00:00:02"], + "source_list": [] }, { "player_id": "00:00:00:00:00:02", @@ -83,15 +87,27 @@ }, "synced_to": null, "enabled_by_default": true, - "needs_poll": false, - "poll_interval": 30, "enabled": true, "hidden": false, "icon": "mdi-speaker", "group_volume": 20, "display_name": "My Super Test Player 2", - "extra_data": {}, - "announcement_in_progress": false + "power_control": "native", + "volume_control": "native", + "mute_control": "native", + "hide_player_in_ui": ["when_unavailable"], + "expose_to_ha": true, + "can_group_with": ["00:00:00:00:00:01"], + "source_list": [ + { + "id": "spotify", + "name": "Spotify Connect", + "passive": true, + "can_play_pause": false, + "can_seek": false, + "can_next_previous": false + } + ] }, { "player_id": "test_group_player_1", @@ -135,15 +151,17 @@ }, "synced_to": null, "enabled_by_default": true, - "needs_poll": true, - "poll_interval": 30, "enabled": true, - "hidden": false, "icon": "mdi-speaker-multiple", "group_volume": 6, "display_name": "Test Group Player 1", - "extra_data": {}, - "announcement_in_progress": false + "power_control": "native", + "volume_control": "native", + "mute_control": "native", + "hide_player_in_ui": ["when_unavailable"], + "expose_to_ha": true, + "can_group_with": [], + "source_list": [] } ] } diff --git a/tests/components/music_assistant/test_media_player.py b/tests/components/music_assistant/test_media_player.py index 44317d4977a..ad321a1cc29 100644 --- a/tests/components/music_assistant/test_media_player.py +++ b/tests/components/music_assistant/test_media_player.py @@ -694,19 +694,6 @@ async def test_media_player_supported_features( assert state assert state.attributes["supported_features"] == expected_features - # remove pause capability from player, trigger subscription callback - # and check if the supported features got updated - music_assistant_client.players._players[mass_player_id].supported_features.remove( - PlayerFeature.PAUSE - ) - await trigger_subscription_callback( - hass, music_assistant_client, EventType.PLAYER_CONFIG_UPDATED, mass_player_id - ) - expected_features &= ~MediaPlayerEntityFeature.PAUSE - state = hass.states.get(entity_id) - assert state - assert state.attributes["supported_features"] == expected_features - # remove grouping capability from player, trigger subscription callback # and check if the supported features got updated music_assistant_client.players._players[mass_player_id].supported_features.remove( diff --git a/tests/components/mysensors/conftest.py b/tests/components/mysensors/conftest.py index 1d407815db0..c2c110466e6 100644 --- a/tests/components/mysensors/conftest.py +++ b/tests/components/mysensors/conftest.py @@ -53,7 +53,7 @@ def gateway_nodes_fixture() -> dict[int, Sensor]: async def serial_transport_fixture( gateway_nodes: dict[int, Sensor], is_serial_port: MagicMock, -) -> AsyncGenerator[dict[int, Sensor]]: +) -> AsyncGenerator[MagicMock]: """Mock a serial transport.""" with ( patch( @@ -320,6 +320,21 @@ def hvac_node_heat( return nodes[1] +@pytest.fixture(name="hvac_node_only_hvac_state", scope="package") +def hvac_node_only_hvac_state_fixture() -> dict: + """Load the hvac node only hvac state.""" + return load_nodes_state("hvac_node_only_hvac_state.json") + + +@pytest.fixture +def hvac_node_only_hvac( + gateway_nodes: dict[int, Sensor], hvac_node_only_hvac_state: dict +) -> Sensor: + """Load the hvac only hvac child node.""" + nodes = update_gateway_nodes(gateway_nodes, deepcopy(hvac_node_only_hvac_state)) + return nodes[1] + + @pytest.fixture(name="power_sensor_state", scope="package") def power_sensor_state_fixture() -> dict: """Load the power sensor state.""" diff --git a/tests/components/mysensors/fixtures/hvac_node_only_hvac_state.json b/tests/components/mysensors/fixtures/hvac_node_only_hvac_state.json new file mode 100644 index 00000000000..b41470e6076 --- /dev/null +++ b/tests/components/mysensors/fixtures/hvac_node_only_hvac_state.json @@ -0,0 +1,22 @@ +{ + "1": { + "sensor_id": 1, + "children": { + "1": { + "id": 1, + "type": 29, + "description": "", + "values": { + "0": "20.0", + "21": "Off" + } + } + }, + "type": 17, + "sketch_name": "HVAC Node", + "sketch_version": "1.0", + "battery_level": 0, + "protocol_version": "2.3.2", + "heartbeat": 0 + } +} diff --git a/tests/components/mysensors/test_climate.py b/tests/components/mysensors/test_climate.py index 959f92ff512..b919287e046 100644 --- a/tests/components/mysensors/test_climate.py +++ b/tests/components/mysensors/test_climate.py @@ -38,6 +38,8 @@ async def test_hvac_node_auto( assert state assert state.state == HVACMode.OFF assert state.attributes[ATTR_BATTERY_LEVEL] == 0 + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 20.0 + assert state.attributes["supported_features"] == 394 # Test set hvac mode auto await hass.services.async_call( @@ -153,6 +155,8 @@ async def test_hvac_node_heat( assert state assert state.state == HVACMode.OFF assert state.attributes[ATTR_BATTERY_LEVEL] == 0 + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 20.0 + assert state.attributes["supported_features"] == 393 # Test set hvac mode heat await hass.services.async_call( @@ -263,8 +267,10 @@ async def test_hvac_node_cool( assert state assert state.state == HVACMode.OFF assert state.attributes[ATTR_BATTERY_LEVEL] == 0 + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 20.0 + assert state.attributes["supported_features"] == 393 - # Test set hvac mode heat + # Test set hvac mode cool await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, @@ -357,3 +363,83 @@ async def test_hvac_node_cool( assert state assert state.state == HVACMode.OFF + + +async def test_hvac_node_only_hvac( + hass: HomeAssistant, + hvac_node_only_hvac: Sensor, + receive_message: Callable[[str], None], + transport_write: MagicMock, +) -> None: + """Test a hvac only hvac node.""" + entity_id = "climate.hvac_node_1_1" + + state = hass.states.get(entity_id) + + assert state + assert state.state == HVACMode.OFF + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 20.0 + assert state.attributes["supported_features"] == 384 + + # Test set hvac mode heat + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: entity_id, ATTR_HVAC_MODE: HVACMode.HEAT}, + blocking=True, + ) + + assert transport_write.call_count == 1 + assert transport_write.call_args == call("1;1;1;1;21;HeatOn\n") + + receive_message("1;1;1;0;21;HeatOn\n") + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + + assert state + assert state.state == HVACMode.HEAT + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 20.0 + + transport_write.reset_mock() + + # Test set hvac mode cool + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: entity_id, ATTR_HVAC_MODE: HVACMode.COOL}, + blocking=True, + ) + + assert transport_write.call_count == 1 + assert transport_write.call_args == call("1;1;1;1;21;CoolOn\n") + + receive_message("1;1;1;0;21;CoolOn\n") + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + + assert state + assert state.state == HVACMode.COOL + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 20.0 + + transport_write.reset_mock() + + # Test set hvac mode off + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: entity_id, ATTR_HVAC_MODE: HVACMode.OFF}, + blocking=True, + ) + + assert transport_write.call_count == 1 + assert transport_write.call_args == call("1;1;1;1;21;Off\n") + + receive_message("1;1;1;0;21;Off\n") + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + + assert state + assert state.state == HVACMode.OFF diff --git a/tests/components/mysensors/test_init.py b/tests/components/mysensors/test_init.py index 7f6ea76d3e1..108f2d7e592 100644 --- a/tests/components/mysensors/test_init.py +++ b/tests/components/mysensors/test_init.py @@ -2,10 +2,15 @@ from __future__ import annotations +from collections.abc import Callable +from unittest.mock import MagicMock + from mysensors import BaseSyncGateway from mysensors.sensor import Sensor from homeassistant.components.mysensors import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component @@ -14,6 +19,50 @@ from tests.common import MockConfigEntry from tests.typing import WebSocketGenerator +async def test_load_unload( + hass: HomeAssistant, + door_sensor: Sensor, + transport: MagicMock, + integration: MockConfigEntry, + receive_message: Callable[[str], None], +) -> None: + """Test loading and unloading the MySensors config entry.""" + config_entry = integration + + assert config_entry.state == ConfigEntryState.LOADED + + entity_id = "binary_sensor.door_sensor_1_1" + state = hass.states.get(entity_id) + + assert state + assert state.state != STATE_UNAVAILABLE + + receive_message("1;1;1;0;16;1\n") + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + + assert state + assert state.state != STATE_UNAVAILABLE + + assert await hass.config_entries.async_unload(config_entry.entry_id) + + assert transport.return_value.disconnect.call_count == 1 + + state = hass.states.get(entity_id) + + assert state + assert state.state == STATE_UNAVAILABLE + + receive_message("1;1;1;0;16;1\n") + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + + assert state + assert state.state == STATE_UNAVAILABLE + + async def test_remove_config_entry_device( hass: HomeAssistant, device_registry: dr.DeviceRegistry, diff --git a/tests/components/nam/fixtures/nam_data.json b/tests/components/nam/fixtures/nam_data.json index 82dacbefb34..47ebe099dc7 100644 --- a/tests/components/nam/fixtures/nam_data.json +++ b/tests/components/nam/fixtures/nam_data.json @@ -26,6 +26,7 @@ { "value_type": "temperature", "value": "6.26" }, { "value_type": "HECA_temperature", "value": "7.95" }, { "value_type": "HECA_humidity", "value": "49.97" }, + { "value_type": "ambient_light", "value": "298.45" }, { "value_type": "signal", "value": "-72" } ] } diff --git a/tests/components/nam/snapshots/test_diagnostics.ambr b/tests/components/nam/snapshots/test_diagnostics.ambr index e92e02fa1d8..c0009899d16 100644 --- a/tests/components/nam/snapshots/test_diagnostics.ambr +++ b/tests/components/nam/snapshots/test_diagnostics.ambr @@ -2,6 +2,7 @@ # name: test_entry_diagnostics dict({ 'data': dict({ + 'bh1750_illuminance': 298.45, 'bme280_humidity': 45.69, 'bme280_pressure': 1011.0117, 'bme280_temperature': 7.56, diff --git a/tests/components/nam/snapshots/test_sensor.ambr b/tests/components/nam/snapshots/test_sensor.ambr index 429d069b741..c6c32737a31 100644 --- a/tests/components/nam/snapshots/test_sensor.ambr +++ b/tests/components/nam/snapshots/test_sensor.ambr @@ -1,4 +1,59 @@ # serializer version: 1 +# name: test_sensor[sensor.nettigo_air_monitor_bh1750_illuminance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nettigo_air_monitor_bh1750_illuminance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'BH1750 illuminance', + 'platform': 'nam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'bh1750_illuminance', + 'unique_id': 'aa:bb:cc:dd:ee:ff-bh1750_illuminance', + 'unit_of_measurement': 'lx', + }) +# --- +# name: test_sensor[sensor.nettigo_air_monitor_bh1750_illuminance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'illuminance', + 'friendly_name': 'Nettigo Air Monitor BH1750 illuminance', + 'state_class': , + 'unit_of_measurement': 'lx', + }), + 'context': , + 'entity_id': 'sensor.nettigo_air_monitor_bh1750_illuminance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '298.45', + }) +# --- # name: test_sensor[sensor.nettigo_air_monitor_bme280_humidity-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/nam/test_button.py b/tests/components/nam/test_button.py index 39c37d57f89..b410665911a 100644 --- a/tests/components/nam/test_button.py +++ b/tests/components/nam/test_button.py @@ -2,9 +2,20 @@ from unittest.mock import patch -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, ButtonDeviceClass +from aiohttp.client_exceptions import ClientError +from nettigo_air_monitor import ApiError, AuthFailedError +import pytest + +from homeassistant.components.button import ( + DOMAIN as BUTTON_DOMAIN, + SERVICE_PRESS, + ButtonDeviceClass, +) +from homeassistant.components.nam import DOMAIN +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util @@ -38,7 +49,7 @@ async def test_button_press(hass: HomeAssistant) -> None: ): await hass.services.async_call( BUTTON_DOMAIN, - "press", + SERVICE_PRESS, {ATTR_ENTITY_ID: "button.nettigo_air_monitor_restart"}, blocking=True, ) @@ -49,3 +60,55 @@ async def test_button_press(hass: HomeAssistant) -> None: state = hass.states.get("button.nettigo_air_monitor_restart") assert state assert state.state == now.isoformat() + + +@pytest.mark.parametrize(("exc"), [ApiError("API Error"), ClientError]) +async def test_button_press_exc(hass: HomeAssistant, exc: Exception) -> None: + """Test button press when exception occurs.""" + await init_integration(hass) + + with ( + patch( + "homeassistant.components.nam.NettigoAirMonitor.async_restart", + side_effect=exc, + ), + pytest.raises( + HomeAssistantError, + match="An error occurred while calling action for button.nettigo_air_monitor_restart", + ), + ): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.nettigo_air_monitor_restart"}, + blocking=True, + ) + + +async def test_button_press_auth_error(hass: HomeAssistant) -> None: + """Test button press when auth error occurs.""" + entry = await init_integration(hass) + + with patch( + "homeassistant.components.nam.NettigoAirMonitor.async_restart", + side_effect=AuthFailedError("auth error"), + ): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.nettigo_air_monitor_restart"}, + blocking=True, + ) + + assert entry.state is ConfigEntryState.LOADED + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow.get("step_id") == "reauth_confirm" + assert flow.get("handler") == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == entry.entry_id diff --git a/tests/components/nest/conftest.py b/tests/components/nest/conftest.py index 92d90a18a7e..b4b94efce5b 100644 --- a/tests/components/nest/conftest.py +++ b/tests/components/nest/conftest.py @@ -144,13 +144,14 @@ async def auth( return FakeAuth(aioclient_mock, create_device, device_access_project_id) -@pytest.fixture(autouse=True) -def cleanup_media_storage(hass: HomeAssistant) -> Generator[None]: +@pytest.fixture(autouse=True, name="media_path") +def cleanup_media_storage(hass: HomeAssistant) -> Generator[str]: """Test cleanup, remove any media storage persisted during the test.""" tmp_path = str(uuid.uuid4()) with patch("homeassistant.components.nest.media_source.MEDIA_PATH", new=tmp_path): - yield - shutil.rmtree(hass.config.path(tmp_path), ignore_errors=True) + full_path = hass.config.path(tmp_path) + yield full_path + shutil.rmtree(full_path, ignore_errors=True) @pytest.fixture diff --git a/tests/components/nest/test_media_source.py b/tests/components/nest/test_media_source.py index d009e1185da..0b0654fc69c 100644 --- a/tests/components/nest/test_media_source.py +++ b/tests/components/nest/test_media_source.py @@ -8,11 +8,13 @@ from collections.abc import Generator import datetime from http import HTTPStatus import io +import pathlib from typing import Any from unittest.mock import patch import aiohttp import av +from freezegun import freeze_time import numpy as np import pytest @@ -39,7 +41,7 @@ from .common import ( ) from .conftest import FakeAuth -from tests.common import MockUser, async_capture_events +from tests.common import MockUser, async_capture_events, async_fire_time_changed from tests.typing import ClientSessionGenerator DOMAIN = "nest" @@ -1574,3 +1576,80 @@ async def test_event_clip_media_attachment( response = await client.get(content_path) assert response.status == HTTPStatus.OK, f"Response not matched: {response}" await response.read() + + +@pytest.mark.parametrize(("device_traits", "cache_size"), [(BATTERY_CAMERA_TRAITS, 5)]) +async def test_remove_stale_media( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + auth, + mp4, + hass_client: ClientSessionGenerator, + subscriber, + setup_platform, + media_path: str, +) -> None: + """Test media files getting evicted from the cache.""" + await setup_platform() + + device = device_registry.async_get_device(identifiers={(DOMAIN, DEVICE_ID)}) + assert device + assert device.name == DEVICE_NAME + + # Publish a media event + auth.responses = [ + aiohttp.web.Response(body=mp4.getvalue()), + ] + event_timestamp = dt_util.now() + await subscriber.async_receive_event( + create_event_message( + create_battery_event_data(MOTION_EVENT), + timestamp=event_timestamp, + ) + ) + await hass.async_block_till_done() + + # The first subdirectory is the device id. Media for events are stored in the + # device subdirectory. First verify that the media was persisted. We will + # then add additional media files, then invoke the garbage collector, and + # then verify orphaned files are removed. + storage_path = pathlib.Path(media_path) + device_path = storage_path / device.id + media_files = list(device_path.glob("*")) + assert len(media_files) == 1 + event_media = media_files[0] + assert event_media.name.endswith(".mp4") + + event_time1 = datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=8) + extra_media1 = ( + device_path / f"{int(event_time1.timestamp())}-camera_motion-test.mp4" + ) + extra_media1.write_bytes(mp4.getvalue()) + event_time2 = event_time1 + datetime.timedelta(hours=20) + extra_media2 = ( + device_path / f"{int(event_time2.timestamp())}-camera_motion-test.jpg" + ) + extra_media2.write_bytes(mp4.getvalue()) + # This event will not be garbage collected because it is too recent + event_time3 = datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=3) + extra_media3 = ( + device_path / f"{int(event_time3.timestamp())}-camera_motion-test.mp4" + ) + extra_media3.write_bytes(mp4.getvalue()) + + assert len(list(device_path.glob("*"))) == 4 + + # Advance the clock to invoke the garbage collector. This will remove extra + # files that are not valid events that are old enough. + point_in_time = datetime.datetime.now(datetime.UTC) + datetime.timedelta(days=1) + with freeze_time(point_in_time): + async_fire_time_changed(hass, point_in_time) + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Verify that the event media is still present and that the extra files + # are removed. Newer media is not removed. + assert event_media.exists() + assert not extra_media1.exists() + assert not extra_media2.exists() + assert extra_media3.exists() diff --git a/tests/components/netatmo/snapshots/test_sensor.ambr b/tests/components/netatmo/snapshots/test_sensor.ambr index b149e80fa5b..00285f565a6 100644 --- a/tests/components/netatmo/snapshots/test_sensor.ambr +++ b/tests/components/netatmo/snapshots/test_sensor.ambr @@ -1501,7 +1501,7 @@ }), 'area_id': None, 'capabilities': dict({ - 'state_class': , + 'state_class': , }), 'config_entry_id': , 'config_subentry_id': , @@ -1520,7 +1520,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Gust angle', 'platform': 'netatmo', @@ -1535,10 +1535,11 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', 'friendly_name': 'Home avg Gust angle', 'latitude': 32.17901225, 'longitude': -117.17901225, - 'state_class': , + 'state_class': , 'unit_of_measurement': '°', }), 'context': , @@ -1659,60 +1660,6 @@ 'state': '63.2', }) # --- -# name: test_entity[sensor.home_avg_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.home_avg_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'Home-avg-windangle_value', - 'unit_of_measurement': '°', - }) -# --- -# name: test_entity[sensor.home_avg_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'friendly_name': 'Home avg None', - 'latitude': 32.17901225, - 'longitude': -117.17901225, - 'state_class': , - 'unit_of_measurement': '°', - }), - 'context': , - 'entity_id': 'sensor.home_avg_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '17.0', - }) -# --- # name: test_entity[sensor.home_avg_precipitation-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1939,6 +1886,61 @@ 'state': '22.7', }) # --- +# name: test_entity[sensor.home_avg_wind_direction-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_avg_wind_direction', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wind direction', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'Home-avg-windangle_value', + 'unit_of_measurement': '°', + }) +# --- +# name: test_entity[sensor.home_avg_wind_direction-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', + 'friendly_name': 'Home avg Wind direction', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'sensor.home_avg_wind_direction', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '17.0', + }) +# --- # name: test_entity[sensor.home_avg_wind_speed-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2061,7 +2063,7 @@ }), 'area_id': None, 'capabilities': dict({ - 'state_class': , + 'state_class': , }), 'config_entry_id': , 'config_subentry_id': , @@ -2080,7 +2082,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Gust angle', 'platform': 'netatmo', @@ -2095,10 +2097,11 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', 'friendly_name': 'Home max Gust angle', 'latitude': 32.17901225, 'longitude': -117.17901225, - 'state_class': , + 'state_class': , 'unit_of_measurement': '°', }), 'context': , @@ -2219,60 +2222,6 @@ 'state': '76', }) # --- -# name: test_entity[sensor.home_max_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.home_max_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'Home-max-windangle_value', - 'unit_of_measurement': '°', - }) -# --- -# name: test_entity[sensor.home_max_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'friendly_name': 'Home max None', - 'latitude': 32.17901225, - 'longitude': -117.17901225, - 'state_class': , - 'unit_of_measurement': '°', - }), - 'context': , - 'entity_id': 'sensor.home_max_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '17', - }) -# --- # name: test_entity[sensor.home_max_precipitation-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2499,6 +2448,61 @@ 'state': '27.4', }) # --- +# name: test_entity[sensor.home_max_wind_direction-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_max_wind_direction', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wind direction', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'Home-max-windangle_value', + 'unit_of_measurement': '°', + }) +# --- +# name: test_entity[sensor.home_max_wind_direction-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', + 'friendly_name': 'Home max Wind direction', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'sensor.home_max_wind_direction', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '17', + }) +# --- # name: test_entity[sensor.home_max_wind_speed-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2621,7 +2625,7 @@ }), 'area_id': None, 'capabilities': dict({ - 'state_class': , + 'state_class': , }), 'config_entry_id': , 'config_subentry_id': , @@ -2640,7 +2644,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Gust angle', 'platform': 'netatmo', @@ -2655,10 +2659,11 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', 'friendly_name': 'Home min Gust angle', 'latitude': 32.17901225, 'longitude': -117.17901225, - 'state_class': , + 'state_class': , 'unit_of_measurement': '°', }), 'context': , @@ -2779,60 +2784,6 @@ 'state': '56', }) # --- -# name: test_entity[sensor.home_min_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.home_min_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'netatmo', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'Home-min-windangle_value', - 'unit_of_measurement': '°', - }) -# --- -# name: test_entity[sensor.home_min_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Netatmo', - 'friendly_name': 'Home min None', - 'latitude': 32.17901225, - 'longitude': -117.17901225, - 'state_class': , - 'unit_of_measurement': '°', - }), - 'context': , - 'entity_id': 'sensor.home_min_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '17', - }) -# --- # name: test_entity[sensor.home_min_precipitation-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -3059,6 +3010,61 @@ 'state': '19.8', }) # --- +# name: test_entity[sensor.home_min_wind_direction-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_min_wind_direction', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wind direction', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'Home-min-windangle_value', + 'unit_of_measurement': '°', + }) +# --- +# name: test_entity[sensor.home_min_wind_direction-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', + 'friendly_name': 'Home min Wind direction', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'sensor.home_min_wind_direction', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '17', + }) +# --- # name: test_entity[sensor.home_min_wind_speed-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -6253,7 +6259,7 @@ }), 'area_id': None, 'capabilities': dict({ - 'state_class': , + 'state_class': , }), 'config_entry_id': , 'config_subentry_id': , @@ -6272,7 +6278,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Gust angle', 'platform': 'netatmo', @@ -6287,8 +6293,9 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', 'friendly_name': 'Villa Garden Gust angle', - 'state_class': , + 'state_class': , 'unit_of_measurement': '°', }), 'context': , @@ -6524,7 +6531,7 @@ }), 'area_id': None, 'capabilities': dict({ - 'state_class': , + 'state_class': , }), 'config_entry_id': , 'config_subentry_id': , @@ -6543,7 +6550,7 @@ 'name': None, 'options': dict({ }), - 'original_device_class': None, + 'original_device_class': , 'original_icon': None, 'original_name': 'Wind angle', 'platform': 'netatmo', @@ -6558,8 +6565,9 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_direction', 'friendly_name': 'Villa Garden Wind angle', - 'state_class': , + 'state_class': , 'unit_of_measurement': '°', }), 'context': , diff --git a/tests/components/netatmo/test_init.py b/tests/components/netatmo/test_init.py index 5fdf4f8ea35..c1a687c6fa8 100644 --- a/tests/components/netatmo/test_init.py +++ b/tests/components/netatmo/test_init.py @@ -25,11 +25,7 @@ from .common import ( simulate_webhook, ) -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - async_get_persistent_notifications, -) +from tests.common import MockConfigEntry, async_fire_time_changed from tests.components.cloud import mock_cloud from tests.typing import WebSocketGenerator @@ -423,9 +419,8 @@ async def test_setup_component_invalid_token_scope(hass: HomeAssistant) -> None: assert config_entry.state is ConfigEntryState.SETUP_ERROR assert hass.config_entries.async_entries(DOMAIN) - notifications = async_get_persistent_notifications(hass) - - assert len(notifications) > 0 + # Test a reauth flow is initiated + assert len(list(config_entry.async_get_active_flows(hass, {"reauth"}))) == 1 for config_entry in hass.config_entries.async_entries("netatmo"): await hass.config_entries.async_remove(config_entry.entry_id) @@ -476,8 +471,9 @@ async def test_setup_component_invalid_token( assert config_entry.state is ConfigEntryState.SETUP_ERROR assert hass.config_entries.async_entries(DOMAIN) - notifications = async_get_persistent_notifications(hass) - assert len(notifications) > 0 + + # Test a reauth flow is initiated + assert len(list(config_entry.async_get_active_flows(hass, {"reauth"}))) == 1 for entry in hass.config_entries.async_entries("netatmo"): await hass.config_entries.async_remove(entry.entry_id) diff --git a/tests/components/nextcloud/snapshots/test_sensor.ambr b/tests/components/nextcloud/snapshots/test_sensor.ambr index 84c1d33f886..e6154841a28 100644 --- a/tests/components/nextcloud/snapshots/test_sensor.ambr +++ b/tests/components/nextcloud/snapshots/test_sensor.ambr @@ -1424,7 +1424,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Cache ttl', + 'original_name': 'Cache TTL', 'platform': 'nextcloud', 'previous_unique_id': None, 'supported_features': 0, @@ -1436,7 +1436,7 @@ # name: test_async_setup_entry[sensor.my_nc_url_local_cache_ttl-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local Cache ttl', + 'friendly_name': 'my.nc_url.local Cache TTL', }), 'context': , 'entity_id': 'sensor.my_nc_url_local_cache_ttl', @@ -1474,7 +1474,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'CPU Load last 15 minutes', + 'original_name': 'CPU load last 15 minutes', 'platform': 'nextcloud', 'previous_unique_id': None, 'supported_features': 0, @@ -1486,7 +1486,7 @@ # name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_15_minutes-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local CPU Load last 15 minutes', + 'friendly_name': 'my.nc_url.local CPU load last 15 minutes', 'unit_of_measurement': 'load', }), 'context': , @@ -1525,7 +1525,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'CPU Load last 1 minute', + 'original_name': 'CPU load last 1 minute', 'platform': 'nextcloud', 'previous_unique_id': None, 'supported_features': 0, @@ -1537,7 +1537,7 @@ # name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_1_minute-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local CPU Load last 1 minute', + 'friendly_name': 'my.nc_url.local CPU load last 1 minute', 'unit_of_measurement': 'load', }), 'context': , @@ -1576,7 +1576,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'CPU Load last 5 minutes', + 'original_name': 'CPU load last 5 minutes', 'platform': 'nextcloud', 'previous_unique_id': None, 'supported_features': 0, @@ -1588,7 +1588,7 @@ # name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_5_minutes-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'my.nc_url.local CPU Load last 5 minutes', + 'friendly_name': 'my.nc_url.local CPU load last 5 minutes', 'unit_of_measurement': 'load', }), 'context': , diff --git a/tests/components/nextdns/test_button.py b/tests/components/nextdns/test_button.py index 51970b9bb48..3d2422c34a7 100644 --- a/tests/components/nextdns/test_button.py +++ b/tests/components/nextdns/test_button.py @@ -1,12 +1,19 @@ """Test button of NextDNS integration.""" -from unittest.mock import patch +from unittest.mock import Mock, patch +from aiohttp import ClientError +from aiohttp.client_exceptions import ClientConnectorError +from nextdns import ApiError, InvalidApiKeyError +import pytest from syrupy import SnapshotAssertion -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.nextdns.const import DOMAIN +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util @@ -36,7 +43,7 @@ async def test_button_press(hass: HomeAssistant) -> None: ): await hass.services.async_call( BUTTON_DOMAIN, - "press", + SERVICE_PRESS, {ATTR_ENTITY_ID: "button.fake_profile_clear_logs"}, blocking=True, ) @@ -47,3 +54,60 @@ async def test_button_press(hass: HomeAssistant) -> None: state = hass.states.get("button.fake_profile_clear_logs") assert state assert state.state == now.isoformat() + + +@pytest.mark.parametrize( + "exc", + [ + ApiError(Mock()), + TimeoutError, + ClientConnectorError(Mock(), Mock()), + ClientError, + ], +) +async def test_button_failure(hass: HomeAssistant, exc: Exception) -> None: + """Tests that the press action throws HomeAssistantError.""" + await init_integration(hass) + + with ( + patch("homeassistant.components.nextdns.NextDns.clear_logs", side_effect=exc), + pytest.raises( + HomeAssistantError, + match="An error occurred while calling the NextDNS API method for button.fake_profile_clear_logs", + ), + ): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.fake_profile_clear_logs"}, + blocking=True, + ) + + +async def test_button_auth_error(hass: HomeAssistant) -> None: + """Tests that the press action starts re-auth flow.""" + entry = await init_integration(hass) + + with patch( + "homeassistant.components.nextdns.NextDns.clear_logs", + side_effect=InvalidApiKeyError, + ): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.fake_profile_clear_logs"}, + blocking=True, + ) + + assert entry.state is ConfigEntryState.LOADED + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow.get("step_id") == "reauth_confirm" + assert flow.get("handler") == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == entry.entry_id diff --git a/tests/components/nextdns/test_switch.py b/tests/components/nextdns/test_switch.py index 6e344e34336..c85525ac457 100644 --- a/tests/components/nextdns/test_switch.py +++ b/tests/components/nextdns/test_switch.py @@ -5,12 +5,14 @@ from unittest.mock import Mock, patch from aiohttp import ClientError from aiohttp.client_exceptions import ClientConnectorError -from nextdns import ApiError +from nextdns import ApiError, InvalidApiKeyError import pytest from syrupy import SnapshotAssertion from tenacity import RetryError +from homeassistant.components.nextdns.const import DOMAIN from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, @@ -158,3 +160,32 @@ async def test_switch_failure(hass: HomeAssistant, exc: Exception) -> None: {ATTR_ENTITY_ID: "switch.fake_profile_block_page"}, blocking=True, ) + + +async def test_switch_auth_error(hass: HomeAssistant) -> None: + """Tests that the turn on/off action starts re-auth flow.""" + entry = await init_integration(hass) + + with patch( + "homeassistant.components.nextdns.NextDns.set_setting", + side_effect=InvalidApiKeyError, + ): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "switch.fake_profile_block_page"}, + blocking=True, + ) + + assert entry.state is ConfigEntryState.LOADED + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow.get("step_id") == "reauth_confirm" + assert flow.get("handler") == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == entry.entry_id diff --git a/tests/components/nmbs/__init__.py b/tests/components/nmbs/__init__.py index 91226950aba..3d284e5bb77 100644 --- a/tests/components/nmbs/__init__.py +++ b/tests/components/nmbs/__init__.py @@ -1,20 +1 @@ """Tests for the NMBS integration.""" - -import json -from typing import Any - -from tests.common import load_fixture - - -def mock_api_unavailable() -> dict[str, Any]: - """Mock for unavailable api.""" - return -1 - - -def mock_station_response() -> dict[str, Any]: - """Mock for valid station response.""" - dummy_stations_response: dict[str, Any] = json.loads( - load_fixture("stations.json", "nmbs") - ) - - return dummy_stations_response diff --git a/tests/components/nmbs/conftest.py b/tests/components/nmbs/conftest.py index 69200fc4c98..a39334ba62c 100644 --- a/tests/components/nmbs/conftest.py +++ b/tests/components/nmbs/conftest.py @@ -3,6 +3,7 @@ from collections.abc import Generator from unittest.mock import AsyncMock, patch +from pyrail.models import StationsApiResponse import pytest from homeassistant.components.nmbs.const import ( @@ -38,8 +39,8 @@ def mock_nmbs_client() -> Generator[AsyncMock]: ), ): client = mock_client.return_value - client.get_stations.return_value = load_json_object_fixture( - "stations.json", DOMAIN + client.get_stations.return_value = StationsApiResponse.from_dict( + load_json_object_fixture("stations.json", DOMAIN) ) yield client diff --git a/tests/components/nmbs/test_config_flow.py b/tests/components/nmbs/test_config_flow.py index ff4c5bdf72a..7e0f087607b 100644 --- a/tests/components/nmbs/test_config_flow.py +++ b/tests/components/nmbs/test_config_flow.py @@ -142,7 +142,7 @@ async def test_unavailable_api( hass: HomeAssistant, mock_nmbs_client: AsyncMock ) -> None: """Test starting a flow by user and api is unavailable.""" - mock_nmbs_client.get_stations.return_value = -1 + mock_nmbs_client.get_stations.return_value = None result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, @@ -203,7 +203,7 @@ async def test_unavailable_api_import( hass: HomeAssistant, mock_nmbs_client: AsyncMock ) -> None: """Test starting a flow by import and api is unavailable.""" - mock_nmbs_client.get_stations.return_value = -1 + mock_nmbs_client.get_stations.return_value = None result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, diff --git a/tests/components/nordpool/fixtures/delivery_period_today.json b/tests/components/nordpool/fixtures/delivery_period_today.json index 77d51dc9433..df48c32a9a9 100644 --- a/tests/components/nordpool/fixtures/delivery_period_today.json +++ b/tests/components/nordpool/fixtures/delivery_period_today.json @@ -162,7 +162,7 @@ "deliveryEnd": "2024-11-05T19:00:00Z", "entryPerArea": { "SE3": 1011.77, - "SE4": 1804.46 + "SE4": 0.0 } }, { diff --git a/tests/components/nordpool/snapshots/test_diagnostics.ambr b/tests/components/nordpool/snapshots/test_diagnostics.ambr index 76a3dd96405..d7f7c4041cd 100644 --- a/tests/components/nordpool/snapshots/test_diagnostics.ambr +++ b/tests/components/nordpool/snapshots/test_diagnostics.ambr @@ -519,7 +519,7 @@ 'deliveryStart': '2024-11-05T18:00:00Z', 'entryPerArea': dict({ 'SE3': 1011.77, - 'SE4': 1804.46, + 'SE4': 0.0, }), }), dict({ diff --git a/tests/components/nordpool/snapshots/test_sensor.ambr b/tests/components/nordpool/snapshots/test_sensor.ambr index 86aa49357c5..be2b04cc520 100644 --- a/tests/components/nordpool/snapshots/test_sensor.ambr +++ b/tests/components/nordpool/snapshots/test_sensor.ambr @@ -1332,7 +1332,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1.80446', + 'state': '0.0', }) # --- # name: test_sensor[sensor.nord_pool_se4_daily_average-entry] @@ -1580,9 +1580,9 @@ # name: test_sensor[sensor.nord_pool_se4_lowest_price-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'end': '2024-11-05T03:00:00+00:00', + 'end': '2024-11-05T19:00:00+00:00', 'friendly_name': 'Nord Pool SE4 Lowest price', - 'start': '2024-11-05T02:00:00+00:00', + 'start': '2024-11-05T18:00:00+00:00', 'unit_of_measurement': 'SEK/kWh', }), 'context': , @@ -1590,7 +1590,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0.06519', + 'state': '0.0', }) # --- # name: test_sensor[sensor.nord_pool_se4_next_price-entry] diff --git a/tests/components/nordpool/test_sensor.py b/tests/components/nordpool/test_sensor.py index 60be1ee3258..082684a2a02 100644 --- a/tests/components/nordpool/test_sensor.py +++ b/tests/components/nordpool/test_sensor.py @@ -33,6 +33,19 @@ async def test_sensor( await snapshot_platform(hass, entity_registry, snapshot, load_int.entry_id) +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_current_price_is_0( + hass: HomeAssistant, load_int: ConfigEntry +) -> None: + """Test the Nord Pool sensor working if price is 0.""" + + current_price = hass.states.get("sensor.nord_pool_se4_current_price") + + assert current_price is not None + assert current_price.state == "0.0" # SE4 2024-11-05T18:00:00Z + + @pytest.mark.freeze_time("2024-11-05T23:00:00+00:00") @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_sensor_no_next_price(hass: HomeAssistant, load_int: ConfigEntry) -> None: diff --git a/tests/components/nut/fixtures/EATON-EPDU-G3-AMBIENT-NOT-PRESENT.json b/tests/components/nut/fixtures/EATON-EPDU-G3-AMBIENT-NOT-PRESENT.json new file mode 100644 index 00000000000..96394e618c9 --- /dev/null +++ b/tests/components/nut/fixtures/EATON-EPDU-G3-AMBIENT-NOT-PRESENT.json @@ -0,0 +1,539 @@ +{ + "ambient.contacts.1.status": "opened", + "ambient.contacts.2.status": "opened", + "ambient.count": "0", + "ambient.humidity": "29.90", + "ambient.humidity.high": "90", + "ambient.humidity.high.critical": "90", + "ambient.humidity.high.warning": "65", + "ambient.humidity.low": "10", + "ambient.humidity.low.critical": "10", + "ambient.humidity.low.warning": "20", + "ambient.humidity.status": "good", + "ambient.present": "no", + "ambient.temperature": "28.9", + "ambient.temperature.high": "43.30", + "ambient.temperature.high.critical": "43.30", + "ambient.temperature.high.warning": "37.70", + "ambient.temperature.low": "5", + "ambient.temperature.low.critical": "5", + "ambient.temperature.low.warning": "10", + "ambient.temperature.status": "good", + "device.contact": "Contact Name", + "device.count": "1", + "device.description": "ePDU MA 00U-C IN: TYPE 00A 0P OUT: 00xTYPE", + "device.location": "Device Location", + "device.macaddr": "00 00 00 FF FF FF ", + "device.mfr": "EATON", + "device.model": "ePDU MA 00U-C IN: TYPE 00A 0P OUT: 00xTYPE", + "device.part": "EMA000-00", + "device.serial": "A000A00000", + "device.type": "pdu", + "driver.debug": "0", + "driver.flag.allow_killpower": "0", + "driver.name": "snmp-ups", + "driver.parameter.pollinterval": "2", + "driver.parameter.port": "eaton-pdu", + "driver.parameter.synchronous": "auto", + "driver.state": "dumping", + "driver.version": "2.8.2.882-882-g63d90ebcb", + "driver.version.data": "eaton_epdu MIB 0.69", + "driver.version.internal": "1.31", + "input.current": "4.30", + "input.current.high.critical": "16", + "input.current.high.warning": "12.80", + "input.current.low.warning": "0", + "input.current.nominal": "16", + "input.current.status": "good", + "input.feed.color": "0", + "input.feed.desc": "Feed A", + "input.frequency": "60", + "input.frequency.status": "good", + "input.L1.current": "4.30", + "input.L1.current.high.critical": "16", + "input.L1.current.high.warning": "12.80", + "input.L1.current.low.warning": "0", + "input.L1.current.nominal": "16", + "input.L1.current.status": "good", + "input.L1.load": "26", + "input.L1.power": "529", + "input.L1.realpower": "482", + "input.L1.voltage": "122.91", + "input.L1.voltage.high.critical": "140", + "input.L1.voltage.high.warning": "130", + "input.L1.voltage.low.critical": "90", + "input.L1.voltage.low.warning": "95", + "input.L1.voltage.status": "good", + "input.load": "26", + "input.phases": "1", + "input.power": "532", + "input.realpower": "482", + "input.realpower.nominal": "1920", + "input.voltage": "122.91", + "input.voltage.high.critical": "140", + "input.voltage.high.warning": "130", + "input.voltage.low.critical": "90", + "input.voltage.low.warning": "95", + "input.voltage.status": "good", + "outlet.1.current": "0", + "outlet.1.current.high.critical": "16", + "outlet.1.current.high.warning": "12.80", + "outlet.1.current.low.warning": "0", + "outlet.1.current.status": "good", + "outlet.1.delay.shutdown": "120", + "outlet.1.delay.start": "1", + "outlet.1.desc": "Outlet A1", + "outlet.1.groupid": "1", + "outlet.1.id": "1", + "outlet.1.name": "A1", + "outlet.1.power": "0", + "outlet.1.realpower": "0", + "outlet.1.status": "on", + "outlet.1.switchable": "yes", + "outlet.1.timer.shutdown": "-1", + "outlet.1.timer.start": "-1", + "outlet.1.type": "nema520", + "outlet.10.current": "0.26", + "outlet.10.current.high.critical": "16", + "outlet.10.current.high.warning": "12.80", + "outlet.10.current.low.warning": "0", + "outlet.10.current.status": "good", + "outlet.10.delay.shutdown": "120", + "outlet.10.delay.start": "10", + "outlet.10.desc": "Outlet A10", + "outlet.10.groupid": "1", + "outlet.10.id": "10", + "outlet.10.name": "A10", + "outlet.10.power": "32", + "outlet.10.realpower": "15", + "outlet.10.status": "on", + "outlet.10.switchable": "yes", + "outlet.10.timer.shutdown": "-1", + "outlet.10.timer.start": "-1", + "outlet.10.type": "nema520", + "outlet.11.current": "0.24", + "outlet.11.current.high.critical": "16", + "outlet.11.current.high.warning": "12.80", + "outlet.11.current.low.warning": "0", + "outlet.11.current.status": "good", + "outlet.11.delay.shutdown": "120", + "outlet.11.delay.start": "11", + "outlet.11.desc": "Outlet A11", + "outlet.11.groupid": "1", + "outlet.11.id": "11", + "outlet.11.name": "A11", + "outlet.11.power": "29", + "outlet.11.realpower": "22", + "outlet.11.status": "on", + "outlet.11.switchable": "yes", + "outlet.11.timer.shutdown": "-1", + "outlet.11.timer.start": "-1", + "outlet.11.type": "nema520", + "outlet.12.current": "0", + "outlet.12.current.high.critical": "16", + "outlet.12.current.high.warning": "12.80", + "outlet.12.current.low.warning": "0", + "outlet.12.current.status": "good", + "outlet.12.delay.shutdown": "120", + "outlet.12.delay.start": "12", + "outlet.12.desc": "Outlet A12", + "outlet.12.groupid": "1", + "outlet.12.id": "12", + "outlet.12.name": "A12", + "outlet.12.power": "0", + "outlet.12.realpower": "0", + "outlet.12.status": "on", + "outlet.12.switchable": "yes", + "outlet.12.timer.shutdown": "-1", + "outlet.12.timer.start": "-1", + "outlet.12.type": "nema520", + "outlet.13.current": "0.23", + "outlet.13.current.high.critical": "16", + "outlet.13.current.high.warning": "12.80", + "outlet.13.current.low.warning": "0", + "outlet.13.current.status": "good", + "outlet.13.delay.shutdown": "0", + "outlet.13.delay.start": "0", + "outlet.13.desc": "Outlet A13", + "outlet.13.groupid": "1", + "outlet.13.id": "0", + "outlet.13.name": "A13", + "outlet.13.power": "27", + "outlet.13.realpower": "9", + "outlet.13.status": "on", + "outlet.13.switchable": "yes", + "outlet.13.timer.shutdown": "-1", + "outlet.13.timer.start": "-1", + "outlet.13.type": "nema520", + "outlet.14.current": "0.10", + "outlet.14.current.high.critical": "16", + "outlet.14.current.high.warning": "12.80", + "outlet.14.current.low.warning": "0", + "outlet.14.current.status": "good", + "outlet.14.delay.shutdown": "120", + "outlet.14.delay.start": "14", + "outlet.14.desc": "Outlet A14", + "outlet.14.groupid": "1", + "outlet.14.id": "14", + "outlet.14.name": "A14", + "outlet.14.power": "12", + "outlet.14.realpower": "7", + "outlet.14.status": "on", + "outlet.14.switchable": "yes", + "outlet.14.timer.shutdown": "-1", + "outlet.14.timer.start": "-1", + "outlet.14.type": "nema520", + "outlet.15.current": "0.03", + "outlet.15.current.high.critical": "16", + "outlet.15.current.high.warning": "12.80", + "outlet.15.current.low.warning": "0", + "outlet.15.current.status": "good", + "outlet.15.delay.shutdown": "120", + "outlet.15.delay.start": "15", + "outlet.15.desc": "Outlet A15", + "outlet.15.groupid": "1", + "outlet.15.id": "15", + "outlet.15.name": "A15", + "outlet.15.power": "3", + "outlet.15.realpower": "1", + "outlet.15.status": "on", + "outlet.15.switchable": "yes", + "outlet.15.timer.shutdown": "-1", + "outlet.15.timer.start": "-1", + "outlet.15.type": "nema520", + "outlet.16.current": "0.04", + "outlet.16.current.high.critical": "16", + "outlet.16.current.high.warning": "12.80", + "outlet.16.current.low.warning": "0", + "outlet.16.current.status": "good", + "outlet.16.delay.shutdown": "120", + "outlet.16.delay.start": "16", + "outlet.16.desc": "Outlet A16", + "outlet.16.groupid": "1", + "outlet.16.id": "16", + "outlet.16.name": "A16", + "outlet.16.power": "4", + "outlet.16.realpower": "1", + "outlet.16.status": "on", + "outlet.16.switchable": "yes", + "outlet.16.timer.shutdown": "-1", + "outlet.16.timer.start": "-1", + "outlet.16.type": "nema520", + "outlet.17.current": "0.19", + "outlet.17.current.high.critical": "16", + "outlet.17.current.high.warning": "12.80", + "outlet.17.current.low.warning": "0", + "outlet.17.current.status": "good", + "outlet.17.delay.shutdown": "0", + "outlet.17.delay.start": "0", + "outlet.17.desc": "Outlet A17", + "outlet.17.groupid": "1", + "outlet.17.id": "0", + "outlet.17.name": "A17", + "outlet.17.power": "23", + "outlet.17.realpower": "5", + "outlet.17.status": "on", + "outlet.17.switchable": "yes", + "outlet.17.timer.shutdown": "-1", + "outlet.17.timer.start": "-1", + "outlet.17.type": "nema520", + "outlet.18.current": "0.35", + "outlet.18.current.high.critical": "16", + "outlet.18.current.high.warning": "12.80", + "outlet.18.current.low.warning": "0", + "outlet.18.current.status": "good", + "outlet.18.delay.shutdown": "0", + "outlet.18.delay.start": "0", + "outlet.18.desc": "Outlet A18", + "outlet.18.groupid": "1", + "outlet.18.id": "0", + "outlet.18.name": "A18", + "outlet.18.power": "42", + "outlet.18.realpower": "34", + "outlet.18.status": "on", + "outlet.18.switchable": "yes", + "outlet.18.timer.shutdown": "-1", + "outlet.18.timer.start": "-1", + "outlet.18.type": "nema520", + "outlet.19.current": "0.12", + "outlet.19.current.high.critical": "16", + "outlet.19.current.high.warning": "12.80", + "outlet.19.current.low.warning": "0", + "outlet.19.current.status": "good", + "outlet.19.delay.shutdown": "0", + "outlet.19.delay.start": "0", + "outlet.19.desc": "Outlet A19", + "outlet.19.groupid": "1", + "outlet.19.id": "0", + "outlet.19.name": "A19", + "outlet.19.power": "15", + "outlet.19.realpower": "6", + "outlet.19.status": "on", + "outlet.19.switchable": "yes", + "outlet.19.timer.shutdown": "-1", + "outlet.19.timer.start": "-1", + "outlet.19.type": "nema520", + "outlet.2.current": "0.39", + "outlet.2.current.high.critical": "16", + "outlet.2.current.high.warning": "12.80", + "outlet.2.current.low.warning": "0", + "outlet.2.current.status": "good", + "outlet.2.delay.shutdown": "120", + "outlet.2.delay.start": "2", + "outlet.2.desc": "Outlet A2", + "outlet.2.groupid": "1", + "outlet.2.id": "2", + "outlet.2.name": "A2", + "outlet.2.power": "47", + "outlet.2.realpower": "43", + "outlet.2.status": "on", + "outlet.2.switchable": "yes", + "outlet.2.timer.shutdown": "-1", + "outlet.2.timer.start": "-1", + "outlet.2.type": "nema520", + "outlet.20.current": "0", + "outlet.20.current.high.critical": "16", + "outlet.20.current.high.warning": "12.80", + "outlet.20.current.low.warning": "0", + "outlet.20.current.status": "good", + "outlet.20.delay.shutdown": "120", + "outlet.20.delay.start": "20", + "outlet.20.desc": "Outlet A20", + "outlet.20.groupid": "1", + "outlet.20.id": "20", + "outlet.20.name": "A20", + "outlet.20.power": "0", + "outlet.20.realpower": "0", + "outlet.20.status": "on", + "outlet.20.switchable": "yes", + "outlet.20.timer.shutdown": "-1", + "outlet.20.timer.start": "-1", + "outlet.20.type": "nema520", + "outlet.21.current": "0", + "outlet.21.current.high.critical": "16", + "outlet.21.current.high.warning": "12.80", + "outlet.21.current.low.warning": "0", + "outlet.21.current.status": "good", + "outlet.21.delay.shutdown": "120", + "outlet.21.delay.start": "21", + "outlet.21.desc": "Outlet A21", + "outlet.21.groupid": "1", + "outlet.21.id": "21", + "outlet.21.name": "A21", + "outlet.21.power": "0", + "outlet.21.realpower": "0", + "outlet.21.status": "on", + "outlet.21.switchable": "yes", + "outlet.21.timer.shutdown": "-1", + "outlet.21.timer.start": "-1", + "outlet.21.type": "nema520", + "outlet.22.current": "0", + "outlet.22.current.high.critical": "16", + "outlet.22.current.high.warning": "12.80", + "outlet.22.current.low.warning": "0", + "outlet.22.current.status": "good", + "outlet.22.delay.shutdown": "0", + "outlet.22.delay.start": "0", + "outlet.22.desc": "Outlet A22", + "outlet.22.groupid": "1", + "outlet.22.id": "0", + "outlet.22.name": "A22", + "outlet.22.power": "0", + "outlet.22.realpower": "0", + "outlet.22.status": "on", + "outlet.22.switchable": "yes", + "outlet.22.timer.shutdown": "-1", + "outlet.22.timer.start": "-1", + "outlet.22.type": "nema520", + "outlet.23.current": "0.34", + "outlet.23.current.high.critical": "16", + "outlet.23.current.high.warning": "12.80", + "outlet.23.current.low.warning": "0", + "outlet.23.current.status": "good", + "outlet.23.delay.shutdown": "120", + "outlet.23.delay.start": "23", + "outlet.23.desc": "Outlet A23", + "outlet.23.groupid": "1", + "outlet.23.id": "23", + "outlet.23.name": "A23", + "outlet.23.power": "41", + "outlet.23.realpower": "39", + "outlet.23.status": "on", + "outlet.23.switchable": "yes", + "outlet.23.timer.shutdown": "-1", + "outlet.23.timer.start": "-1", + "outlet.23.type": "nema520", + "outlet.24.current": "0.19", + "outlet.24.current.high.critical": "16", + "outlet.24.current.high.warning": "12.80", + "outlet.24.current.low.warning": "0", + "outlet.24.current.status": "good", + "outlet.24.delay.shutdown": "0", + "outlet.24.delay.start": "0", + "outlet.24.desc": "Outlet A24", + "outlet.24.groupid": "1", + "outlet.24.id": "0", + "outlet.24.name": "A24", + "outlet.24.power": "23", + "outlet.24.realpower": "11", + "outlet.24.status": "on", + "outlet.24.switchable": "yes", + "outlet.24.timer.shutdown": "-1", + "outlet.24.timer.start": "-1", + "outlet.24.type": "nema520", + "outlet.3.current": "0.46", + "outlet.3.current.high.critical": "16", + "outlet.3.current.high.warning": "12.80", + "outlet.3.current.low.warning": "0", + "outlet.3.current.status": "good", + "outlet.3.delay.shutdown": "120", + "outlet.3.delay.start": "3", + "outlet.3.desc": "Outlet A3", + "outlet.3.groupid": "1", + "outlet.3.id": "3", + "outlet.3.name": "A3", + "outlet.3.power": "56", + "outlet.3.realpower": "53", + "outlet.3.status": "on", + "outlet.3.switchable": "yes", + "outlet.3.timer.shutdown": "-1", + "outlet.3.timer.start": "-1", + "outlet.3.type": "nema520", + "outlet.4.current": "0.44", + "outlet.4.current.high.critical": "16", + "outlet.4.current.high.warning": "12.80", + "outlet.4.current.low.warning": "0", + "outlet.4.current.status": "good", + "outlet.4.delay.shutdown": "120", + "outlet.4.delay.start": "4", + "outlet.4.desc": "Outlet A4", + "outlet.4.groupid": "1", + "outlet.4.id": "4", + "outlet.4.name": "A4", + "outlet.4.power": "53", + "outlet.4.realpower": "48", + "outlet.4.status": "on", + "outlet.4.switchable": "yes", + "outlet.4.timer.shutdown": "-1", + "outlet.4.timer.start": "-1", + "outlet.4.type": "nema520", + "outlet.5.current": "0.43", + "outlet.5.current.high.critical": "16", + "outlet.5.current.high.warning": "12.80", + "outlet.5.current.low.warning": "0", + "outlet.5.current.status": "good", + "outlet.5.delay.shutdown": "120", + "outlet.5.delay.start": "5", + "outlet.5.desc": "Outlet A5", + "outlet.5.groupid": "1", + "outlet.5.id": "5", + "outlet.5.name": "A5", + "outlet.5.power": "52", + "outlet.5.realpower": "48", + "outlet.5.status": "on", + "outlet.5.switchable": "yes", + "outlet.5.timer.shutdown": "-1", + "outlet.5.timer.start": "-1", + "outlet.5.type": "nema520", + "outlet.6.current": "1.07", + "outlet.6.current.high.critical": "16", + "outlet.6.current.high.warning": "12.80", + "outlet.6.current.low.warning": "0", + "outlet.6.current.status": "good", + "outlet.6.delay.shutdown": "120", + "outlet.6.delay.start": "6", + "outlet.6.desc": "Outlet A6", + "outlet.6.groupid": "1", + "outlet.6.id": "6", + "outlet.6.name": "A6", + "outlet.6.power": "131", + "outlet.6.realpower": "118", + "outlet.6.status": "on", + "outlet.6.switchable": "yes", + "outlet.6.timer.shutdown": "-1", + "outlet.6.timer.start": "-1", + "outlet.6.type": "nema520", + "outlet.7.current": "0", + "outlet.7.current.high.critical": "16", + "outlet.7.current.high.warning": "12.80", + "outlet.7.current.low.warning": "0", + "outlet.7.current.status": "good", + "outlet.7.delay.shutdown": "120", + "outlet.7.delay.start": "7", + "outlet.7.desc": "Outlet A7", + "outlet.7.groupid": "1", + "outlet.7.id": "7", + "outlet.7.name": "A7", + "outlet.7.power": "0", + "outlet.7.realpower": "0", + "outlet.7.status": "on", + "outlet.7.switchable": "yes", + "outlet.7.timer.shutdown": "-1", + "outlet.7.timer.start": "-1", + "outlet.7.type": "nema520", + "outlet.8.current": "0", + "outlet.8.current.high.critical": "16", + "outlet.8.current.high.warning": "12.80", + "outlet.8.current.low.warning": "0", + "outlet.8.current.status": "good", + "outlet.8.delay.shutdown": "120", + "outlet.8.delay.start": "8", + "outlet.8.desc": "Outlet A8", + "outlet.8.groupid": "1", + "outlet.8.id": "8", + "outlet.8.name": "A8", + "outlet.8.power": "0", + "outlet.8.realpower": "0", + "outlet.8.status": "on", + "outlet.8.switchable": "yes", + "outlet.8.timer.shutdown": "-1", + "outlet.8.timer.start": "-1", + "outlet.8.type": "nema520", + "outlet.9.current": "0", + "outlet.9.current.high.critical": "16", + "outlet.9.current.high.warning": "12.80", + "outlet.9.current.low.warning": "0", + "outlet.9.current.status": "good", + "outlet.9.delay.shutdown": "120", + "outlet.9.delay.start": "9", + "outlet.9.desc": "Outlet A9", + "outlet.9.groupid": "1", + "outlet.9.id": "9", + "outlet.9.name": "A9", + "outlet.9.power": "0", + "outlet.9.realpower": "0", + "outlet.9.status": "on", + "outlet.9.switchable": "yes", + "outlet.9.timer.shutdown": "-1", + "outlet.9.timer.start": "-1", + "outlet.9.type": "nema520", + "outlet.count": "24", + "outlet.current": "43.05", + "outlet.desc": "All outlets", + "outlet.frequency": "60", + "outlet.group.1.color": "16051527", + "outlet.group.1.count": "24", + "outlet.group.1.desc": "Section A", + "outlet.group.1.id": "1", + "outlet.group.1.input": "1", + "outlet.group.1.name": "A", + "outlet.group.1.phase": "1", + "outlet.group.1.status": "on", + "outlet.group.1.type": "outlet-section", + "outlet.group.1.voltage": "122.83", + "outlet.group.1.voltage.high.critical": "140", + "outlet.group.1.voltage.high.warning": "130", + "outlet.group.1.voltage.low.critical": "90", + "outlet.group.1.voltage.low.warning": "95", + "outlet.group.1.voltage.status": "good", + "outlet.group.count": "1", + "outlet.id": "0", + "outlet.switchable": "yes", + "outlet.voltage": "122.91", + "ups.firmware": "05.01.0002", + "ups.mfr": "EATON", + "ups.model": "ePDU MA 00U-C IN: TYPE 00A 0P OUT: 00xTYPE", + "ups.serial": "A000A00000", + "ups.status": "", + "ups.type": "pdu" +} diff --git a/tests/components/nut/test_button.py b/tests/components/nut/test_button.py new file mode 100644 index 00000000000..bbcc521b7f3 --- /dev/null +++ b/tests/components/nut/test_button.py @@ -0,0 +1,102 @@ +"""Test the NUT button platform.""" + +import pytest + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.nut.const import INTEGRATION_SUPPORTED_COMMANDS +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .util import async_init_integration + + +@pytest.mark.parametrize( + "model", + [ + "CP1350C", + "5E650I", + "5E850I", + "CP1500PFCLCD", + "DL650ELCD", + "EATON5P1550", + "blazer_usb", + ], +) +async def test_buttons_ups( + hass: HomeAssistant, entity_registry: er.EntityRegistry, model: str +) -> None: + """Tests that there are no standard buttons.""" + + list_commands_return_value = { + supported_command: supported_command + for supported_command in INTEGRATION_SUPPORTED_COMMANDS + } + + await async_init_integration( + hass, + model, + list_commands_return_value=list_commands_return_value, + ) + + button = hass.states.get("button.ups1_power_cycle_outlet_1") + assert not button + + +@pytest.mark.parametrize( + ("model", "unique_id_base"), + [ + ( + "EATON-EPDU-G3", + "EATON_ePDU MA 00U-C IN: TYPE 00A 0P OUT: 00xTYPE_A000A00000_", + ), + ], +) +async def test_buttons_pdu_dynamic_outlets( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + model: str, + unique_id_base: str, +) -> None: + """Tests that the button entities are correct.""" + + list_commands_return_value = { + supported_command: supported_command + for supported_command in INTEGRATION_SUPPORTED_COMMANDS + } + + for num in range(1, 25): + command = f"outlet.{num!s}.load.cycle" + list_commands_return_value[command] = command + + await async_init_integration( + hass, + model, + list_commands_return_value=list_commands_return_value, + ) + + entity_id = "button.ups1_power_cycle_outlet_a1" + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == f"{unique_id_base}outlet.1.load.cycle" + + button = hass.states.get(entity_id) + assert button + assert button.state == STATE_UNKNOWN + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + await hass.async_block_till_done() + + button = hass.states.get(entity_id) + assert button.state != STATE_UNKNOWN + + button = hass.states.get("button.ups1_power_cycle_outlet_25") + assert not button + + button = hass.states.get("button.ups1_power_cycle_outlet_a25") + assert not button diff --git a/tests/components/nut/test_sensor.py b/tests/components/nut/test_sensor.py index eb171c39011..cdec6c5083b 100644 --- a/tests/components/nut/test_sensor.py +++ b/tests/components/nut/test_sensor.py @@ -12,6 +12,7 @@ from homeassistant.const import ( CONF_RESOURCES, PERCENTAGE, STATE_UNKNOWN, + UnitOfElectricCurrent, UnitOfElectricPotential, ) from homeassistant.core import HomeAssistant @@ -103,7 +104,7 @@ async def test_ups_devices_with_unique_ids( [ ( "EATON-EPDU-G3", - "EATON_ePDU MA 00U-C IN: TYPE 00A 0P OUT: 00xTYPE_A000A00000_", + "EATON_ePDU MA 00U-C IN: TYPE 00A 0P OUT: 00xTYPE_A000A00000", ), ], ) @@ -115,11 +116,13 @@ async def test_pdu_devices_with_unique_ids( ) -> None: """Test creation of device sensors with unique ids.""" - await _test_sensor_and_attributes( + await async_init_integration(hass, model) + + _test_sensor_and_attributes( hass, entity_registry, model, - unique_id=f"{unique_id_base}input.voltage", + unique_id=f"{unique_id_base}_input.voltage", device_id="sensor.ups1_input_voltage", state_value="122.91", expected_attributes={ @@ -130,11 +133,11 @@ async def test_pdu_devices_with_unique_ids( }, ) - await _test_sensor_and_attributes( + _test_sensor_and_attributes( hass, entity_registry, model, - unique_id=f"{unique_id_base}ambient.humidity.status", + unique_id=f"{unique_id_base}_ambient.humidity.status", device_id="sensor.ups1_ambient_humidity_status", state_value="good", expected_attributes={ @@ -143,11 +146,11 @@ async def test_pdu_devices_with_unique_ids( }, ) - await _test_sensor_and_attributes( + _test_sensor_and_attributes( hass, entity_registry, model, - unique_id=f"{unique_id_base}ambient.temperature.status", + unique_id=f"{unique_id_base}_ambient.temperature.status", device_id="sensor.ups1_ambient_temperature_status", state_value="good", expected_attributes={ @@ -241,3 +244,89 @@ async def test_stale_options( state = hass.states.get("sensor.ups1_battery_charge") assert state.state == "10" + + +@pytest.mark.parametrize( + ("model", "unique_id_base"), + [ + ( + "EATON-EPDU-G3-AMBIENT-NOT-PRESENT", + "EATON_ePDU MA 00U-C IN: TYPE 00A 0P OUT: 00xTYPE_A000A00000", + ), + ], +) +async def test_pdu_devices_ambient_not_present( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + model: str, + unique_id_base: str, +) -> None: + """Test that ambient sensors not created.""" + + await async_init_integration(hass, model) + + entry = entity_registry.async_get("sensor.ups1_ambient_humidity") + assert not entry + + entry = entity_registry.async_get("sensor.ups1_ambient_humidity_status") + assert not entry + + entry = entity_registry.async_get("sensor.ups1_ambient_temperature") + assert not entry + + entry = entity_registry.async_get("sensor.ups1_ambient_temperature_status") + assert not entry + + +@pytest.mark.parametrize( + ("model", "unique_id_base"), + [ + ( + "EATON-EPDU-G3", + "EATON_ePDU MA 00U-C IN: TYPE 00A 0P OUT: 00xTYPE_A000A00000", + ), + ], +) +async def test_pdu_dynamic_outlets( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + model: str, + unique_id_base: str, +) -> None: + """Test for dynamically created outlet sensors.""" + + await async_init_integration(hass, model) + + _test_sensor_and_attributes( + hass, + entity_registry, + model, + unique_id=f"{unique_id_base}_outlet.1.current", + device_id="sensor.ups1_outlet_a1_current", + state_value="0", + expected_attributes={ + "device_class": SensorDeviceClass.CURRENT, + "friendly_name": "Ups1 Outlet A1 current", + "unit_of_measurement": UnitOfElectricCurrent.AMPERE, + }, + ) + + _test_sensor_and_attributes( + hass, + entity_registry, + model, + unique_id=f"{unique_id_base}_outlet.24.current", + device_id="sensor.ups1_outlet_a24_current", + state_value="0.19", + expected_attributes={ + "device_class": SensorDeviceClass.CURRENT, + "friendly_name": "Ups1 Outlet A24 current", + "unit_of_measurement": UnitOfElectricCurrent.AMPERE, + }, + ) + + entry = entity_registry.async_get("sensor.ups1_outlet_25_current") + assert not entry + + entry = entity_registry.async_get("sensor.ups1_outlet_a25_current") + assert not entry diff --git a/tests/components/nut/test_switch.py b/tests/components/nut/test_switch.py new file mode 100644 index 00000000000..f2de5eeb5e6 --- /dev/null +++ b/tests/components/nut/test_switch.py @@ -0,0 +1,159 @@ +"""Test the NUT switch platform.""" + +import json +from unittest.mock import AsyncMock + +import pytest + +from homeassistant.components.nut.const import INTEGRATION_SUPPORTED_COMMANDS +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_ON, + STATE_UNKNOWN, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .util import async_init_integration + +from tests.common import load_fixture + + +@pytest.mark.parametrize( + "model", + [ + "CP1350C", + "5E650I", + "5E850I", + "CP1500PFCLCD", + "DL650ELCD", + "EATON5P1550", + "blazer_usb", + ], +) +async def test_switch_ups( + hass: HomeAssistant, entity_registry: er.EntityRegistry, model: str +) -> None: + """Tests that there are no standard switches.""" + + list_commands_return_value = { + supported_command: supported_command + for supported_command in INTEGRATION_SUPPORTED_COMMANDS + } + + await async_init_integration( + hass, + model, + list_commands_return_value=list_commands_return_value, + ) + + switch = hass.states.get("switch.ups1_power_outlet_1") + assert not switch + + +@pytest.mark.parametrize( + ("model", "unique_id_base"), + [ + ( + "EATON-EPDU-G3", + "EATON_ePDU MA 00U-C IN: TYPE 00A 0P OUT: 00xTYPE_A000A00000", + ), + ], +) +async def test_switch_pdu_dynamic_outlets( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + model: str, + unique_id_base: str, +) -> None: + """Tests that the switch entities are correct.""" + + list_commands_return_value = { + supported_command: supported_command + for supported_command in INTEGRATION_SUPPORTED_COMMANDS + } + + for num in range(1, 25): + command = f"outlet.{num!s}.load.on" + list_commands_return_value[command] = command + command = f"outlet.{num!s}.load.off" + list_commands_return_value[command] = command + + ups_fixture = f"nut/{model}.json" + list_vars = json.loads(load_fixture(ups_fixture)) + + run_command = AsyncMock() + + await async_init_integration( + hass, + model, + list_vars=list_vars, + list_commands_return_value=list_commands_return_value, + run_command=run_command, + ) + + entity_id = "switch.ups1_power_outlet_a1" + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == f"{unique_id_base}_outlet.1.load.poweronoff" + + switch = hass.states.get(entity_id) + assert switch + assert switch.state == STATE_ON + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + run_command.assert_called_with("ups1", "outlet.1.load.off") + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + run_command.assert_called_with("ups1", "outlet.1.load.on") + + switch = hass.states.get("switch.ups1_power_outlet_25") + assert not switch + + switch = hass.states.get("switch.ups1_power_outlet_a25") + assert not switch + + +async def test_switch_pdu_dynamic_outlets_state_unknown( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, +) -> None: + """Test switch entity with missing status is reported as unknown.""" + + config_entry = await async_init_integration( + hass, + list_ups={"ups1": "UPS 1"}, + list_vars={ + "outlet.count": "1", + "outlet.1.switchable": "yes", + "outlet.1.name": "A1", + }, + list_commands_return_value={ + "outlet.1.load.on": None, + "outlet.1.load.off": None, + }, + ) + + entity_id = "switch.ups1_power_outlet_a1" + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == f"{config_entry.entry_id}_outlet.1.load.poweronoff" + + switch = hass.states.get(entity_id) + assert switch + assert switch.state == STATE_UNKNOWN diff --git a/tests/components/nut/util.py b/tests/components/nut/util.py index bd82ffdd6b4..07c073f0286 100644 --- a/tests/components/nut/util.py +++ b/tests/components/nut/util.py @@ -82,7 +82,7 @@ async def async_init_integration( return entry -async def _test_sensor_and_attributes( +def _test_sensor_and_attributes( hass: HomeAssistant, entity_registry: er.EntityRegistry, model: str, @@ -91,9 +91,8 @@ async def _test_sensor_and_attributes( state_value: str, expected_attributes: dict, ) -> None: - """Test creation of device sensors with unique ids.""" + """Test all of the sensor entry attributes.""" - await async_init_integration(hass, model) entry = entity_registry.async_get(device_id) assert entry assert entry.unique_id == unique_id diff --git a/tests/components/nws/const.py b/tests/components/nws/const.py index 39e954af15a..1de8f67fbdb 100644 --- a/tests/components/nws/const.py +++ b/tests/components/nws/const.py @@ -176,7 +176,7 @@ WEATHER_EXPECTED_OBSERVATION_METRIC = { ATTR_WEATHER_HUMIDITY: 10, } -NONE_OBSERVATION = {key: None for key in DEFAULT_OBSERVATION} +NONE_OBSERVATION = dict.fromkeys(DEFAULT_OBSERVATION) DEFAULT_FORECAST = [ { @@ -235,4 +235,4 @@ EXPECTED_FORECAST_METRIC = { ATTR_FORECAST_HUMIDITY: 75, } -NONE_FORECAST = [{key: None for key in DEFAULT_FORECAST[0]}] +NONE_FORECAST = [dict.fromkeys(DEFAULT_FORECAST[0])] diff --git a/tests/components/ohme/conftest.py b/tests/components/ohme/conftest.py index 01cc668ae32..e8a7d27b2c3 100644 --- a/tests/components/ohme/conftest.py +++ b/tests/components/ohme/conftest.py @@ -60,10 +60,14 @@ def mock_client(): client.preconditioning = 15 client.serial = "chargerid" client.ct_connected = True + client.cap_available = True + client.cap_enabled = True client.energy = 1000 client.device_info = { "name": "Ohme Home Pro", "model": "Home Pro", "sw_version": "v2.65", } + client.vehicles = ["Nissan Leaf", "Tesla Model 3"] + client.current_vehicle = "Nissan Leaf" yield client diff --git a/tests/components/ohme/snapshots/test_diagnostics.ambr b/tests/components/ohme/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..f51c701b71b --- /dev/null +++ b/tests/components/ohme/snapshots/test_diagnostics.ambr @@ -0,0 +1,16 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'cap_available': True, + 'ct_connected': True, + 'device_info': dict({ + 'model': 'Home Pro', + 'name': 'Ohme Home Pro', + 'sw_version': 'v2.65', + }), + 'vehicles': list([ + 'Nissan Leaf', + 'Tesla Model 3', + ]), + }) +# --- diff --git a/tests/components/ohme/snapshots/test_select.ambr b/tests/components/ohme/snapshots/test_select.ambr index 8eec0556889..063a9616588 100644 --- a/tests/components/ohme/snapshots/test_select.ambr +++ b/tests/components/ohme/snapshots/test_select.ambr @@ -57,3 +57,59 @@ 'state': 'unknown', }) # --- +# name: test_selects[select.ohme_home_pro_vehicle-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Nissan Leaf', + 'Tesla Model 3', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.ohme_home_pro_vehicle', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Vehicle', + 'platform': 'ohme', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle', + 'unique_id': 'chargerid_vehicle', + 'unit_of_measurement': None, + }) +# --- +# name: test_selects[select.ohme_home_pro_vehicle-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Ohme Home Pro Vehicle', + 'options': list([ + 'Nissan Leaf', + 'Tesla Model 3', + ]), + }), + 'context': , + 'entity_id': 'select.ohme_home_pro_vehicle', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Nissan Leaf', + }) +# --- diff --git a/tests/components/ohme/snapshots/test_switch.ambr b/tests/components/ohme/snapshots/test_switch.ambr index 49bf5d5709a..4790d96c551 100644 --- a/tests/components/ohme/snapshots/test_switch.ambr +++ b/tests/components/ohme/snapshots/test_switch.ambr @@ -46,6 +46,53 @@ 'state': 'on', }) # --- +# name: test_switches[switch.ohme_home_pro_price_cap-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.ohme_home_pro_price_cap', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Price cap', + 'platform': 'ohme', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'price_cap', + 'unique_id': 'chargerid_price_cap', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[switch.ohme_home_pro_price_cap-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Ohme Home Pro Price cap', + }), + 'context': , + 'entity_id': 'switch.ohme_home_pro_price_cap', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_switches[switch.ohme_home_pro_require_approval-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/ohme/test_config_flow.py b/tests/components/ohme/test_config_flow.py index bb7ecc00bdc..b8754711d76 100644 --- a/tests/components/ohme/test_config_flow.py +++ b/tests/components/ohme/test_config_flow.py @@ -182,3 +182,84 @@ async def test_reauth_fail( await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" + + +async def test_reconfigure_form(hass: HomeAssistant, mock_client: MagicMock) -> None: + """Test reconfigure form.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "hunter1", + }, + ) + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "reconfigure", "entry_id": entry.entry_id} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + assert not result["errors"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_PASSWORD: "hunter2"}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + +@pytest.mark.parametrize( + ("test_exception", "expected_error"), + [(AuthException, "invalid_auth"), (ApiException, "unknown")], +) +async def test_reconfigure_fail( + hass: HomeAssistant, + mock_client: MagicMock, + test_exception: Exception, + expected_error: str, +) -> None: + """Test reconfigure errors.""" + + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_EMAIL: "test@example.com", + CONF_PASSWORD: "hunter1", + }, + ) + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "reconfigure", "entry_id": entry.entry_id} + ) + + assert result["step_id"] == "reconfigure" + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + # Simulate failed login attempt + mock_client.async_login.side_effect = test_exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_PASSWORD: "hunter1"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": expected_error} + + # Retry with a successful login + mock_client.async_login.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_PASSWORD: "hunter2"}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" diff --git a/tests/components/ohme/test_diagnostics.py b/tests/components/ohme/test_diagnostics.py new file mode 100644 index 00000000000..6aab1262189 --- /dev/null +++ b/tests/components/ohme/test_diagnostics.py @@ -0,0 +1,28 @@ +"""Tests for the diagnostics data provided by the Ohme integration.""" + +from unittest.mock import MagicMock + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + await setup_integration(hass, mock_config_entry) + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry) + == snapshot + ) diff --git a/tests/components/ohme/test_services.py b/tests/components/ohme/test_services.py index 76c7ce94b57..c228ddcd9a7 100644 --- a/tests/components/ohme/test_services.py +++ b/tests/components/ohme/test_services.py @@ -1,13 +1,16 @@ """Tests for services.""" -from unittest.mock import MagicMock +from datetime import datetime +from unittest.mock import AsyncMock, MagicMock +from ohme import ChargeSlot import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.ohme.const import DOMAIN from homeassistant.components.ohme.services import ( ATTR_CONFIG_ENTRY, + ATTR_PRICE_CAP, SERVICE_LIST_CHARGE_SLOTS, ) from homeassistant.core import HomeAssistant @@ -29,11 +32,11 @@ async def test_list_charge_slots( await setup_integration(hass, mock_config_entry) mock_client.slots = [ - { - "start": "2024-12-30T04:00:00+00:00", - "end": "2024-12-30T04:30:39+00:00", - "energy": 2.042, - } + ChargeSlot( + datetime.fromisoformat("2024-12-30T04:00:00+00:00"), + datetime.fromisoformat("2024-12-30T04:30:39+00:00"), + 2.042, + ) ] assert snapshot == await hass.services.async_call( @@ -47,6 +50,30 @@ async def test_list_charge_slots( ) +async def test_set_price_cap( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, + snapshot: SnapshotAssertion, +) -> None: + """Test set price cap service.""" + + await setup_integration(hass, mock_config_entry) + mock_client.async_change_price_cap = AsyncMock() + + await hass.services.async_call( + DOMAIN, + "set_price_cap", + { + ATTR_CONFIG_ENTRY: mock_config_entry.entry_id, + ATTR_PRICE_CAP: 10.0, + }, + blocking=True, + ) + + mock_client.async_change_price_cap.assert_called_once_with(cap=10.0) + + async def test_list_charge_slots_exception( hass: HomeAssistant, mock_config_entry: MockConfigEntry, diff --git a/tests/components/ohme/test_switch.py b/tests/components/ohme/test_switch.py index b16b70d67f8..8d82a5a3ea4 100644 --- a/tests/components/ohme/test_switch.py +++ b/tests/components/ohme/test_switch.py @@ -1,6 +1,6 @@ """Tests for switches.""" -from unittest.mock import MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, patch from syrupy import SnapshotAssertion @@ -32,7 +32,49 @@ async def test_switches( await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) -async def test_switch_on( +async def test_cap_switch_on( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test the switch turn_on action.""" + await setup_integration(hass, mock_config_entry) + mock_client.async_change_price_cap = AsyncMock() + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "switch.ohme_home_pro_price_cap", + }, + blocking=True, + ) + + mock_client.async_change_price_cap.assert_called_once_with(True) + + +async def test_cap_switch_off( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_client: MagicMock, +) -> None: + """Test the switch turn_off action.""" + await setup_integration(hass, mock_config_entry) + mock_client.async_change_price_cap = AsyncMock() + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: "switch.ohme_home_pro_price_cap", + }, + blocking=True, + ) + + mock_client.async_change_price_cap.assert_called_once_with(False) + + +async def test_config_switch_on( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_client: MagicMock, @@ -52,7 +94,7 @@ async def test_switch_on( assert len(mock_client.async_set_configuration_value.mock_calls) == 1 -async def test_switch_off( +async def test_config_switch_off( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_client: MagicMock, diff --git a/tests/components/ollama/snapshots/test_conversation.ambr b/tests/components/ollama/snapshots/test_conversation.ambr index 93f3b03d9af..de414019317 100644 --- a/tests/components/ollama/snapshots/test_conversation.ambr +++ b/tests/components/ollama/snapshots/test_conversation.ambr @@ -1,6 +1,7 @@ # serializer version: 1 # name: test_unknown_hass_api dict({ + 'continue_conversation': False, 'conversation_id': '1234', 'response': IntentResponse( card=dict({ diff --git a/tests/components/ollama/test_conversation.py b/tests/components/ollama/test_conversation.py index db641ba703b..c718aab1e81 100644 --- a/tests/components/ollama/test_conversation.py +++ b/tests/components/ollama/test_conversation.py @@ -4,6 +4,7 @@ from collections.abc import AsyncGenerator from typing import Any from unittest.mock import AsyncMock, Mock, patch +from freezegun.api import FrozenDateTimeFactory from ollama import Message, ResponseError import pytest from syrupy.assertion import SnapshotAssertion @@ -404,7 +405,10 @@ async def test_unknown_hass_api( async def test_message_history_trimming( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, + freezer: FrozenDateTimeFactory, ) -> None: """Test that a single message history is trimmed according to the config.""" response_idx = 0 diff --git a/tests/components/onboarding/snapshots/test_views.ambr b/tests/components/onboarding/snapshots/test_views.ambr index 2d084bd9ade..48ddf30d1f2 100644 --- a/tests/components/onboarding/snapshots/test_views.ambr +++ b/tests/components/onboarding/snapshots/test_views.ambr @@ -62,7 +62,7 @@ 'with_automatic_settings': None, }), ]), - 'last_non_idle_event': None, + 'last_action_event': None, 'state': 'idle', }) # --- diff --git a/tests/components/onboarding/test_views.py b/tests/components/onboarding/test_views.py index b7189bda6cc..509dece7dd0 100644 --- a/tests/components/onboarding/test_views.py +++ b/tests/components/onboarding/test_views.py @@ -6,12 +6,16 @@ from http import HTTPStatus from io import StringIO import os from typing import Any -from unittest.mock import ANY, AsyncMock, Mock, patch +from unittest.mock import ANY, DEFAULT, AsyncMock, MagicMock, Mock, patch +from hass_nabucasa.auth import CognitoAuth +from hass_nabucasa.const import STATE_CONNECTED +from hass_nabucasa.iot import CloudIoT import pytest from syrupy import SnapshotAssertion from homeassistant.components import backup, onboarding +from homeassistant.components.cloud import DOMAIN as CLOUD_DOMAIN, CloudClient from homeassistant.components.onboarding import const, views from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -762,7 +766,7 @@ async def test_onboarding_backup_info( hass_client: ClientSessionGenerator, snapshot: SnapshotAssertion, ) -> None: - """Test returning installation type during onboarding.""" + """Test backup info.""" mock_storage(hass_storage, {"done": []}) assert await async_setup_component(hass, "onboarding", {}) @@ -879,7 +883,7 @@ async def test_onboarding_backup_restore( params: dict[str, Any], expected_kwargs: dict[str, Any], ) -> None: - """Test returning installation type during onboarding.""" + """Test restore backup.""" mock_storage(hass_storage, {"done": []}) assert await async_setup_component(hass, "onboarding", {}) @@ -976,7 +980,7 @@ async def test_onboarding_backup_restore_error( expected_json: str, restore_calls: int, ) -> None: - """Test returning installation type during onboarding.""" + """Test restore backup fails.""" mock_storage(hass_storage, {"done": []}) assert await async_setup_component(hass, "onboarding", {}) @@ -1020,7 +1024,7 @@ async def test_onboarding_backup_restore_unexpected_error( expected_message: str, restore_calls: int, ) -> None: - """Test returning installation type during onboarding.""" + """Test restore backup fails.""" mock_storage(hass_storage, {"done": []}) assert await async_setup_component(hass, "onboarding", {}) @@ -1046,7 +1050,7 @@ async def test_onboarding_backup_upload( hass_storage: dict[str, Any], hass_client: ClientSessionGenerator, ) -> None: - """Test returning installation type during onboarding.""" + """Test upload backup.""" mock_storage(hass_storage, {"done": []}) assert await async_setup_component(hass, "onboarding", {}) @@ -1067,3 +1071,139 @@ async def test_onboarding_backup_upload( assert resp.status == 201 assert await resp.json() == {"backup_id": "abc123"} mock_receive.assert_called_once_with(agent_ids=["backup.local"], contents=ANY) + + +@pytest.fixture(name="cloud") +async def cloud_fixture() -> AsyncGenerator[MagicMock]: + """Mock the cloud object. + + See the real hass_nabucasa.Cloud class for how to configure the mock. + """ + with patch( + "homeassistant.components.cloud.Cloud", autospec=True + ) as mock_cloud_class: + mock_cloud = mock_cloud_class.return_value + + mock_cloud.auth = MagicMock(spec=CognitoAuth) + mock_cloud.iot = MagicMock( + spec=CloudIoT, last_disconnect_reason=None, state=STATE_CONNECTED + ) + + def set_up_mock_cloud( + cloud_client: CloudClient, mode: str, **kwargs: Any + ) -> DEFAULT: + """Set up mock cloud with a mock constructor.""" + + # Attributes set in the constructor with parameters. + mock_cloud.client = cloud_client + + return DEFAULT + + mock_cloud_class.side_effect = set_up_mock_cloud + + # Attributes that we mock with default values. + mock_cloud.id_token = None + mock_cloud.is_logged_in = False + + yield mock_cloud + + +@pytest.fixture(name="setup_cloud") +async def setup_cloud_fixture(hass: HomeAssistant, cloud: MagicMock) -> None: + """Fixture that sets up cloud.""" + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, CLOUD_DOMAIN, {}) + await hass.async_block_till_done() + + +@pytest.mark.usefixtures("setup_cloud") +async def test_onboarding_cloud_forgot_password( + hass: HomeAssistant, + hass_storage: dict[str, Any], + hass_client: ClientSessionGenerator, + cloud: MagicMock, +) -> None: + """Test cloud forgot password.""" + mock_storage(hass_storage, {"done": []}) + + assert await async_setup_component(hass, "onboarding", {}) + await hass.async_block_till_done() + + client = await hass_client() + + mock_cognito = cloud.auth + + req = await client.post( + "/api/onboarding/cloud/forgot_password", json={"email": "hello@bla.com"} + ) + + assert req.status == HTTPStatus.OK + assert mock_cognito.async_forgot_password.call_count == 1 + + +@pytest.mark.usefixtures("setup_cloud") +async def test_onboarding_cloud_login( + hass: HomeAssistant, + hass_storage: dict[str, Any], + hass_client: ClientSessionGenerator, + cloud: MagicMock, +) -> None: + """Test logging out from cloud.""" + mock_storage(hass_storage, {"done": []}) + + assert await async_setup_component(hass, "onboarding", {}) + await hass.async_block_till_done() + + client = await hass_client() + req = await client.post( + "/api/onboarding/cloud/login", + json={"email": "my_username", "password": "my_password"}, + ) + + assert req.status == HTTPStatus.OK + data = await req.json() + assert data == {"cloud_pipeline": None, "success": True} + assert cloud.login.call_count == 1 + + +@pytest.mark.usefixtures("setup_cloud") +async def test_onboarding_cloud_logout( + hass: HomeAssistant, + hass_storage: dict[str, Any], + hass_client: ClientSessionGenerator, + cloud: MagicMock, +) -> None: + """Test logging out from cloud.""" + mock_storage(hass_storage, {"done": []}) + + assert await async_setup_component(hass, "onboarding", {}) + await hass.async_block_till_done() + + client = await hass_client() + req = await client.post("/api/onboarding/cloud/logout") + + assert req.status == HTTPStatus.OK + data = await req.json() + assert data == {"message": "ok"} + assert cloud.logout.call_count == 1 + + +@pytest.mark.usefixtures("setup_cloud") +async def test_onboarding_cloud_status( + hass: HomeAssistant, + hass_storage: dict[str, Any], + hass_client: ClientSessionGenerator, + cloud: MagicMock, +) -> None: + """Test logging out from cloud.""" + mock_storage(hass_storage, {"done": []}) + + assert await async_setup_component(hass, "onboarding", {}) + await hass.async_block_till_done() + + client = await hass_client() + req = await client.get("/api/onboarding/cloud/status") + + assert req.status == HTTPStatus.OK + data = await req.json() + assert data == {"logged_in": False} diff --git a/tests/components/ondilo_ico/conftest.py b/tests/components/ondilo_ico/conftest.py index d35e5ac0003..891f60eb549 100644 --- a/tests/components/ondilo_ico/conftest.py +++ b/tests/components/ondilo_ico/conftest.py @@ -7,6 +7,7 @@ from unittest.mock import MagicMock, patch import pytest from homeassistant.components.ondilo_ico.const import DOMAIN +from homeassistant.util.json import JsonArrayType from tests.common import ( MockConfigEntry, @@ -71,7 +72,7 @@ def ico_details2() -> dict[str, Any]: @pytest.fixture(scope="package") -def last_measures() -> list[dict[str, Any]]: +def last_measures() -> JsonArrayType: """Pool measurements.""" return load_json_array_fixture("last_measures.json", DOMAIN) diff --git a/tests/components/ondilo_ico/snapshots/test_sensor.ambr b/tests/components/ondilo_ico/snapshots/test_sensor.ambr index 84a2d3da4cb..7df2bfc22ce 100644 --- a/tests/components/ondilo_ico/snapshots/test_sensor.ambr +++ b/tests/components/ondilo_ico/snapshots/test_sensor.ambr @@ -13,7 +13,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.pool_1_battery', 'has_entity_name': True, 'hidden_by': None, @@ -167,7 +167,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.pool_1_rssi', 'has_entity_name': True, 'hidden_by': None, @@ -372,7 +372,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.pool_2_battery', 'has_entity_name': True, 'hidden_by': None, @@ -526,7 +526,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.pool_2_rssi', 'has_entity_name': True, 'hidden_by': None, diff --git a/tests/components/ondilo_ico/test_config_flow.py b/tests/components/ondilo_ico/test_config_flow.py index deab2a8e0b9..19407cecb9d 100644 --- a/tests/components/ondilo_ico/test_config_flow.py +++ b/tests/components/ondilo_ico/test_config_flow.py @@ -4,15 +4,13 @@ from unittest.mock import patch import pytest -from homeassistant import config_entries, setup +from homeassistant import config_entries from homeassistant.components.ondilo_ico.const import ( DOMAIN, OAUTH2_AUTHORIZE, - OAUTH2_CLIENTID, - OAUTH2_CLIENTSECRET, + OAUTH2_CLIENT_ID as CLIENT_ID, OAUTH2_TOKEN, ) -from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -21,13 +19,12 @@ from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator -CLIENT_ID = OAUTH2_CLIENTID -CLIENT_SECRET = OAUTH2_CLIENTSECRET - -async def test_abort_if_existing_entry(hass: HomeAssistant) -> None: +async def test_abort_if_existing_entry( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: """Check flow abort when an entry already exist.""" - MockConfigEntry(domain=DOMAIN).add_to_hass(hass) + config_entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -43,15 +40,6 @@ async def test_full_flow( aioclient_mock: AiohttpClientMocker, ) -> None: """Check full flow.""" - assert await setup.async_setup_component( - hass, - DOMAIN, - { - DOMAIN: {CONF_CLIENT_ID: CLIENT_ID, CONF_CLIENT_SECRET: CLIENT_SECRET}, - "http": {"base_url": "https://example.com"}, - }, - ) - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) diff --git a/tests/components/ondilo_ico/test_sensor.py b/tests/components/ondilo_ico/test_sensor.py index 0043d22f6c0..c944353724e 100644 --- a/tests/components/ondilo_ico/test_sensor.py +++ b/tests/components/ondilo_ico/test_sensor.py @@ -45,7 +45,9 @@ async def test_no_ico_for_one_pool( # Only the second pool is created assert len(hass.states.async_all()) == 7 assert hass.states.get("sensor.pool_1_temperature") is None - assert hass.states.get("sensor.pool_2_rssi").state == next( + state = hass.states.get("sensor.pool_2_rssi") + assert state is not None + assert state.state == next( str(item["value"]) for item in last_measures if item["data_type"] == "rssi" ) diff --git a/tests/components/onedrive/test_services.py b/tests/components/onedrive/test_services.py new file mode 100644 index 00000000000..31d2d932cd0 --- /dev/null +++ b/tests/components/onedrive/test_services.py @@ -0,0 +1,280 @@ +"""Tests for OneDrive services.""" + +from collections.abc import Generator +from dataclasses import dataclass +import re +from typing import Any, cast +from unittest.mock import MagicMock, Mock, patch + +from onedrive_personal_sdk.exceptions import OneDriveException +import pytest + +from homeassistant.components.onedrive.const import DOMAIN +from homeassistant.components.onedrive.services import ( + CONF_CONFIG_ENTRY_ID, + CONF_DESTINATION_FOLDER, + UPLOAD_SERVICE, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_FILENAME +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from . import setup_integration + +from tests.common import MockConfigEntry + +TEST_FILENAME = "doorbell_snapshot.jpg" +DESINATION_FOLDER = "TestFolder" + + +@dataclass +class MockUploadFile: + """Dataclass used to configure the test with a fake file behavior.""" + + content: bytes = b"image bytes" + exists: bool = True + is_allowed_path: bool = True + size: int | None = None + + +@pytest.fixture(name="upload_file") +def upload_file_fixture() -> MockUploadFile: + """Fixture to set up test configuration with a fake file.""" + return MockUploadFile() + + +@pytest.fixture(autouse=True) +def mock_upload_file( + hass: HomeAssistant, upload_file: MockUploadFile +) -> Generator[None]: + """Fixture that mocks out the file calls using the FakeFile fixture.""" + with ( + patch( + "homeassistant.components.onedrive.services.Path.read_bytes", + return_value=upload_file.content, + ), + patch( + "homeassistant.components.onedrive.services.Path.exists", + return_value=upload_file.exists, + ), + patch.object( + hass.config, "is_allowed_path", return_value=upload_file.is_allowed_path + ), + patch("pathlib.Path.stat") as mock_stat, + ): + mock_stat.return_value = Mock() + mock_stat.return_value.st_size = ( + upload_file.size if upload_file.size else len(upload_file.content) + ) + yield + + +async def test_upload_service( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test service call to upload content.""" + await setup_integration(hass, mock_config_entry) + + assert hass.services.has_service(DOMAIN, "upload") + + response = await hass.services.async_call( + DOMAIN, + UPLOAD_SERVICE, + { + CONF_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + CONF_FILENAME: TEST_FILENAME, + CONF_DESTINATION_FOLDER: DESINATION_FOLDER, + }, + blocking=True, + return_response=True, + ) + + assert response + assert response["files"] + assert cast(list[dict[str, Any]], response["files"])[0]["id"] == "id" + + +async def test_upload_service_no_response( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test service call to upload content without response.""" + await setup_integration(hass, mock_config_entry) + + assert hass.services.has_service(DOMAIN, "upload") + + response = await hass.services.async_call( + DOMAIN, + UPLOAD_SERVICE, + { + CONF_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + CONF_FILENAME: TEST_FILENAME, + CONF_DESTINATION_FOLDER: DESINATION_FOLDER, + }, + blocking=True, + ) + + assert response is None + + +async def test_upload_service_config_entry_not_found( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test upload service call with a config entry that does not exist.""" + await setup_integration(hass, mock_config_entry) + with pytest.raises(HomeAssistantError, match="not found in registry"): + await hass.services.async_call( + DOMAIN, + UPLOAD_SERVICE, + { + CONF_CONFIG_ENTRY_ID: "invalid-config-entry-id", + CONF_FILENAME: TEST_FILENAME, + CONF_DESTINATION_FOLDER: DESINATION_FOLDER, + }, + blocking=True, + return_response=True, + ) + + +async def test_config_entry_not_loaded( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test upload service call with a config entry that is not loaded.""" + await setup_integration(hass, mock_config_entry) + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + with pytest.raises(HomeAssistantError, match="not found in registry"): + await hass.services.async_call( + DOMAIN, + UPLOAD_SERVICE, + { + CONF_CONFIG_ENTRY_ID: mock_config_entry.unique_id, + CONF_FILENAME: TEST_FILENAME, + CONF_DESTINATION_FOLDER: DESINATION_FOLDER, + }, + blocking=True, + return_response=True, + ) + + +@pytest.mark.parametrize("upload_file", [MockUploadFile(is_allowed_path=False)]) +async def test_path_is_not_allowed( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test upload service call with a filename path that is not allowed.""" + await setup_integration(hass, mock_config_entry) + with ( + pytest.raises(HomeAssistantError, match="no access to path"), + ): + await hass.services.async_call( + DOMAIN, + UPLOAD_SERVICE, + { + CONF_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + CONF_FILENAME: TEST_FILENAME, + CONF_DESTINATION_FOLDER: DESINATION_FOLDER, + }, + blocking=True, + return_response=True, + ) + + +@pytest.mark.parametrize("upload_file", [MockUploadFile(exists=False)]) +async def test_filename_does_not_exist( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test upload service call with a filename path that does not exist.""" + await setup_integration(hass, mock_config_entry) + with pytest.raises(HomeAssistantError, match="does not exist"): + await hass.services.async_call( + DOMAIN, + UPLOAD_SERVICE, + { + CONF_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + CONF_FILENAME: TEST_FILENAME, + CONF_DESTINATION_FOLDER: DESINATION_FOLDER, + }, + blocking=True, + return_response=True, + ) + + +async def test_upload_service_fails_upload( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_onedrive_client: MagicMock, +) -> None: + """Test service call to upload content.""" + await setup_integration(hass, mock_config_entry) + mock_onedrive_client.upload_file.side_effect = OneDriveException("error") + + with pytest.raises(HomeAssistantError, match="Failed to upload"): + await hass.services.async_call( + DOMAIN, + UPLOAD_SERVICE, + { + CONF_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + CONF_FILENAME: TEST_FILENAME, + CONF_DESTINATION_FOLDER: DESINATION_FOLDER, + }, + blocking=True, + return_response=True, + ) + + +@pytest.mark.parametrize("upload_file", [MockUploadFile(size=260 * 1024 * 1024)]) +async def test_upload_size_limit( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test upload service call with a filename path that does not exist.""" + await setup_integration(hass, mock_config_entry) + with pytest.raises( + HomeAssistantError, + match=re.escape(f"`{TEST_FILENAME}` is too large (272629760 > 262144000)"), + ): + await hass.services.async_call( + DOMAIN, + UPLOAD_SERVICE, + { + CONF_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + CONF_FILENAME: TEST_FILENAME, + CONF_DESTINATION_FOLDER: DESINATION_FOLDER, + }, + blocking=True, + return_response=True, + ) + + +async def test_create_album_failed( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_onedrive_client: MagicMock, +) -> None: + """Test service call when folder creation fails.""" + await setup_integration(hass, mock_config_entry) + assert hass.services.has_service(DOMAIN, "upload") + + mock_onedrive_client.create_folder.side_effect = OneDriveException() + + with pytest.raises(HomeAssistantError, match="Failed to create folder"): + await hass.services.async_call( + DOMAIN, + UPLOAD_SERVICE, + { + CONF_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + CONF_FILENAME: TEST_FILENAME, + CONF_DESTINATION_FOLDER: DESINATION_FOLDER, + }, + blocking=True, + return_response=True, + ) diff --git a/tests/components/onvif/__init__.py b/tests/components/onvif/__init__.py index 8a86538b977..868624fb2e4 100644 --- a/tests/components/onvif/__init__.py +++ b/tests/components/onvif/__init__.py @@ -123,7 +123,7 @@ def setup_mock_onvif_camera( mock_onvif_camera.side_effect = mock_constructor -def setup_mock_device(mock_device, capabilities=None): +def setup_mock_device(mock_device, capabilities=None, profiles=None): """Prepare mock ONVIFDevice.""" mock_device.async_setup = AsyncMock(return_value=True) mock_device.port = 80 @@ -145,7 +145,7 @@ def setup_mock_device(mock_device, capabilities=None): ptz=None, video_source_token=None, ) - mock_device.profiles = [profile1] + mock_device.profiles = profiles or [profile1] mock_device.events = MagicMock( webhook_manager=MagicMock(state=WebHookManagerState.STARTED), pullpoint_manager=MagicMock(state=PullPointManagerState.PAUSED), diff --git a/tests/components/onvif/test_init.py b/tests/components/onvif/test_init.py new file mode 100644 index 00000000000..c176bdcc112 --- /dev/null +++ b/tests/components/onvif/test_init.py @@ -0,0 +1,102 @@ +"""Tests for the ONVIF integration __init__ module.""" + +from unittest.mock import MagicMock, patch + +import pytest + +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import MAC, setup_mock_device + +from tests.common import MockConfigEntry + + +@pytest.mark.asyncio +async def test_migrate_camera_entities_unique_ids(hass: HomeAssistant) -> None: + """Test that camera entities unique ids get migrated properly.""" + config_entry = MockConfigEntry(domain="onvif", unique_id=MAC) + config_entry.add_to_hass(hass) + + entity_registry = er.async_get(hass) + + entity_with_only_mac = entity_registry.async_get_or_create( + domain="camera", + platform="onvif", + unique_id=MAC, + config_entry=config_entry, + ) + entity_with_index = entity_registry.async_get_or_create( + domain="camera", + platform="onvif", + unique_id=f"{MAC}_1", + config_entry=config_entry, + ) + # This one should not be migrated (different domain) + entity_sensor = entity_registry.async_get_or_create( + domain="sensor", + platform="onvif", + unique_id=MAC, + config_entry=config_entry, + ) + # This one should not be migrated (already migrated) + entity_migrated = entity_registry.async_get_or_create( + domain="camera", + platform="onvif", + unique_id=f"{MAC}#profile_token_2", + config_entry=config_entry, + ) + # Unparsable index + entity_unparsable_index = entity_registry.async_get_or_create( + domain="camera", + platform="onvif", + unique_id=f"{MAC}_a", + config_entry=config_entry, + ) + # Unexisting index + entity_unexisting_index = entity_registry.async_get_or_create( + domain="camera", + platform="onvif", + unique_id=f"{MAC}_9", + config_entry=config_entry, + ) + + with patch("homeassistant.components.onvif.ONVIFDevice") as mock_device: + setup_mock_device( + mock_device, + capabilities=None, + profiles=[ + MagicMock(token="profile_token_0"), + MagicMock(token="profile_token_1"), + MagicMock(token="profile_token_2"), + ], + ) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entity_with_only_mac = entity_registry.async_get(entity_with_only_mac.entity_id) + entity_with_index = entity_registry.async_get(entity_with_index.entity_id) + entity_sensor = entity_registry.async_get(entity_sensor.entity_id) + entity_migrated = entity_registry.async_get(entity_migrated.entity_id) + + assert entity_with_only_mac is not None + assert entity_with_only_mac.unique_id == f"{MAC}#profile_token_0" + + assert entity_with_index is not None + assert entity_with_index.unique_id == f"{MAC}#profile_token_1" + + # Make sure the sensor entity is unchanged + assert entity_sensor is not None + assert entity_sensor.unique_id == MAC + + # Make sure the already migrated entity is unchanged + assert entity_migrated is not None + assert entity_migrated.unique_id == f"{MAC}#profile_token_2" + + # Make sure the unparsable index entity is unchanged + assert entity_unparsable_index is not None + assert entity_unparsable_index.unique_id == f"{MAC}_a" + + # Make sure the unexisting index entity is unchanged + assert entity_unexisting_index is not None + assert entity_unexisting_index.unique_id == f"{MAC}_9" diff --git a/tests/components/onvif/test_parsers.py b/tests/components/onvif/test_parsers.py index 4f7e10abae6..8448a6e8195 100644 --- a/tests/components/onvif/test_parsers.py +++ b/tests/components/onvif/test_parsers.py @@ -5,6 +5,7 @@ import os import onvif import onvif.settings +import pytest from zeep import Client from zeep.transports import Transport @@ -732,25 +733,24 @@ async def test_tapo_intrusion(hass: HomeAssistant) -> None: async def test_tapo_missing_attributes(hass: HomeAssistant) -> None: """Tests async_parse_tplink_detector with missing fields.""" - event = await get_event( - { - "Message": { - "_value_1": { - "Data": { - "ElementItem": [], - "Extension": None, - "SimpleItem": [{"Name": "IsPeople", "Value": "true"}], - "_attr_1": None, - }, - } - }, - "Topic": { - "_value_1": "tns1:RuleEngine/PeopleDetector/People", - }, - } - ) - - assert event is None + with pytest.raises(AttributeError, match="SimpleItem"): + await get_event( + { + "Message": { + "_value_1": { + "Data": { + "ElementItem": [], + "Extension": None, + "SimpleItem": [{"Name": "IsPeople", "Value": "true"}], + "_attr_1": None, + }, + } + }, + "Topic": { + "_value_1": "tns1:RuleEngine/PeopleDetector/People", + }, + } + ) async def test_tapo_unknown_type(hass: HomeAssistant) -> None: @@ -789,3 +789,93 @@ async def test_tapo_unknown_type(hass: HomeAssistant) -> None: ) assert event is None + + +async def test_reolink_package(hass: HomeAssistant) -> None: + """Tests reolink package event.""" + event = await get_event( + { + "SubscriptionReference": None, + "Topic": { + "_value_1": "tns1:RuleEngine/MyRuleDetector/Package", + "Dialect": "http://www.onvif.org/ver10/tev/topicExpression/ConcreteSet", + "_attr_1": {}, + }, + "ProducerReference": None, + "Message": { + "_value_1": { + "Source": { + "SimpleItem": [{"Name": "Source", "Value": "000"}], + "ElementItem": [], + "Extension": None, + "_attr_1": None, + }, + "Key": None, + "Data": { + "SimpleItem": [{"Name": "State", "Value": "true"}], + "ElementItem": [], + "Extension": None, + "_attr_1": None, + }, + "Extension": None, + "UtcTime": datetime.datetime( + 2025, 3, 12, 9, 54, 27, tzinfo=datetime.UTC + ), + "PropertyOperation": "Initialized", + "_attr_1": {}, + } + }, + } + ) + + assert event is not None + assert event.name == "Package Detection" + assert event.platform == "binary_sensor" + assert event.device_class == "occupancy" + assert event.value + assert event.uid == (f"{TEST_UID}_tns1:RuleEngine/MyRuleDetector/Package_000") + + +async def test_hikvision_alarm(hass: HomeAssistant) -> None: + """Tests hikvision camera alarm event.""" + event = await get_event( + { + "SubscriptionReference": None, + "Topic": { + "_value_1": "tns1:Device/Trigger/tnshik:AlarmIn", + "Dialect": "http://www.onvif.org/ver10/tev/topicExpression/ConcreteSet", + "_attr_1": {}, + }, + "ProducerReference": None, + "Message": { + "_value_1": { + "Source": { + "SimpleItem": [{"Name": "AlarmInToken", "Value": "AlarmIn_1"}], + "ElementItem": [], + "Extension": None, + "_attr_1": None, + }, + "Key": None, + "Data": { + "SimpleItem": [{"Name": "State", "Value": "true"}], + "ElementItem": [], + "Extension": None, + "_attr_1": None, + }, + "Extension": None, + "UtcTime": datetime.datetime( + 2025, 3, 13, 22, 57, 26, tzinfo=datetime.UTC + ), + "PropertyOperation": "Initialized", + "_attr_1": {}, + } + }, + } + ) + + assert event is not None + assert event.name == "Motion Alarm" + assert event.platform == "binary_sensor" + assert event.device_class == "motion" + assert event.value + assert event.uid == (f"{TEST_UID}_tns1:Device/Trigger/tnshik:AlarmIn_AlarmIn_1") diff --git a/tests/components/openai_conversation/test_config_flow.py b/tests/components/openai_conversation/test_config_flow.py index 90a08471f39..17a5aad6478 100644 --- a/tests/components/openai_conversation/test_config_flow.py +++ b/tests/components/openai_conversation/test_config_flow.py @@ -1,9 +1,10 @@ """Test the OpenAI Conversation config flow.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, patch -from httpx import Response +import httpx from openai import APIConnectionError, AuthenticationError, BadRequestError +from openai.types.responses import Response, ResponseOutputMessage, ResponseOutputText import pytest from homeassistant import config_entries @@ -16,6 +17,13 @@ from homeassistant.components.openai_conversation.const import ( CONF_RECOMMENDED, CONF_TEMPERATURE, CONF_TOP_P, + CONF_WEB_SEARCH, + CONF_WEB_SEARCH_CITY, + CONF_WEB_SEARCH_CONTEXT_SIZE, + CONF_WEB_SEARCH_COUNTRY, + CONF_WEB_SEARCH_REGION, + CONF_WEB_SEARCH_TIMEZONE, + CONF_WEB_SEARCH_USER_LOCATION, DOMAIN, RECOMMENDED_CHAT_MODEL, RECOMMENDED_MAX_TOKENS, @@ -117,13 +125,17 @@ async def test_options_unsupported_model( (APIConnectionError(request=None), "cannot_connect"), ( AuthenticationError( - response=Response(status_code=None, request=""), body=None, message=None + response=httpx.Response(status_code=None, request=""), + body=None, + message=None, ), "invalid_auth", ), ( BadRequestError( - response=Response(status_code=None, request=""), body=None, message=None + response=httpx.Response(status_code=None, request=""), + body=None, + message=None, ), "unknown", ), @@ -172,6 +184,9 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non CONF_TOP_P: RECOMMENDED_TOP_P, CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, CONF_REASONING_EFFORT: RECOMMENDED_REASONING_EFFORT, + CONF_WEB_SEARCH: False, + CONF_WEB_SEARCH_CONTEXT_SIZE: "medium", + CONF_WEB_SEARCH_USER_LOCATION: False, }, ), ( @@ -183,6 +198,9 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non CONF_TOP_P: RECOMMENDED_TOP_P, CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, CONF_REASONING_EFFORT: RECOMMENDED_REASONING_EFFORT, + CONF_WEB_SEARCH: False, + CONF_WEB_SEARCH_CONTEXT_SIZE: "medium", + CONF_WEB_SEARCH_USER_LOCATION: False, }, { CONF_RECOMMENDED: True, @@ -225,3 +243,105 @@ async def test_options_switching( await hass.async_block_till_done() assert options["type"] is FlowResultType.CREATE_ENTRY assert options["data"] == expected_options + + +async def test_options_web_search_user_location( + hass: HomeAssistant, mock_config_entry, mock_init_component +) -> None: + """Test fetching user location.""" + options_flow = await hass.config_entries.options.async_init( + mock_config_entry.entry_id + ) + hass.config.country = "US" + hass.config.time_zone = "America/Los_Angeles" + hass.states.async_set( + "zone.home", "0", {"latitude": 37.7749, "longitude": -122.4194} + ) + with patch( + "openai.resources.responses.AsyncResponses.create", + new_callable=AsyncMock, + ) as mock_create: + mock_create.return_value = Response( + object="response", + id="resp_A", + created_at=1700000000, + model="gpt-4o-mini", + parallel_tool_calls=True, + tool_choice="auto", + tools=[], + output=[ + ResponseOutputMessage( + type="message", + id="msg_A", + content=[ + ResponseOutputText( + type="output_text", + text='{"city": "San Francisco", "region": "California"}', + annotations=[], + ) + ], + role="assistant", + status="completed", + ) + ], + ) + + options = await hass.config_entries.options.async_configure( + options_flow["flow_id"], + { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_TEMPERATURE: 1.0, + CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, + CONF_TOP_P: RECOMMENDED_TOP_P, + CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, + CONF_REASONING_EFFORT: RECOMMENDED_REASONING_EFFORT, + CONF_WEB_SEARCH: True, + CONF_WEB_SEARCH_CONTEXT_SIZE: "medium", + CONF_WEB_SEARCH_USER_LOCATION: True, + }, + ) + await hass.async_block_till_done() + assert ( + mock_create.call_args.kwargs["input"][0]["content"] == "Where are the following" + " coordinates located: (37.7749, -122.4194)?" + ) + assert options["type"] is FlowResultType.CREATE_ENTRY + assert options["data"] == { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_TEMPERATURE: 1.0, + CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, + CONF_TOP_P: RECOMMENDED_TOP_P, + CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, + CONF_REASONING_EFFORT: RECOMMENDED_REASONING_EFFORT, + CONF_WEB_SEARCH: True, + CONF_WEB_SEARCH_CONTEXT_SIZE: "medium", + CONF_WEB_SEARCH_USER_LOCATION: True, + CONF_WEB_SEARCH_CITY: "San Francisco", + CONF_WEB_SEARCH_REGION: "California", + CONF_WEB_SEARCH_COUNTRY: "US", + CONF_WEB_SEARCH_TIMEZONE: "America/Los_Angeles", + } + + +async def test_options_web_search_unsupported_model( + hass: HomeAssistant, mock_config_entry, mock_init_component +) -> None: + """Test the options form giving error about web search not being available.""" + options_flow = await hass.config_entries.options.async_init( + mock_config_entry.entry_id + ) + result = await hass.config_entries.options.async_configure( + options_flow["flow_id"], + { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_CHAT_MODEL: "o1-pro", + CONF_LLM_HASS_API: "assist", + CONF_WEB_SEARCH: True, + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"web_search": "web_search_not_supported"} diff --git a/tests/components/openai_conversation/test_conversation.py b/tests/components/openai_conversation/test_conversation.py index 238fd5f2d7b..d6f09e0f30e 100644 --- a/tests/components/openai_conversation/test_conversation.py +++ b/tests/components/openai_conversation/test_conversation.py @@ -3,20 +3,52 @@ from collections.abc import Generator from unittest.mock import AsyncMock, patch -from httpx import Response +import httpx from openai import AuthenticationError, RateLimitError -from openai.types.chat.chat_completion_chunk import ( - ChatCompletionChunk, - Choice, - ChoiceDelta, - ChoiceDeltaToolCall, - ChoiceDeltaToolCallFunction, +from openai.types import ResponseFormatText +from openai.types.responses import ( + Response, + ResponseCompletedEvent, + ResponseContentPartAddedEvent, + ResponseContentPartDoneEvent, + ResponseCreatedEvent, + ResponseError, + ResponseErrorEvent, + ResponseFailedEvent, + ResponseFunctionCallArgumentsDeltaEvent, + ResponseFunctionCallArgumentsDoneEvent, + ResponseFunctionToolCall, + ResponseFunctionWebSearch, + ResponseIncompleteEvent, + ResponseInProgressEvent, + ResponseOutputItemAddedEvent, + ResponseOutputItemDoneEvent, + ResponseOutputMessage, + ResponseOutputText, + ResponseReasoningItem, + ResponseStreamEvent, + ResponseTextConfig, + ResponseTextDeltaEvent, + ResponseTextDoneEvent, + ResponseWebSearchCallCompletedEvent, + ResponseWebSearchCallInProgressEvent, + ResponseWebSearchCallSearchingEvent, ) +from openai.types.responses.response import IncompleteDetails import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components import conversation from homeassistant.components.homeassistant.exposed_entities import async_expose_entity +from homeassistant.components.openai_conversation.const import ( + CONF_WEB_SEARCH, + CONF_WEB_SEARCH_CITY, + CONF_WEB_SEARCH_CONTEXT_SIZE, + CONF_WEB_SEARCH_COUNTRY, + CONF_WEB_SEARCH_REGION, + CONF_WEB_SEARCH_TIMEZONE, + CONF_WEB_SEARCH_USER_LOCATION, +) from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import Context, HomeAssistant from homeassistant.helpers import intent @@ -28,40 +60,88 @@ from tests.components.conversation import ( mock_chat_log, # noqa: F401 ) -ASSIST_RESPONSE_FINISH = ( - # Assistant message - ChatCompletionChunk( - id="chatcmpl-B", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[Choice(index=0, delta=ChoiceDelta(content="Cool"))], - ), - # Finish stream - ChatCompletionChunk( - id="chatcmpl-B", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[Choice(index=0, finish_reason="stop", delta=ChoiceDelta())], - ), -) - @pytest.fixture def mock_create_stream() -> Generator[AsyncMock]: """Mock stream response.""" - async def mock_generator(stream): - for value in stream: + async def mock_generator(events, **kwargs): + response = Response( + id="resp_A", + created_at=1700000000, + error=None, + incomplete_details=None, + instructions=kwargs.get("instructions"), + metadata=kwargs.get("metadata", {}), + model=kwargs.get("model", "gpt-4o-mini"), + object="response", + output=[], + parallel_tool_calls=kwargs.get("parallel_tool_calls", True), + temperature=kwargs.get("temperature", 1.0), + tool_choice=kwargs.get("tool_choice", "auto"), + tools=kwargs.get("tools"), + top_p=kwargs.get("top_p", 1.0), + max_output_tokens=kwargs.get("max_output_tokens", 100000), + previous_response_id=kwargs.get("previous_response_id"), + reasoning=kwargs.get("reasoning"), + status="in_progress", + text=kwargs.get( + "text", ResponseTextConfig(format=ResponseFormatText(type="text")) + ), + truncation=kwargs.get("truncation", "disabled"), + usage=None, + user=kwargs.get("user"), + store=kwargs.get("store", True), + ) + yield ResponseCreatedEvent( + response=response, + type="response.created", + ) + yield ResponseInProgressEvent( + response=response, + type="response.in_progress", + ) + response.status = "completed" + + for value in events: + if isinstance(value, ResponseOutputItemDoneEvent): + response.output.append(value.item) + elif isinstance(value, IncompleteDetails): + response.status = "incomplete" + response.incomplete_details = value + break + if isinstance(value, ResponseError): + response.status = "failed" + response.error = value + break + yield value + if isinstance(value, ResponseErrorEvent): + return + + if response.status == "incomplete": + yield ResponseIncompleteEvent( + response=response, + type="response.incomplete", + ) + elif response.status == "failed": + yield ResponseFailedEvent( + response=response, + type="response.failed", + ) + else: + yield ResponseCompletedEvent( + response=response, + type="response.completed", + ) + with patch( - "openai.resources.chat.completions.AsyncCompletions.create", + "openai.resources.responses.AsyncResponses.create", AsyncMock(), ) as mock_create: mock_create.side_effect = lambda **kwargs: mock_generator( - mock_create.return_value.pop(0) + mock_create.return_value.pop(0), **kwargs ) yield mock_create @@ -99,13 +179,17 @@ async def test_entity( [ ( RateLimitError( - response=Response(status_code=429, request=""), body=None, message=None + response=httpx.Response(status_code=429, request=""), + body=None, + message=None, ), "Rate limited or insufficient funds", ), ( AuthenticationError( - response=Response(status_code=401, request=""), body=None, message=None + response=httpx.Response(status_code=401, request=""), + body=None, + message=None, ), "Error talking to OpenAI", ), @@ -120,7 +204,7 @@ async def test_error_handling( ) -> None: """Test that we handle errors when calling completion API.""" with patch( - "openai.resources.chat.completions.AsyncCompletions.create", + "openai.resources.responses.AsyncResponses.create", new_callable=AsyncMock, side_effect=exception, ): @@ -132,6 +216,121 @@ async def test_error_handling( assert result.response.speech["plain"]["speech"] == message, result.response.speech +@pytest.mark.parametrize( + ("reason", "message"), + [ + ( + "max_output_tokens", + "max output tokens reached", + ), + ( + "content_filter", + "content filter triggered", + ), + ( + None, + "unknown reason", + ), + ], +) +async def test_incomplete_response( + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, + mock_create_stream: AsyncMock, + reason: str, + message: str, +) -> None: + """Test handling early model stop.""" + # Incomplete details received after some content is generated + mock_create_stream.return_value = [ + ( + # Start message + *create_message_item( + id="msg_A", + text=["Once upon", " a time, ", "there was "], + output_index=0, + ), + # Length limit or content filter + IncompleteDetails(reason=reason), + ) + ] + + result = await conversation.async_converse( + hass, + "Please tell me a big story", + "mock-conversation-id", + Context(), + agent_id="conversation.openai", + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR, result + assert ( + result.response.speech["plain"]["speech"] + == f"OpenAI response incomplete: {message}" + ), result.response.speech + + # Incomplete details received before any content is generated + mock_create_stream.return_value = [ + ( + # Start generating response + *create_reasoning_item(id="rs_A", output_index=0), + # Length limit or content filter + IncompleteDetails(reason=reason), + ) + ] + + result = await conversation.async_converse( + hass, + "please tell me a big story", + "mock-conversation-id", + Context(), + agent_id="conversation.openai", + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR, result + assert ( + result.response.speech["plain"]["speech"] + == f"OpenAI response incomplete: {message}" + ), result.response.speech + + +@pytest.mark.parametrize( + ("error", "message"), + [ + ( + ResponseError(code="rate_limit_exceeded", message="Rate limit exceeded"), + "OpenAI response failed: Rate limit exceeded", + ), + ( + ResponseErrorEvent(type="error", message="Some error"), + "OpenAI response error: Some error", + ), + ], +) +async def test_failed_response( + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, + mock_create_stream: AsyncMock, + error: ResponseError | ResponseErrorEvent, + message: str, +) -> None: + """Test handling failed and error responses.""" + mock_create_stream.return_value = [(error,)] + + result = await conversation.async_converse( + hass, + "next natural number please", + "mock-conversation-id", + Context(), + agent_id="conversation.openai", + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR, result + assert result.response.speech["plain"]["speech"] == message, result.response.speech + + async def test_conversation_agent( hass: HomeAssistant, mock_config_entry: MockConfigEntry, @@ -144,6 +343,200 @@ async def test_conversation_agent( assert agent.supported_languages == "*" +def create_message_item( + id: str, text: str | list[str], output_index: int +) -> list[ResponseStreamEvent]: + """Create a message item.""" + if isinstance(text, str): + text = [text] + + content = ResponseOutputText(annotations=[], text="", type="output_text") + events = [ + ResponseOutputItemAddedEvent( + item=ResponseOutputMessage( + id=id, + content=[], + type="message", + role="assistant", + status="in_progress", + ), + output_index=output_index, + type="response.output_item.added", + ), + ResponseContentPartAddedEvent( + content_index=0, + item_id=id, + output_index=output_index, + part=content, + type="response.content_part.added", + ), + ] + + content.text = "".join(text) + events.extend( + ResponseTextDeltaEvent( + content_index=0, + delta=delta, + item_id=id, + output_index=output_index, + type="response.output_text.delta", + ) + for delta in text + ) + + events.extend( + [ + ResponseTextDoneEvent( + content_index=0, + item_id=id, + output_index=output_index, + text="".join(text), + type="response.output_text.done", + ), + ResponseContentPartDoneEvent( + content_index=0, + item_id=id, + output_index=output_index, + part=content, + type="response.content_part.done", + ), + ResponseOutputItemDoneEvent( + item=ResponseOutputMessage( + id=id, + content=[content], + role="assistant", + status="completed", + type="message", + ), + output_index=output_index, + type="response.output_item.done", + ), + ] + ) + + return events + + +def create_function_tool_call_item( + id: str, arguments: str | list[str], call_id: str, name: str, output_index: int +) -> list[ResponseStreamEvent]: + """Create a function tool call item.""" + if isinstance(arguments, str): + arguments = [arguments] + + events = [ + ResponseOutputItemAddedEvent( + item=ResponseFunctionToolCall( + id=id, + arguments="", + call_id=call_id, + name=name, + type="function_call", + status="in_progress", + ), + output_index=output_index, + type="response.output_item.added", + ) + ] + + events.extend( + ResponseFunctionCallArgumentsDeltaEvent( + delta=delta, + item_id=id, + output_index=output_index, + type="response.function_call_arguments.delta", + ) + for delta in arguments + ) + + events.append( + ResponseFunctionCallArgumentsDoneEvent( + arguments="".join(arguments), + item_id=id, + output_index=output_index, + type="response.function_call_arguments.done", + ) + ) + + events.append( + ResponseOutputItemDoneEvent( + item=ResponseFunctionToolCall( + id=id, + arguments="".join(arguments), + call_id=call_id, + name=name, + type="function_call", + status="completed", + ), + output_index=output_index, + type="response.output_item.done", + ) + ) + + return events + + +def create_reasoning_item(id: str, output_index: int) -> list[ResponseStreamEvent]: + """Create a reasoning item.""" + return [ + ResponseOutputItemAddedEvent( + item=ResponseReasoningItem( + id=id, + summary=[], + type="reasoning", + status=None, + ), + output_index=output_index, + type="response.output_item.added", + ), + ResponseOutputItemDoneEvent( + item=ResponseReasoningItem( + id=id, + summary=[], + type="reasoning", + status=None, + ), + output_index=output_index, + type="response.output_item.done", + ), + ] + + +def create_web_search_item(id: str, output_index: int) -> list[ResponseStreamEvent]: + """Create a web search call item.""" + return [ + ResponseOutputItemAddedEvent( + item=ResponseFunctionWebSearch( + id=id, status="in_progress", type="web_search_call" + ), + output_index=output_index, + type="response.output_item.added", + ), + ResponseWebSearchCallInProgressEvent( + item_id=id, + output_index=output_index, + type="response.web_search_call.in_progress", + ), + ResponseWebSearchCallSearchingEvent( + item_id=id, + output_index=output_index, + type="response.web_search_call.searching", + ), + ResponseWebSearchCallCompletedEvent( + item_id=id, + output_index=output_index, + type="response.web_search_call.completed", + ), + ResponseOutputItemDoneEvent( + item=ResponseFunctionWebSearch( + id=id, status="completed", type="web_search_call" + ), + output_index=output_index, + type="response.output_item.done", + ), + ] + + async def test_function_call( hass: HomeAssistant, mock_config_entry_with_assist: MockConfigEntry, @@ -156,111 +549,27 @@ async def test_function_call( mock_create_stream.return_value = [ # Initial conversation ( + # Wait for the model to think + *create_reasoning_item(id="rs_A", output_index=0), # First tool call - ChatCompletionChunk( - id="chatcmpl-A", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[ - Choice( - index=0, - delta=ChoiceDelta( - tool_calls=[ - ChoiceDeltaToolCall( - id="call_call_1", - index=0, - function=ChoiceDeltaToolCallFunction( - name="test_tool", - arguments=None, - ), - ) - ] - ), - ) - ], - ), - ChatCompletionChunk( - id="chatcmpl-A", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[ - Choice( - index=0, - delta=ChoiceDelta( - tool_calls=[ - ChoiceDeltaToolCall( - index=0, - function=ChoiceDeltaToolCallFunction( - name=None, - arguments='{"para', - ), - ) - ] - ), - ) - ], - ), - ChatCompletionChunk( - id="chatcmpl-A", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[ - Choice( - index=0, - delta=ChoiceDelta( - tool_calls=[ - ChoiceDeltaToolCall( - index=0, - function=ChoiceDeltaToolCallFunction( - name=None, - arguments='m1":"call1"}', - ), - ) - ] - ), - ) - ], + *create_function_tool_call_item( + id="fc_1", + arguments=['{"para', 'm1":"call1"}'], + call_id="call_call_1", + name="test_tool", + output_index=1, ), # Second tool call - ChatCompletionChunk( - id="chatcmpl-A", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[ - Choice( - index=0, - delta=ChoiceDelta( - tool_calls=[ - ChoiceDeltaToolCall( - id="call_call_2", - index=1, - function=ChoiceDeltaToolCallFunction( - name="test_tool", - arguments='{"param1":"call2"}', - ), - ) - ] - ), - ) - ], - ), - # Finish stream - ChatCompletionChunk( - id="chatcmpl-A", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[ - Choice(index=0, finish_reason="tool_calls", delta=ChoiceDelta()) - ], + *create_function_tool_call_item( + id="fc_2", + arguments='{"param1":"call2"}', + call_id="call_call_2", + name="test_tool", + output_index=2, ), ), # Response after tool responses - ASSIST_RESPONSE_FINISH, + create_message_item(id="msg_A", text="Cool", output_index=0), ] mock_chat_log.mock_tool_results( { @@ -288,99 +597,27 @@ async def test_function_call( ( "Test function call started with missing arguments", ( - ChatCompletionChunk( - id="chatcmpl-A", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[ - Choice( - index=0, - delta=ChoiceDelta( - tool_calls=[ - ChoiceDeltaToolCall( - id="call_call_1", - index=0, - function=ChoiceDeltaToolCallFunction( - name="test_tool", - arguments=None, - ), - ) - ] - ), - ) - ], - ), - ChatCompletionChunk( - id="chatcmpl-B", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[Choice(index=0, delta=ChoiceDelta(content="Cool"))], + *create_function_tool_call_item( + id="fc_1", + arguments=[], + call_id="call_call_1", + name="test_tool", + output_index=0, ), + *create_message_item(id="msg_A", text="Cool", output_index=1), ), ), ( "Test invalid JSON", ( - ChatCompletionChunk( - id="chatcmpl-A", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[ - Choice( - index=0, - delta=ChoiceDelta( - tool_calls=[ - ChoiceDeltaToolCall( - id="call_call_1", - index=0, - function=ChoiceDeltaToolCallFunction( - name="test_tool", - arguments=None, - ), - ) - ] - ), - ) - ], - ), - ChatCompletionChunk( - id="chatcmpl-A", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[ - Choice( - index=0, - delta=ChoiceDelta( - tool_calls=[ - ChoiceDeltaToolCall( - index=0, - function=ChoiceDeltaToolCallFunction( - name=None, - arguments='{"para', - ), - ) - ] - ), - ) - ], - ), - ChatCompletionChunk( - id="chatcmpl-B", - created=1700000000, - model="gpt-4-1106-preview", - object="chat.completion.chunk", - choices=[ - Choice( - index=0, - delta=ChoiceDelta(content="Cool"), - finish_reason="tool_calls", - ) - ], + *create_function_tool_call_item( + id="fc_1", + arguments=['{"para'], + call_id="call_call_1", + name="test_tool", + output_index=0, ), + *create_message_item(id="msg_A", text="Cool", output_index=1), ), ), ], @@ -390,9 +627,8 @@ async def test_function_call_invalid( mock_config_entry_with_assist: MockConfigEntry, mock_init_component, mock_create_stream: AsyncMock, - mock_chat_log: MockChatLog, # noqa: F811 description: str, - messages: tuple[ChatCompletionChunk], + messages: tuple[ResponseStreamEvent], ) -> None: """Test function call containing invalid data.""" mock_create_stream.return_value = [messages] @@ -432,7 +668,9 @@ async def test_assist_api_tools_conversion( hass.states.async_set(f"{component}.test", "on") async_expose_entity(hass, "conversation", f"{component}.test", True) - mock_create_stream.return_value = [ASSIST_RESPONSE_FINISH] + mock_create_stream.return_value = [ + create_message_item(id="msg_A", text="Cool", output_index=0) + ] await conversation.async_converse( hass, "hello", None, Context(), agent_id="conversation.openai" @@ -440,3 +678,60 @@ async def test_assist_api_tools_conversion( tools = mock_create_stream.mock_calls[0][2]["tools"] assert tools + + +async def test_web_search( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, + mock_create_stream, + mock_chat_log: MockChatLog, # noqa: F811 +) -> None: + """Test web_search_tool.""" + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + **mock_config_entry.options, + CONF_WEB_SEARCH: True, + CONF_WEB_SEARCH_CONTEXT_SIZE: "low", + CONF_WEB_SEARCH_USER_LOCATION: True, + CONF_WEB_SEARCH_CITY: "San Francisco", + CONF_WEB_SEARCH_COUNTRY: "US", + CONF_WEB_SEARCH_REGION: "California", + CONF_WEB_SEARCH_TIMEZONE: "America/Los_Angeles", + }, + ) + await hass.config_entries.async_reload(mock_config_entry.entry_id) + + message = "Home Assistant now supports ChatGPT Search in Assist" + mock_create_stream.return_value = [ + # Initial conversation + ( + *create_web_search_item(id="ws_A", output_index=0), + *create_message_item(id="msg_A", text=message, output_index=1), + ) + ] + + result = await conversation.async_converse( + hass, + "What's on the latest news?", + mock_chat_log.conversation_id, + Context(), + agent_id="conversation.openai", + ) + + assert mock_create_stream.mock_calls[0][2]["tools"] == [ + { + "type": "web_search_preview", + "search_context_size": "low", + "user_location": { + "type": "approximate", + "city": "San Francisco", + "region": "California", + "country": "US", + "timezone": "America/Los_Angeles", + }, + } + ] + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert result.response.speech["plain"]["speech"] == message, result.response.speech diff --git a/tests/components/openai_conversation/test_init.py b/tests/components/openai_conversation/test_init.py index d78ce398c92..5aef68841ee 100644 --- a/tests/components/openai_conversation/test_init.py +++ b/tests/components/openai_conversation/test_init.py @@ -1,8 +1,8 @@ """Tests for the OpenAI integration.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, mock_open, patch -from httpx import Response +import httpx from openai import ( APIConnectionError, AuthenticationError, @@ -11,8 +11,10 @@ from openai import ( ) from openai.types.image import Image from openai.types.images_response import ImagesResponse +from openai.types.responses import Response, ResponseOutputMessage, ResponseOutputText import pytest +from homeassistant.components.openai_conversation import CONF_FILENAMES from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.setup import async_setup_component @@ -114,7 +116,9 @@ async def test_generate_image_service_error( patch( "openai.resources.images.AsyncImages.generate", side_effect=RateLimitError( - response=Response(status_code=None, request=""), + response=httpx.Response( + status_code=500, request=httpx.Request(method="GET", url="") + ), body=None, message="Reason", ), @@ -133,22 +137,60 @@ async def test_generate_image_service_error( ) +@pytest.mark.usefixtures("mock_init_component") +async def test_generate_content_service_with_image_not_allowed_path( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test generate content service with an image in a not allowed path.""" + with ( + patch("pathlib.Path.exists", return_value=True), + patch.object(hass.config, "is_allowed_path", return_value=False), + pytest.raises( + HomeAssistantError, + match=( + "Cannot read `doorbell_snapshot.jpg`, no access to path; " + "`allowlist_external_dirs` may need to be adjusted in " + "`configuration.yaml`" + ), + ), + ): + await hass.services.async_call( + "openai_conversation", + "generate_content", + { + "config_entry": mock_config_entry.entry_id, + "prompt": "Describe this image from my doorbell camera", + "filenames": "doorbell_snapshot.jpg", + }, + blocking=True, + return_response=True, + ) + + +@pytest.mark.parametrize( + ("service_name", "error"), + [ + ("generate_image", "Invalid config entry provided. Got invalid_entry"), + ("generate_content", "Invalid config entry provided. Got invalid_entry"), + ], +) async def test_invalid_config_entry( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component, + service_name: str, + error: str, ) -> None: """Assert exception when invalid config entry is provided.""" service_data = { "prompt": "Picture of a dog", "config_entry": "invalid_entry", } - with pytest.raises( - ServiceValidationError, match="Invalid config entry provided. Got invalid_entry" - ): + with pytest.raises(ServiceValidationError, match=error): await hass.services.async_call( "openai_conversation", - "generate_image", + service_name, service_data, blocking=True, return_response=True, @@ -158,18 +200,29 @@ async def test_invalid_config_entry( @pytest.mark.parametrize( ("side_effect", "error"), [ - (APIConnectionError(request=None), "Connection error"), + ( + APIConnectionError(request=httpx.Request(method="GET", url="test")), + "Connection error", + ), ( AuthenticationError( - response=Response(status_code=None, request=""), body=None, message=None + response=httpx.Response( + status_code=500, request=httpx.Request(method="GET", url="test") + ), + body=None, + message="", ), "Invalid API key", ), ( BadRequestError( - response=Response(status_code=None, request=""), body=None, message=None + response=httpx.Response( + status_code=500, request=httpx.Request(method="GET", url="test") + ), + body=None, + message="", ), - "openai_conversation integration not ready yet: None", + "openai_conversation integration not ready yet", ), ], ) @@ -188,3 +241,253 @@ async def test_init_error( assert await async_setup_component(hass, "openai_conversation", {}) await hass.async_block_till_done() assert error in caplog.text + + +@pytest.mark.parametrize( + ("service_data", "expected_args", "number_of_files"), + [ + ( + {"prompt": "Picture of a dog", "filenames": []}, + { + "input": [ + { + "content": [ + { + "type": "input_text", + "text": "Picture of a dog", + }, + ], + }, + ], + }, + 0, + ), + ( + {"prompt": "Picture of a dog", "filenames": ["/a/b/c.jpg"]}, + { + "input": [ + { + "content": [ + { + "type": "input_text", + "text": "Picture of a dog", + }, + { + "type": "input_image", + "image_url": "data:image/jpeg;base64,BASE64IMAGE1", + "detail": "auto", + "file_id": "/a/b/c.jpg", + }, + ], + }, + ], + }, + 1, + ), + ( + { + "prompt": "Picture of a dog", + "filenames": ["/a/b/c.jpg", "d/e/f.jpg"], + }, + { + "input": [ + { + "content": [ + { + "type": "input_text", + "text": "Picture of a dog", + }, + { + "type": "input_image", + "image_url": "data:image/jpeg;base64,BASE64IMAGE1", + "detail": "auto", + "file_id": "/a/b/c.jpg", + }, + { + "type": "input_image", + "image_url": "data:image/jpeg;base64,BASE64IMAGE2", + "detail": "auto", + "file_id": "d/e/f.jpg", + }, + ], + }, + ], + }, + 2, + ), + ], +) +async def test_generate_content_service( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, + service_data, + expected_args, + number_of_files, +) -> None: + """Test generate content service.""" + service_data["config_entry"] = mock_config_entry.entry_id + expected_args["model"] = "gpt-4o-mini" + expected_args["max_output_tokens"] = 150 + expected_args["top_p"] = 1.0 + expected_args["temperature"] = 1.0 + expected_args["user"] = None + expected_args["store"] = False + expected_args["input"][0]["type"] = "message" + expected_args["input"][0]["role"] = "user" + + with ( + patch( + "openai.resources.responses.AsyncResponses.create", + new_callable=AsyncMock, + ) as mock_create, + patch( + "base64.b64encode", side_effect=[b"BASE64IMAGE1", b"BASE64IMAGE2"] + ) as mock_b64encode, + patch("builtins.open", mock_open(read_data="ABC")) as mock_file, + patch("pathlib.Path.exists", return_value=True), + patch.object(hass.config, "is_allowed_path", return_value=True), + ): + mock_create.return_value = Response( + object="response", + id="resp_A", + created_at=1700000000, + model="gpt-4o-mini", + parallel_tool_calls=True, + tool_choice="auto", + tools=[], + output=[ + ResponseOutputMessage( + type="message", + id="msg_A", + content=[ + ResponseOutputText( + type="output_text", + text="This is the response", + annotations=[], + ) + ], + role="assistant", + status="completed", + ) + ], + ) + + response = await hass.services.async_call( + "openai_conversation", + "generate_content", + service_data, + blocking=True, + return_response=True, + ) + assert response == {"text": "This is the response"} + assert len(mock_create.mock_calls) == 1 + assert mock_create.mock_calls[0][2] == expected_args + assert mock_b64encode.call_count == number_of_files + for idx, file in enumerate(service_data[CONF_FILENAMES]): + assert mock_file.call_args_list[idx][0][0] == file + + +@pytest.mark.parametrize( + ( + "service_data", + "error", + "number_of_files", + "exists_side_effect", + "is_allowed_side_effect", + ), + [ + ( + {"prompt": "Picture of a dog", "filenames": ["/a/b/c.jpg"]}, + "`/a/b/c.jpg` does not exist", + 0, + [False], + [True], + ), + ( + { + "prompt": "Picture of a dog", + "filenames": ["/a/b/c.jpg", "d/e/f.png"], + }, + "Cannot read `d/e/f.png`, no access to path; `allowlist_external_dirs` may need to be adjusted in `configuration.yaml`", + 1, + [True, True], + [True, False], + ), + ( + {"prompt": "Not a picture of a dog", "filenames": ["/a/b/c.pdf"]}, + "Only images are supported by the OpenAI API,`/a/b/c.pdf` is not an image file", + 1, + [True], + [True], + ), + ], +) +async def test_generate_content_service_invalid( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, + service_data, + error, + number_of_files, + exists_side_effect, + is_allowed_side_effect, +) -> None: + """Test generate content service.""" + service_data["config_entry"] = mock_config_entry.entry_id + + with ( + patch( + "openai.resources.responses.AsyncResponses.create", + new_callable=AsyncMock, + ) as mock_create, + patch( + "base64.b64encode", side_effect=[b"BASE64IMAGE1", b"BASE64IMAGE2"] + ) as mock_b64encode, + patch("builtins.open", mock_open(read_data="ABC")), + patch("pathlib.Path.exists", side_effect=exists_side_effect), + patch.object( + hass.config, "is_allowed_path", side_effect=is_allowed_side_effect + ), + ): + with pytest.raises(HomeAssistantError, match=error): + await hass.services.async_call( + "openai_conversation", + "generate_content", + service_data, + blocking=True, + return_response=True, + ) + assert len(mock_create.mock_calls) == 0 + assert mock_b64encode.call_count == number_of_files + + +@pytest.mark.usefixtures("mock_init_component") +async def test_generate_content_service_error( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test generate content service handles errors.""" + with ( + patch( + "openai.resources.responses.AsyncResponses.create", + side_effect=RateLimitError( + response=httpx.Response( + status_code=417, request=httpx.Request(method="GET", url="") + ), + body=None, + message="Reason", + ), + ), + pytest.raises(HomeAssistantError, match="Error generating content: Reason"), + ): + await hass.services.async_call( + "openai_conversation", + "generate_content", + { + "config_entry": mock_config_entry.entry_id, + "prompt": "Image of an epic fail", + }, + blocking=True, + return_response=True, + ) diff --git a/tests/components/opentherm_gw/test_config_flow.py b/tests/components/opentherm_gw/test_config_flow.py index 57bea4e55dc..99a2dde4acc 100644 --- a/tests/components/opentherm_gw/test_config_flow.py +++ b/tests/components/opentherm_gw/test_config_flow.py @@ -54,30 +54,6 @@ async def test_form_user( assert mock_pyotgw.return_value.disconnect.await_count == 1 -# Deprecated import from configuration.yaml, can be removed in 2025.4.0 -async def test_form_import( - hass: HomeAssistant, - mock_pyotgw: MagicMock, - mock_setup_entry: AsyncMock, -) -> None: - """Test import from existing config.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_ID: "legacy_gateway", CONF_DEVICE: "/dev/ttyUSB1"}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "legacy_gateway" - assert result["data"] == { - CONF_NAME: "legacy_gateway", - CONF_DEVICE: "/dev/ttyUSB1", - CONF_ID: "legacy_gateway", - } - assert mock_pyotgw.return_value.connect.await_count == 1 - assert mock_pyotgw.return_value.disconnect.await_count == 1 - - async def test_form_duplicate_entries( hass: HomeAssistant, mock_pyotgw: MagicMock, diff --git a/tests/components/opentherm_gw/test_init.py b/tests/components/opentherm_gw/test_init.py index 3e85afbf782..84629137ce1 100644 --- a/tests/components/opentherm_gw/test_init.py +++ b/tests/components/opentherm_gw/test_init.py @@ -4,18 +4,12 @@ from unittest.mock import MagicMock from pyotgw.vars import OTGW, OTGW_ABOUT -from homeassistant import setup from homeassistant.components.opentherm_gw.const import ( DOMAIN, OpenThermDeviceIdentifier, ) -from homeassistant.const import CONF_ID from homeassistant.core import HomeAssistant -from homeassistant.helpers import ( - device_registry as dr, - entity_registry as er, - issue_registry as ir, -) +from homeassistant.helpers import device_registry as dr from .conftest import MOCK_GATEWAY_ID, VERSION_TEST @@ -74,104 +68,3 @@ async def test_device_registry_update( ) assert gw_dev is not None assert gw_dev.sw_version == VERSION_NEW - - -# Device migration test can be removed in 2025.4.0 -async def test_device_migration( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mock_config_entry: MockConfigEntry, - mock_pyotgw: MagicMock, -) -> None: - """Test that the device registry is updated correctly.""" - mock_config_entry.add_to_hass(hass) - - device_registry.async_get_or_create( - config_entry_id=mock_config_entry.entry_id, - identifiers={ - (DOMAIN, MOCK_GATEWAY_ID), - }, - name="Mock Gateway", - manufacturer="Schelte Bron", - model="OpenTherm Gateway", - sw_version=VERSION_TEST, - ) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert ( - device_registry.async_get_device(identifiers={(DOMAIN, MOCK_GATEWAY_ID)}) - is None - ) - - gw_dev = device_registry.async_get_device( - identifiers={(DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.GATEWAY}")} - ) - assert gw_dev is not None - - assert ( - device_registry.async_get_device( - identifiers={ - (DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.BOILER}") - } - ) - is not None - ) - - assert ( - device_registry.async_get_device( - identifiers={ - (DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.THERMOSTAT}") - } - ) - is not None - ) - - -# Entity migration test can be removed in 2025.4.0 -async def test_climate_entity_migration( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_config_entry: MockConfigEntry, - mock_pyotgw: MagicMock, -) -> None: - """Test that the climate entity unique_id gets migrated correctly.""" - mock_config_entry.add_to_hass(hass) - entry = entity_registry.async_get_or_create( - domain="climate", - platform="opentherm_gw", - unique_id=mock_config_entry.data[CONF_ID], - ) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - updated_entry = entity_registry.async_get(entry.entity_id) - assert updated_entry is not None - assert ( - updated_entry.unique_id - == f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.THERMOSTAT}-thermostat_entity" - ) - - -# Deprecation test, can be removed in 2025.4.0 -async def test_configuration_yaml_deprecation( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - mock_config_entry: MockConfigEntry, - mock_pyotgw: MagicMock, -) -> None: - """Test that existing configuration in configuration.yaml creates an issue.""" - - await setup.async_setup_component( - hass, DOMAIN, {DOMAIN: {"legacy_gateway": {"device": "/dev/null"}}} - ) - - await hass.async_block_till_done() - assert ( - issue_registry.async_get_issue( - DOMAIN, "deprecated_import_from_configuration_yaml" - ) - is not None - ) diff --git a/tests/components/openweathermap/test_weather.py b/tests/components/openweathermap/test_weather.py index 5d3565d6ca9..e9817e739ac 100644 --- a/tests/components/openweathermap/test_weather.py +++ b/tests/components/openweathermap/test_weather.py @@ -6,7 +6,7 @@ from syrupy import SnapshotAssertion from homeassistant.components.openweathermap.const import ( DEFAULT_LANGUAGE, DOMAIN, - OWM_MODE_V25, + OWM_MODE_FREE_CURRENT, OWM_MODE_V30, ) from homeassistant.components.openweathermap.weather import SERVICE_GET_MINUTE_FORECAST @@ -52,9 +52,9 @@ def mock_config_entry(mode: str) -> MockConfigEntry: @pytest.fixture -def mock_config_entry_v25() -> MockConfigEntry: - """Create a mock OpenWeatherMap v2.5 config entry.""" - return mock_config_entry(OWM_MODE_V25) +def mock_config_entry_free_current() -> MockConfigEntry: + """Create a mock OpenWeatherMap FREE_CURRENT config entry.""" + return mock_config_entry(OWM_MODE_FREE_CURRENT) @pytest.fixture @@ -97,15 +97,15 @@ async def test_get_minute_forecast( @patch( - "pyopenweathermap.client.onecall_client.OWMOneCallClient.get_weather", + "pyopenweathermap.client.free_client.OWMFreeClient.get_weather", AsyncMock(return_value=static_weather_report), ) async def test_mode_fail( hass: HomeAssistant, - mock_config_entry_v25: MockConfigEntry, + mock_config_entry_free_current: MockConfigEntry, ) -> None: """Test that Minute forecasting fails when mode is not v3.0.""" - await setup_mock_config_entry(hass, mock_config_entry_v25) + await setup_mock_config_entry(hass, mock_config_entry_free_current) # Expect a ServiceValidationError when mode is not OWM_MODE_V30 with pytest.raises( diff --git a/tests/components/pglab/snapshots/test_sensor.ambr b/tests/components/pglab/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..71889b65183 --- /dev/null +++ b/tests/components/pglab/snapshots/test_sensor.ambr @@ -0,0 +1,95 @@ +# serializer version: 1 +# name: test_sensors[mpu_voltage][initial_sensor_mpu_voltage] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'test MPU voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_mpu_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensors[mpu_voltage][updated_sensor_mpu_voltage] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'test MPU voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_mpu_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.31', + }) +# --- +# name: test_sensors[run_time][initial_sensor_run_time] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'test Run time', + 'icon': 'mdi:progress-clock', + }), + 'context': , + 'entity_id': 'sensor.test_run_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensors[run_time][updated_sensor_run_time] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'test Run time', + 'icon': 'mdi:progress-clock', + }), + 'context': , + 'entity_id': 'sensor.test_run_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-02-26T01:04:54+00:00', + }) +# --- +# name: test_sensors[temperature][initial_sensor_temperature] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'test Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensors[temperature][updated_sensor_temperature] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'test Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '33.4', + }) +# --- diff --git a/tests/components/pglab/test_cover.py b/tests/components/pglab/test_cover.py new file mode 100644 index 00000000000..ea4c7a7213e --- /dev/null +++ b/tests/components/pglab/test_cover.py @@ -0,0 +1,210 @@ +"""The tests for the PG LAB Electronics cover.""" + +import json + +from homeassistant.components import cover +from homeassistant.components.cover import ( + DOMAIN as COVER_DOMAIN, + SERVICE_CLOSE_COVER, + SERVICE_OPEN_COVER, + SERVICE_STOP_COVER, +) +from homeassistant.const import ( + ATTR_ASSUMED_STATE, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, + STATE_UNKNOWN, +) +from homeassistant.core import HomeAssistant + +from tests.common import async_fire_mqtt_message +from tests.typing import MqttMockHAClient + +COVER_FEATURES = ( + cover.CoverEntityFeature.OPEN + | cover.CoverEntityFeature.CLOSE + | cover.CoverEntityFeature.STOP +) + + +async def call_service(hass: HomeAssistant, entity_id, service, **kwargs): + """Call a service.""" + await hass.services.async_call( + COVER_DOMAIN, + service, + {"entity_id": entity_id, **kwargs}, + blocking=True, + ) + + +async def test_cover_features( + hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_pglab +) -> None: + """Test cover features.""" + topic = "pglab/discovery/E-Board-DD53AC85/config" + payload = { + "ip": "192.168.1.16", + "mac": "80:34:28:1B:18:5A", + "name": "test", + "hw": "1.0.7", + "fw": "1.0.0", + "type": "E-Board", + "id": "E-Board-DD53AC85", + "manufacturer": "PG LAB Electronics", + "params": {"shutters": 4, "boards": "10000000"}, + } + + async_fire_mqtt_message( + hass, + topic, + json.dumps(payload), + ) + await hass.async_block_till_done() + + assert len(hass.states.async_all("cover")) == 4 + + for i in range(4): + cover = hass.states.get(f"cover.test_shutter_{i}") + assert cover + assert cover.attributes["supported_features"] == COVER_FEATURES + + +async def test_cover_availability( + hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_pglab +) -> None: + """Check if covers are properly created.""" + topic = "pglab/discovery/E-Board-DD53AC85/config" + payload = { + "ip": "192.168.1.16", + "mac": "80:34:28:1B:18:5A", + "name": "test", + "hw": "1.0.7", + "fw": "1.0.0", + "type": "E-Board", + "id": "E-Board-DD53AC85", + "manufacturer": "PG LAB Electronics", + "params": {"shutters": 6, "boards": "11000000"}, + } + + async_fire_mqtt_message( + hass, + topic, + json.dumps(payload), + ) + await hass.async_block_till_done() + + # We are creating 6 covers using two E-RELAY devices connected to E-BOARD. + # Now we are going to check if all covers are created and their state is unknown. + for i in range(5): + cover = hass.states.get(f"cover.test_shutter_{i}") + assert cover.state == STATE_UNKNOWN + assert not cover.attributes.get(ATTR_ASSUMED_STATE) + + # The cover with id 7 should not be created. + cover = hass.states.get("cover.test_shutter_7") + assert not cover + + +async def test_cover_change_state_via_mqtt( + hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_pglab +) -> None: + """Test state update via MQTT.""" + topic = "pglab/discovery/E-Board-DD53AC85/config" + payload = { + "ip": "192.168.1.16", + "mac": "80:34:28:1B:18:5A", + "name": "test", + "hw": "1.0.7", + "fw": "1.0.0", + "type": "E-Board", + "id": "E-Board-DD53AC85", + "manufacturer": "PG LAB Electronics", + "params": {"shutters": 2, "boards": "10000000"}, + } + + async_fire_mqtt_message( + hass, + topic, + json.dumps(payload), + ) + await hass.async_block_till_done() + + # Check initial state is unknown + cover = hass.states.get("cover.test_shutter_0") + assert cover.state == STATE_UNKNOWN + assert not cover.attributes.get(ATTR_ASSUMED_STATE) + + # Simulate the device responds sending mqtt messages and check if the cover state + # change appropriately. + + async_fire_mqtt_message(hass, "pglab/test/shutter/0/state", "OPEN") + await hass.async_block_till_done() + cover = hass.states.get("cover.test_shutter_0") + assert not cover.attributes.get(ATTR_ASSUMED_STATE) + assert cover.state == STATE_OPEN + + async_fire_mqtt_message(hass, "pglab/test/shutter/0/state", "OPENING") + await hass.async_block_till_done() + cover = hass.states.get("cover.test_shutter_0") + assert cover.state == STATE_OPENING + + async_fire_mqtt_message(hass, "pglab/test/shutter/0/state", "CLOSING") + await hass.async_block_till_done() + cover = hass.states.get("cover.test_shutter_0") + assert cover.state == STATE_CLOSING + + async_fire_mqtt_message(hass, "pglab/test/shutter/0/state", "CLOSED") + await hass.async_block_till_done() + cover = hass.states.get("cover.test_shutter_0") + assert cover.state == STATE_CLOSED + + +async def test_cover_mqtt_state_by_calling_service( + hass: HomeAssistant, mqtt_mock: MqttMockHAClient, setup_pglab +) -> None: + """Calling service to OPEN/CLOSE cover and check mqtt state.""" + topic = "pglab/discovery/E-Board-DD53AC85/config" + payload = { + "ip": "192.168.1.16", + "mac": "80:34:28:1B:18:5A", + "name": "test", + "hw": "1.0.7", + "fw": "1.0.0", + "type": "E-Board", + "id": "E-Board-DD53AC85", + "manufacturer": "PG LAB Electronics", + "params": {"shutters": 2, "boards": "10000000"}, + } + + async_fire_mqtt_message( + hass, + topic, + json.dumps(payload), + ) + await hass.async_block_till_done() + + cover = hass.states.get("cover.test_shutter_0") + assert cover.state == STATE_UNKNOWN + assert not cover.attributes.get(ATTR_ASSUMED_STATE) + + # Call HA covers services and verify that the MQTT messages are sent correctly + + await call_service(hass, "cover.test_shutter_0", SERVICE_OPEN_COVER) + mqtt_mock.async_publish.assert_called_once_with( + "pglab/test/shutter/0/set", "OPEN", 0, False + ) + mqtt_mock.async_publish.reset_mock() + + await call_service(hass, "cover.test_shutter_0", SERVICE_STOP_COVER) + mqtt_mock.async_publish.assert_called_once_with( + "pglab/test/shutter/0/set", "STOP", 0, False + ) + mqtt_mock.async_publish.reset_mock() + + await call_service(hass, "cover.test_shutter_0", SERVICE_CLOSE_COVER) + mqtt_mock.async_publish.assert_called_once_with( + "pglab/test/shutter/0/set", "CLOSE", 0, False + ) + mqtt_mock.async_publish.reset_mock() diff --git a/tests/components/pglab/test_sensor.py b/tests/components/pglab/test_sensor.py new file mode 100644 index 00000000000..ff20d1452a4 --- /dev/null +++ b/tests/components/pglab/test_sensor.py @@ -0,0 +1,71 @@ +"""The tests for the PG LAB Electronics sensor.""" + +import json + +from freezegun import freeze_time +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from tests.common import async_fire_mqtt_message +from tests.typing import MqttMockHAClient + + +async def send_discovery_message(hass: HomeAssistant) -> None: + """Send mqtt discovery message.""" + + topic = "pglab/discovery/E-Board-DD53AC85/config" + payload = { + "ip": "192.168.1.16", + "mac": "80:34:28:1B:18:5A", + "name": "test", + "hw": "1.0.7", + "fw": "1.0.0", + "type": "E-Board", + "id": "E-Board-DD53AC85", + "manufacturer": "PG LAB Electronics", + "params": {"shutters": 0, "boards": "00000000"}, + } + + async_fire_mqtt_message( + hass, + topic, + json.dumps(payload), + ) + await hass.async_block_till_done() + + +@freeze_time("2024-02-26 01:21:34") +@pytest.mark.parametrize( + "sensor_suffix", + [ + "temperature", + "mpu_voltage", + "run_time", + ], +) +async def test_sensors( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mqtt_mock: MqttMockHAClient, + setup_pglab, + sensor_suffix: str, +) -> None: + """Check if sensors are properly created and updated.""" + + # send the discovery message to make E-BOARD device discoverable + await send_discovery_message(hass) + + # check initial sensors state + state = hass.states.get(f"sensor.test_{sensor_suffix}") + assert state == snapshot(name=f"initial_sensor_{sensor_suffix}") + + # update sensors value via mqtt + update_payload = {"temp": 33.4, "volt": 3.31, "rtime": 1000} + async_fire_mqtt_message(hass, "pglab/test/sensor/value", json.dumps(update_payload)) + await hass.async_block_till_done() + + # check updated sensors state + state = hass.states.get(f"sensor.test_{sensor_suffix}") + assert state == snapshot(name=f"updated_sensor_{sensor_suffix}") diff --git a/tests/components/ping/snapshots/test_sensor.ambr b/tests/components/ping/snapshots/test_sensor.ambr index bb811af6a34..6b86c327863 100644 --- a/tests/components/ping/snapshots/test_sensor.ambr +++ b/tests/components/ping/snapshots/test_sensor.ambr @@ -26,7 +26,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Round Trip Time Average', + 'original_name': 'Round-trip time average', 'platform': 'ping', 'previous_unique_id': None, 'supported_features': 0, @@ -38,7 +38,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': '10.10.10.10 Round Trip Time Average', + 'friendly_name': '10.10.10.10 Round-trip time average', 'state_class': , 'unit_of_measurement': , }), @@ -77,7 +77,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Round Trip Time Maximum', + 'original_name': 'Round-trip time maximum', 'platform': 'ping', 'previous_unique_id': None, 'supported_features': 0, @@ -89,7 +89,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': '10.10.10.10 Round Trip Time Maximum', + 'friendly_name': '10.10.10.10 Round-trip time maximum', 'state_class': , 'unit_of_measurement': , }), @@ -134,7 +134,7 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Round Trip Time Minimum', + 'original_name': 'Round-trip time minimum', 'platform': 'ping', 'previous_unique_id': None, 'supported_features': 0, @@ -146,7 +146,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': '10.10.10.10 Round Trip Time Minimum', + 'friendly_name': '10.10.10.10 Round-trip time minimum', 'state_class': , 'unit_of_measurement': , }), diff --git a/tests/components/pterodactyl/__init__.py b/tests/components/pterodactyl/__init__.py new file mode 100644 index 00000000000..a5b28d67ae3 --- /dev/null +++ b/tests/components/pterodactyl/__init__.py @@ -0,0 +1 @@ +"""Tests for the Pterodactyl integration.""" diff --git a/tests/components/pterodactyl/conftest.py b/tests/components/pterodactyl/conftest.py new file mode 100644 index 00000000000..62326e79207 --- /dev/null +++ b/tests/components/pterodactyl/conftest.py @@ -0,0 +1,155 @@ +"""Common fixtures for the Pterodactyl tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +from pydactyl.responses import PaginatedResponse +import pytest + +from homeassistant.components.pterodactyl.const import DOMAIN +from homeassistant.const import CONF_API_KEY, CONF_URL + +from tests.common import MockConfigEntry + +TEST_URL = "https://192.168.0.1:8080/" +TEST_API_KEY = "TestClientApiKey" +TEST_USER_INPUT = { + CONF_URL: TEST_URL, + CONF_API_KEY: TEST_API_KEY, +} +TEST_SERVER_LIST_DATA = { + "meta": {"pagination": {"total": 2, "count": 2, "per_page": 50, "current_page": 1}}, + "data": [ + { + "object": "server", + "attributes": { + "server_owner": True, + "identifier": "1", + "internal_id": 1, + "uuid": "1-1-1-1-1", + "name": "Test Server 1", + "node": "default_node", + "description": "Description of Test Server 1", + "limits": { + "memory": 2048, + "swap": 1024, + "disk": 10240, + "io": 500, + "cpu": 100, + "threads": None, + "oom_disabled": True, + }, + "invocation": "java -jar test_server1.jar", + "docker_image": "test_docker_image_1", + "egg_features": ["java_version"], + }, + }, + { + "object": "server", + "attributes": { + "server_owner": True, + "identifier": "2", + "internal_id": 2, + "uuid": "2-2-2-2-2", + "name": "Test Server 2", + "node": "default_node", + "description": "Description of Test Server 2", + "limits": { + "memory": 2048, + "swap": 1024, + "disk": 10240, + "io": 500, + "cpu": 100, + "threads": None, + "oom_disabled": True, + }, + "invocation": "java -jar test_server_2.jar", + "docker_image": "test_docker_image2", + "egg_features": ["java_version"], + }, + }, + ], +} +TEST_SERVER = { + "server_owner": True, + "identifier": "1", + "internal_id": 1, + "uuid": "1-1-1-1-1", + "name": "Test Server 1", + "node": "default_node", + "is_node_under_maintenance": False, + "sftp_details": {"ip": "192.168.0.1", "port": 2022}, + "description": "", + "limits": { + "memory": 2048, + "swap": 1024, + "disk": 10240, + "io": 500, + "cpu": 100, + "threads": None, + "oom_disabled": True, + }, + "invocation": "java -jar test.jar", + "docker_image": "test_docker_image", + "egg_features": ["eula", "java_version", "pid_limit"], + "feature_limits": {"databases": 0, "allocations": 0, "backups": 3}, + "status": None, + "is_suspended": False, + "is_installing": False, + "is_transferring": False, + "relationships": {"allocations": {...}, "variables": {...}}, +} +TEST_SERVER_UTILIZATION = { + "current_state": "running", + "is_suspended": False, + "resources": { + "memory_bytes": 1111, + "cpu_absolute": 22, + "disk_bytes": 3333, + "network_rx_bytes": 44, + "network_tx_bytes": 55, + "uptime": 6666, + }, +} + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.pterodactyl.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Create Pterodactyl mock config entry.""" + return MockConfigEntry( + domain=DOMAIN, + unique_id=None, + entry_id="01234567890123456789012345678901", + title=TEST_URL, + data={ + CONF_URL: TEST_URL, + CONF_API_KEY: TEST_API_KEY, + }, + version=1, + ) + + +@pytest.fixture +def mock_pterodactyl(): + """Mock the Pterodactyl API.""" + with patch( + "homeassistant.components.pterodactyl.api.PterodactylClient", autospec=True + ) as mock: + mock.return_value.client.servers.list_servers.return_value = PaginatedResponse( + mock.return_value, "client", TEST_SERVER_LIST_DATA + ) + mock.return_value.client.servers.get_server.return_value = TEST_SERVER + mock.return_value.client.servers.get_server_utilization.return_value = ( + TEST_SERVER_UTILIZATION + ) + + yield mock.return_value diff --git a/tests/components/pterodactyl/test_config_flow.py b/tests/components/pterodactyl/test_config_flow.py new file mode 100644 index 00000000000..14bb2d2f69f --- /dev/null +++ b/tests/components/pterodactyl/test_config_flow.py @@ -0,0 +1,129 @@ +"""Test the Pterodactyl config flow.""" + +from pydactyl import PterodactylClient +from pydactyl.exceptions import ClientConfigError, PterodactylApiError +import pytest + +from homeassistant.components.pterodactyl.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .conftest import TEST_URL, TEST_USER_INPUT + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("mock_pterodactyl", "mock_setup_entry") +async def test_full_flow(hass: HomeAssistant) -> None: + """Test full flow without errors.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + flow_id=result["flow_id"], + user_input=TEST_USER_INPUT, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == TEST_URL + assert result["data"] == TEST_USER_INPUT + + +@pytest.mark.usefixtures("mock_setup_entry") +@pytest.mark.parametrize( + "exception_type", + [ + ClientConfigError, + PterodactylApiError, + ], +) +async def test_recovery_after_api_error( + hass: HomeAssistant, + exception_type, + mock_pterodactyl: PterodactylClient, +) -> None: + """Test recovery after an API error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + mock_pterodactyl.client.servers.list_servers.side_effect = exception_type + + result = await hass.config_entries.flow.async_configure( + flow_id=result["flow_id"], + user_input=TEST_USER_INPUT, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + mock_pterodactyl.reset_mock(side_effect=True) + + result = await hass.config_entries.flow.async_configure( + flow_id=result["flow_id"], user_input=TEST_USER_INPUT + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == TEST_URL + assert result["data"] == TEST_USER_INPUT + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_recovery_after_unknown_error( + hass: HomeAssistant, + mock_pterodactyl: PterodactylClient, +) -> None: + """Test recovery after an API error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + mock_pterodactyl.client.servers.list_servers.side_effect = Exception + + result = await hass.config_entries.flow.async_configure( + flow_id=result["flow_id"], + user_input=TEST_USER_INPUT, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "unknown"} + + mock_pterodactyl.reset_mock(side_effect=True) + + result = await hass.config_entries.flow.async_configure( + flow_id=result["flow_id"], user_input=TEST_USER_INPUT + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == TEST_URL + assert result["data"] == TEST_USER_INPUT + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_service_already_configured( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_pterodactyl: PterodactylClient, +) -> None: + """Test config flow abort if the Pterodactyl server is already configured.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER}, data=TEST_USER_INPUT + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/purpleair/conftest.py b/tests/components/purpleair/conftest.py index 1809b16bd75..a9a51c22b7c 100644 --- a/tests/components/purpleair/conftest.py +++ b/tests/components/purpleair/conftest.py @@ -8,7 +8,7 @@ from aiopurpleair.endpoints.sensors import NearbySensorResult from aiopurpleair.models.sensors import GetSensorsResponse import pytest -from homeassistant.components.purpleair import DOMAIN +from homeassistant.components.purpleair.const import DOMAIN from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -20,7 +20,7 @@ TEST_SENSOR_INDEX2 = 567890 @pytest.fixture(name="api") def api_fixture(get_sensors_response: GetSensorsResponse) -> Mock: - """Define a fixture to return a mocked aiopurple API object.""" + """Define a fixture to return a mocked aiopurpleair API object.""" return Mock( async_check_api_key=AsyncMock(), get_map_url=Mock(return_value="http://example.com"), diff --git a/tests/components/purpleair/test_config_flow.py b/tests/components/purpleair/test_config_flow.py index 998cb2b7878..5ee15de4e6b 100644 --- a/tests/components/purpleair/test_config_flow.py +++ b/tests/components/purpleair/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, patch from aiopurpleair.errors import InvalidApiKeyError, PurpleAirError import pytest -from homeassistant.components.purpleair import DOMAIN +from homeassistant.components.purpleair.const import DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -288,6 +288,7 @@ async def test_options_remove_sensor( device_entry = device_registry.async_get_device( identifiers={(DOMAIN, str(TEST_SENSOR_INDEX1))} ) + assert device_entry is not None result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={"sensor_device_id": device_entry.id}, diff --git a/tests/components/pvoutput/test_sensor.py b/tests/components/pvoutput/test_sensor.py index fbcff94be60..36a37653efe 100644 --- a/tests/components/pvoutput/test_sensor.py +++ b/tests/components/pvoutput/test_sensor.py @@ -30,8 +30,8 @@ async def test_sensors( ) -> None: """Test the PVOutput sensors.""" - state = hass.states.get("sensor.frenck_s_solar_farm_energy_consumed") - entry = entity_registry.async_get("sensor.frenck_s_solar_farm_energy_consumed") + state = hass.states.get("sensor.frenck_s_solar_farm_energy_consumption") + entry = entity_registry.async_get("sensor.frenck_s_solar_farm_energy_consumption") assert entry assert state assert entry.unique_id == "12345_energy_consumption" @@ -40,14 +40,14 @@ async def test_sensors( assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY assert ( state.attributes.get(ATTR_FRIENDLY_NAME) - == "Frenck's Solar Farm Energy consumed" + == "Frenck's Solar Farm Energy consumption" ) assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL_INCREASING assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.WATT_HOUR assert ATTR_ICON not in state.attributes - state = hass.states.get("sensor.frenck_s_solar_farm_energy_generated") - entry = entity_registry.async_get("sensor.frenck_s_solar_farm_energy_generated") + state = hass.states.get("sensor.frenck_s_solar_farm_energy_generation") + entry = entity_registry.async_get("sensor.frenck_s_solar_farm_energy_generation") assert entry assert state assert entry.unique_id == "12345_energy_generation" @@ -56,7 +56,7 @@ async def test_sensors( assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY assert ( state.attributes.get(ATTR_FRIENDLY_NAME) - == "Frenck's Solar Farm Energy generated" + == "Frenck's Solar Farm Energy generation" ) assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.TOTAL_INCREASING assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.WATT_HOUR @@ -78,8 +78,8 @@ async def test_sensors( assert ATTR_DEVICE_CLASS not in state.attributes assert ATTR_ICON not in state.attributes - state = hass.states.get("sensor.frenck_s_solar_farm_power_consumed") - entry = entity_registry.async_get("sensor.frenck_s_solar_farm_power_consumed") + state = hass.states.get("sensor.frenck_s_solar_farm_power_consumption") + entry = entity_registry.async_get("sensor.frenck_s_solar_farm_power_consumption") assert entry assert state assert entry.unique_id == "12345_power_consumption" @@ -87,14 +87,15 @@ async def test_sensors( assert state.state == "2500.0" assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.POWER assert ( - state.attributes.get(ATTR_FRIENDLY_NAME) == "Frenck's Solar Farm Power consumed" + state.attributes.get(ATTR_FRIENDLY_NAME) + == "Frenck's Solar Farm Power consumption" ) assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfPower.WATT assert ATTR_ICON not in state.attributes - state = hass.states.get("sensor.frenck_s_solar_farm_power_generated") - entry = entity_registry.async_get("sensor.frenck_s_solar_farm_power_generated") + state = hass.states.get("sensor.frenck_s_solar_farm_power_generation") + entry = entity_registry.async_get("sensor.frenck_s_solar_farm_power_generation") assert entry assert state assert entry.unique_id == "12345_power_generation" @@ -103,7 +104,7 @@ async def test_sensors( assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.POWER assert ( state.attributes.get(ATTR_FRIENDLY_NAME) - == "Frenck's Solar Farm Power generated" + == "Frenck's Solar Farm Power generation" ) assert state.attributes.get(ATTR_STATE_CLASS) is SensorStateClass.MEASUREMENT assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfPower.WATT diff --git a/tests/components/pyload/conftest.py b/tests/components/pyload/conftest.py index 46144771cc1..9b410a5fdd6 100644 --- a/tests/components/pyload/conftest.py +++ b/tests/components/pyload/conftest.py @@ -12,6 +12,7 @@ from homeassistant.const import ( CONF_PASSWORD, CONF_PORT, CONF_SSL, + CONF_URL, CONF_USERNAME, CONF_VERIFY_SSL, ) @@ -19,10 +20,8 @@ from homeassistant.const import ( from tests.common import MockConfigEntry USER_INPUT = { - CONF_HOST: "pyload.local", + CONF_URL: "https://pyload.local:8000/prefix", CONF_PASSWORD: "test-password", - CONF_PORT: 8000, - CONF_SSL: True, CONF_USERNAME: "test-username", CONF_VERIFY_SSL: False, } @@ -33,10 +32,8 @@ REAUTH_INPUT = { } NEW_INPUT = { - CONF_HOST: "pyload.local", + CONF_URL: "https://pyload.local:8000/prefix", CONF_PASSWORD: "new-password", - CONF_PORT: 8000, - CONF_SSL: True, CONF_USERNAME: "new-username", CONF_VERIFY_SSL: False, } @@ -97,5 +94,28 @@ def mock_pyloadapi() -> Generator[MagicMock]: def mock_config_entry() -> MockConfigEntry: """Mock pyLoad configuration entry.""" return MockConfigEntry( - domain=DOMAIN, title=DEFAULT_NAME, data=USER_INPUT, entry_id="XXXXXXXXXXXXXX" + domain=DOMAIN, + title=DEFAULT_NAME, + data=USER_INPUT, + entry_id="XXXXXXXXXXXXXX", + ) + + +@pytest.fixture(name="config_entry_migrate") +def mock_config_entry_migrate() -> MockConfigEntry: + """Mock pyLoad configuration entry for migration.""" + return MockConfigEntry( + domain=DOMAIN, + title=DEFAULT_NAME, + data={ + CONF_HOST: "pyload.local", + CONF_PASSWORD: "test-password", + CONF_PORT: 8000, + CONF_SSL: True, + CONF_USERNAME: "test-username", + CONF_VERIFY_SSL: False, + }, + version=1, + minor_version=0, + entry_id="XXXXXXXXXXXXXX", ) diff --git a/tests/components/pyload/snapshots/test_diagnostics.ambr b/tests/components/pyload/snapshots/test_diagnostics.ambr index e2b51ad184a..d773804bf73 100644 --- a/tests/components/pyload/snapshots/test_diagnostics.ambr +++ b/tests/components/pyload/snapshots/test_diagnostics.ambr @@ -2,10 +2,8 @@ # name: test_diagnostics dict({ 'config_entry_data': dict({ - 'host': '**REDACTED**', 'password': '**REDACTED**', - 'port': 8000, - 'ssl': True, + 'url': 'https://**redacted**:8000/prefix', 'username': '**REDACTED**', 'verify_ssl': False, }), @@ -15,6 +13,7 @@ 'download': True, 'free_space': 99999999999, 'pause': False, + 'proxy': None, 'queue': 6, 'reconnect': False, 'speed': 5405963.0, diff --git a/tests/components/pyload/test_init.py b/tests/components/pyload/test_init.py index 00b1f0aa3a8..5c85979b9df 100644 --- a/tests/components/pyload/test_init.py +++ b/tests/components/pyload/test_init.py @@ -8,6 +8,7 @@ from pyloadapi.exceptions import CannotConnect, InvalidAuth, ParserError import pytest from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.const import CONF_PATH, CONF_URL from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, async_fire_time_changed @@ -88,3 +89,22 @@ async def test_coordinator_update_invalid_auth( await hass.async_block_till_done() assert any(config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) + + +@pytest.mark.usefixtures("mock_pyloadapi") +async def test_migration( + hass: HomeAssistant, + config_entry_migrate: MockConfigEntry, +) -> None: + """Test config entry migration.""" + + config_entry_migrate.add_to_hass(hass) + assert config_entry_migrate.data.get(CONF_PATH) is None + + await hass.config_entries.async_setup(config_entry_migrate.entry_id) + await hass.async_block_till_done() + + assert config_entry_migrate.state is ConfigEntryState.LOADED + assert config_entry_migrate.version == 1 + assert config_entry_migrate.minor_version == 1 + assert config_entry_migrate.data[CONF_URL] == "https://pyload.local:8000/" diff --git a/tests/components/qbus/conftest.py b/tests/components/qbus/conftest.py index 8268d091bda..f1fd96c321b 100644 --- a/tests/components/qbus/conftest.py +++ b/tests/components/qbus/conftest.py @@ -1,5 +1,7 @@ """Test fixtures for qbus.""" +import json + import pytest from homeassistant.components.qbus.const import CONF_SERIAL_NUMBER, DOMAIN @@ -7,9 +9,13 @@ from homeassistant.const import CONF_ID from homeassistant.core import HomeAssistant from homeassistant.util.json import JsonObjectType -from .const import FIXTURE_PAYLOAD_CONFIG +from .const import FIXTURE_PAYLOAD_CONFIG, TOPIC_CONFIG -from tests.common import MockConfigEntry, load_json_object_fixture +from tests.common import ( + MockConfigEntry, + async_fire_mqtt_message, + load_json_object_fixture, +) @pytest.fixture @@ -31,3 +37,18 @@ def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: def payload_config() -> JsonObjectType: """Return the config topic payload.""" return load_json_object_fixture(FIXTURE_PAYLOAD_CONFIG, DOMAIN) + + +@pytest.fixture +async def setup_integration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + payload_config: JsonObjectType, +) -> None: + """Set up the integration.""" + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + async_fire_mqtt_message(hass, TOPIC_CONFIG, json.dumps(payload_config)) + await hass.async_block_till_done() diff --git a/tests/components/qbus/fixtures/payload_config.json b/tests/components/qbus/fixtures/payload_config.json index e2c7f463e4e..fc204c975ad 100644 --- a/tests/components/qbus/fixtures/payload_config.json +++ b/tests/components/qbus/fixtures/payload_config.json @@ -46,7 +46,7 @@ { "id": "UL15", "location": "Media room", - "locationId": 0, + "locationId": 1, "name": "MEDIA ROOM", "originalName": "MEDIA ROOM", "refId": "000001/28", @@ -65,6 +65,40 @@ "write": true } } + }, + { + "id": "UL20", + "location": "Living", + "locationId": 0, + "name": "LIVING TH", + "originalName": "LIVING TH", + "refId": "000001/120", + "type": "thermo", + "actions": {}, + "properties": { + "currRegime": { + "enumValues": ["MANUEEL", "VORST", "ECONOMY", "COMFORT", "NACHT"], + "read": true, + "type": "enumString", + "write": true + }, + "currTemp": { + "max": 35, + "min": 0, + "read": true, + "step": 0.5, + "type": "number", + "write": false + }, + "setTemp": { + "max": 35, + "min": 0, + "read": true, + "step": 0.5, + "type": "number", + "write": true + } + } } ] } diff --git a/tests/components/qbus/test_climate.py b/tests/components/qbus/test_climate.py new file mode 100644 index 00000000000..d521e310984 --- /dev/null +++ b/tests/components/qbus/test_climate.py @@ -0,0 +1,228 @@ +"""Test Qbus light entities.""" + +from datetime import timedelta +from unittest.mock import MagicMock, call + +import pytest + +from homeassistant.components.climate import ( + ATTR_CURRENT_TEMPERATURE, + ATTR_HVAC_ACTION, + ATTR_PRESET_MODE, + DOMAIN as CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + SERVICE_SET_TEMPERATURE, + ClimateEntity, + HVACAction, + HVACMode, +) +from homeassistant.components.qbus.climate import STATE_REQUEST_DELAY +from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.entity_platform import EntityPlatform +from homeassistant.util import dt as dt_util + +from tests.common import async_fire_mqtt_message, async_fire_time_changed +from tests.typing import MqttMockHAClient + +_CURRENT_TEMPERATURE = 21.5 +_SET_TEMPERATURE = 20.5 +_REGIME = "COMFORT" + +_PAYLOAD_CLIMATE_STATE_TEMP = ( + f'{{"id":"UL20","properties":{{"setTemp":{_SET_TEMPERATURE}}},"type":"event"}}' +) +_PAYLOAD_CLIMATE_STATE_TEMP_FULL = f'{{"id":"UL20","properties":{{"currRegime":"MANUEEL","currTemp":{_CURRENT_TEMPERATURE},"setTemp":{_SET_TEMPERATURE}}},"type":"state"}}' + +_PAYLOAD_CLIMATE_STATE_PRESET = ( + f'{{"id":"UL20","properties":{{"currRegime":"{_REGIME}"}},"type":"event"}}' +) +_PAYLOAD_CLIMATE_STATE_PRESET_FULL = f'{{"id":"UL20","properties":{{"currRegime":"{_REGIME}","currTemp":{_CURRENT_TEMPERATURE},"setTemp":22.0}},"type":"state"}}' + +_PAYLOAD_CLIMATE_SET_TEMP = f'{{"id": "UL20", "type": "state", "properties": {{"setTemp": {_SET_TEMPERATURE}}}}}' +_PAYLOAD_CLIMATE_SET_PRESET = ( + '{"id": "UL20", "type": "state", "properties": {"currRegime": "COMFORT"}}' +) + +_TOPIC_CLIMATE_STATE = "cloudapp/QBUSMQTTGW/UL1/UL20/state" +_TOPIC_CLIMATE_SET_STATE = "cloudapp/QBUSMQTTGW/UL1/UL20/setState" +_TOPIC_GET_STATE = "cloudapp/QBUSMQTTGW/getState" + +_CLIMATE_ENTITY_ID = "climate.living_th" + + +async def test_climate( + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + setup_integration: None, +) -> None: + """Test climate temperature & preset.""" + + # Set temperature + mqtt_mock.reset_mock() + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: _CLIMATE_ENTITY_ID, + ATTR_TEMPERATURE: _SET_TEMPERATURE, + }, + blocking=True, + ) + + mqtt_mock.async_publish.assert_called_once_with( + _TOPIC_CLIMATE_SET_STATE, _PAYLOAD_CLIMATE_SET_TEMP, 0, False + ) + + # Simulate a partial state response + async_fire_mqtt_message(hass, _TOPIC_CLIMATE_STATE, _PAYLOAD_CLIMATE_STATE_TEMP) + await hass.async_block_till_done() + + # Check state + entity = hass.states.get(_CLIMATE_ENTITY_ID) + assert entity + assert entity.attributes[ATTR_TEMPERATURE] == _SET_TEMPERATURE + assert entity.attributes[ATTR_CURRENT_TEMPERATURE] is None + assert entity.attributes[ATTR_PRESET_MODE] == "MANUEEL" + assert entity.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE + assert entity.state == HVACMode.HEAT + + # After a delay, a full state request should've been sent + _wait_and_assert_state_request(hass, mqtt_mock) + + # Simulate a full state response + async_fire_mqtt_message( + hass, _TOPIC_CLIMATE_STATE, _PAYLOAD_CLIMATE_STATE_TEMP_FULL + ) + await hass.async_block_till_done() + + # Check state after full state response + entity = hass.states.get(_CLIMATE_ENTITY_ID) + assert entity + assert entity.attributes[ATTR_TEMPERATURE] == _SET_TEMPERATURE + assert entity.attributes[ATTR_CURRENT_TEMPERATURE] == _CURRENT_TEMPERATURE + assert entity.attributes[ATTR_PRESET_MODE] == "MANUEEL" + assert entity.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE + assert entity.state == HVACMode.HEAT + + # Set preset + mqtt_mock.reset_mock() + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + { + ATTR_ENTITY_ID: _CLIMATE_ENTITY_ID, + ATTR_PRESET_MODE: _REGIME, + }, + blocking=True, + ) + + mqtt_mock.async_publish.assert_called_once_with( + _TOPIC_CLIMATE_SET_STATE, _PAYLOAD_CLIMATE_SET_PRESET, 0, False + ) + + # Simulate a partial state response + async_fire_mqtt_message(hass, _TOPIC_CLIMATE_STATE, _PAYLOAD_CLIMATE_STATE_PRESET) + await hass.async_block_till_done() + + # Check state + entity = hass.states.get(_CLIMATE_ENTITY_ID) + assert entity + assert entity.attributes[ATTR_TEMPERATURE] == _SET_TEMPERATURE + assert entity.attributes[ATTR_CURRENT_TEMPERATURE] == _CURRENT_TEMPERATURE + assert entity.attributes[ATTR_PRESET_MODE] == _REGIME + assert entity.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE + assert entity.state == HVACMode.HEAT + + # After a delay, a full state request should've been sent + _wait_and_assert_state_request(hass, mqtt_mock) + + # Simulate a full state response + async_fire_mqtt_message( + hass, _TOPIC_CLIMATE_STATE, _PAYLOAD_CLIMATE_STATE_PRESET_FULL + ) + await hass.async_block_till_done() + + # Check state after full state response + entity = hass.states.get(_CLIMATE_ENTITY_ID) + assert entity + assert entity.attributes[ATTR_TEMPERATURE] == 22.0 + assert entity.attributes[ATTR_CURRENT_TEMPERATURE] == _CURRENT_TEMPERATURE + assert entity.attributes[ATTR_PRESET_MODE] == _REGIME + assert entity.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING + assert entity.state == HVACMode.HEAT + + +async def test_climate_when_invalid_state_received( + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + setup_integration: None, +) -> None: + """Test climate when no valid state is received.""" + + platform: EntityPlatform = hass.data["entity_components"][CLIMATE_DOMAIN] + entity: ClimateEntity = next( + ( + entity + for entity in platform.entities + if entity.entity_id == _CLIMATE_ENTITY_ID + ), + None, + ) + + assert entity + entity.async_schedule_update_ha_state = MagicMock() + + # Simulate state response + async_fire_mqtt_message(hass, _TOPIC_CLIMATE_STATE, "") + await hass.async_block_till_done() + + entity.async_schedule_update_ha_state.assert_not_called() + + +async def test_climate_with_fast_subsequent_changes( + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + setup_integration: None, +) -> None: + """Test climate with fast subsequent changes.""" + + # Simulate two subsequent partial state responses + async_fire_mqtt_message(hass, _TOPIC_CLIMATE_STATE, _PAYLOAD_CLIMATE_STATE_TEMP) + await hass.async_block_till_done() + async_fire_mqtt_message(hass, _TOPIC_CLIMATE_STATE, _PAYLOAD_CLIMATE_STATE_TEMP) + await hass.async_block_till_done() + + # State request should be requested only once + _wait_and_assert_state_request(hass, mqtt_mock) + + +async def test_climate_with_unknown_preset( + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + setup_integration: None, +) -> None: + """Test climate with passing an unknown preset value.""" + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + { + ATTR_ENTITY_ID: _CLIMATE_ENTITY_ID, + ATTR_PRESET_MODE: "What is cooler than being cool?", + }, + blocking=True, + ) + + +def _wait_and_assert_state_request( + hass: HomeAssistant, mqtt_mock: MqttMockHAClient +) -> None: + mqtt_mock.reset_mock() + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(STATE_REQUEST_DELAY)) + mqtt_mock.async_publish.assert_has_calls( + [call(_TOPIC_GET_STATE, '["UL20"]', 0, False)], + any_order=True, + ) diff --git a/tests/components/qbus/test_light.py b/tests/components/qbus/test_light.py index c64219f1269..2db2c622289 100644 --- a/tests/components/qbus/test_light.py +++ b/tests/components/qbus/test_light.py @@ -1,7 +1,5 @@ """Test Qbus light entities.""" -import json - from homeassistant.components.light import ( ATTR_BRIGHTNESS, DOMAIN as LIGHT_DOMAIN, @@ -10,11 +8,8 @@ from homeassistant.components.light import ( ) from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant -from homeassistant.util.json import JsonObjectType -from .const import TOPIC_CONFIG - -from tests.common import MockConfigEntry, async_fire_mqtt_message +from tests.common import async_fire_mqtt_message from tests.typing import MqttMockHAClient # 186 = 73% (rounded) @@ -44,17 +39,10 @@ _LIGHT_ENTITY_ID = "light.media_room" async def test_light( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - mock_config_entry: MockConfigEntry, - payload_config: JsonObjectType, + setup_integration: None, ) -> None: """Test turning on and off.""" - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - async_fire_mqtt_message(hass, TOPIC_CONFIG, json.dumps(payload_config)) - await hass.async_block_till_done() - # Switch ON mqtt_mock.reset_mock() await hass.services.async_call( diff --git a/tests/components/qbus/test_switch.py b/tests/components/qbus/test_switch.py index 83bb667e4eb..ddb63e933da 100644 --- a/tests/components/qbus/test_switch.py +++ b/tests/components/qbus/test_switch.py @@ -1,7 +1,5 @@ """Test Qbus switch entities.""" -import json - from homeassistant.components.switch import ( DOMAIN as SWITCH_DOMAIN, SERVICE_TURN_OFF, @@ -9,11 +7,8 @@ from homeassistant.components.switch import ( ) from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant -from homeassistant.util.json import JsonObjectType -from .const import TOPIC_CONFIG - -from tests.common import MockConfigEntry, async_fire_mqtt_message +from tests.common import async_fire_mqtt_message from tests.typing import MqttMockHAClient _PAYLOAD_SWITCH_STATE_ON = '{"id":"UL10","properties":{"value":true},"type":"state"}' @@ -34,17 +29,10 @@ _SWITCH_ENTITY_ID = "switch.living" async def test_switch_turn_on_off( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - mock_config_entry: MockConfigEntry, - payload_config: JsonObjectType, + setup_integration: None, ) -> None: """Test turning on and off.""" - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - async_fire_mqtt_message(hass, TOPIC_CONFIG, json.dumps(payload_config)) - await hass.async_block_till_done() - # Switch ON mqtt_mock.reset_mock() await hass.services.async_call( diff --git a/tests/components/recorder/auto_repairs/statistics/test_schema.py b/tests/components/recorder/auto_repairs/statistics/test_schema.py index 352a2345052..99d6705e4a4 100644 --- a/tests/components/recorder/auto_repairs/statistics/test_schema.py +++ b/tests/components/recorder/auto_repairs/statistics/test_schema.py @@ -87,6 +87,7 @@ async def test_validate_db_schema_fix_float_issue( "created_ts DOUBLE PRECISION", "start_ts DOUBLE PRECISION", "mean DOUBLE PRECISION", + "mean_weight DOUBLE PRECISION", "min DOUBLE PRECISION", "max DOUBLE PRECISION", "last_reset_ts DOUBLE PRECISION", diff --git a/tests/components/recorder/common.py b/tests/components/recorder/common.py index 28eb097f576..d381c225275 100644 --- a/tests/components/recorder/common.py +++ b/tests/components/recorder/common.py @@ -35,7 +35,8 @@ from homeassistant.components.recorder.db_schema import ( StatesMeta, ) from homeassistant.components.recorder.tasks import RecorderTask, StatisticsTask -from homeassistant.const import UnitOfTemperature +from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass +from homeassistant.const import DEGREE, UnitOfTemperature from homeassistant.core import Event, HomeAssistant, State from homeassistant.helpers import recorder as recorder_helper from homeassistant.util import dt as dt_util @@ -290,6 +291,7 @@ def record_states( sns2 = "sensor.test2" sns3 = "sensor.test3" sns4 = "sensor.test4" + sns5 = "sensor.wind_direction" sns1_attr = { "device_class": "temperature", "state_class": "measurement", @@ -302,6 +304,11 @@ def record_states( } sns3_attr = {"device_class": "temperature"} sns4_attr = {} + sns5_attr = { + "device_class": SensorDeviceClass.WIND_DIRECTION, + "state_class": SensorStateClass.MEASUREMENT_ANGLE, + "unit_of_measurement": DEGREE, + } def set_state(entity_id, state, **kwargs): """Set the state.""" @@ -315,7 +322,7 @@ def record_states( three = two + timedelta(seconds=30 * 5) four = three + timedelta(seconds=14 * 5) - states = {mp: [], sns1: [], sns2: [], sns3: [], sns4: []} + states = {mp: [], sns1: [], sns2: [], sns3: [], sns4: [], sns5: []} with freeze_time(one) as freezer: states[mp].append( set_state(mp, "idle", attributes={"media_title": str(sentinel.mt1)}) @@ -324,6 +331,7 @@ def record_states( states[sns2].append(set_state(sns2, "10", attributes=sns2_attr)) states[sns3].append(set_state(sns3, "10", attributes=sns3_attr)) states[sns4].append(set_state(sns4, "10", attributes=sns4_attr)) + states[sns5].append(set_state(sns5, "10", attributes=sns5_attr)) freezer.move_to(one + timedelta(microseconds=1)) states[mp].append( @@ -335,12 +343,14 @@ def record_states( states[sns2].append(set_state(sns2, "15", attributes=sns2_attr)) states[sns3].append(set_state(sns3, "15", attributes=sns3_attr)) states[sns4].append(set_state(sns4, "15", attributes=sns4_attr)) + states[sns5].append(set_state(sns5, "350", attributes=sns5_attr)) freezer.move_to(three) states[sns1].append(set_state(sns1, "20", attributes=sns1_attr)) states[sns2].append(set_state(sns2, "20", attributes=sns2_attr)) states[sns3].append(set_state(sns3, "20", attributes=sns3_attr)) states[sns4].append(set_state(sns4, "20", attributes=sns4_attr)) + states[sns5].append(set_state(sns5, "5", attributes=sns5_attr)) return zero, four, states diff --git a/tests/components/recorder/db_schema_32.py b/tests/components/recorder/db_schema_32.py index daa7fb6977c..9c19a1c7405 100644 --- a/tests/components/recorder/db_schema_32.py +++ b/tests/components/recorder/db_schema_32.py @@ -583,6 +583,8 @@ class StatisticsBase: last_reset_ts = Column(TIMESTAMP_TYPE) state = Column(DOUBLE_TYPE) sum = Column(DOUBLE_TYPE) + # *** Not originally in v32, only added for tests. Added in v49 + mean_weight = Column(DOUBLE_TYPE) @classmethod def from_stats(cls, metadata_id: int, stats: StatisticData) -> Self: diff --git a/tests/components/recorder/test_backup.py b/tests/components/recorder/test_backup.py index bed9e88fcbf..a4362b1fa4c 100644 --- a/tests/components/recorder/test_backup.py +++ b/tests/components/recorder/test_backup.py @@ -17,7 +17,7 @@ async def test_async_pre_backup(recorder_mock: Recorder, hass: HomeAssistant) -> "homeassistant.components.recorder.core.Recorder.lock_database" ) as lock_mock: await async_pre_backup(hass) - assert lock_mock.called + assert lock_mock.called RAISES_HASS_NOT_RUNNING = pytest.raises( @@ -75,13 +75,17 @@ async def test_async_pre_backup_with_migration( ) -> None: """Test pre backup with migration.""" with ( + patch( + "homeassistant.components.recorder.core.Recorder.lock_database" + ) as lock_mock, patch( "homeassistant.components.recorder.backup.async_migration_in_progress", return_value=True, ), - pytest.raises(HomeAssistantError), + pytest.raises(HomeAssistantError, match="Database migration in progress"), ): await async_pre_backup(hass) + assert not lock_mock.called async def test_async_post_backup(recorder_mock: Recorder, hass: HomeAssistant) -> None: @@ -90,7 +94,7 @@ async def test_async_post_backup(recorder_mock: Recorder, hass: HomeAssistant) - "homeassistant.components.recorder.core.Recorder.unlock_database" ) as unlock_mock: await async_post_backup(hass) - assert unlock_mock.called + assert unlock_mock.called async def test_async_post_backup_failure( @@ -102,7 +106,9 @@ async def test_async_post_backup_failure( "homeassistant.components.recorder.core.Recorder.unlock_database", return_value=False, ) as unlock_mock, - pytest.raises(HomeAssistantError), + pytest.raises( + HomeAssistantError, match="Could not release database write lock" + ), ): await async_post_backup(hass) assert unlock_mock.called diff --git a/tests/components/recorder/test_migration_from_schema_32.py b/tests/components/recorder/test_migration_from_schema_32.py index 012e227c11a..7fd73aaf735 100644 --- a/tests/components/recorder/test_migration_from_schema_32.py +++ b/tests/components/recorder/test_migration_from_schema_32.py @@ -1538,6 +1538,7 @@ async def test_stats_timestamp_conversion_is_reentrant( "last_reset_ts": one_year_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": process_timestamp(one_year_ago).replace(tzinfo=None), @@ -1553,6 +1554,7 @@ async def test_stats_timestamp_conversion_is_reentrant( "last_reset_ts": six_months_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1568,6 +1570,7 @@ async def test_stats_timestamp_conversion_is_reentrant( "last_reset_ts": one_month_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": process_timestamp(one_month_ago).replace(tzinfo=None), @@ -1705,6 +1708,7 @@ async def test_stats_timestamp_with_one_by_one( "last_reset_ts": one_year_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1720,6 +1724,7 @@ async def test_stats_timestamp_with_one_by_one( "last_reset_ts": six_months_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1735,6 +1740,7 @@ async def test_stats_timestamp_with_one_by_one( "last_reset_ts": one_month_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1758,6 +1764,7 @@ async def test_stats_timestamp_with_one_by_one( "last_reset_ts": one_year_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1773,6 +1780,7 @@ async def test_stats_timestamp_with_one_by_one( "last_reset_ts": six_months_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1788,6 +1796,7 @@ async def test_stats_timestamp_with_one_by_one( "last_reset_ts": one_month_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1932,6 +1941,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( "last_reset_ts": one_year_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1947,6 +1957,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( "last_reset_ts": six_months_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1962,6 +1973,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( "last_reset_ts": one_month_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, @@ -1985,6 +1997,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( "last_reset_ts": six_months_ago.timestamp(), "max": None, "mean": None, + "mean_weight": None, "metadata_id": 1000, "min": None, "start": None, diff --git a/tests/components/recorder/test_pool.py b/tests/components/recorder/test_pool.py index 3cca095399b..e391161c1ec 100644 --- a/tests/components/recorder/test_pool.py +++ b/tests/components/recorder/test_pool.py @@ -1,5 +1,6 @@ """Test pool.""" +import asyncio import threading import pytest @@ -8,6 +9,7 @@ from sqlalchemy.orm import sessionmaker from homeassistant.components.recorder.const import DB_WORKER_PREFIX from homeassistant.components.recorder.pool import RecorderPool +from homeassistant.core import HomeAssistant async def test_recorder_pool_called_from_event_loop() -> None: @@ -22,7 +24,9 @@ async def test_recorder_pool_called_from_event_loop() -> None: sessionmaker(bind=engine)().connection() -def test_recorder_pool(caplog: pytest.LogCaptureFixture) -> None: +async def test_recorder_pool( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: """Test RecorderPool gives the same connection in the creating thread.""" recorder_and_worker_thread_ids: set[int] = set() engine = create_engine( @@ -35,6 +39,8 @@ def test_recorder_pool(caplog: pytest.LogCaptureFixture) -> None: connections = [] add_thread = False + event = asyncio.Event() + def _get_connection_twice(): if add_thread: recorder_and_worker_thread_ids.add(threading.get_ident()) @@ -48,33 +54,42 @@ def test_recorder_pool(caplog: pytest.LogCaptureFixture) -> None: session = get_session() connections.append(session.connection().connection.driver_connection) session.close() + hass.loop.call_soon_threadsafe(event.set) caplog.clear() + event.clear() new_thread = threading.Thread(target=_get_connection_twice) new_thread.start() + await event.wait() new_thread.join() assert "accesses the database without the database executor" in caplog.text assert connections[0] != connections[1] add_thread = True caplog.clear() + event.clear() new_thread = threading.Thread(target=_get_connection_twice, name=DB_WORKER_PREFIX) new_thread.start() + await event.wait() new_thread.join() assert "accesses the database without the database executor" not in caplog.text assert connections[2] == connections[3] caplog.clear() + event.clear() new_thread = threading.Thread(target=_get_connection_twice, name="Recorder") new_thread.start() + await event.wait() new_thread.join() assert "accesses the database without the database executor" not in caplog.text assert connections[4] == connections[5] shutdown = True caplog.clear() + event.clear() new_thread = threading.Thread(target=_get_connection_twice, name=DB_WORKER_PREFIX) new_thread.start() + await event.wait() new_thread.join() assert "accesses the database without the database executor" not in caplog.text assert connections[6] != connections[7] diff --git a/tests/components/recorder/test_statistics.py b/tests/components/recorder/test_statistics.py index ed883c5403e..ed754723426 100644 --- a/tests/components/recorder/test_statistics.py +++ b/tests/components/recorder/test_statistics.py @@ -12,6 +12,7 @@ from homeassistant.components import recorder from homeassistant.components.recorder import Recorder, history, statistics from homeassistant.components.recorder.db_schema import StatisticsShortTerm from homeassistant.components.recorder.models import ( + StatisticMeanType, datetime_to_timestamp_or_none, process_timestamp, ) @@ -123,32 +124,38 @@ async def test_compile_hourly_statistics( stats = get_latest_short_term_statistics_with_session( hass, session, - {"sensor.test1"}, + {"sensor.test1", "sensor.wind_direction"}, {"last_reset", "max", "mean", "min", "state", "sum"}, ) assert stats == {} - for kwargs in ({}, {"statistic_ids": ["sensor.test1"]}): + for kwargs in ({}, {"statistic_ids": ["sensor.test1", "sensor.wind_direction"]}): stats = statistics_during_period(hass, zero, period="5minute", **kwargs) assert stats == {} - stats = get_last_short_term_statistics( - hass, - 0, - "sensor.test1", - True, - {"last_reset", "max", "mean", "min", "state", "sum"}, - ) - assert stats == {} + for sensor in ("sensor.test1", "sensor.wind_direction"): + stats = get_last_short_term_statistics( + hass, + 0, + sensor, + True, + {"last_reset", "max", "mean", "min", "state", "sum"}, + ) + assert stats == {} do_adhoc_statistics(hass, start=zero) do_adhoc_statistics(hass, start=four) await async_wait_recording_done(hass) - metadata = get_metadata(hass, statistic_ids={"sensor.test1", "sensor.test2"}) - assert metadata["sensor.test1"][1]["has_mean"] is True - assert metadata["sensor.test1"][1]["has_sum"] is False - assert metadata["sensor.test2"][1]["has_mean"] is True - assert metadata["sensor.test2"][1]["has_sum"] is False + metadata = get_metadata( + hass, statistic_ids={"sensor.test1", "sensor.test2", "sensor.wind_direction"} + ) + for sensor, mean_type in ( + ("sensor.test1", StatisticMeanType.ARITHMETIC), + ("sensor.test2", StatisticMeanType.ARITHMETIC), + ("sensor.wind_direction", StatisticMeanType.CIRCULAR), + ): + assert metadata[sensor][1]["mean_type"] is mean_type + assert metadata[sensor][1]["has_sum"] is False expected_1 = { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), @@ -168,11 +175,39 @@ async def test_compile_hourly_statistics( expected_stats1 = [expected_1, expected_2] expected_stats2 = [expected_1, expected_2] + expected_stats_wind_direction1 = { + "start": process_timestamp(zero).timestamp(), + "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(358.6387003873801), + "min": None, + "max": None, + "last_reset": None, + } + expected_stats_wind_direction2 = { + "start": process_timestamp(four).timestamp(), + "end": process_timestamp(four + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(5), + "min": None, + "max": None, + "last_reset": None, + } + expected_stats_wind_direction = [ + expected_stats_wind_direction1, + expected_stats_wind_direction2, + ] + # Test statistics_during_period stats = statistics_during_period( - hass, zero, period="5minute", statistic_ids={"sensor.test1", "sensor.test2"} + hass, + zero, + period="5minute", + statistic_ids={"sensor.test1", "sensor.test2", "sensor.wind_direction"}, ) - assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test1": expected_stats1, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } # Test statistics_during_period with a far future start and end date future = dt_util.as_utc(dt_util.parse_datetime("2221-11-01 00:00:00")) @@ -181,7 +216,7 @@ async def test_compile_hourly_statistics( future, end_time=future, period="5minute", - statistic_ids={"sensor.test1", "sensor.test2"}, + statistic_ids={"sensor.test1", "sensor.test2", "sensor.wind_direction"}, ) assert stats == {} @@ -191,9 +226,13 @@ async def test_compile_hourly_statistics( zero, end_time=future, period="5minute", - statistic_ids={"sensor.test1", "sensor.test2"}, + statistic_ids={"sensor.test1", "sensor.test2", "sensor.wind_direction"}, ) - assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test1": expected_stats1, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } stats = statistics_during_period( hass, zero, statistic_ids={"sensor.test2"}, period="5minute" @@ -206,32 +245,39 @@ async def test_compile_hourly_statistics( assert stats == {} # Test get_last_short_term_statistics and get_latest_short_term_statistics - stats = get_last_short_term_statistics( - hass, - 0, - "sensor.test1", - True, - {"last_reset", "max", "mean", "min", "state", "sum"}, - ) - assert stats == {} + for sensor, expected in ( + ("sensor.test1", expected_2), + ("sensor.wind_direction", expected_stats_wind_direction2), + ): + stats = get_last_short_term_statistics( + hass, + 0, + sensor, + True, + {"last_reset", "max", "mean", "min", "state", "sum"}, + ) + assert stats == {} - stats = get_last_short_term_statistics( - hass, - 1, - "sensor.test1", - True, - {"last_reset", "max", "mean", "min", "state", "sum"}, - ) - assert stats == {"sensor.test1": [expected_2]} + stats = get_last_short_term_statistics( + hass, + 1, + sensor, + True, + {"last_reset", "max", "mean", "min", "state", "sum"}, + ) + assert stats == {sensor: [expected]} with session_scope(hass=hass, read_only=True) as session: stats = get_latest_short_term_statistics_with_session( hass, session, - {"sensor.test1"}, + {"sensor.test1", "sensor.wind_direction"}, {"last_reset", "max", "mean", "min", "state", "sum"}, ) - assert stats == {"sensor.test1": [expected_2]} + assert stats == { + "sensor.test1": [expected_2], + "sensor.wind_direction": [expected_stats_wind_direction2], + } # Now wipe the latest_short_term_statistics_ids table and test again # to make sure we can rebuild the missing data @@ -241,13 +287,15 @@ async def test_compile_hourly_statistics( stats = get_latest_short_term_statistics_with_session( hass, session, - {"sensor.test1"}, + {"sensor.test1", "sensor.wind_direction"}, {"last_reset", "max", "mean", "min", "state", "sum"}, ) - assert stats == {"sensor.test1": [expected_2]} + assert stats == { + "sensor.test1": [expected_2], + "sensor.wind_direction": [expected_stats_wind_direction2], + } metadata = get_metadata(hass, statistic_ids={"sensor.test1"}) - with session_scope(hass=hass, read_only=True) as session: stats = get_latest_short_term_statistics_with_session( hass, @@ -258,23 +306,44 @@ async def test_compile_hourly_statistics( ) assert stats == {"sensor.test1": [expected_2]} - stats = get_last_short_term_statistics( - hass, - 2, - "sensor.test1", - True, - {"last_reset", "max", "mean", "min", "state", "sum"}, + # Test with multiple metadata ids + metadata = get_metadata( + hass, statistic_ids={"sensor.test1", "sensor.wind_direction"} ) - assert stats == {"sensor.test1": expected_stats1[::-1]} + with session_scope(hass=hass, read_only=True) as session: + stats = get_latest_short_term_statistics_with_session( + hass, + session, + {"sensor.test1", "sensor.wind_direction"}, + {"last_reset", "max", "mean", "min", "state", "sum"}, + metadata=metadata, + ) + assert stats == { + "sensor.test1": [expected_2], + "sensor.wind_direction": [expected_stats_wind_direction2], + } - stats = get_last_short_term_statistics( - hass, - 3, - "sensor.test1", - True, - {"last_reset", "max", "mean", "min", "state", "sum"}, - ) - assert stats == {"sensor.test1": expected_stats1[::-1]} + for sensor, expected in ( + ("sensor.test1", expected_stats1[::-1]), + ("sensor.wind_direction", expected_stats_wind_direction[::-1]), + ): + stats = get_last_short_term_statistics( + hass, + 2, + sensor, + True, + {"last_reset", "max", "mean", "min", "state", "sum"}, + ) + assert stats == {sensor: expected} + + stats = get_last_short_term_statistics( + hass, + 3, + sensor, + True, + {"last_reset", "max", "mean", "min", "state", "sum"}, + ) + assert stats == {sensor: expected} stats = get_last_short_term_statistics( hass, @@ -291,7 +360,7 @@ async def test_compile_hourly_statistics( stats = get_latest_short_term_statistics_with_session( hass, session, - {"sensor.test1"}, + {"sensor.test1", "sensor.wind_direction"}, {"last_reset", "max", "mean", "min", "state", "sum"}, ) assert stats == {} @@ -306,7 +375,7 @@ async def test_compile_hourly_statistics( stats = get_latest_short_term_statistics_with_session( hass, session, - {"sensor.test1"}, + {"sensor.test1", "sensor.wind_direction"}, {"last_reset", "max", "mean", "min", "state", "sum"}, ) assert stats == {} @@ -460,15 +529,35 @@ async def test_rename_entity( expected_stats1 = [expected_1] expected_stats2 = [expected_1] expected_stats99 = [expected_1] + expected_stats_wind_direction = [ + { + "start": process_timestamp(zero).timestamp(), + "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(358.6387003873801), + "min": None, + "max": None, + "last_reset": None, + "state": None, + "sum": None, + } + ] stats = statistics_during_period(hass, zero, period="5minute") - assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test1": expected_stats1, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } entity_registry.async_update_entity("sensor.test1", new_entity_id="sensor.test99") await async_wait_recording_done(hass) stats = statistics_during_period(hass, zero, period="5minute") - assert stats == {"sensor.test99": expected_stats99, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test99": expected_stats99, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } async def test_statistics_during_period_set_back_compat( @@ -544,9 +633,25 @@ async def test_rename_entity_collision( } expected_stats1 = [expected_1] expected_stats2 = [expected_1] + expected_stats_wind_direction = [ + { + "start": process_timestamp(zero).timestamp(), + "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(358.6387003873801), + "min": None, + "max": None, + "last_reset": None, + "state": None, + "sum": None, + } + ] stats = statistics_during_period(hass, zero, period="5minute") - assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test1": expected_stats1, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } # Insert metadata for sensor.test99 metadata_1 = { @@ -567,7 +672,11 @@ async def test_rename_entity_collision( # Statistics failed to migrate due to the collision stats = statistics_during_period(hass, zero, period="5minute") - assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test1": expected_stats1, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } # Verify the safeguard in the states meta manager was hit assert ( @@ -631,9 +740,25 @@ async def test_rename_entity_collision_states_meta_check_disabled( } expected_stats1 = [expected_1] expected_stats2 = [expected_1] + expected_stats_wind_direction = [ + { + "start": process_timestamp(zero).timestamp(), + "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(358.6387003873801), + "min": None, + "max": None, + "last_reset": None, + "state": None, + "sum": None, + } + ] stats = statistics_during_period(hass, zero, period="5minute") - assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test1": expected_stats1, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } # Insert metadata for sensor.test99 metadata_1 = { @@ -660,7 +785,11 @@ async def test_rename_entity_collision_states_meta_check_disabled( # Statistics failed to migrate due to the collision stats = statistics_during_period(hass, zero, period="5minute") - assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} + assert stats == { + "sensor.test1": expected_stats1, + "sensor.test2": expected_stats2, + "sensor.wind_direction": expected_stats_wind_direction, + } # Verify the filter_unique_constraint_integrity_error safeguard was hit assert "Blocked attempt to insert duplicated statistic rows" in caplog.text @@ -786,6 +915,7 @@ async def test_import_statistics( { "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "statistic_id": statistic_id, "name": "Total imported energy", @@ -800,6 +930,7 @@ async def test_import_statistics( 1, { "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": "Total imported energy", "source": source, @@ -876,6 +1007,7 @@ async def test_import_statistics( { "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "statistic_id": statistic_id, "name": "Total imported energy renamed", @@ -890,6 +1022,7 @@ async def test_import_statistics( 1, { "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": "Total imported energy renamed", "source": source, diff --git a/tests/components/recorder/test_websocket_api.py b/tests/components/recorder/test_websocket_api.py index a4e35bc8753..a4e4fe45db1 100644 --- a/tests/components/recorder/test_websocket_api.py +++ b/tests/components/recorder/test_websocket_api.py @@ -1,11 +1,14 @@ """The tests for sensor recorder platform.""" +from collections.abc import Iterable import datetime from datetime import timedelta +import math from statistics import fmean import sys from unittest.mock import ANY, patch +from _pytest.python_api import ApproxBase from freezegun import freeze_time from freezegun.api import FrozenDateTimeFactory import pytest @@ -13,7 +16,14 @@ import pytest from homeassistant.components import recorder from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.db_schema import Statistics, StatisticsShortTerm +from homeassistant.components.recorder.models import ( + StatisticData, + StatisticMeanType, + StatisticMetaData, +) from homeassistant.components.recorder.statistics import ( + DEG_TO_RAD, + RAD_TO_DEG, async_add_external_statistics, get_last_statistics, get_latest_short_term_statistics_with_session, @@ -24,6 +34,7 @@ from homeassistant.components.recorder.statistics import ( from homeassistant.components.recorder.util import session_scope from homeassistant.components.recorder.websocket_api import UNIT_SCHEMA from homeassistant.components.sensor import UNIT_CONVERTERS +from homeassistant.const import DEGREE from homeassistant.core import HomeAssistant from homeassistant.helpers import recorder as recorder_helper from homeassistant.setup import async_setup_component @@ -247,12 +258,12 @@ async def test_statistics_during_period( @pytest.mark.freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.UTC)) +@pytest.mark.usefixtures("recorder_mock") @pytest.mark.parametrize("offset", [0, 1, 2]) async def test_statistic_during_period( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - offset, + offset: int, ) -> None: """Test statistic_during_period.""" now = dt_util.utcnow() @@ -307,7 +318,7 @@ async def test_statistic_during_period( ) imported_metadata = { - "has_mean": False, + "has_mean": True, "has_sum": True, "name": "Total imported energy", "source": "recorder", @@ -655,7 +666,7 @@ async def test_statistic_during_period( hass, session, {"sensor.test"}, - {"last_reset", "max", "mean", "min", "state", "sum"}, + {"last_reset", "state", "sum"}, ) start = imported_stats_5min[-1]["start"].timestamp() end = start + (5 * 60) @@ -672,18 +683,376 @@ async def test_statistic_during_period( } +def _circular_mean(values: Iterable[StatisticData]) -> dict[str, float]: + sin_sum = 0 + cos_sum = 0 + for x in values: + mean = x.get("mean") + assert mean is not None + sin_sum += math.sin(mean * DEG_TO_RAD) + cos_sum += math.cos(mean * DEG_TO_RAD) + + return { + "mean": (RAD_TO_DEG * math.atan2(sin_sum, cos_sum)) % 360, + "mean_weight": math.sqrt(sin_sum**2 + cos_sum**2), + } + + +def _circular_mean_approx(values: Iterable[StatisticData]) -> ApproxBase: + return pytest.approx(_circular_mean(values)["mean"]) + + +@pytest.mark.freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.UTC)) +@pytest.mark.usefixtures("recorder_mock") +@pytest.mark.parametrize("offset", [0, 1, 2]) +async def test_statistic_during_period_circular_mean( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + offset: int, +) -> None: + """Test statistic_during_period.""" + now = dt_util.utcnow() + + await async_recorder_block_till_done(hass) + client = await hass_ws_client() + + zero = now + start = zero.replace(minute=offset * 5, second=0, microsecond=0) + timedelta( + hours=-3 + ) + + imported_stats_5min: list[StatisticData] = [ + { + "start": (start + timedelta(minutes=5 * i)), + "mean": (123.456 * i) % 360, + "mean_weight": 1, + } + for i in range(39) + ] + + imported_stats = [] + slice_end = 12 - offset + imported_stats.append( + { + "start": imported_stats_5min[0]["start"].replace(minute=0), + **_circular_mean(imported_stats_5min[0:slice_end]), + } + ) + for i in range(2): + slice_start = i * 12 + (12 - offset) + slice_end = (i + 1) * 12 + (12 - offset) + assert imported_stats_5min[slice_start]["start"].minute == 0 + imported_stats.append( + { + "start": imported_stats_5min[slice_start]["start"], + **_circular_mean(imported_stats_5min[slice_start:slice_end]), + } + ) + + imported_metadata: StatisticMetaData = { + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": "Wind direction", + "source": "recorder", + "statistic_id": "sensor.test", + "unit_of_measurement": DEGREE, + } + + recorder.get_instance(hass).async_import_statistics( + imported_metadata, + imported_stats, + Statistics, + ) + recorder.get_instance(hass).async_import_statistics( + imported_metadata, + imported_stats_5min, + StatisticsShortTerm, + ) + await async_wait_recording_done(hass) + + metadata = get_metadata(hass, statistic_ids={"sensor.test"}) + metadata_id = metadata["sensor.test"][0] + run_cache = get_short_term_statistics_run_cache(hass) + # Verify the import of the short term statistics + # also updates the run cache + assert run_cache.get_latest_ids({metadata_id}) is not None + + # No data for this period yet + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "fixed_period": { + "start_time": now.isoformat(), + "end_time": now.isoformat(), + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "max": None, + "mean": None, + "min": None, + "change": None, + } + + # This should include imported_statistics_5min[:] + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min), + "max": None, + "min": None, + "change": None, + } + + # This should also include imported_statistics_5min[:] + start_time = ( + dt_util.parse_datetime("2022-10-21T04:00:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + end_time = ( + dt_util.parse_datetime("2022-10-21T07:15:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + "fixed_period": { + "start_time": start_time, + "end_time": end_time, + }, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min), + "max": None, + "min": None, + "change": None, + } + + # This should also include imported_statistics_5min[:] + start_time = ( + dt_util.parse_datetime("2022-10-21T04:00:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + end_time = ( + dt_util.parse_datetime("2022-10-21T08:20:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + "fixed_period": { + "start_time": start_time, + "end_time": end_time, + }, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min), + "max": None, + "min": None, + "change": None, + } + + # This should include imported_statistics_5min[26:] + start_time = ( + dt_util.parse_datetime("2022-10-21T06:10:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + assert imported_stats_5min[26]["start"].isoformat() == start_time + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "fixed_period": { + "start_time": start_time, + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min[26:]), + "max": None, + "min": None, + "change": None, + } + + # This should also include imported_statistics_5min[26:] + start_time = ( + dt_util.parse_datetime("2022-10-21T06:09:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "fixed_period": { + "start_time": start_time, + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min[26:]), + "max": None, + "min": None, + "change": None, + } + + # This should include imported_statistics_5min[:26] + end_time = ( + dt_util.parse_datetime("2022-10-21T06:10:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + assert imported_stats_5min[26]["start"].isoformat() == end_time + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "fixed_period": { + "end_time": end_time, + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min[:26]), + "max": None, + "min": None, + "change": None, + } + + # This should include imported_statistics_5min[26:32] (less than a full hour) + start_time = ( + dt_util.parse_datetime("2022-10-21T06:10:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + assert imported_stats_5min[26]["start"].isoformat() == start_time + end_time = ( + dt_util.parse_datetime("2022-10-21T06:40:00+00:00") + + timedelta(minutes=5 * offset) + ).isoformat() + assert imported_stats_5min[32]["start"].isoformat() == end_time + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "fixed_period": { + "start_time": start_time, + "end_time": end_time, + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min[26:32]), + "max": None, + "min": None, + "change": None, + } + + # This should include imported_statistics[2:] + imported_statistics_5min[36:] + start_time = "2022-10-21T06:00:00+00:00" + assert imported_stats_5min[24 - offset]["start"].isoformat() == start_time + assert imported_stats[2]["start"].isoformat() == start_time + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "fixed_period": { + "start_time": start_time, + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min[24 - offset :]), + "max": None, + "min": None, + "change": None, + } + + # This should also include imported_statistics[2:] + imported_statistics_5min[36:] + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "rolling_window": { + "duration": {"hours": 1, "minutes": 25}, + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min[24 - offset :]), + "max": None, + "min": None, + "change": None, + } + + # This should include imported_statistics[2:3] + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "rolling_window": { + "duration": {"hours": 1}, + "offset": {"minutes": -25}, + }, + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + slice_start = 24 - offset + slice_end = 36 - offset + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min[slice_start:slice_end]), + "max": None, + "min": None, + "change": None, + } + + # Test we can get only selected types + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + "types": ["mean"], + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats_5min), + } + + @pytest.mark.freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.UTC)) async def test_statistic_during_period_hole( recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test statistic_during_period when there are holes in the data.""" - stat_id = 1 - - def next_id(): - nonlocal stat_id - stat_id += 1 - return stat_id - now = dt_util.utcnow() await async_recorder_block_till_done(hass) @@ -704,7 +1073,7 @@ async def test_statistic_during_period_hole( ] imported_metadata = { - "has_mean": False, + "has_mean": True, "has_sum": True, "name": "Total imported energy", "source": "recorder", @@ -830,6 +1199,156 @@ async def test_statistic_during_period_hole( } +@pytest.mark.freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.UTC)) +@pytest.mark.usefixtures("recorder_mock") +async def test_statistic_during_period_hole_circular_mean( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> None: + """Test statistic_during_period when there are holes in the data.""" + now = dt_util.utcnow() + + await async_recorder_block_till_done(hass) + client = await hass_ws_client() + + zero = now + start = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=-18) + + imported_stats: list[StatisticData] = [ + { + "start": (start + timedelta(hours=3 * i)), + "mean": (123.456 * i) % 360, + "mean_weight": 1, + } + for i in range(6) + ] + + imported_metadata: StatisticMetaData = { + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": "Wind direction", + "source": "recorder", + "statistic_id": "sensor.test", + "unit_of_measurement": DEGREE, + } + + recorder.get_instance(hass).async_import_statistics( + imported_metadata, + imported_stats, + Statistics, + ) + await async_wait_recording_done(hass) + + # This should include imported_stats[:] + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats[:]), + "max": None, + "min": None, + "change": None, + } + + # This should also include imported_stats[:] + start_time = "2022-10-20T13:00:00+00:00" + end_time = "2022-10-21T05:00:00+00:00" + assert imported_stats[0]["start"].isoformat() == start_time + assert imported_stats[-1]["start"].isoformat() < end_time + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + "fixed_period": { + "start_time": start_time, + "end_time": end_time, + }, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats[:]), + "max": None, + "min": None, + "change": None, + } + + # This should also include imported_stats[:] + start_time = "2022-10-20T13:00:00+00:00" + end_time = "2022-10-21T08:20:00+00:00" + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + "fixed_period": { + "start_time": start_time, + "end_time": end_time, + }, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats[:]), + "max": None, + "min": None, + "change": None, + } + + # This should include imported_stats[1:4] + start_time = "2022-10-20T16:00:00+00:00" + end_time = "2022-10-20T23:00:00+00:00" + assert imported_stats[1]["start"].isoformat() == start_time + assert imported_stats[3]["start"].isoformat() < end_time + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + "fixed_period": { + "start_time": start_time, + "end_time": end_time, + }, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats[1:4]), + "max": None, + "min": None, + "change": None, + } + + # This should also include imported_stats[1:4] + start_time = "2022-10-20T15:00:00+00:00" + end_time = "2022-10-21T00:00:00+00:00" + assert imported_stats[1]["start"].isoformat() > start_time + assert imported_stats[3]["start"].isoformat() < end_time + await client.send_json_auto_id( + { + "type": "recorder/statistic_during_period", + "statistic_id": "sensor.test", + "fixed_period": { + "start_time": start_time, + "end_time": end_time, + }, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["result"] == { + "mean": _circular_mean_approx(imported_stats[1:4]), + "max": None, + "min": None, + "change": None, + } + + @pytest.mark.parametrize( "frozen_time", [ @@ -897,7 +1416,7 @@ async def test_statistic_during_period_partial_overlap( statId = "sensor.test_overlapping" imported_metadata = { - "has_mean": False, + "has_mean": True, "has_sum": True, "name": "Total imported energy overlapping", "source": "recorder", @@ -1766,6 +2285,7 @@ async def test_list_statistic_ids( """Test list_statistic_ids.""" now = get_start_time(dt_util.utcnow()) has_mean = attributes["state_class"] == "measurement" + mean_type = StatisticMeanType.ARITHMETIC if has_mean else StatisticMeanType.NONE has_sum = not has_mean hass.config.units = units @@ -1791,6 +2311,7 @@ async def test_list_statistic_ids( "statistic_id": "sensor.test", "display_unit_of_measurement": display_unit, "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -1813,6 +2334,7 @@ async def test_list_statistic_ids( "statistic_id": "sensor.test", "display_unit_of_measurement": display_unit, "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -1838,6 +2360,7 @@ async def test_list_statistic_ids( "statistic_id": "sensor.test", "display_unit_of_measurement": display_unit, "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -1859,6 +2382,7 @@ async def test_list_statistic_ids( "statistic_id": "sensor.test", "display_unit_of_measurement": display_unit, "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -1939,6 +2463,7 @@ async def test_list_statistic_ids_unit_change( """Test list_statistic_ids.""" now = get_start_time(dt_util.utcnow()) has_mean = attributes["state_class"] == "measurement" + mean_type = StatisticMeanType.ARITHMETIC if has_mean else StatisticMeanType.NONE has_sum = not has_mean await async_setup_component(hass, "sensor", {}) @@ -1966,6 +2491,7 @@ async def test_list_statistic_ids_unit_change( "statistic_id": "sensor.test", "display_unit_of_measurement": statistics_unit, "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -1987,6 +2513,7 @@ async def test_list_statistic_ids_unit_change( "statistic_id": "sensor.test", "display_unit_of_measurement": display_unit, "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -2208,6 +2735,7 @@ async def test_update_statistics_metadata( "statistic_id": "sensor.test", "display_unit_of_measurement": "kW", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2235,6 +2763,7 @@ async def test_update_statistics_metadata( "statistic_id": "sensor.test", "display_unit_of_measurement": new_display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2324,6 +2853,7 @@ async def test_change_statistics_unit( "statistic_id": "sensor.test", "display_unit_of_measurement": "kW", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2375,6 +2905,7 @@ async def test_change_statistics_unit( "statistic_id": "sensor.test", "display_unit_of_measurement": "kW", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2428,6 +2959,7 @@ async def test_change_statistics_unit( "statistic_id": "sensor.test", "display_unit_of_measurement": "kW", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2455,6 +2987,7 @@ async def test_change_statistics_unit_errors( "statistic_id": "sensor.test", "display_unit_of_measurement": "kW", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2774,6 +3307,7 @@ async def test_get_statistics_metadata( """Test get_statistics_metadata.""" now = get_start_time(dt_util.utcnow()) has_mean = attributes["state_class"] == "measurement" + mean_type = StatisticMeanType.ARITHMETIC if has_mean else StatisticMeanType.NONE has_sum = not has_mean hass.config.units = units @@ -2843,6 +3377,7 @@ async def test_get_statistics_metadata( "statistic_id": "test:total_gas", "display_unit_of_measurement": unit, "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": "Total imported energy", "source": "test", @@ -2874,6 +3409,7 @@ async def test_get_statistics_metadata( "statistic_id": "sensor.test", "display_unit_of_measurement": attributes["unit_of_measurement"], "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -2901,6 +3437,7 @@ async def test_get_statistics_metadata( "statistic_id": "sensor.test", "display_unit_of_measurement": attributes["unit_of_measurement"], "has_mean": has_mean, + "mean_type": mean_type, "has_sum": has_sum, "name": None, "source": "recorder", @@ -2995,6 +3532,7 @@ async def test_import_statistics( { "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "statistic_id": statistic_id, "name": "Total imported energy", @@ -3009,6 +3547,7 @@ async def test_import_statistics( 1, { "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": "Total imported energy", "source": source, @@ -3213,6 +3752,7 @@ async def test_adjust_sum_statistics_energy( { "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "statistic_id": statistic_id, "name": "Total imported energy", @@ -3227,6 +3767,7 @@ async def test_adjust_sum_statistics_energy( 1, { "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": "Total imported energy", "source": source, @@ -3406,6 +3947,7 @@ async def test_adjust_sum_statistics_gas( { "display_unit_of_measurement": "m³", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "statistic_id": statistic_id, "name": "Total imported energy", @@ -3420,6 +3962,7 @@ async def test_adjust_sum_statistics_gas( 1, { "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": "Total imported energy", "source": source, @@ -3617,6 +4160,7 @@ async def test_adjust_sum_statistics_errors( { "display_unit_of_measurement": state_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "statistic_id": statistic_id, "name": "Total imported energy", @@ -3631,6 +4175,7 @@ async def test_adjust_sum_statistics_errors( 1, { "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": "Total imported energy", "source": source, diff --git a/tests/components/remote_calendar/__init__.py b/tests/components/remote_calendar/__init__.py new file mode 100644 index 00000000000..2ffb157f072 --- /dev/null +++ b/tests/components/remote_calendar/__init__.py @@ -0,0 +1,11 @@ +"""Tests for the Remote Calendar integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/remote_calendar/conftest.py b/tests/components/remote_calendar/conftest.py new file mode 100644 index 00000000000..bf5184bbf54 --- /dev/null +++ b/tests/components/remote_calendar/conftest.py @@ -0,0 +1,89 @@ +"""Fixtures for Remote Calendar.""" + +from collections.abc import Awaitable, Callable +from http import HTTPStatus +import textwrap +from typing import Any +import urllib + +import pytest + +from homeassistant.components.remote_calendar.const import CONF_CALENDAR_NAME, DOMAIN +from homeassistant.const import CONF_URL +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator + +CALENDAR_NAME = "Home Assistant Events" +TEST_ENTITY = "calendar.home_assistant_events" +CALENDER_URL = "https://some.calendar.com/calendar.ics" +FRIENDLY_NAME = "Home Assistant Events" + + +@pytest.fixture(name="time_zone") +def mock_time_zone() -> str: + """Fixture for time zone to use in tests.""" + # Set our timezone to CST/Regina so we can check calculations + # This keeps UTC-6 all year round + return "America/Regina" + + +@pytest.fixture(autouse=True) +async def set_time_zone(hass: HomeAssistant, time_zone: str): + """Set the time zone for the tests.""" + # Set our timezone to CST/Regina so we can check calculations + # This keeps UTC-6 all year round + await hass.config.async_set_time_zone(time_zone) + + +@pytest.fixture(name="config_entry") +def mock_config_entry() -> MockConfigEntry: + """Fixture for mock configuration entry.""" + return MockConfigEntry( + domain=DOMAIN, data={CONF_CALENDAR_NAME: CALENDAR_NAME, CONF_URL: CALENDER_URL} + ) + + +type GetEventsFn = Callable[[str, str], Awaitable[list[dict[str, Any]]]] + + +@pytest.fixture(name="get_events") +def get_events_fixture(hass_client: ClientSessionGenerator) -> GetEventsFn: + """Fetch calendar events from the HTTP API.""" + + async def _fetch(start: str, end: str) -> list[dict[str, Any]]: + client = await hass_client() + response = await client.get( + f"/api/calendars/{TEST_ENTITY}?start={urllib.parse.quote(start)}&end={urllib.parse.quote(end)}" + ) + assert response.status == HTTPStatus.OK + return await response.json() + + return _fetch + + +def event_fields(data: dict[str, str]) -> dict[str, str]: + """Filter event API response to minimum fields.""" + return { + k: data[k] + for k in ("summary", "start", "end", "recurrence_id", "location") + if data.get(k) + } + + +@pytest.fixture(name="ics_content") +def mock_ics_content(request: pytest.FixtureRequest) -> str: + """Fixture to allow tests to set initial ics content for the calendar store.""" + default_content = textwrap.dedent( + """\ + BEGIN:VCALENDAR + BEGIN:VEVENT + SUMMARY:Bastille Day Party + DTSTART:19970714T170000Z + DTEND:19970715T040000Z + END:VEVENT + END:VCALENDAR + """ + ) + return request.param if hasattr(request, "param") else default_content diff --git a/tests/components/remote_calendar/snapshots/test_diagnostics.ambr b/tests/components/remote_calendar/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..de955f8a2aa --- /dev/null +++ b/tests/components/remote_calendar/snapshots/test_diagnostics.ambr @@ -0,0 +1,17 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'ics': ''' + BEGIN:VCALENDAR + BEGIN:VEVENT + SUMMARY:*** + DTSTART:19970714T170000Z + DTEND:19970715T040000Z + END:VEVENT + END:VCALENDAR + ''', + 'now': '2023-06-04T18:00:00-06:00', + 'system_timezone': 'tzlocal()', + 'timezone': 'America/Regina', + }) +# --- diff --git a/tests/components/remote_calendar/test_calendar.py b/tests/components/remote_calendar/test_calendar.py new file mode 100644 index 00000000000..6ae817321c3 --- /dev/null +++ b/tests/components/remote_calendar/test_calendar.py @@ -0,0 +1,394 @@ +"""Tests for calendar platform of Remote Calendar.""" + +from datetime import datetime +import textwrap + +from httpx import Response +import pytest +import respx + +from homeassistant.const import STATE_OFF, STATE_ON +from homeassistant.core import HomeAssistant + +from . import setup_integration +from .conftest import ( + CALENDER_URL, + FRIENDLY_NAME, + TEST_ENTITY, + GetEventsFn, + event_fields, +) + +from tests.common import MockConfigEntry + + +@respx.mock +async def test_empty_calendar( + hass: HomeAssistant, + config_entry: MockConfigEntry, + get_events: GetEventsFn, +) -> None: + """Test querying the API and fetching events.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=textwrap.dedent( + """BEGIN:VCALENDAR + VERSION:2.0 + PRODID:-//hacksw/handcal//NONSGML v1.0//EN + END:VCALENDAR + """ + ), + ) + ) + await setup_integration(hass, config_entry) + events = await get_events("1997-07-14T00:00:00", "1997-07-16T00:00:00") + assert len(events) == 0 + + state = hass.states.get(TEST_ENTITY) + assert state + assert state.name == FRIENDLY_NAME + assert state.state == STATE_OFF + assert dict(state.attributes) == { + "friendly_name": FRIENDLY_NAME, + } + + +@pytest.mark.parametrize( + "ics_content", + [ + textwrap.dedent( + """\ + BEGIN:VCALENDAR + VERSION:2.0 + BEGIN:VEVENT + SUMMARY:Bastille Day Party + DTSTART;TZID=Europe/Berlin:19970714T190000 + DTEND;TZID=Europe/Berlin:19970715T060000 + END:VEVENT + END:VCALENDAR + """ + ), + textwrap.dedent( + """\ + BEGIN:VCALENDAR + BEGIN:VEVENT + SUMMARY:Bastille Day Party + DTSTART:19970714T170000Z + DTEND:19970715T040000Z + END:VEVENT + END:VCALENDAR + """ + ), + textwrap.dedent( + """\ + BEGIN:VCALENDAR + VERSION:2.0 + BEGIN:VEVENT + SUMMARY:Bastille Day Party + DTSTART;TZID=America/Regina:19970714T110000 + DTEND;TZID=America/Regina:19970714T220000 + END:VEVENT + END:VCALENDAR + """ + ), + textwrap.dedent( + """\ + BEGIN:VCALENDAR + VERSION:2.0 + BEGIN:VEVENT + SUMMARY:Bastille Day Party + DTSTART;TZID=America/Los_Angeles:19970714T100000 + DTEND;TZID=America/Los_Angeles:19970714T210000 + END:VEVENT + END:VCALENDAR + """ + ), + ], +) +@respx.mock +async def test_api_date_time_event( + get_events: GetEventsFn, + hass: HomeAssistant, + config_entry: MockConfigEntry, + ics_content: str, +) -> None: + """Test an event with a start/end date time.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=ics_content, + ) + ) + await setup_integration(hass, config_entry) + events = await get_events("1997-07-14T00:00:00Z", "1997-07-16T00:00:00Z") + assert list(map(event_fields, events)) == [ + { + "summary": "Bastille Day Party", + "start": {"dateTime": "1997-07-14T11:00:00-06:00"}, + "end": {"dateTime": "1997-07-14T22:00:00-06:00"}, + } + ] + + # Query events in UTC + + # Time range before event + events = await get_events("1997-07-13T00:00:00Z", "1997-07-14T16:00:00Z") + assert len(events) == 0 + # Time range after event + events = await get_events("1997-07-15T05:00:00Z", "1997-07-15T06:00:00Z") + assert len(events) == 0 + + # Overlap with event start + events = await get_events("1997-07-13T00:00:00Z", "1997-07-14T18:00:00Z") + assert len(events) == 1 + # Overlap with event end + events = await get_events("1997-07-15T03:00:00Z", "1997-07-15T06:00:00Z") + assert len(events) == 1 + + # Query events overlapping with start and end but in another timezone + events = await get_events("1997-07-12T23:00:00-01:00", "1997-07-14T17:00:00-01:00") + assert len(events) == 1 + events = await get_events("1997-07-15T02:00:00-01:00", "1997-07-15T05:00:00-01:00") + assert len(events) == 1 + + +@respx.mock +async def test_api_date_event( + get_events: GetEventsFn, + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test an event with a start/end date all day event.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=textwrap.dedent( + """\ + BEGIN:VCALENDAR + VERSION:2.0 + BEGIN:VEVENT + SUMMARY:Festival International de Jazz de Montreal + DTSTART:20070628 + DTEND:20070709 + END:VEVENT + END:VCALENDAR + """ + ), + ) + ) + await setup_integration(hass, config_entry) + events = await get_events("2007-06-20T00:00:00", "2007-07-20T00:00:00") + assert list(map(event_fields, events)) == [ + { + "summary": "Festival International de Jazz de Montreal", + "start": {"date": "2007-06-28"}, + "end": {"date": "2007-07-09"}, + } + ] + + # Time range before event (timezone is -6) + events = await get_events("2007-06-26T00:00:00Z", "2007-06-28T01:00:00Z") + assert len(events) == 0 + # Time range after event + events = await get_events("2007-07-10T00:00:00Z", "2007-07-11T00:00:00Z") + assert len(events) == 0 + + # Overlap with event start (timezone is -6) + events = await get_events("2007-06-26T00:00:00Z", "2007-06-28T08:00:00Z") + assert len(events) == 1 + # Overlap with event end + events = await get_events("2007-07-09T00:00:00Z", "2007-07-11T00:00:00Z") + assert len(events) == 1 + + +@pytest.mark.freeze_time(datetime(2007, 6, 28, 12)) +@respx.mock +async def test_active_event( + get_events: GetEventsFn, + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test an event with a start/end date time.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=textwrap.dedent( + """\ + BEGIN:VCALENDAR + VERSION:2.0 + BEGIN:VEVENT + SUMMARY:Festival International de Jazz de Montreal + LOCATION:Montreal + DTSTART:20070628 + DTEND:20070709 + END:VEVENT + END:VCALENDAR + """ + ), + ) + ) + await setup_integration(hass, config_entry) + state = hass.states.get(TEST_ENTITY) + assert state + assert state.name == FRIENDLY_NAME + assert state.state == STATE_ON + assert dict(state.attributes) == { + "friendly_name": FRIENDLY_NAME, + "message": "Festival International de Jazz de Montreal", + "all_day": True, + "description": "", + "location": "Montreal", + "start_time": "2007-06-28 00:00:00", + "end_time": "2007-07-09 00:00:00", + } + + +@pytest.mark.freeze_time(datetime(2007, 6, 27, 12)) +@respx.mock +async def test_upcoming_event( + get_events: GetEventsFn, + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test an event with a start/end date time.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=textwrap.dedent( + """\ + BEGIN:VCALENDAR + VERSION:2.0 + BEGIN:VEVENT + SUMMARY:Festival International de Jazz de Montreal + LOCATION:Montreal + DTSTART:20070628 + DTEND:20070709 + END:VEVENT + END:VCALENDAR + """ + ), + ) + ) + await setup_integration(hass, config_entry) + state = hass.states.get(TEST_ENTITY) + assert state + assert state.name == FRIENDLY_NAME + assert state.state == STATE_OFF + assert dict(state.attributes) == { + "friendly_name": FRIENDLY_NAME, + "message": "Festival International de Jazz de Montreal", + "all_day": True, + "description": "", + "location": "Montreal", + "start_time": "2007-06-28 00:00:00", + "end_time": "2007-07-09 00:00:00", + } + + +@respx.mock +async def test_recurring_event( + get_events: GetEventsFn, + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test an event with a recurrence rule.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=textwrap.dedent( + """\ + BEGIN:VCALENDAR + BEGIN:VEVENT + DTSTART:20220829T090000 + DTEND:20220829T100000 + SUMMARY:Monday meeting + RRULE:FREQ=WEEKLY;BYDAY=MO + END:VEVENT + END:VCALENDAR + """ + ), + ) + ) + await setup_integration(hass, config_entry) + + events = await get_events("2022-08-20T00:00:00", "2022-09-20T00:00:00") + assert list(map(event_fields, events)) == [ + { + "summary": "Monday meeting", + "start": {"dateTime": "2022-08-29T09:00:00-06:00"}, + "end": {"dateTime": "2022-08-29T10:00:00-06:00"}, + "recurrence_id": "20220829T090000", + }, + { + "summary": "Monday meeting", + "start": {"dateTime": "2022-09-05T09:00:00-06:00"}, + "end": {"dateTime": "2022-09-05T10:00:00-06:00"}, + "recurrence_id": "20220905T090000", + }, + { + "summary": "Monday meeting", + "start": {"dateTime": "2022-09-12T09:00:00-06:00"}, + "end": {"dateTime": "2022-09-12T10:00:00-06:00"}, + "recurrence_id": "20220912T090000", + }, + { + "summary": "Monday meeting", + "start": {"dateTime": "2022-09-19T09:00:00-06:00"}, + "end": {"dateTime": "2022-09-19T10:00:00-06:00"}, + "recurrence_id": "20220919T090000", + }, + ] + + +@respx.mock +@pytest.mark.parametrize( + ("time_zone", "event_order"), + [ + ("America/Los_Angeles", ["One", "Two", "All Day Event"]), + ("America/Regina", ["One", "Two", "All Day Event"]), + ("UTC", ["One", "All Day Event", "Two"]), + ("Asia/Tokyo", ["All Day Event", "One", "Two"]), + ], +) +async def test_all_day_iter_order( + get_events: GetEventsFn, + hass: HomeAssistant, + config_entry: MockConfigEntry, + event_order: list[str], +) -> None: + """Test the sort order of an all day events depending on the time zone.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=textwrap.dedent( + """\ + BEGIN:VCALENDAR + + BEGIN:VEVENT + DTSTART:20221008 + DTEND:20221009 + SUMMARY:All Day Event + END:VEVENT + + BEGIN:VEVENT + DTSTART:20221007T230000Z + DTEND:20221008T233000Z + SUMMARY:One + END:VEVENT + + BEGIN:VEVENT + DTSTART:20221008T010000Z + DTEND:20221008T020000Z + SUMMARY:Two + END:VEVENT + + END:VCALENDAR + """ + ), + ) + ) + await setup_integration(hass, config_entry) + + events = await get_events("2022-10-06T00:00:00Z", "2022-10-09T00:00:00Z") + assert [event["summary"] for event in events] == event_order diff --git a/tests/components/remote_calendar/test_config_flow.py b/tests/components/remote_calendar/test_config_flow.py new file mode 100644 index 00000000000..9aff1594db3 --- /dev/null +++ b/tests/components/remote_calendar/test_config_flow.py @@ -0,0 +1,314 @@ +"""Test the Remote Calendar config flow.""" + +from httpx import ConnectError, Response, UnsupportedProtocol +import pytest +import respx + +from homeassistant.components.remote_calendar.const import CONF_CALENDAR_NAME, DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_URL +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from . import setup_integration +from .conftest import CALENDAR_NAME, CALENDER_URL + +from tests.common import MockConfigEntry + + +@respx.mock +async def test_form_import_ics(hass: HomeAssistant, ics_content: str) -> None: + """Test we get the import form.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=ics_content, + ) + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: CALENDER_URL, + }, + ) + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == CALENDAR_NAME + assert result2["data"] == { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: CALENDER_URL, + } + + +@respx.mock +async def test_form_import_webcal(hass: HomeAssistant, ics_content: str) -> None: + """Test we get the import form.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=ics_content, + ) + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: "webcal://some.calendar.com/calendar.ics", + }, + ) + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == CALENDAR_NAME + assert result2["data"] == { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: CALENDER_URL, + } + + +@pytest.mark.parametrize( + ("side_effect"), + [ + ConnectError("Connection failed"), + UnsupportedProtocol("Unsupported protocol"), + ], +) +@respx.mock +async def test_form_inavild_url( + hass: HomeAssistant, + side_effect: Exception, + ics_content: str, +) -> None: + """Test we get the import form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + respx.get("invalid-url.com").mock(side_effect=side_effect) + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: "invalid-url.com", + }, + ) + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "cannot_connect"} + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=ics_content, + ) + ) + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: CALENDER_URL, + }, + ) + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == CALENDAR_NAME + assert result3["data"] == { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: CALENDER_URL, + } + + +@pytest.mark.parametrize( + ("url", "log_message"), + [ + ( + "unsupported://protocol.com", # Test for httpx.UnsupportedProtocol + "Request URL has an unsupported protocol 'unsupported://'", + ), + ( + "invalid-url", # Test for httpx.ProtocolError + "Request URL is missing an 'http://' or 'https://' protocol", + ), + ( + "https://example.com:abc/", # Test for httpx.InvalidURL + "Invalid port: 'abc'", + ), + ], +) +async def test_unsupported_inputs( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, url: str, log_message: str +) -> None: + """Test that an unsupported inputs results in a form error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: url, + }, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "cannot_connect"} + assert log_message in caplog.text + ## It's not possible to test a successful config flow because, we need to mock httpx.get here + ## and then the exception isn't raised anymore. + + +@pytest.mark.parametrize( + ("http_status", "error"), + [ + (401, "cannot_connect"), + (403, "forbidden"), + ], +) +@respx.mock +async def test_form_http_status_error( + hass: HomeAssistant, ics_content: str, http_status: int, error: str +) -> None: + """Test we http status.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=http_status, + ) + ) + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: CALENDER_URL, + }, + ) + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": error} + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=ics_content, + ) + ) + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: CALENDER_URL, + }, + ) + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == CALENDAR_NAME + assert result3["data"] == { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: CALENDER_URL, + } + + +@respx.mock +async def test_no_valid_calendar(hass: HomeAssistant, ics_content: str) -> None: + """Test invalid ics content.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text="blabla", + ) + ) + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: CALENDER_URL, + }, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "invalid_ics_file"} + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=ics_content, + ) + ) + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: CALENDER_URL, + }, + ) + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == CALENDAR_NAME + assert result3["data"] == { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: CALENDER_URL, + } + + +async def test_duplicate_name( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test two calendars cannot be added with the same name.""" + + await setup_integration(hass, config_entry) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result.get("errors") + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_CALENDAR_NAME: CALENDAR_NAME, + CONF_URL: "http://other-calendar.com", + }, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "already_configured" + + +async def test_duplicate_url( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test two calendars cannot be added with the same url.""" + + await setup_integration(hass, config_entry) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result.get("errors") + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_CALENDAR_NAME: "new name", + CONF_URL: CALENDER_URL, + }, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "already_configured" diff --git a/tests/components/remote_calendar/test_diagnostics.py b/tests/components/remote_calendar/test_diagnostics.py new file mode 100644 index 00000000000..428369b1180 --- /dev/null +++ b/tests/components/remote_calendar/test_diagnostics.py @@ -0,0 +1,39 @@ +"""Test the remote calendar diagnostics.""" + +import datetime + +from httpx import Response +import pytest +import respx +from syrupy.assertion import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_integration +from .conftest import CALENDER_URL + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +@respx.mock +@pytest.mark.freeze_time(datetime.datetime(2023, 6, 5)) +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, + config_entry: MockConfigEntry, + ics_content: str, +) -> None: + """Test config entry diagnostics.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=ics_content, + ) + ) + await setup_integration(hass, config_entry) + await hass.async_block_till_done() + result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + assert result == snapshot diff --git a/tests/components/remote_calendar/test_init.py b/tests/components/remote_calendar/test_init.py new file mode 100644 index 00000000000..f4ca500b2e1 --- /dev/null +++ b/tests/components/remote_calendar/test_init.py @@ -0,0 +1,86 @@ +"""Tests for init platform of Remote Calendar.""" + +from httpx import ConnectError, Response, UnsupportedProtocol +import pytest +import respx + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_OFF +from homeassistant.core import HomeAssistant + +from . import setup_integration +from .conftest import CALENDER_URL, TEST_ENTITY + +from tests.common import MockConfigEntry + + +@respx.mock +async def test_load_unload( + hass: HomeAssistant, config_entry: MockConfigEntry, ics_content: str +) -> None: + """Test loading and unloading a config entry.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=200, + text=ics_content, + ) + ) + await setup_integration(hass, config_entry) + assert config_entry.state is ConfigEntryState.LOADED + + state = hass.states.get(TEST_ENTITY) + assert state + assert state.state == STATE_OFF + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.NOT_LOADED + + +@respx.mock +async def test_raise_for_status( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test update failed using respx to simulate HTTP exceptions.""" + respx.get(CALENDER_URL).mock( + return_value=Response( + status_code=403, + ) + ) + await setup_integration(hass, config_entry) + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +@pytest.mark.parametrize( + "side_effect", + [ + ConnectError("Connection failed"), + UnsupportedProtocol("Unsupported protocol"), + ValueError("Invalid response"), + ], +) +@respx.mock +async def test_update_failed( + hass: HomeAssistant, + config_entry: MockConfigEntry, + side_effect: Exception, +) -> None: + """Test update failed using respx to simulate different exceptions.""" + respx.get(CALENDER_URL).mock(side_effect=side_effect) + await setup_integration(hass, config_entry) + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +@respx.mock +async def test_calendar_parse_error( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test CalendarParseError using respx.""" + respx.get(CALENDER_URL).mock( + return_value=Response(status_code=200, text="not a calendar") + ) + await setup_integration(hass, config_entry) + assert config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/reolink/conftest.py b/tests/components/reolink/conftest.py index 2862aa55b4d..21acced3d1d 100644 --- a/tests/components/reolink/conftest.py +++ b/tests/components/reolink/conftest.py @@ -10,6 +10,7 @@ from reolink_aio.exceptions import ReolinkError from homeassistant.components.reolink.config_flow import DEFAULT_PROTOCOL from homeassistant.components.reolink.const import ( + CONF_BC_PORT, CONF_SUPPORTS_PRIVACY_MODE, CONF_USE_HTTPS, DOMAIN, @@ -34,6 +35,7 @@ TEST_PASSWORD = "password" TEST_PASSWORD2 = "new_password" TEST_MAC = "aa:bb:cc:dd:ee:ff" TEST_MAC2 = "ff:ee:dd:cc:bb:aa" +TEST_MAC_CAM = "11:22:33:44:55:66" DHCP_FORMATTED_MAC = "aabbccddeeff" TEST_UID = "ABC1234567D89EFG" TEST_UID_CAM = "DEF7654321D89GHT" @@ -48,6 +50,7 @@ TEST_ITEM_NUMBER = "P000" TEST_CAM_MODEL = "RLC-123" TEST_DUO_MODEL = "Reolink Duo PoE" TEST_PRIVACY = True +TEST_BC_PORT = 5678 @pytest.fixture @@ -123,6 +126,8 @@ def reolink_connect_class() -> Generator[MagicMock]: "{'host':'TEST_RESPONSE','channel':'TEST_RESPONSE'}" ) + reolink_connect.chime_list = [] + # enums host_mock.whiteled_mode.return_value = 1 host_mock.whiteled_mode_list.return_value = ["off", "auto"] @@ -130,13 +135,28 @@ def reolink_connect_class() -> Generator[MagicMock]: host_mock.doorbell_led_list.return_value = ["stayoff", "auto"] host_mock.auto_track_method.return_value = 3 host_mock.daynight_state.return_value = "Black&White" + host_mock.hub_alarm_tone_id.return_value = 1 + host_mock.hub_visitor_tone_id.return_value = 1 # Baichuan host_mock.baichuan = create_autospec(Baichuan) # Disable tcp push by default for tests + host_mock.baichuan.port = TEST_BC_PORT host_mock.baichuan.events_active = False + host_mock.baichuan.mac_address.return_value = TEST_MAC_CAM host_mock.baichuan.privacy_mode.return_value = False + host_mock.baichuan.day_night_state.return_value = "day" host_mock.baichuan.subscribe_events.side_effect = ReolinkError("Test error") + host_mock.baichuan.active_scene = "off" + host_mock.baichuan.scene_names = ["off", "home"] + host_mock.baichuan.abilities = { + 0: {"chnID": 0, "aitype": 34615}, + "Host": {"pushAlarm": 7}, + } + host_mock.baichuan.smart_location_list.return_value = [0] + host_mock.baichuan.smart_ai_type_list.return_value = ["people"] + host_mock.baichuan.smart_ai_index.return_value = 1 + host_mock.baichuan.smart_ai_name.return_value = "zone1" yield host_mock_class @@ -169,6 +189,7 @@ def config_entry(hass: HomeAssistant) -> MockConfigEntry: CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, CONF_SUPPORTS_PRIVACY_MODE: TEST_PRIVACY, + CONF_BC_PORT: TEST_BC_PORT, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, diff --git a/tests/components/reolink/snapshots/test_diagnostics.ambr b/tests/components/reolink/snapshots/test_diagnostics.ambr index 71c5397fbd1..5eb80d16356 100644 --- a/tests/components/reolink/snapshots/test_diagnostics.ambr +++ b/tests/components/reolink/snapshots/test_diagnostics.ambr @@ -1,6 +1,27 @@ # serializer version: 1 # name: test_entry_diagnostics dict({ + 'BC_abilities': dict({ + '0': dict({ + 'aitype': 34615, + 'chnID': 0, + }), + 'Host': dict({ + 'pushAlarm': 7, + }), + }), + 'Chimes': dict({ + '12345678': dict({ + 'channel': 0, + 'event_types': list([ + 'md', + 'people', + 'visitor', + ]), + 'name': 'Test chime', + 'online': True, + }), + }), 'HTTP(S) port': 1234, 'HTTPS': True, 'IPC cams': dict({ @@ -41,6 +62,14 @@ 0, ]), 'cmd list': dict({ + '296': dict({ + '0': 1, + 'null': 1, + }), + 'DingDongOpt': dict({ + '0': 2, + 'null': 2, + }), 'GetAiAlarm': dict({ '0': 5, 'null': 5, @@ -81,6 +110,10 @@ '0': 2, 'null': 4, }), + 'GetDingDongCfg': dict({ + '0': 3, + 'null': 3, + }), 'GetEmail': dict({ '0': 1, 'null': 2, @@ -137,6 +170,9 @@ '0': 1, 'null': 2, }), + 'GetScene': dict({ + 'null': 1, + }), 'GetStateLight': dict({ 'null': 1, }), diff --git a/tests/components/reolink/test_binary_sensor.py b/tests/components/reolink/test_binary_sensor.py index 71318c27b25..99c9efba002 100644 --- a/tests/components/reolink/test_binary_sensor.py +++ b/tests/components/reolink/test_binary_sensor.py @@ -51,6 +51,32 @@ async def test_motion_sensor( assert hass.states.get(entity_id).state == STATE_ON +async def test_smart_ai_sensor( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + freezer: FrozenDateTimeFactory, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test smart ai binary sensor entity.""" + reolink_connect.model = TEST_HOST_MODEL + reolink_connect.baichuan.smart_ai_state.return_value = True + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BINARY_SENSOR]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.BINARY_SENSOR}.{TEST_NVR_NAME}_crossline_zone1_person" + assert hass.states.get(entity_id).state == STATE_ON + + reolink_connect.baichuan.smart_ai_state.return_value = False + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_OFF + + async def test_tcp_callback( hass: HomeAssistant, config_entry: MockConfigEntry, diff --git a/tests/components/reolink/test_config_flow.py b/tests/components/reolink/test_config_flow.py index 4fe671f8cca..e706af0d067 100644 --- a/tests/components/reolink/test_config_flow.py +++ b/tests/components/reolink/test_config_flow.py @@ -19,6 +19,7 @@ from homeassistant import config_entries from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL from homeassistant.components.reolink.config_flow import DEFAULT_PROTOCOL from homeassistant.components.reolink.const import ( + CONF_BC_PORT, CONF_SUPPORTS_PRIVACY_MODE, CONF_USE_HTTPS, DOMAIN, @@ -40,6 +41,7 @@ from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo from .conftest import ( DHCP_FORMATTED_MAC, + TEST_BC_PORT, TEST_HOST, TEST_HOST2, TEST_MAC, @@ -88,6 +90,7 @@ async def test_config_flow_manual_success( CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, CONF_SUPPORTS_PRIVACY_MODE: TEST_PRIVACY, + CONF_BC_PORT: TEST_BC_PORT, } assert result["options"] == { CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -140,6 +143,7 @@ async def test_config_flow_privacy_success( CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, CONF_SUPPORTS_PRIVACY_MODE: TEST_PRIVACY, + CONF_BC_PORT: TEST_BC_PORT, } assert result["options"] == { CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -290,6 +294,7 @@ async def test_config_flow_errors( CONF_HOST: TEST_HOST, CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_BC_PORT: TEST_BC_PORT, }, ) @@ -302,6 +307,7 @@ async def test_config_flow_errors( CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, CONF_SUPPORTS_PRIVACY_MODE: TEST_PRIVACY, + CONF_BC_PORT: TEST_BC_PORT, } assert result["options"] == { CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -322,6 +328,7 @@ async def test_options_flow(hass: HomeAssistant, mock_setup_entry: MagicMock) -> CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_BC_PORT: TEST_BC_PORT, }, options={ CONF_PROTOCOL: "rtsp", @@ -360,6 +367,7 @@ async def test_reauth(hass: HomeAssistant, mock_setup_entry: MagicMock) -> None: CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_BC_PORT: TEST_BC_PORT, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -405,6 +413,7 @@ async def test_reauth_abort_unique_id_mismatch( CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_BC_PORT: TEST_BC_PORT, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -474,6 +483,7 @@ async def test_dhcp_flow(hass: HomeAssistant, mock_setup_entry: MagicMock) -> No CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, CONF_SUPPORTS_PRIVACY_MODE: TEST_PRIVACY, + CONF_BC_PORT: TEST_BC_PORT, } assert result["options"] == { CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -496,6 +506,7 @@ async def test_dhcp_ip_update_aborted_if_wrong_mac( CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_BC_PORT: TEST_BC_PORT, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -536,6 +547,7 @@ async def test_dhcp_ip_update_aborted_if_wrong_mac( protocol=DEFAULT_PROTOCOL, timeout=DEFAULT_TIMEOUT, aiohttp_get_session_callback=ANY, + bc_port=TEST_BC_PORT, ) assert expected_call in reolink_connect_class.call_args_list @@ -593,6 +605,7 @@ async def test_dhcp_ip_update( CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_BC_PORT: TEST_BC_PORT, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -635,6 +648,7 @@ async def test_dhcp_ip_update( protocol=DEFAULT_PROTOCOL, timeout=DEFAULT_TIMEOUT, aiohttp_get_session_callback=ANY, + bc_port=TEST_BC_PORT, ) assert expected_call in reolink_connect_class.call_args_list @@ -671,6 +685,7 @@ async def test_dhcp_ip_update_ingnored_if_still_connected( CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_BC_PORT: TEST_BC_PORT, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -702,6 +717,7 @@ async def test_dhcp_ip_update_ingnored_if_still_connected( protocol=DEFAULT_PROTOCOL, timeout=DEFAULT_TIMEOUT, aiohttp_get_session_callback=ANY, + bc_port=TEST_BC_PORT, ) assert expected_call in reolink_connect_class.call_args_list @@ -731,6 +747,7 @@ async def test_reconfig(hass: HomeAssistant, mock_setup_entry: MagicMock) -> Non CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_BC_PORT: TEST_BC_PORT, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -777,6 +794,7 @@ async def test_reconfig_abort_unique_id_mismatch( CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_BC_PORT: TEST_BC_PORT, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, diff --git a/tests/components/reolink/test_diagnostics.py b/tests/components/reolink/test_diagnostics.py index 57b474c13ad..d45163d3cf0 100644 --- a/tests/components/reolink/test_diagnostics.py +++ b/tests/components/reolink/test_diagnostics.py @@ -2,6 +2,7 @@ from unittest.mock import MagicMock +from reolink_aio.api import Chime from syrupy.assertion import SnapshotAssertion from homeassistant.core import HomeAssistant @@ -15,6 +16,7 @@ async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, reolink_connect: MagicMock, + test_chime: Chime, config_entry: MockConfigEntry, snapshot: SnapshotAssertion, ) -> None: diff --git a/tests/components/reolink/test_init.py b/tests/components/reolink/test_init.py index 28d8c542f4f..4c4908dca6f 100644 --- a/tests/components/reolink/test_init.py +++ b/tests/components/reolink/test_init.py @@ -19,10 +19,14 @@ from homeassistant.components.reolink import ( FIRMWARE_UPDATE_INTERVAL, NUM_CRED_ERRORS, ) -from homeassistant.components.reolink.const import DOMAIN +from homeassistant.components.reolink.const import CONF_BC_PORT, DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( + CONF_HOST, + CONF_PASSWORD, CONF_PORT, + CONF_PROTOCOL, + CONF_USERNAME, STATE_OFF, STATE_ON, STATE_UNAVAILABLE, @@ -35,16 +39,25 @@ from homeassistant.helpers import ( entity_registry as er, issue_registry as ir, ) +from homeassistant.helpers.device_registry import format_mac from homeassistant.setup import async_setup_component from .conftest import ( + CONF_SUPPORTS_PRIVACY_MODE, + CONF_USE_HTTPS, + DEFAULT_PROTOCOL, + TEST_BC_PORT, TEST_CAM_MODEL, + TEST_HOST, TEST_HOST_MODEL, TEST_MAC, TEST_NVR_NAME, TEST_PORT, + TEST_PRIVACY, TEST_UID, TEST_UID_CAM, + TEST_USE_HTTPS, + TEST_USERNAME, ) from tests.common import MockConfigEntry, async_fire_time_changed @@ -722,6 +735,41 @@ async def test_firmware_repair_issue( await hass.async_block_till_done() assert (DOMAIN, "firmware_update_host") in issue_registry.issues + reolink_connect.camera_sw_version_update_required.return_value = False + + +async def test_password_too_long_repair_issue( + hass: HomeAssistant, + reolink_connect: MagicMock, + issue_registry: ir.IssueRegistry, +) -> None: + """Test password too long issue is raised.""" + reolink_connect.valid_password.return_value = False + config_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=format_mac(TEST_MAC), + data={ + CONF_HOST: TEST_HOST, + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: "too_longgggggggggggggggggggggggggggggggggggggggggggggggggg", + CONF_PORT: TEST_PORT, + CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_SUPPORTS_PRIVACY_MODE: TEST_PRIVACY, + }, + options={ + CONF_PROTOCOL: DEFAULT_PROTOCOL, + }, + title=TEST_NVR_NAME, + ) + config_entry.add_to_hass(hass) + assert not await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert ( + DOMAIN, + f"password_too_long_{config_entry.entry_id}", + ) in issue_registry.issues + reolink_connect.valid_password.return_value = True async def test_new_device_discovered( @@ -762,6 +810,21 @@ async def test_port_changed( assert config_entry.data[CONF_PORT] == 4567 +async def test_baichuan_port_changed( + hass: HomeAssistant, + reolink_connect: MagicMock, + config_entry: MockConfigEntry, +) -> None: + """Test config_entry baichuan port update when it has changed during initial login.""" + assert config_entry.data[CONF_BC_PORT] == TEST_BC_PORT + reolink_connect.baichuan.port = 8901 + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.data[CONF_BC_PORT] == 8901 + + async def test_privacy_mode_on( hass: HomeAssistant, freezer: FrozenDateTimeFactory, diff --git a/tests/components/reolink/test_media_source.py b/tests/components/reolink/test_media_source.py index a5a34514598..7044ea53671 100644 --- a/tests/components/reolink/test_media_source.py +++ b/tests/components/reolink/test_media_source.py @@ -15,7 +15,7 @@ from homeassistant.components.media_source import ( async_resolve_media, ) from homeassistant.components.reolink.config_flow import DEFAULT_PROTOCOL -from homeassistant.components.reolink.const import CONF_USE_HTTPS, DOMAIN +from homeassistant.components.reolink.const import CONF_BC_PORT, CONF_USE_HTTPS, DOMAIN from homeassistant.components.stream import DOMAIN as MEDIA_STREAM_DOMAIN from homeassistant.const import ( CONF_HOST, @@ -31,6 +31,7 @@ from homeassistant.helpers.device_registry import format_mac from homeassistant.setup import async_setup_component from .conftest import ( + TEST_BC_PORT, TEST_HOST2, TEST_HOST_MODEL, TEST_MAC2, @@ -348,6 +349,7 @@ async def test_browsing_not_loaded( CONF_PASSWORD: TEST_PASSWORD2, CONF_PORT: TEST_PORT, CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_BC_PORT: TEST_BC_PORT, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, diff --git a/tests/components/reolink/test_number.py b/tests/components/reolink/test_number.py index c6507fa36c1..dd70376d658 100644 --- a/tests/components/reolink/test_number.py +++ b/tests/components/reolink/test_number.py @@ -67,6 +67,48 @@ async def test_number( reolink_connect.set_volume.reset_mock(side_effect=True) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_smart_ai_number( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test number entity with smart ai sensitivity.""" + reolink_connect.baichuan.smart_ai_sensitivity.return_value = 80 + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.NUMBER]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.NUMBER}.{TEST_NVR_NAME}_AI_crossline_zone1_sensitivity" + + assert hass.states.get(entity_id).state == "80" + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 50}, + blocking=True, + ) + reolink_connect.baichuan.set_smart_ai.assert_called_with( + 0, "crossline", 0, sensitivity=50 + ) + + reolink_connect.baichuan.set_smart_ai.side_effect = InvalidParameterError( + "Test error" + ) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 50}, + blocking=True, + ) + + reolink_connect.baichuan.set_smart_ai.reset_mock(side_effect=True) + + async def test_host_number( hass: HomeAssistant, config_entry: MockConfigEntry, diff --git a/tests/components/reolink/test_select.py b/tests/components/reolink/test_select.py index 7910174380a..32bc5e4435e 100644 --- a/tests/components/reolink/test_select.py +++ b/tests/components/reolink/test_select.py @@ -104,6 +104,58 @@ async def test_play_quick_reply_message( reolink_connect.quick_reply_dict = MagicMock() +async def test_host_scene_select( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test host select entity with scene mode.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SELECT}.{TEST_NVR_NAME}_scene_mode" + assert hass.states.get(entity_id).state == "off" + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "home"}, + blocking=True, + ) + reolink_connect.baichuan.set_scene.assert_called_once() + + reolink_connect.baichuan.set_scene.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "home"}, + blocking=True, + ) + + reolink_connect.baichuan.set_scene.side_effect = InvalidParameterError("Test error") + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "home"}, + blocking=True, + ) + + reolink_connect.baichuan.active_scene = "Invalid value" + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_UNKNOWN + + reolink_connect.baichuan.set_scene.reset_mock(side_effect=True) + reolink_connect.baichuan.active_scene = "off" + + async def test_chime_select( hass: HomeAssistant, freezer: FrozenDateTimeFactory, diff --git a/tests/components/reolink/test_update.py b/tests/components/reolink/test_update.py index a6cfe862963..d48362516b8 100644 --- a/tests/components/reolink/test_update.py +++ b/tests/components/reolink/test_update.py @@ -6,7 +6,7 @@ from unittest.mock import MagicMock, patch from freezegun.api import FrozenDateTimeFactory import pytest -from reolink_aio.exceptions import ReolinkError +from reolink_aio.exceptions import ApiError, ReolinkError from reolink_aio.software_version import NewSoftwareVersion from homeassistant.components.reolink.update import POLL_AFTER_INSTALL, POLL_PROGRESS @@ -144,6 +144,17 @@ async def test_update_firm( blocking=True, ) + reolink_connect.update_firmware.side_effect = ApiError( + "Test error", translation_key="firmware_rate_limit" + ) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + UPDATE_DOMAIN, + SERVICE_INSTALL, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + # test _async_update_future reolink_connect.camera_sw_version.return_value = "v3.3.0.226_23031644" reolink_connect.firmware_update_available.return_value = False diff --git a/tests/components/reolink/test_util.py b/tests/components/reolink/test_util.py index f66f4682b98..ef66d471801 100644 --- a/tests/components/reolink/test_util.py +++ b/tests/components/reolink/test_util.py @@ -38,51 +38,59 @@ from tests.common import MockConfigEntry [ ( ApiError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="api_error"), + ), + ( + ApiError("Test error", translation_key="firmware_rate_limit"), + HomeAssistantError(translation_key="firmware_rate_limit"), + ), + ( + ApiError("Test error", translation_key="not_in_strings.json"), + HomeAssistantError(translation_key="api_error"), ), ( CredentialsInvalidError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="invalid_credentials"), ), ( InvalidContentTypeError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="invalid_content_type"), ), ( InvalidParameterError("Test error"), - ServiceValidationError, + ServiceValidationError(translation_key="invalid_parameter"), ), ( LoginError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="login_error"), ), ( NoDataError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="no_data"), ), ( NotSupportedError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="not_supported"), ), ( ReolinkConnectionError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="connection_error"), ), ( ReolinkError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="unexpected"), ), ( ReolinkTimeoutError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="timeout"), ), ( SubscriptionError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="subscription_error"), ), ( UnexpectedDataError("Test error"), - HomeAssistantError, + HomeAssistantError(translation_key="unexpected_data"), ), ], ) @@ -91,7 +99,7 @@ async def test_try_function( config_entry: MockConfigEntry, reolink_connect: MagicMock, side_effect: ReolinkError, - expected: Exception, + expected: HomeAssistantError, ) -> None: """Test try_function error translations using number entity.""" reolink_connect.volume.return_value = 80 @@ -104,7 +112,7 @@ async def test_try_function( entity_id = f"{Platform.NUMBER}.{TEST_NVR_NAME}_volume" reolink_connect.set_volume.side_effect = side_effect - with pytest.raises(expected): + with pytest.raises(expected.__class__) as err: await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, @@ -112,4 +120,6 @@ async def test_try_function( blocking=True, ) + assert err.value.translation_key == expected.translation_key + reolink_connect.set_volume.reset_mock(side_effect=True) diff --git a/tests/components/roborock/conftest.py b/tests/components/roborock/conftest.py index 43e5148c9a8..1ec2b00263f 100644 --- a/tests/components/roborock/conftest.py +++ b/tests/components/roborock/conftest.py @@ -3,10 +3,9 @@ from collections.abc import Generator from copy import deepcopy import pathlib -import shutil +import tempfile from typing import Any from unittest.mock import Mock, patch -import uuid import pytest from roborock import RoborockCategory, RoomMapping @@ -19,7 +18,6 @@ from homeassistant.components.roborock.const import ( CONF_USER_DATA, DOMAIN, ) -from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_USERNAME, Platform from homeassistant.core import HomeAssistant @@ -30,6 +28,7 @@ from .mock_data import ( MULTI_MAP_LIST, NETWORK_INFO, PROP, + SCENES, USER_DATA, USER_EMAIL, ) @@ -67,8 +66,27 @@ class A01Mock(RoborockMqttClientA01): return {prot: self.protocol_responses[prot] for prot in dyad_data_protocols} +@pytest.fixture(name="bypass_api_client_fixture") +def bypass_api_client_fixture() -> None: + """Skip calls to the API client.""" + with ( + patch( + "homeassistant.components.roborock.RoborockApiClient.get_home_data_v2", + return_value=HOME_DATA, + ), + patch( + "homeassistant.components.roborock.RoborockApiClient.get_scenes", + return_value=SCENES, + ), + patch( + "homeassistant.components.roborock.coordinator.RoborockLocalClientV1.load_multi_map" + ), + ): + yield + + @pytest.fixture(name="bypass_api_fixture") -def bypass_api_fixture() -> None: +def bypass_api_fixture(bypass_api_client_fixture: Any) -> None: """Skip calls to the API.""" with ( patch("homeassistant.components.roborock.RoborockMqttClientV1.async_connect"), @@ -76,10 +94,6 @@ def bypass_api_fixture() -> None: patch( "homeassistant.components.roborock.coordinator.RoborockMqttClientV1._send_command" ), - patch( - "homeassistant.components.roborock.RoborockApiClient.get_home_data_v2", - return_value=HOME_DATA, - ), patch( "homeassistant.components.roborock.RoborockMqttClientV1.get_networking", return_value=NETWORK_INFO, @@ -97,7 +111,7 @@ def bypass_api_fixture() -> None: return_value=MULTI_MAP_LIST, ), patch( - "homeassistant.components.roborock.image.RoborockMapDataParser.parse", + "homeassistant.components.roborock.coordinator.RoborockMapDataParser.parse", return_value=MAP_DATA, ), patch( @@ -114,7 +128,7 @@ def bypass_api_fixture() -> None: "roborock.version_1_apis.AttributeCache.value", ), patch( - "homeassistant.components.roborock.image.MAP_SLEEP", + "homeassistant.components.roborock.coordinator.MAP_SLEEP", 0, ), patch( @@ -202,7 +216,6 @@ async def setup_entry( hass: HomeAssistant, bypass_api_fixture, mock_roborock_entry: MockConfigEntry, - cleanup_map_storage: pathlib.Path, platforms: list[Platform], ) -> Generator[MockConfigEntry]: """Set up the Roborock platform.""" @@ -212,21 +225,18 @@ async def setup_entry( yield mock_roborock_entry -@pytest.fixture -async def cleanup_map_storage( - hass: HomeAssistant, mock_roborock_entry: MockConfigEntry +@pytest.fixture(autouse=True, name="storage_path") +async def storage_path_fixture( + hass: HomeAssistant, ) -> Generator[pathlib.Path]: """Test cleanup, remove any map storage persisted during the test.""" - tmp_path = str(uuid.uuid4()) - with patch( - "homeassistant.components.roborock.roborock_storage.STORAGE_PATH", new=tmp_path - ): - storage_path = ( - pathlib.Path(hass.config.path(tmp_path)) / mock_roborock_entry.entry_id - ) - yield storage_path - # We need to first unload the config entry because unloading it will - # persist any unsaved maps to storage. - if mock_roborock_entry.state is ConfigEntryState.LOADED: - await hass.config_entries.async_unload(mock_roborock_entry.entry_id) - shutil.rmtree(str(storage_path), ignore_errors=True) + with tempfile.TemporaryDirectory() as tmp_path: + + def get_storage_path(_: HomeAssistant, entry_id: str) -> pathlib.Path: + return pathlib.Path(tmp_path) / entry_id + + with patch( + "homeassistant.components.roborock.roborock_storage._storage_path_prefix", + new=get_storage_path, + ): + yield pathlib.Path(tmp_path) diff --git a/tests/components/roborock/mock_data.py b/tests/components/roborock/mock_data.py index 6e3fb229aa9..82b51e67f8d 100644 --- a/tests/components/roborock/mock_data.py +++ b/tests/components/roborock/mock_data.py @@ -9,6 +9,7 @@ from roborock.containers import ( Consumable, DnDTimer, HomeData, + HomeDataScene, MultiMapsList, NetworkInfo, S7Status, @@ -1119,7 +1120,10 @@ PROP = DeviceProp( ) NETWORK_INFO = NetworkInfo( - ip="123.232.12.1", ssid="wifi", mac="ac:cc:cc:cc:cc", bssid="bssid", rssi=90 + ip="123.232.12.1", ssid="wifi", mac="ac:cc:cc:cc:cc:cc", bssid="bssid", rssi=90 +) +NETWORK_INFO_2 = NetworkInfo( + ip="123.232.12.2", ssid="wifi", mac="ac:cc:cc:cc:cd:cc", bssid="bssid", rssi=90 ) MULTI_MAP_LIST = MultiMapsList.from_dict( @@ -1150,3 +1154,20 @@ MAP_DATA = MapData(0, 0) MAP_DATA.image = ImageData( 100, 10, 10, 10, 10, ImageConfig(), Image.new("RGB", (1, 1)), lambda p: p ) +MAP_DATA.vacuum_room = 17 + + +SCENES = [ + HomeDataScene.from_dict( + { + "name": "sc1", + "id": 12, + }, + ), + HomeDataScene.from_dict( + { + "name": "sc2", + "id": 24, + }, + ), +] diff --git a/tests/components/roborock/test_binary_sensor.py b/tests/components/roborock/test_binary_sensor.py index 0e4b338f469..6a234d735e5 100644 --- a/tests/components/roborock/test_binary_sensor.py +++ b/tests/components/roborock/test_binary_sensor.py @@ -18,7 +18,7 @@ async def test_binary_sensors( hass: HomeAssistant, setup_entry: MockConfigEntry ) -> None: """Test binary sensors and check test values are correctly set.""" - assert len(hass.states.async_all("binary_sensor")) == 8 + assert len(hass.states.async_all("binary_sensor")) == 10 assert hass.states.get("binary_sensor.roborock_s7_maxv_mop_attached").state == "on" assert ( hass.states.get("binary_sensor.roborock_s7_maxv_water_box_attached").state @@ -28,3 +28,4 @@ async def test_binary_sensors( hass.states.get("binary_sensor.roborock_s7_maxv_water_shortage").state == "off" ) assert hass.states.get("binary_sensor.roborock_s7_maxv_cleaning").state == "off" + assert hass.states.get("binary_sensor.roborock_s7_maxv_charging").state == "on" diff --git a/tests/components/roborock/test_button.py b/tests/components/roborock/test_button.py index 0a7efe83513..77c5d4d7cb0 100644 --- a/tests/components/roborock/test_button.py +++ b/tests/components/roborock/test_button.py @@ -1,9 +1,10 @@ """Test Roborock Button platform.""" -from unittest.mock import patch +from unittest.mock import ANY, patch import pytest import roborock +from roborock import RoborockException from homeassistant.components.button import SERVICE_PRESS from homeassistant.const import Platform @@ -13,6 +14,18 @@ from homeassistant.exceptions import HomeAssistantError from tests.common import MockConfigEntry +@pytest.fixture +def bypass_api_client_get_scenes_fixture(bypass_api_fixture) -> None: + """Fixture to raise when getting scenes.""" + with ( + patch( + "homeassistant.components.roborock.RoborockApiClient.get_scenes", + side_effect=RoborockException(), + ), + ): + yield + + @pytest.fixture def platforms() -> list[Platform]: """Fixture to set platforms used in the test.""" @@ -84,3 +97,85 @@ async def test_update_failure( ) assert mock_send_message.assert_called_once assert hass.states.get(entity_id).state == "2023-10-30T08:50:00+00:00" + + +@pytest.mark.parametrize( + ("entity_id"), + [ + ("button.roborock_s7_maxv_sc1"), + ("button.roborock_s7_maxv_sc2"), + ], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_get_button_routines_failure( + hass: HomeAssistant, + bypass_api_client_get_scenes_fixture, + setup_entry: MockConfigEntry, + entity_id: str, +) -> None: + """Test that if routine retrieval fails, no entity is being created.""" + # Ensure that the entity does not exist + assert hass.states.get(entity_id) is None + + +@pytest.mark.parametrize( + ("entity_id", "routine_id"), + [ + ("button.roborock_s7_maxv_sc1", 12), + ("button.roborock_s7_maxv_sc2", 24), + ], +) +@pytest.mark.freeze_time("2023-10-30 08:50:00") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_press_routine_button_success( + hass: HomeAssistant, + bypass_api_fixture, + setup_entry: MockConfigEntry, + entity_id: str, + routine_id: int, +) -> None: + """Test pressing the button entities.""" + with patch( + "homeassistant.components.roborock.RoborockApiClient.execute_scene" + ) as mock_execute_scene: + await hass.services.async_call( + "button", + SERVICE_PRESS, + blocking=True, + target={"entity_id": entity_id}, + ) + mock_execute_scene.assert_called_once_with(ANY, routine_id) + assert hass.states.get(entity_id).state == "2023-10-30T08:50:00+00:00" + + +@pytest.mark.parametrize( + ("entity_id", "routine_id"), + [ + ("button.roborock_s7_maxv_sc1", 12), + ], +) +@pytest.mark.freeze_time("2023-10-30 08:50:00") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_press_routine_button_failure( + hass: HomeAssistant, + bypass_api_fixture, + setup_entry: MockConfigEntry, + entity_id: str, + routine_id: int, +) -> None: + """Test failure while pressing the button entity.""" + with ( + patch( + "homeassistant.components.roborock.RoborockApiClient.execute_scene", + side_effect=RoborockException, + ) as mock_execute_scene, + pytest.raises(HomeAssistantError, match="Error while calling execute_scene"), + ): + await hass.services.async_call( + "button", + SERVICE_PRESS, + blocking=True, + target={"entity_id": entity_id}, + ) + mock_execute_scene.assert_called_once_with(ANY, routine_id) + assert hass.states.get(entity_id).state == "2023-10-30T08:50:00+00:00" diff --git a/tests/components/roborock/test_config_flow.py b/tests/components/roborock/test_config_flow.py index 13bc23e6e2b..441974dc15d 100644 --- a/tests/components/roborock/test_config_flow.py +++ b/tests/components/roborock/test_config_flow.py @@ -19,12 +19,19 @@ from homeassistant.components.roborock.const import CONF_ENTRY_CODE, DOMAIN, DRA from homeassistant.const import CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo -from .mock_data import MOCK_CONFIG, USER_DATA, USER_EMAIL +from .mock_data import MOCK_CONFIG, NETWORK_INFO, USER_DATA, USER_EMAIL from tests.common import MockConfigEntry +@pytest.fixture +def cleanup_map_storage(): + """Override the map storage fixture as it is not relevant here.""" + return + + async def test_config_flow_success( hass: HomeAssistant, bypass_api_fixture, @@ -189,25 +196,31 @@ async def test_config_flow_failures_code_login( async def test_options_flow_drawables( - hass: HomeAssistant, setup_entry: MockConfigEntry + hass: HomeAssistant, mock_roborock_entry: MockConfigEntry ) -> None: """Test that the options flow works.""" - result = await hass.config_entries.options.async_init(setup_entry.entry_id) - - assert result["type"] == FlowResultType.FORM - assert result["step_id"] == DRAWABLES - with patch( - "homeassistant.components.roborock.async_setup_entry", return_value=True - ) as mock_setup: - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={Drawable.PREDICTED_PATH: True}, - ) + with patch("homeassistant.components.roborock.roborock_storage"): + await hass.config_entries.async_setup(mock_roborock_entry.entry_id) await hass.async_block_till_done() - assert result["type"] == FlowResultType.CREATE_ENTRY - assert setup_entry.options[DRAWABLES][Drawable.PREDICTED_PATH] is True - assert len(mock_setup.mock_calls) == 1 + result = await hass.config_entries.options.async_init( + mock_roborock_entry.entry_id + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == DRAWABLES + with patch( + "homeassistant.components.roborock.async_setup_entry", return_value=True + ) as mock_setup: + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={Drawable.PREDICTED_PATH: True}, + ) + await hass.async_block_till_done() + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert mock_roborock_entry.options[DRAWABLES][Drawable.PREDICTED_PATH] is True + assert len(mock_setup.mock_calls) == 1 async def test_reauth_flow( @@ -269,3 +282,67 @@ async def test_account_already_configured( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured_account" + + +async def test_discovery_not_setup( + hass: HomeAssistant, + bypass_api_fixture, +) -> None: + """Handle the config flow and make sure it succeeds.""" + with ( + patch("homeassistant.components.roborock.async_setup_entry", return_value=True), + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_DHCP}, + data=DhcpServiceInfo( + ip=NETWORK_INFO.ip, + macaddress=NETWORK_INFO.mac.replace(":", ""), + hostname="roborock-vacuum-a72", + ), + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + with patch( + "homeassistant.components.roborock.config_flow.RoborockApiClient.request_code" + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_USERNAME: USER_EMAIL} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "code" + assert result["errors"] == {} + with patch( + "homeassistant.components.roborock.config_flow.RoborockApiClient.code_login", + return_value=USER_DATA, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_ENTRY_CODE: "123456"} + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == USER_EMAIL + assert result["data"] == MOCK_CONFIG + assert result["result"] + + +async def test_discovery_already_setup( + hass: HomeAssistant, + bypass_api_fixture, + mock_roborock_entry: MockConfigEntry, +) -> None: + """Handle aborting if the device is already setup.""" + await hass.config_entries.async_setup(mock_roborock_entry.entry_id) + await hass.async_block_till_done() + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_DHCP}, + data=DhcpServiceInfo( + ip=NETWORK_INFO.ip, + macaddress=NETWORK_INFO.mac.replace(":", ""), + hostname="roborock-vacuum-a72", + ), + ) + + assert result["type"] is FlowResultType.ABORT diff --git a/tests/components/roborock/test_coordinator.py b/tests/components/roborock/test_coordinator.py new file mode 100644 index 00000000000..94976ba92f5 --- /dev/null +++ b/tests/components/roborock/test_coordinator.py @@ -0,0 +1,107 @@ +"""Test Roborock Coordinator specific logic.""" + +import copy +from datetime import timedelta +from unittest.mock import patch + +import pytest +from roborock.exceptions import RoborockException + +from homeassistant.components.roborock.const import ( + V1_CLOUD_IN_CLEANING_INTERVAL, + V1_CLOUD_NOT_CLEANING_INTERVAL, + V1_LOCAL_IN_CLEANING_INTERVAL, + V1_LOCAL_NOT_CLEANING_INTERVAL, +) +from homeassistant.core import HomeAssistant +from homeassistant.util import dt as dt_util + +from .mock_data import PROP + +from tests.common import MockConfigEntry, async_fire_time_changed + + +@pytest.mark.parametrize( + ("interval", "in_cleaning"), + [ + (V1_CLOUD_IN_CLEANING_INTERVAL, 1), + (V1_CLOUD_NOT_CLEANING_INTERVAL, 0), + ], +) +async def test_dynamic_cloud_scan_interval( + hass: HomeAssistant, + mock_roborock_entry: MockConfigEntry, + bypass_api_fixture_v1_only, + interval: timedelta, + in_cleaning: int, +) -> None: + """Test dynamic scan interval.""" + prop = copy.deepcopy(PROP) + prop.status.in_cleaning = in_cleaning + with ( + # Force the system to use the cloud api. + patch( + "homeassistant.components.roborock.coordinator.RoborockLocalClientV1.ping", + side_effect=RoborockException(), + ), + patch( + "homeassistant.components.roborock.RoborockMqttClientV1.get_prop", + return_value=prop, + ), + ): + await hass.config_entries.async_setup(mock_roborock_entry.entry_id) + assert hass.states.get("sensor.roborock_s7_maxv_battery").state == "100" + prop = copy.deepcopy(prop) + prop.status.battery = 20 + with patch( + "homeassistant.components.roborock.RoborockMqttClientV1.get_prop", + return_value=prop, + ): + async_fire_time_changed( + hass, dt_util.utcnow() + interval - timedelta(seconds=5) + ) + assert hass.states.get("sensor.roborock_s7_maxv_battery").state == "100" + async_fire_time_changed(hass, dt_util.utcnow() + interval) + + assert hass.states.get("sensor.roborock_s7_maxv_battery").state == "20" + + +@pytest.mark.parametrize( + ("interval", "in_cleaning"), + [ + (V1_LOCAL_IN_CLEANING_INTERVAL, 1), + (V1_LOCAL_NOT_CLEANING_INTERVAL, 0), + ], +) +async def test_dynamic_local_scan_interval( + hass: HomeAssistant, + mock_roborock_entry: MockConfigEntry, + bypass_api_fixture_v1_only, + interval: timedelta, + in_cleaning: int, +) -> None: + """Test dynamic scan interval.""" + prop = copy.deepcopy(PROP) + prop.status.in_cleaning = in_cleaning + with ( + patch( + "homeassistant.components.roborock.coordinator.RoborockLocalClientV1.get_prop", + return_value=prop, + ), + ): + await hass.config_entries.async_setup(mock_roborock_entry.entry_id) + assert hass.states.get("sensor.roborock_s7_maxv_battery").state == "100" + prop = copy.deepcopy(prop) + prop.status.battery = 20 + with patch( + "homeassistant.components.roborock.coordinator.RoborockLocalClientV1.get_prop", + return_value=prop, + ): + async_fire_time_changed( + hass, dt_util.utcnow() + interval - timedelta(seconds=5) + ) + assert hass.states.get("sensor.roborock_s7_maxv_battery").state == "100" + + async_fire_time_changed(hass, dt_util.utcnow() + interval) + + assert hass.states.get("sensor.roborock_s7_maxv_battery").state == "20" diff --git a/tests/components/roborock/test_image.py b/tests/components/roborock/test_image.py index 7d79cf4f6ab..b7c811e0ce2 100644 --- a/tests/components/roborock/test_image.py +++ b/tests/components/roborock/test_image.py @@ -11,6 +11,7 @@ from roborock import RoborockException from vacuum_map_parser_base.map_data import ImageConfig, ImageData from homeassistant.components.roborock import DOMAIN +from homeassistant.components.roborock.const import V1_LOCAL_NOT_CLEANING_INTERVAL from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant @@ -62,20 +63,26 @@ async def test_floorplan_image( return_value=prop, ), patch( - "homeassistant.components.roborock.image.dt_util.utcnow", return_value=now + "homeassistant.components.roborock.coordinator.dt_util.utcnow", + return_value=now, ), patch( - "homeassistant.components.roborock.image.RoborockMapDataParser.parse", - return_value=new_map_data, + "homeassistant.components.roborock.coordinator.RoborockMapDataParser.parse", + return_value=MAP_DATA, ) as parse_map, ): + # This should call parse_map twice as the both devices are in cleaning. async_fire_time_changed(hass, now) - await hass.async_block_till_done() resp = await client.get("/api/image_proxy/image.roborock_s7_maxv_upstairs") + assert resp.status == HTTPStatus.OK + resp = await client.get("/api/image_proxy/image.roborock_s7_2_upstairs") + assert resp.status == HTTPStatus.OK + resp = await client.get("/api/image_proxy/image.roborock_s7_maxv_downstairs") assert resp.status == HTTPStatus.OK body = await resp.read() assert body is not None - assert parse_map.call_count == 1 + + assert parse_map.call_count == 2 async def test_floorplan_image_failed_parse( @@ -91,10 +98,11 @@ async def test_floorplan_image_failed_parse( # Copy the device prop so we don't override it prop = copy.deepcopy(PROP) prop.status.in_cleaning = 1 + previous_state = hass.states.get("image.roborock_s7_maxv_upstairs").state # Update image, but get none for parse image. with ( patch( - "homeassistant.components.roborock.image.RoborockMapDataParser.parse", + "homeassistant.components.roborock.coordinator.RoborockMapDataParser.parse", return_value=map_data, ), patch( @@ -102,12 +110,16 @@ async def test_floorplan_image_failed_parse( return_value=prop, ), patch( - "homeassistant.components.roborock.image.dt_util.utcnow", return_value=now + "homeassistant.components.roborock.coordinator.dt_util.utcnow", + return_value=now, ), ): async_fire_time_changed(hass, now) resp = await client.get("/api/image_proxy/image.roborock_s7_maxv_upstairs") - assert not resp.ok + # The map should load fine from the coordinator, but it should not update the + # last_updated timestamp. + assert resp.ok + assert previous_state == hass.states.get("image.roborock_s7_maxv_upstairs").state async def test_fail_to_save_image( @@ -147,9 +159,6 @@ async def test_fail_to_load_image( ) -> None: """Test that we gracefully handle failing to load an image.""" with ( - patch( - "homeassistant.components.roborock.image.RoborockMapDataParser.parse", - ) as parse_map, patch( "homeassistant.components.roborock.roborock_storage.Path.exists", return_value=True, @@ -158,13 +167,14 @@ async def test_fail_to_load_image( "homeassistant.components.roborock.roborock_storage.Path.read_bytes", side_effect=OSError, ) as read_bytes, + patch( + "homeassistant.components.roborock.coordinator.RoborockDataUpdateCoordinator.refresh_coordinator_map" + ), ): # Reload the config entry so that the map is saved in storage and entities exist. await hass.config_entries.async_reload(setup_entry.entry_id) await hass.async_block_till_done() assert read_bytes.call_count == 4 - # Ensure that we never updated the map manually since we couldn't load it. - assert parse_map.call_count == 0 assert "Unable to read map file" in caplog.text @@ -178,7 +188,7 @@ async def test_fail_parse_on_startup( map_data = copy.deepcopy(MAP_DATA) map_data.image = None with patch( - "homeassistant.components.roborock.image.RoborockMapDataParser.parse", + "homeassistant.components.roborock.coordinator.RoborockMapDataParser.parse", return_value=map_data, ): await async_setup_component(hass, DOMAIN, {}) @@ -224,9 +234,10 @@ async def test_fail_updating_image( prop = copy.deepcopy(PROP) prop.status.in_cleaning = 1 # Update image, but get none for parse image. + previous_state = hass.states.get("image.roborock_s7_maxv_upstairs").state with ( patch( - "homeassistant.components.roborock.image.RoborockMapDataParser.parse", + "homeassistant.components.roborock.coordinator.RoborockMapDataParser.parse", return_value=map_data, ), patch( @@ -234,7 +245,8 @@ async def test_fail_updating_image( return_value=prop, ), patch( - "homeassistant.components.roborock.image.dt_util.utcnow", return_value=now + "homeassistant.components.roborock.coordinator.dt_util.utcnow", + return_value=now, ), patch( "homeassistant.components.roborock.coordinator.RoborockMqttClientV1.get_map_v1", @@ -243,4 +255,91 @@ async def test_fail_updating_image( ): async_fire_time_changed(hass, now) resp = await client.get("/api/image_proxy/image.roborock_s7_maxv_upstairs") - assert not resp.ok + # The map should load fine from the coordinator, but it should not update the + # last_updated timestamp. + assert resp.ok + assert previous_state == hass.states.get("image.roborock_s7_maxv_upstairs").state + + +async def test_index_error_map( + hass: HomeAssistant, + setup_entry: MockConfigEntry, + hass_client: ClientSessionGenerator, +) -> None: + """Test that we handle failing getting the image after it has already been setup with a indexerror.""" + client = await hass_client() + now = dt_util.utcnow() + timedelta(seconds=91) + # Copy the device prop so we don't override it + prop = copy.deepcopy(PROP) + prop.status.in_cleaning = 1 + previous_state = hass.states.get("image.roborock_s7_maxv_upstairs").state + # Update image, but get IndexError for image. + with ( + patch( + "homeassistant.components.roborock.coordinator.RoborockMapDataParser.parse", + side_effect=IndexError, + ), + patch( + "homeassistant.components.roborock.coordinator.RoborockLocalClientV1.get_prop", + return_value=prop, + ), + patch( + "homeassistant.components.roborock.coordinator.dt_util.utcnow", + return_value=now, + ), + ): + async_fire_time_changed(hass, now) + resp = await client.get("/api/image_proxy/image.roborock_s7_maxv_upstairs") + # The map should load fine from the coordinator, but it should not update the + # last_updated timestamp. + assert resp.ok + assert previous_state == hass.states.get("image.roborock_s7_maxv_upstairs").state + + +async def test_map_status_change( + hass: HomeAssistant, + setup_entry: MockConfigEntry, + hass_client: ClientSessionGenerator, +) -> None: + """Test floor plan map image is correctly updated on status change.""" + assert len(hass.states.async_all("image")) == 4 + + assert hass.states.get("image.roborock_s7_maxv_upstairs") is not None + client = await hass_client() + resp = await client.get("/api/image_proxy/image.roborock_s7_maxv_upstairs") + assert resp.status == HTTPStatus.OK + old_body = await resp.read() + assert old_body[0:4] == b"\x89PNG" + + # Call a second time. This interval does not directly trigger a map update, but does + # trigger a status update which detects the state has changed and uddates the map + now = dt_util.utcnow() + V1_LOCAL_NOT_CLEANING_INTERVAL + + # Copy the device prop so we don't override it + prop = copy.deepcopy(PROP) + prop.status.state_name = "testing" + new_map_data = copy.deepcopy(MAP_DATA) + new_map_data.image = ImageData( + 100, 10, 10, 10, 10, ImageConfig(), Image.new("RGB", (2, 2)), lambda p: p + ) + with ( + patch( + "homeassistant.components.roborock.coordinator.RoborockLocalClientV1.get_prop", + return_value=prop, + ), + patch( + "homeassistant.components.roborock.coordinator.dt_util.utcnow", + return_value=now, + ), + patch( + "homeassistant.components.roborock.coordinator.RoborockMapDataParser.parse", + return_value=new_map_data, + ), + ): + async_fire_time_changed(hass, now) + resp = await client.get("/api/image_proxy/image.roborock_s7_maxv_upstairs") + assert resp.status == HTTPStatus.OK + assert resp.status == HTTPStatus.OK + body = await resp.read() + assert body is not None + assert body != old_body diff --git a/tests/components/roborock/test_init.py b/tests/components/roborock/test_init.py index 904a3af89d6..983e3d083f4 100644 --- a/tests/components/roborock/test_init.py +++ b/tests/components/roborock/test_init.py @@ -17,9 +17,10 @@ from homeassistant.components.roborock.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.setup import async_setup_component -from .mock_data import HOME_DATA +from .mock_data import HOME_DATA, NETWORK_INFO, NETWORK_INFO_2 from tests.common import MockConfigEntry from tests.typing import ClientSessionGenerator @@ -173,7 +174,7 @@ async def test_remove_from_hass( bypass_api_fixture, setup_entry: MockConfigEntry, hass_client: ClientSessionGenerator, - cleanup_map_storage: pathlib.Path, + storage_path: pathlib.Path, ) -> None: """Test that removing from hass removes any existing images.""" @@ -183,17 +184,18 @@ async def test_remove_from_hass( resp = await client.get("/api/image_proxy/image.roborock_s7_maxv_upstairs") assert resp.status == HTTPStatus.OK - assert not cleanup_map_storage.exists() + config_entry_storage = storage_path / setup_entry.entry_id + assert not config_entry_storage.exists() # Flush to disk await hass.config_entries.async_unload(setup_entry.entry_id) - assert cleanup_map_storage.exists() - paths = list(cleanup_map_storage.walk()) - assert len(paths) == 3 # One map image and two directories + assert config_entry_storage.exists() + paths = list(config_entry_storage.walk()) + assert len(paths) == 4 # Two map image and two directories await hass.config_entries.async_remove(setup_entry.entry_id) # After removal, directories should be empty. - assert not cleanup_map_storage.exists() + assert not config_entry_storage.exists() @pytest.mark.parametrize("platforms", [[Platform.IMAGE]]) @@ -201,7 +203,7 @@ async def test_oserror_remove_image( hass: HomeAssistant, bypass_api_fixture, setup_entry: MockConfigEntry, - cleanup_map_storage: pathlib.Path, + storage_path: pathlib.Path, hass_client: ClientSessionGenerator, caplog: pytest.LogCaptureFixture, ) -> None: @@ -214,12 +216,13 @@ async def test_oserror_remove_image( assert resp.status == HTTPStatus.OK # Image content is saved when unloading - assert not cleanup_map_storage.exists() + config_entry_storage = storage_path / setup_entry.entry_id + assert not config_entry_storage.exists() await hass.config_entries.async_unload(setup_entry.entry_id) - assert cleanup_map_storage.exists() - paths = list(cleanup_map_storage.walk()) - assert len(paths) == 3 # One map image and two directories + assert config_entry_storage.exists() + paths = list(config_entry_storage.walk()) + assert len(paths) == 4 # Two map image and two directories with patch( "homeassistant.components.roborock.roborock_storage.shutil.rmtree", @@ -242,7 +245,7 @@ async def test_not_supported_protocol( "homeassistant.components.roborock.RoborockApiClient.get_home_data_v2", return_value=home_data_copy, ): - await async_setup_component(hass, DOMAIN, {}) + await hass.config_entries.async_setup(mock_roborock_entry.entry_id) await hass.async_block_till_done() assert "because its protocol version random" in caplog.text @@ -295,3 +298,74 @@ async def test_no_user_agreement( await hass.config_entries.async_setup(mock_roborock_entry.entry_id) assert mock_roborock_entry.state is ConfigEntryState.SETUP_RETRY assert mock_roborock_entry.error_reason_translation_key == "no_user_agreement" + + +async def test_stale_device( + hass: HomeAssistant, + bypass_api_fixture, + mock_roborock_entry: MockConfigEntry, + device_registry: DeviceRegistry, +) -> None: + """Test that we remove a device if it no longer is given by home_data.""" + with patch( + "homeassistant.components.roborock.RoborockMqttClientV1.get_networking", + side_effect=[NETWORK_INFO, NETWORK_INFO_2], + ): + await hass.config_entries.async_setup(mock_roborock_entry.entry_id) + assert mock_roborock_entry.state is ConfigEntryState.LOADED + existing_devices = device_registry.devices.get_devices_for_config_entry_id( + mock_roborock_entry.entry_id + ) + assert len(existing_devices) == 6 # 2 for each robot, 1 for A01, 1 for Zeo + hd = deepcopy(HOME_DATA) + hd.devices = [hd.devices[0]] + + with ( + patch( + "homeassistant.components.roborock.RoborockApiClient.get_home_data_v2", + return_value=hd, + ), + patch( + "homeassistant.components.roborock.RoborockMqttClientV1.get_networking", + side_effect=[NETWORK_INFO, NETWORK_INFO_2], + ), + ): + await hass.config_entries.async_reload(mock_roborock_entry.entry_id) + await hass.async_block_till_done() + new_devices = device_registry.devices.get_devices_for_config_entry_id( + mock_roborock_entry.entry_id + ) + assert ( + len(new_devices) == 4 + ) # 2 for the one remaining robot. 1 for both the A01s which are shared and + # therefore not deleted. + + +async def test_no_stale_device( + hass: HomeAssistant, + bypass_api_fixture, + mock_roborock_entry: MockConfigEntry, + device_registry: DeviceRegistry, +) -> None: + """Test that we don't remove a device if fails to setup.""" + with patch( + "homeassistant.components.roborock.RoborockMqttClientV1.get_networking", + side_effect=[NETWORK_INFO, NETWORK_INFO_2], + ): + await hass.config_entries.async_setup(mock_roborock_entry.entry_id) + assert mock_roborock_entry.state is ConfigEntryState.LOADED + existing_devices = device_registry.devices.get_devices_for_config_entry_id( + mock_roborock_entry.entry_id + ) + assert len(existing_devices) == 6 # 2 for each robot, 1 for A01, 1 for Zeo + + with patch( + "homeassistant.components.roborock.RoborockMqttClientV1.get_networking", + side_effect=[NETWORK_INFO, RoborockException], + ): + await hass.config_entries.async_reload(mock_roborock_entry.entry_id) + await hass.async_block_till_done() + new_devices = device_registry.devices.get_devices_for_config_entry_id( + mock_roborock_entry.entry_id + ) + assert len(new_devices) == 6 # 2 for each robot, 1 for A01, 1 for Zeo diff --git a/tests/components/roborock/test_sensor.py b/tests/components/roborock/test_sensor.py index e33d3aa78d5..719b398de94 100644 --- a/tests/components/roborock/test_sensor.py +++ b/tests/components/roborock/test_sensor.py @@ -29,7 +29,7 @@ def platforms() -> list[Platform]: async def test_sensors(hass: HomeAssistant, setup_entry: MockConfigEntry) -> None: """Test sensors and check test values are correctly set.""" - assert len(hass.states.async_all("sensor")) == 40 + assert len(hass.states.async_all("sensor")) == 42 assert hass.states.get("sensor.roborock_s7_maxv_main_brush_time_left").state == str( MAIN_BRUSH_REPLACE_TIME - 74382 ) @@ -53,7 +53,7 @@ async def test_sensors(hass: HomeAssistant, setup_entry: MockConfigEntry) -> Non assert hass.states.get("sensor.roborock_s7_maxv_cleaning_area").state == "21.0" assert hass.states.get("sensor.roborock_s7_maxv_vacuum_error").state == "none" assert hass.states.get("sensor.roborock_s7_maxv_battery").state == "100" - assert hass.states.get("sensor.roborock_s7_maxv_dock_error").state == "ok" + assert hass.states.get("sensor.roborock_s7_maxv_dock_dock_error").state == "ok" assert hass.states.get("sensor.roborock_s7_maxv_total_cleaning_count").state == "31" assert ( hass.states.get("sensor.roborock_s7_maxv_last_clean_begin").state @@ -63,6 +63,10 @@ async def test_sensors(hass: HomeAssistant, setup_entry: MockConfigEntry) -> Non hass.states.get("sensor.roborock_s7_maxv_last_clean_end").state == "2023-01-01T03:43:58+00:00" ) + assert ( + hass.states.get("sensor.roborock_s7_maxv_current_room").state + == "Example room 2" + ) assert hass.states.get("sensor.dyad_pro_status").state == "drying" assert hass.states.get("sensor.dyad_pro_battery").state == "100" assert hass.states.get("sensor.dyad_pro_filter_time_left").state == "111" diff --git a/tests/components/roborock/test_switch.py b/tests/components/roborock/test_switch.py index e2df9a3498f..120c4fc4860 100644 --- a/tests/components/roborock/test_switch.py +++ b/tests/components/roborock/test_switch.py @@ -22,8 +22,8 @@ def platforms() -> list[Platform]: @pytest.mark.parametrize( ("entity_id"), [ - ("switch.roborock_s7_maxv_child_lock"), - ("switch.roborock_s7_maxv_status_indicator_light"), + ("switch.roborock_s7_maxv_dock_child_lock"), + ("switch.roborock_s7_maxv_dock_status_indicator_light"), ("switch.roborock_s7_maxv_do_not_disturb"), ], ) @@ -59,8 +59,8 @@ async def test_update_success( @pytest.mark.parametrize( ("entity_id", "service"), [ - ("switch.roborock_s7_maxv_status_indicator_light", SERVICE_TURN_ON), - ("switch.roborock_s7_maxv_status_indicator_light", SERVICE_TURN_OFF), + ("switch.roborock_s7_maxv_dock_status_indicator_light", SERVICE_TURN_ON), + ("switch.roborock_s7_maxv_dock_status_indicator_light", SERVICE_TURN_OFF), ], ) @pytest.mark.parametrize( diff --git a/tests/components/roborock/test_vacuum.py b/tests/components/roborock/test_vacuum.py index d9d4340ec83..5d6e7a599bd 100644 --- a/tests/components/roborock/test_vacuum.py +++ b/tests/components/roborock/test_vacuum.py @@ -117,6 +117,30 @@ async def test_commands( assert mock_send_command.call_args[0][1] == called_params +async def test_cloud_command( + hass: HomeAssistant, + bypass_api_fixture, + setup_entry: MockConfigEntry, +) -> None: + """Test sending commands to the vacuum.""" + + vacuum = hass.states.get(ENTITY_ID) + assert vacuum + + data = {ATTR_ENTITY_ID: ENTITY_ID, "command": "get_map_v1"} + with patch( + "homeassistant.components.roborock.coordinator.RoborockMqttClientV1.send_command" + ) as mock_send_command: + await hass.services.async_call( + Platform.VACUUM, + SERVICE_SEND_COMMAND, + data, + blocking=True, + ) + assert mock_send_command.call_count == 1 + assert mock_send_command.call_args[0][0] == RoborockCommand.GET_MAP_V1 + + @pytest.mark.parametrize( ("in_cleaning_int", "expected_command"), [ @@ -237,7 +261,7 @@ async def test_get_current_position( return_value=b"", ), patch( - "homeassistant.components.roborock.image.RoborockMapDataParser.parse", + "homeassistant.components.roborock.coordinator.RoborockMapDataParser.parse", return_value=map_data, ), ): @@ -267,7 +291,9 @@ async def test_get_current_position_no_map_data( "homeassistant.components.roborock.coordinator.RoborockMqttClientV1.get_map_v1", return_value=None, ), - pytest.raises(HomeAssistantError, match="Failed to retrieve map data."), + pytest.raises( + HomeAssistantError, match="Something went wrong creating the map" + ), ): await hass.services.async_call( DOMAIN, @@ -292,7 +318,7 @@ async def test_get_current_position_no_robot_position( return_value=b"", ), patch( - "homeassistant.components.roborock.image.RoborockMapDataParser.parse", + "homeassistant.components.roborock.coordinator.RoborockMapDataParser.parse", return_value=map_data, ), pytest.raises(HomeAssistantError, match="Robot position not found"), diff --git a/tests/components/sensor/test_init.py b/tests/components/sensor/test_init.py index b162200f95e..9666e29579b 100644 --- a/tests/components/sensor/test_init.py +++ b/tests/components/sensor/test_init.py @@ -24,6 +24,7 @@ from homeassistant.components.sensor import ( async_rounded_state, async_update_suggested_units, ) +from homeassistant.components.sensor.const import STATE_CLASS_UNITS from homeassistant.config_entries import ConfigEntry, ConfigFlow from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, @@ -2005,6 +2006,7 @@ async def test_non_numeric_device_class_with_unit_of_measurement( SensorDeviceClass.VOLUME, SensorDeviceClass.WATER, SensorDeviceClass.WEIGHT, + SensorDeviceClass.WIND_DIRECTION, SensorDeviceClass.WIND_SPEED, ], ) @@ -2035,6 +2037,37 @@ async def test_device_classes_with_invalid_unit_of_measurement( ) in caplog.text +@pytest.mark.parametrize( + "state_class", + [SensorStateClass.MEASUREMENT_ANGLE], +) +async def test_state_classes_with_invalid_unit_of_measurement( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + state_class: SensorStateClass, +) -> None: + """Test error when unit of measurement is not valid for used state class.""" + entity0 = MockSensor( + name="Test", + native_value="1.0", + state_class=state_class, + native_unit_of_measurement="INVALID!", + ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) + units = { + str(unit) if unit else "no unit of measurement" + for unit in STATE_CLASS_UNITS.get(state_class, set()) + } + assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}}) + await hass.async_block_till_done() + + assert ( + f"Sensor sensor.test ({entity0.__class__}) is using native unit of " + "measurement 'INVALID!' which is not a valid unit " + f"for the state class ('{state_class}') it is using; expected one of {units};" + ) in caplog.text + + @pytest.mark.parametrize( ("device_class", "state_class", "unit"), [ diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index a5b6a07dde5..962c0a0ef8f 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -1,7 +1,8 @@ """The tests for sensor recorder platform.""" -from collections.abc import Iterable +from collections.abc import Callable, Iterable from datetime import datetime, timedelta +import logging import math from statistics import mean from typing import Any, Literal @@ -26,17 +27,30 @@ from homeassistant.components.recorder.db_schema import ( ) from homeassistant.components.recorder.models import ( StatisticData, + StatisticMeanType, StatisticMetaData, process_timestamp, ) from homeassistant.components.recorder.statistics import ( + DEG_TO_RAD, + RAD_TO_DEG, async_import_statistics, get_metadata, list_statistic_ids, ) from homeassistant.components.recorder.util import get_instance, session_scope -from homeassistant.components.sensor import ATTR_OPTIONS, DOMAIN, SensorDeviceClass -from homeassistant.const import ATTR_FRIENDLY_NAME, STATE_UNAVAILABLE +from homeassistant.components.sensor import ( + ATTR_OPTIONS, + DOMAIN, + SensorDeviceClass, + SensorStateClass, +) +from homeassistant.components.sensor.recorder import ( + MEAN_TYPE_CHANGED_ISSUE, + STATE_CLASS_REMOVED_ISSUE, + UNITS_CHANGED_ISSUE, +) +from homeassistant.const import ATTR_FRIENDLY_NAME, DEGREE, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant, State from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component @@ -98,6 +112,13 @@ KW_SENSOR_ATTRIBUTES = { "state_class": "measurement", "unit_of_measurement": "kW", } +WIND_DIRECTION_ATTRIBUTES = { + "device_class": SensorDeviceClass.WIND_DIRECTION, + "state_class": SensorStateClass.MEASUREMENT_ANGLE, + "unit_of_measurement": DEGREE, +} +WIND_DIRECTION_STATES_SEQ = [350, 0, 15] +TEMP_STATES_SEQ = [-10, 15, 30, 60] @pytest.fixture @@ -281,6 +302,7 @@ async def test_compile_hourly_statistics( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -306,6 +328,64 @@ async def test_compile_hourly_statistics( assert "Error while processing event StatisticsTask" not in caplog.text +async def test_compile_hourly_statistics_angle( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test compiling hourly statistics for measurement_angle.""" + zero = get_start_time(dt_util.utcnow()) + await async_setup_component(hass, "sensor", {}) + # Wait for the sensor recorder platform to be added + await async_recorder_block_till_done(hass) + with freeze_time(zero) as freezer: + four, states = await async_record_states( + hass, + freezer, + zero, + "sensor.test1", + WIND_DIRECTION_ATTRIBUTES, + seq=WIND_DIRECTION_STATES_SEQ, + ) + await async_wait_recording_done(hass) + hist = history.get_significant_states( + hass, zero, four, hass.states.async_entity_ids() + ) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + do_adhoc_statistics(hass, start=zero) + await async_wait_recording_done(hass) + statistic_ids = await async_list_statistic_ids(hass) + assert statistic_ids == [ + { + "statistic_id": "sensor.test1", + "display_unit_of_measurement": DEGREE, + "has_mean": False, + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": None, + "source": "recorder", + "statistics_unit_of_measurement": DEGREE, + "unit_class": None, + } + ] + stats = statistics_during_period(hass, zero, period="5minute") + assert stats == { + "sensor.test1": [ + { + "start": process_timestamp(zero).timestamp(), + "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(0.5802544), + "min": None, + "max": None, + "last_reset": None, + "state": None, + "sum": None, + } + ] + } + assert "Error while processing event StatisticsTask" not in caplog.text + + @pytest.mark.parametrize( ( "device_class", @@ -349,7 +429,7 @@ async def test_compile_hourly_statistics_with_some_same_last_updated( "unit_of_measurement": state_unit, } attributes = dict(attributes) - seq = [-10, 15, 30, 60] + seq = TEMP_STATES_SEQ async def set_state(entity_id, state, **kwargs): """Set the state.""" @@ -395,6 +475,7 @@ async def test_compile_hourly_statistics_with_some_same_last_updated( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -420,33 +501,167 @@ async def test_compile_hourly_statistics_with_some_same_last_updated( assert "Error while processing event StatisticsTask" not in caplog.text +async def test_compile_hourly_statistics_with_some_same_last_updated_angle( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test compiling hourly statistics with the some of the same last updated value for measurement_angle. + + If the last updated value is the same we will have a zero duration. + """ + zero = get_start_time(dt_util.utcnow()) + await async_setup_component(hass, "sensor", {}) + # Wait for the sensor recorder platform to be added + await async_recorder_block_till_done(hass) + entity_id = "sensor.test1" + seq = [350, 2, 15, 345] + + async def set_state(entity_id, state, **kwargs): + """Set the state.""" + hass.states.async_set(entity_id, state, **kwargs) + await async_wait_recording_done(hass) + return hass.states.get(entity_id) + + one = zero + timedelta(seconds=1 * 5) + two = one + timedelta(seconds=10 * 5) + three = two + timedelta(seconds=40 * 5) + four = three + timedelta(seconds=10 * 5) + + states = {entity_id: []} + with freeze_time(one) as freezer: + states[entity_id].append( + await set_state( + entity_id, str(seq[0]), attributes=WIND_DIRECTION_ATTRIBUTES + ) + ) + + # Record two states at the exact same time + freezer.move_to(two) + states[entity_id].append( + await set_state( + entity_id, str(seq[1]), attributes=WIND_DIRECTION_ATTRIBUTES + ) + ) + states[entity_id].append( + await set_state( + entity_id, str(seq[2]), attributes=WIND_DIRECTION_ATTRIBUTES + ) + ) + + freezer.move_to(three) + states[entity_id].append( + await set_state( + entity_id, str(seq[3]), attributes=WIND_DIRECTION_ATTRIBUTES + ) + ) + + hist = history.get_significant_states( + hass, zero, four, hass.states.async_entity_ids() + ) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + do_adhoc_statistics(hass, start=zero) + await async_wait_recording_done(hass) + statistic_ids = await async_list_statistic_ids(hass) + assert statistic_ids == [ + { + "statistic_id": "sensor.test1", + "display_unit_of_measurement": DEGREE, + "has_mean": False, + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": None, + "source": "recorder", + "statistics_unit_of_measurement": DEGREE, + "unit_class": None, + } + ] + stats = statistics_during_period(hass, zero, period="5minute") + assert stats == { + "sensor.test1": [ + { + "start": process_timestamp(zero).timestamp(), + "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(6.274605), + "min": None, + "max": None, + "last_reset": None, + "state": None, + "sum": None, + } + ] + } + assert "Error while processing event StatisticsTask" not in caplog.text + + @pytest.mark.parametrize( ( - "device_class", - "state_unit", + "attributes", "display_unit", "statistics_unit", "unit_class", "mean", "min", "max", + "mean_type", + "seq", ), [ - ("temperature", "°C", "°C", "°C", "temperature", 60, -10, 60), - ("temperature", "°F", "°F", "°F", "temperature", 60, -10, 60), + ( + { + "device_class": "temperature", + "state_class": "measurement", + "unit_of_measurement": "°C", + }, + "°C", + "°C", + "temperature", + 60, + -10, + 60, + StatisticMeanType.ARITHMETIC, + TEMP_STATES_SEQ, + ), + ( + { + "device_class": "temperature", + "state_class": "measurement", + "unit_of_measurement": "°F", + }, + "°F", + "°F", + "temperature", + 60, + -10, + 60, + StatisticMeanType.ARITHMETIC, + TEMP_STATES_SEQ, + ), + ( + WIND_DIRECTION_ATTRIBUTES, + DEGREE, + DEGREE, + None, + 15, + None, + None, + StatisticMeanType.CIRCULAR, + [350, 0, 355, 15], + ), ], ) async def test_compile_hourly_statistics_with_all_same_last_updated( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - device_class, - state_unit, - display_unit, - statistics_unit, - unit_class, - mean, - min, - max, + attributes: dict[str, Any], + display_unit: str, + statistics_unit: str, + unit_class: str | None, + mean: float | None, + min: float | None, + max: float | None, + mean_type: StatisticMeanType, + seq: list[float], ) -> None: """Test compiling hourly statistics with the all of the same last updated value. @@ -457,13 +672,6 @@ async def test_compile_hourly_statistics_with_all_same_last_updated( # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) entity_id = "sensor.test1" - attributes = { - "device_class": device_class, - "state_class": "measurement", - "unit_of_measurement": state_unit, - } - attributes = dict(attributes) - seq = [-10, 15, 30, 60] async def set_state(entity_id, state, **kwargs): """Set the state.""" @@ -503,7 +711,8 @@ async def test_compile_hourly_statistics_with_all_same_last_updated( { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, - "has_mean": True, + "has_mean": mean_type is StatisticMeanType.ARITHMETIC, + "mean_type": mean_type, "has_sum": False, "name": None, "source": "recorder", @@ -531,45 +740,79 @@ async def test_compile_hourly_statistics_with_all_same_last_updated( @pytest.mark.parametrize( ( - "device_class", - "state_unit", + "attributes", "display_unit", "statistics_unit", "unit_class", "mean", "min", "max", + "mean_type", + "seq", ), [ - ("temperature", "°C", "°C", "°C", "temperature", 0, 60, 60), - ("temperature", "°F", "°F", "°F", "temperature", 0, 60, 60), + ( + { + "device_class": "temperature", + "state_class": "measurement", + "unit_of_measurement": "°C", + }, + "°C", + "°C", + "temperature", + 60, + -10, + 60, + StatisticMeanType.ARITHMETIC, + TEMP_STATES_SEQ, + ), + ( + { + "device_class": "temperature", + "state_class": "measurement", + "unit_of_measurement": "°F", + }, + "°F", + "°F", + "temperature", + 60, + -10, + 60, + StatisticMeanType.ARITHMETIC, + TEMP_STATES_SEQ, + ), + ( + WIND_DIRECTION_ATTRIBUTES, + DEGREE, + DEGREE, + None, + 15, + None, + None, + StatisticMeanType.CIRCULAR, + [350, 0, 355, 15], + ), ], ) -async def test_compile_hourly_statistics_only_state_is_and_end_of_period( +async def test_compile_hourly_statistics_only_state_is_at_end_of_period( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - device_class, - state_unit, - display_unit, - statistics_unit, - unit_class, - mean, - min, - max, + attributes: dict[str, Any], + display_unit: str, + statistics_unit: str, + unit_class: str | None, + mean: float | None, + min: float | None, + max: float | None, + mean_type: StatisticMeanType, + seq: list[float], ) -> None: - """Test compiling hourly statistics when the only state at end of period.""" + """Test compiling hourly statistics when the only states are at end of period.""" zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) entity_id = "sensor.test1" - attributes = { - "device_class": device_class, - "state_class": "measurement", - "unit_of_measurement": state_unit, - } - attributes = dict(attributes) - seq = [-10, 15, 30, 60] async def set_state(entity_id, state, **kwargs): """Set the state.""" @@ -604,13 +847,15 @@ async def test_compile_hourly_statistics_only_state_is_and_end_of_period( assert_dict_of_states_equal_without_context_and_last_changed(states, hist) do_adhoc_statistics(hass, start=zero) + do_adhoc_statistics(hass, start=zero + timedelta(minutes=5)) await async_wait_recording_done(hass) statistic_ids = await async_list_statistic_ids(hass) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, - "has_mean": True, + "has_mean": mean_type is StatisticMeanType.ARITHMETIC, + "mean_type": mean_type, "has_sum": False, "name": None, "source": "recorder", @@ -622,8 +867,8 @@ async def test_compile_hourly_statistics_only_state_is_and_end_of_period( assert stats == { "sensor.test1": [ { - "start": process_timestamp(zero).timestamp(), - "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "start": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "end": process_timestamp(zero + timedelta(minutes=10)).timestamp(), "mean": pytest.approx(mean), "min": pytest.approx(min), "max": pytest.approx(max), @@ -651,7 +896,10 @@ async def test_compile_hourly_statistics_purged_state_changes( statistics_unit, unit_class, ) -> None: - """Test compiling hourly statistics.""" + """Test compiling hourly statistics. + + This tests statistics falls back to the state machine when states are purged. + """ zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added @@ -691,6 +939,7 @@ async def test_compile_hourly_statistics_purged_state_changes( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -716,6 +965,94 @@ async def test_compile_hourly_statistics_purged_state_changes( assert "Error while processing event StatisticsTask" not in caplog.text +@pytest.mark.parametrize( + ( + "device_class", + "state_unit", + "display_unit", + "statistics_unit", + "unit_class", + "mean", + "min", + "max", + ), + [ + (None, "%", "%", "%", "unitless", 13.050847, -10, 30), + ], +) +async def test_compile_hourly_statistics_ignore_future_state( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + device_class, + state_unit, + display_unit, + statistics_unit, + unit_class, + mean, + min, + max, +) -> None: + """Test compiling hourly statistics. + + This tests statistics does not fall back to the state machine if the state + in the state machine is newer than the end of the statistics period. + """ + zero = get_start_time(dt_util.utcnow() + timedelta(minutes=5)) + previous_period = zero - timedelta(minutes=5) + await async_setup_component(hass, "sensor", {}) + # Wait for the sensor recorder platform to be added + await async_recorder_block_till_done(hass) + attributes = { + "device_class": device_class, + "state_class": "measurement", + "unit_of_measurement": state_unit, + } + with freeze_time(zero) as freezer: + four, states = await async_record_states( + hass, freezer, zero, "sensor.test1", attributes + ) + await async_wait_recording_done(hass) + hist = history.get_significant_states( + hass, zero, four, hass.states.async_entity_ids() + ) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + do_adhoc_statistics(hass, start=previous_period) + do_adhoc_statistics(hass, start=zero) + await async_wait_recording_done(hass) + statistic_ids = await async_list_statistic_ids(hass) + assert statistic_ids == [ + { + "statistic_id": "sensor.test1", + "display_unit_of_measurement": display_unit, + "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, + "has_sum": False, + "name": None, + "source": "recorder", + "statistics_unit_of_measurement": statistics_unit, + "unit_class": unit_class, + } + ] + stats = statistics_during_period(hass, previous_period, period="5minute") + # Check we get no stats from the previous period + assert stats == { + "sensor.test1": [ + { + "start": process_timestamp(zero).timestamp(), + "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(mean), + "min": pytest.approx(min), + "max": pytest.approx(max), + "last_reset": None, + "state": None, + "sum": None, + } + ] + } + assert "Error while processing event StatisticsTask" not in caplog.text + + @pytest.mark.parametrize("attributes", [TEMPERATURE_SENSOR_ATTRIBUTES]) async def test_compile_hourly_statistics_wrong_unit( hass: HomeAssistant, @@ -782,6 +1119,7 @@ async def test_compile_hourly_statistics_wrong_unit( "statistic_id": "sensor.test1", "display_unit_of_measurement": "°C", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -791,6 +1129,7 @@ async def test_compile_hourly_statistics_wrong_unit( { "display_unit_of_measurement": "invalid", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -801,6 +1140,7 @@ async def test_compile_hourly_statistics_wrong_unit( { "display_unit_of_measurement": None, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -812,6 +1152,7 @@ async def test_compile_hourly_statistics_wrong_unit( "statistic_id": "sensor.test6", "display_unit_of_measurement": "°C", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -822,6 +1163,7 @@ async def test_compile_hourly_statistics_wrong_unit( "statistic_id": "sensor.test7", "display_unit_of_measurement": "°C", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -993,6 +1335,7 @@ async def test_compile_hourly_sum_statistics_amount( "statistic_id": "sensor.test1", "display_unit_of_measurement": statistics_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -1197,6 +1540,7 @@ async def test_compile_hourly_sum_statistics_amount_reset_every_state_change( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -1306,6 +1650,7 @@ async def test_compile_hourly_sum_statistics_amount_invalid_last_reset( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -1402,6 +1747,7 @@ async def test_compile_hourly_sum_statistics_nan_inf_state( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -1545,6 +1891,7 @@ async def test_compile_hourly_sum_statistics_negative_state( assert { "display_unit_of_measurement": display_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -1646,6 +1993,7 @@ async def test_compile_hourly_sum_statistics_total_no_reset( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -1759,6 +2107,7 @@ async def test_compile_hourly_sum_statistics_total_increasing( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -1885,6 +2234,7 @@ async def test_compile_hourly_sum_statistics_total_increasing_small_dip( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -1989,6 +2339,7 @@ async def test_compile_hourly_energy_statistics_unsupported( "statistic_id": "sensor.test1", "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -2091,6 +2442,7 @@ async def test_compile_hourly_energy_statistics_multiple( "statistic_id": "sensor.test1", "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -2101,6 +2453,7 @@ async def test_compile_hourly_energy_statistics_multiple( "statistic_id": "sensor.test2", "display_unit_of_measurement": "kWh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -2111,6 +2464,7 @@ async def test_compile_hourly_energy_statistics_multiple( "statistic_id": "sensor.test3", "display_unit_of_measurement": "Wh", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -2293,8 +2647,64 @@ async def test_compile_hourly_statistics_unchanged( assert "Error while processing event StatisticsTask" not in caplog.text +async def test_compile_hourly_statistics_unchanged_angle( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test compiling hourly statistics, with no changes during the hour for measurement_angle.""" + zero = get_start_time(dt_util.utcnow()) + await async_setup_component(hass, "sensor", {}) + # Wait for the sensor recorder platform to be added + await async_recorder_block_till_done(hass) + with freeze_time(zero) as freezer: + four, states = await async_record_states( + hass, + freezer, + zero, + "sensor.test1", + WIND_DIRECTION_ATTRIBUTES, + seq=WIND_DIRECTION_STATES_SEQ, + ) + await async_wait_recording_done(hass) + hist = history.get_significant_states( + hass, zero, four, hass.states.async_entity_ids() + ) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + do_adhoc_statistics(hass, start=four) + await async_wait_recording_done(hass) + stats = statistics_during_period(hass, four, period="5minute") + assert stats == { + "sensor.test1": [ + { + "start": process_timestamp(four).timestamp(), + "end": process_timestamp(four + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(15), + "min": None, + "max": None, + "last_reset": None, + "state": None, + "sum": None, + } + ] + } + assert "Error while processing event StatisticsTask" not in caplog.text + + +@pytest.mark.parametrize( + ("attributes", "expected_mean", "expected_min", "expected_max"), + [ + (TEMPERATURE_SENSOR_ATTRIBUTES, 21.1864406779661, 10.0, 25.0), + (WIND_DIRECTION_ATTRIBUTES, 21.202479155239875, None, None), + ], +) async def test_compile_hourly_statistics_partially_unavailable( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + attributes: dict, + expected_mean: float, + expected_min: float | None, + expected_max: float | None, ) -> None: """Test compiling hourly statistics, with the sensor being partially unavailable.""" zero = get_start_time(dt_util.utcnow()) @@ -2302,7 +2712,7 @@ async def test_compile_hourly_statistics_partially_unavailable( # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) four, states = await async_record_states_partially_unavailable( - hass, zero, "sensor.test1", TEMPERATURE_SENSOR_ATTRIBUTES + hass, zero, "sensor.test1", attributes ) await async_wait_recording_done(hass) hist = history.get_significant_states( @@ -2318,9 +2728,9 @@ async def test_compile_hourly_statistics_partially_unavailable( { "start": process_timestamp(zero).timestamp(), "end": process_timestamp(zero + timedelta(minutes=5)).timestamp(), - "mean": pytest.approx(21.1864406779661), - "min": pytest.approx(10.0), - "max": pytest.approx(25.0), + "mean": pytest.approx(expected_mean), + "min": pytest.approx(expected_min), + "max": pytest.approx(expected_max), "last_reset": None, "state": None, "sum": None, @@ -2411,6 +2821,58 @@ async def test_compile_hourly_statistics_unavailable( assert "Error while processing event StatisticsTask" not in caplog.text +async def test_compile_hourly_statistics_unavailable_angle( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test compiling hourly statistics, with one sensor being unavailable for measurement_angle. + + sensor.test1 is unavailable and should not have statistics generated + sensor.test2 should have statistics generated + """ + zero = get_start_time(dt_util.utcnow()) + await async_setup_component(hass, "sensor", {}) + # Wait for the sensor recorder platform to be added + await async_recorder_block_till_done(hass) + four, states = await async_record_states_partially_unavailable( + hass, zero, "sensor.test1", WIND_DIRECTION_ATTRIBUTES + ) + with freeze_time(zero) as freezer: + _, _states = await async_record_states( + hass, + freezer, + zero, + "sensor.test2", + WIND_DIRECTION_ATTRIBUTES, + seq=WIND_DIRECTION_STATES_SEQ, + ) + await async_wait_recording_done(hass) + states = {**states, **_states} + hist = history.get_significant_states( + hass, zero, four, hass.states.async_entity_ids() + ) + assert_dict_of_states_equal_without_context_and_last_changed(states, hist) + + do_adhoc_statistics(hass, start=four) + await async_wait_recording_done(hass) + stats = statistics_during_period(hass, four, period="5minute") + assert stats == { + "sensor.test2": [ + { + "start": process_timestamp(four).timestamp(), + "end": process_timestamp(four + timedelta(minutes=5)).timestamp(), + "mean": pytest.approx(15), + "min": None, + "max": None, + "last_reset": None, + "state": None, + "sum": None, + } + ] + } + assert "Error while processing event StatisticsTask" not in caplog.text + + async def test_compile_hourly_statistics_fails( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: @@ -2439,59 +2901,267 @@ async def test_compile_hourly_statistics_fails( "statistic_type", ), [ - ("measurement", "area", "m²", "m²", "m²", "area", "mean"), - ("measurement", "area", "mi²", "mi²", "mi²", "area", "mean"), + ("measurement", "area", "m²", "m²", "m²", "area", StatisticMeanType.ARITHMETIC), + ( + "measurement", + "area", + "mi²", + "mi²", + "mi²", + "area", + StatisticMeanType.ARITHMETIC, + ), ("total", "area", "m²", "m²", "m²", "area", "sum"), ("total", "area", "mi²", "mi²", "mi²", "area", "sum"), - ("measurement", "battery", "%", "%", "%", "unitless", "mean"), - ("measurement", "battery", None, None, None, "unitless", "mean"), - ("measurement", "distance", "m", "m", "m", "distance", "mean"), - ("measurement", "distance", "mi", "mi", "mi", "distance", "mean"), + ( + "measurement", + "battery", + "%", + "%", + "%", + "unitless", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "battery", + None, + None, + None, + "unitless", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "distance", + "m", + "m", + "m", + "distance", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "distance", + "mi", + "mi", + "mi", + "distance", + StatisticMeanType.ARITHMETIC, + ), ("total", "distance", "m", "m", "m", "distance", "sum"), ("total", "distance", "mi", "mi", "mi", "distance", "sum"), ("total", "energy", "Wh", "Wh", "Wh", "energy", "sum"), ("total", "energy", "kWh", "kWh", "kWh", "energy", "sum"), - ("measurement", "energy", "Wh", "Wh", "Wh", "energy", "mean"), - ("measurement", "energy", "kWh", "kWh", "kWh", "energy", "mean"), - ("measurement", "humidity", "%", "%", "%", "unitless", "mean"), - ("measurement", "humidity", None, None, None, "unitless", "mean"), + ( + "measurement", + "energy", + "Wh", + "Wh", + "Wh", + "energy", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "energy", + "kWh", + "kWh", + "kWh", + "energy", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "humidity", + "%", + "%", + "%", + "unitless", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "humidity", + None, + None, + None, + "unitless", + StatisticMeanType.ARITHMETIC, + ), ("total", "monetary", "USD", "USD", "USD", None, "sum"), ("total", "monetary", "None", "None", "None", None, "sum"), ("total", "gas", "m³", "m³", "m³", "volume", "sum"), ("total", "gas", "ft³", "ft³", "ft³", "volume", "sum"), - ("measurement", "monetary", "USD", "USD", "USD", None, "mean"), - ("measurement", "monetary", "None", "None", "None", None, "mean"), - ("measurement", "gas", "m³", "m³", "m³", "volume", "mean"), - ("measurement", "gas", "ft³", "ft³", "ft³", "volume", "mean"), - ("measurement", "pressure", "Pa", "Pa", "Pa", "pressure", "mean"), - ("measurement", "pressure", "hPa", "hPa", "hPa", "pressure", "mean"), - ("measurement", "pressure", "mbar", "mbar", "mbar", "pressure", "mean"), - ("measurement", "pressure", "inHg", "inHg", "inHg", "pressure", "mean"), - ("measurement", "pressure", "psi", "psi", "psi", "pressure", "mean"), - ("measurement", "speed", "m/s", "m/s", "m/s", "speed", "mean"), - ("measurement", "speed", "mph", "mph", "mph", "speed", "mean"), - ("measurement", "temperature", "°C", "°C", "°C", "temperature", "mean"), - ("measurement", "temperature", "°F", "°F", "°F", "temperature", "mean"), - ("measurement", "volume", "m³", "m³", "m³", "volume", "mean"), - ("measurement", "volume", "ft³", "ft³", "ft³", "volume", "mean"), + ( + "measurement", + "monetary", + "USD", + "USD", + "USD", + None, + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "monetary", + "None", + "None", + "None", + None, + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "gas", + "m³", + "m³", + "m³", + "volume", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "gas", + "ft³", + "ft³", + "ft³", + "volume", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "pressure", + "Pa", + "Pa", + "Pa", + "pressure", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "pressure", + "hPa", + "hPa", + "hPa", + "pressure", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "pressure", + "mbar", + "mbar", + "mbar", + "pressure", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "pressure", + "inHg", + "inHg", + "inHg", + "pressure", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "pressure", + "psi", + "psi", + "psi", + "pressure", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "speed", + "m/s", + "m/s", + "m/s", + "speed", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "speed", + "mph", + "mph", + "mph", + "speed", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "temperature", + "°C", + "°C", + "°C", + "temperature", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "temperature", + "°F", + "°F", + "°F", + "temperature", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "volume", + "m³", + "m³", + "m³", + "volume", + StatisticMeanType.ARITHMETIC, + ), + ( + "measurement", + "volume", + "ft³", + "ft³", + "ft³", + "volume", + StatisticMeanType.ARITHMETIC, + ), ("total", "volume", "m³", "m³", "m³", "volume", "sum"), ("total", "volume", "ft³", "ft³", "ft³", "volume", "sum"), - ("measurement", "weight", "g", "g", "g", "mass", "mean"), - ("measurement", "weight", "oz", "oz", "oz", "mass", "mean"), + ("measurement", "weight", "g", "g", "g", "mass", StatisticMeanType.ARITHMETIC), + ( + "measurement", + "weight", + "oz", + "oz", + "oz", + "mass", + StatisticMeanType.ARITHMETIC, + ), ("total", "weight", "g", "g", "g", "mass", "sum"), ("total", "weight", "oz", "oz", "oz", "mass", "sum"), + ( + SensorStateClass.MEASUREMENT_ANGLE, + SensorDeviceClass.WIND_DIRECTION, + DEGREE, + DEGREE, + DEGREE, + None, + StatisticMeanType.CIRCULAR, + ), ], ) async def test_list_statistic_ids( hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - state_class, - device_class, - state_unit, - display_unit, - statistics_unit, - unit_class, - statistic_type, + state_class: str | SensorStateClass, + device_class: str | SensorDeviceClass, + state_unit: str, + display_unit: str, + statistics_unit: str, + unit_class: str | None, + statistic_type: str | StatisticMeanType, ) -> None: """Test listing future statistic ids.""" await async_setup_component(hass, "sensor", {}) @@ -2505,11 +3175,20 @@ async def test_list_statistic_ids( } hass.states.async_set("sensor.test1", 0, attributes=attributes) statistic_ids = await async_list_statistic_ids(hass) + mean_type = ( + statistic_type + if isinstance(statistic_type, StatisticMeanType) + else StatisticMeanType.NONE + ) + statistic_type = ( + statistic_type if not isinstance(statistic_type, StatisticMeanType) else "mean" + ) assert statistic_ids == [ { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, - "has_mean": statistic_type == "mean", + "has_mean": mean_type is StatisticMeanType.ARITHMETIC, + "mean_type": mean_type, "has_sum": statistic_type == "sum", "name": None, "source": "recorder", @@ -2517,6 +3196,7 @@ async def test_list_statistic_ids( "unit_class": unit_class, }, ] + for stat_type in ("mean", "sum", "dogs"): statistic_ids = await async_list_statistic_ids(hass, statistic_type=stat_type) if statistic_type == stat_type: @@ -2524,7 +3204,8 @@ async def test_list_statistic_ids( { "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, - "has_mean": statistic_type == "mean", + "has_mean": mean_type is StatisticMeanType.ARITHMETIC, + "mean_type": mean_type, "has_sum": statistic_type == "sum", "name": None, "source": "recorder", @@ -2632,6 +3313,7 @@ async def test_compile_hourly_statistics_changing_units_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2667,6 +3349,7 @@ async def test_compile_hourly_statistics_changing_units_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2758,6 +3441,7 @@ async def test_compile_hourly_statistics_changing_units_2( "statistic_id": "sensor.test1", "display_unit_of_measurement": "cats", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2840,6 +3524,7 @@ async def test_compile_hourly_statistics_changing_units_3( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2875,6 +3560,7 @@ async def test_compile_hourly_statistics_changing_units_3( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -2957,6 +3643,7 @@ async def test_compile_hourly_statistics_convert_units_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit_1, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3004,6 +3691,7 @@ async def test_compile_hourly_statistics_convert_units_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit_2, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3120,6 +3808,7 @@ async def test_compile_hourly_statistics_equivalent_units_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3151,6 +3840,7 @@ async def test_compile_hourly_statistics_equivalent_units_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit2, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3242,6 +3932,7 @@ async def test_compile_hourly_statistics_equivalent_units_2( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3326,6 +4017,7 @@ async def test_compile_hourly_statistics_changing_device_class_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3375,6 +4067,7 @@ async def test_compile_hourly_statistics_changing_device_class_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3434,6 +4127,7 @@ async def test_compile_hourly_statistics_changing_device_class_1( "statistic_id": "sensor.test1", "display_unit_of_measurement": state_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3538,6 +4232,7 @@ async def test_compile_hourly_statistics_changing_device_class_2( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3587,6 +4282,7 @@ async def test_compile_hourly_statistics_changing_device_class_2( "statistic_id": "sensor.test1", "display_unit_of_measurement": display_unit, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3626,15 +4322,13 @@ async def test_compile_hourly_statistics_changing_device_class_2( ( "device_class", "state_unit", - "display_unit", - "statistics_unit", "unit_class", "mean", "min", "max", ), [ - (None, None, None, None, "unitless", 13.050847, -10, 30), + (None, None, "unitless", 13.050847, -10, 30), ], ) async def test_compile_hourly_statistics_changing_state_class( @@ -3642,8 +4336,6 @@ async def test_compile_hourly_statistics_changing_state_class( caplog: pytest.LogCaptureFixture, device_class, state_unit, - display_unit, - statistics_unit, unit_class, mean, min, @@ -3679,6 +4371,7 @@ async def test_compile_hourly_statistics_changing_state_class( "statistic_id": "sensor.test1", "display_unit_of_measurement": None, "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3692,6 +4385,7 @@ async def test_compile_hourly_statistics_changing_state_class( 1, { "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3721,6 +4415,7 @@ async def test_compile_hourly_statistics_changing_state_class( "statistic_id": "sensor.test1", "display_unit_of_measurement": None, "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -3734,6 +4429,7 @@ async def test_compile_hourly_statistics_changing_state_class( 1, { "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", @@ -3799,10 +4495,11 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "unit_of_measurement": "EUR", } + durations = [50, 200, 45] + def _weighted_average(seq, i, last_state): total = 0 duration = 0 - durations = [50, 200, 45] if i > 0: total += last_state * 5 duration += 5 @@ -3811,6 +4508,24 @@ async def test_compile_statistics_hourly_daily_monthly_summary( duration += dur return total / duration + def _time_weighted_circular_mean(values: list[tuple[float, int]]): + sin_sum = 0 + cos_sum = 0 + for x, dur in values: + sin_sum += math.sin(x * DEG_TO_RAD) * dur + cos_sum += math.cos(x * DEG_TO_RAD) * dur + + return (RAD_TO_DEG * math.atan2(sin_sum, cos_sum)) % 360 + + def _circular_mean(values: list[float]) -> float: + sin_sum = 0 + cos_sum = 0 + for x in values: + sin_sum += math.sin(x * DEG_TO_RAD) + cos_sum += math.cos(x * DEG_TO_RAD) + + return (RAD_TO_DEG * math.atan2(sin_sum, cos_sum)) % 360 + def _min(seq, last_state): if last_state is None: return min(seq) @@ -3832,17 +4547,24 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "sensor.test2": [], "sensor.test3": [], "sensor.test4": [], + "sensor.test5": [], } expected_minima = {"sensor.test1": [], "sensor.test2": [], "sensor.test3": []} expected_maxima = {"sensor.test1": [], "sensor.test2": [], "sensor.test3": []} - expected_averages = {"sensor.test1": [], "sensor.test2": [], "sensor.test3": []} + expected_means = { + "sensor.test1": [], + "sensor.test2": [], + "sensor.test3": [], + "sensor.test5": [], + } expected_states = {"sensor.test4": []} expected_sums = {"sensor.test4": []} - last_states = { + last_states: dict[str, float | None] = { "sensor.test1": None, "sensor.test2": None, "sensor.test3": None, "sensor.test4": None, + "sensor.test5": None, } start = zero for i in range(24): @@ -3855,7 +4577,7 @@ async def test_compile_statistics_hourly_daily_monthly_summary( last_state = last_states["sensor.test1"] expected_minima["sensor.test1"].append(_min(seq, last_state)) expected_maxima["sensor.test1"].append(_max(seq, last_state)) - expected_averages["sensor.test1"].append(_weighted_average(seq, i, last_state)) + expected_means["sensor.test1"].append(_weighted_average(seq, i, last_state)) last_states["sensor.test1"] = seq[-1] # test2 values change: min/max at the last state seq = [-10 * (i + 1), 15 * (i + 1), 30 * (i + 1)] @@ -3866,7 +4588,7 @@ async def test_compile_statistics_hourly_daily_monthly_summary( last_state = last_states["sensor.test2"] expected_minima["sensor.test2"].append(_min(seq, last_state)) expected_maxima["sensor.test2"].append(_max(seq, last_state)) - expected_averages["sensor.test2"].append(_weighted_average(seq, i, last_state)) + expected_means["sensor.test2"].append(_weighted_average(seq, i, last_state)) last_states["sensor.test2"] = seq[-1] # test3 values change: min/max at the first state seq = [-10 * (23 - i + 1), 15 * (23 - i + 1), 30 * (23 - i + 1)] @@ -3877,7 +4599,7 @@ async def test_compile_statistics_hourly_daily_monthly_summary( last_state = last_states["sensor.test3"] expected_minima["sensor.test3"].append(_min(seq, last_state)) expected_maxima["sensor.test3"].append(_max(seq, last_state)) - expected_averages["sensor.test3"].append(_weighted_average(seq, i, last_state)) + expected_means["sensor.test3"].append(_weighted_average(seq, i, last_state)) last_states["sensor.test3"] = seq[-1] # test4 values grow seq = [i, i + 0.5, i + 0.75] @@ -3900,6 +4622,18 @@ async def test_compile_statistics_hourly_daily_monthly_summary( ) last_states["sensor.test4"] = seq[-1] + # test5 circular mean + seq = [350 - i, 0 + (i / 2.0), 15 + i] + four, _states = await async_record_states( + hass, freezer, start, "sensor.test5", WIND_DIRECTION_ATTRIBUTES, seq + ) + states["sensor.test5"] += _states["sensor.test5"] + values = [(seq, durations[j]) for j, seq in enumerate(seq)] + if (state := last_states["sensor.test5"]) is not None: + values.append((state, 5)) + expected_means["sensor.test5"].append(_time_weighted_circular_mean(values)) + last_states["sensor.test5"] = seq[-1] + start += timedelta(minutes=5) await async_wait_recording_done(hass) hist = history.get_significant_states( @@ -3925,6 +4659,7 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "statistic_id": "sensor.test1", "display_unit_of_measurement": "%", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3935,6 +4670,7 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "statistic_id": "sensor.test2", "display_unit_of_measurement": "%", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3945,6 +4681,7 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "statistic_id": "sensor.test3", "display_unit_of_measurement": "%", "has_mean": True, + "mean_type": StatisticMeanType.ARITHMETIC, "has_sum": False, "name": None, "source": "recorder", @@ -3955,12 +4692,24 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "statistic_id": "sensor.test4", "display_unit_of_measurement": "EUR", "has_mean": False, + "mean_type": StatisticMeanType.NONE, "has_sum": True, "name": None, "source": "recorder", "statistics_unit_of_measurement": "EUR", "unit_class": None, }, + { + "statistic_id": "sensor.test5", + "display_unit_of_measurement": DEGREE, + "has_mean": False, + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": None, + "source": "recorder", + "statistics_unit_of_measurement": DEGREE, + "unit_class": None, + }, ] # Adjust the inserted statistics @@ -3979,6 +4728,7 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "sensor.test2": [], "sensor.test3": [], "sensor.test4": [], + "sensor.test5": [], } start = zero end = zero + timedelta(minutes=5) @@ -3988,11 +4738,10 @@ async def test_compile_statistics_hourly_daily_monthly_summary( "sensor.test2", "sensor.test3", "sensor.test4", + "sensor.test5", ): expected_average = ( - expected_averages[entity_id][i] - if entity_id in expected_averages - else None + expected_means[entity_id][i] if entity_id in expected_means else None ) expected_minimum = ( expected_minima[entity_id][i] if entity_id in expected_minima else None @@ -4022,176 +4771,78 @@ async def test_compile_statistics_hourly_daily_monthly_summary( end += timedelta(minutes=5) assert stats == expected_stats - stats = statistics_during_period(hass, zero, period="hour") - expected_stats = { - "sensor.test1": [], - "sensor.test2": [], - "sensor.test3": [], - "sensor.test4": [], - } - start = zero - end = zero + timedelta(hours=1) - for i in range(2): - for entity_id in ( - "sensor.test1", - "sensor.test2", - "sensor.test3", - "sensor.test4", - ): - expected_average = ( - mean(expected_averages[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_averages - else None - ) - expected_minimum = ( - min(expected_minima[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_minima - else None - ) - expected_maximum = ( - max(expected_maxima[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_maxima - else None - ) - expected_state = ( - expected_states[entity_id][(i + 1) * 12 - 1] - if entity_id in expected_states - else None - ) - expected_sum = ( - expected_sums[entity_id][(i + 1) * 12 - 1] - if entity_id in expected_sums - else None - ) - expected_stats[entity_id].append( - { - "start": process_timestamp(start).timestamp(), - "end": process_timestamp(end).timestamp(), - "mean": pytest.approx(expected_average), - "min": pytest.approx(expected_minimum), - "max": pytest.approx(expected_maximum), - "last_reset": None, - "state": expected_state, - "sum": expected_sum, - } - ) - start += timedelta(hours=1) - end += timedelta(hours=1) - assert stats == expected_stats + def verify_stats( + period: Literal["5minute", "day", "hour", "week", "month"], + start: datetime, + next_datetime: Callable[[datetime], datetime], + ) -> None: + stats = statistics_during_period(hass, zero, period=period) + expected_stats = { + "sensor.test1": [], + "sensor.test2": [], + "sensor.test3": [], + "sensor.test4": [], + "sensor.test5": [], + } + end = next_datetime(start) + for i in range(2): + for entity_id, mean_fn in ( + ("sensor.test1", mean), + ("sensor.test2", mean), + ("sensor.test3", mean), + ("sensor.test4", mean), + ("sensor.test5", _circular_mean), + ): + expected_average = ( + mean_fn(expected_means[entity_id][i * 12 : (i + 1) * 12]) + if entity_id in expected_means + else None + ) + expected_minimum = ( + min(expected_minima[entity_id][i * 12 : (i + 1) * 12]) + if entity_id in expected_minima + else None + ) + expected_maximum = ( + max(expected_maxima[entity_id][i * 12 : (i + 1) * 12]) + if entity_id in expected_maxima + else None + ) + expected_state = ( + expected_states[entity_id][(i + 1) * 12 - 1] + if entity_id in expected_states + else None + ) + expected_sum = ( + expected_sums[entity_id][(i + 1) * 12 - 1] + if entity_id in expected_sums + else None + ) + expected_stats[entity_id].append( + { + "start": process_timestamp(start).timestamp(), + "end": process_timestamp(end).timestamp(), + "mean": pytest.approx(expected_average), + "min": pytest.approx(expected_minimum), + "max": pytest.approx(expected_maximum), + "last_reset": None, + "state": expected_state, + "sum": expected_sum, + } + ) + start = next_datetime(start) + end = next_datetime(end) + assert stats == expected_stats + + verify_stats("hour", zero, lambda v: v + timedelta(hours=1)) - stats = statistics_during_period(hass, zero, period="day") - expected_stats = { - "sensor.test1": [], - "sensor.test2": [], - "sensor.test3": [], - "sensor.test4": [], - } start = dt_util.parse_datetime("2021-08-31T06:00:00+00:00") - end = start + timedelta(days=1) - for i in range(2): - for entity_id in ( - "sensor.test1", - "sensor.test2", - "sensor.test3", - "sensor.test4", - ): - expected_average = ( - mean(expected_averages[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_averages - else None - ) - expected_minimum = ( - min(expected_minima[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_minima - else None - ) - expected_maximum = ( - max(expected_maxima[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_maxima - else None - ) - expected_state = ( - expected_states[entity_id][(i + 1) * 12 - 1] - if entity_id in expected_states - else None - ) - expected_sum = ( - expected_sums[entity_id][(i + 1) * 12 - 1] - if entity_id in expected_sums - else None - ) - expected_stats[entity_id].append( - { - "start": process_timestamp(start).timestamp(), - "end": process_timestamp(end).timestamp(), - "mean": pytest.approx(expected_average), - "min": pytest.approx(expected_minimum), - "max": pytest.approx(expected_maximum), - "last_reset": None, - "state": expected_state, - "sum": expected_sum, - } - ) - start += timedelta(days=1) - end += timedelta(days=1) - assert stats == expected_stats + assert start + verify_stats("day", start, lambda v: v + timedelta(days=1)) - stats = statistics_during_period(hass, zero, period="month") - expected_stats = { - "sensor.test1": [], - "sensor.test2": [], - "sensor.test3": [], - "sensor.test4": [], - } start = dt_util.parse_datetime("2021-08-01T06:00:00+00:00") - end = dt_util.parse_datetime("2021-09-01T06:00:00+00:00") - for i in range(2): - for entity_id in ( - "sensor.test1", - "sensor.test2", - "sensor.test3", - "sensor.test4", - ): - expected_average = ( - mean(expected_averages[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_averages - else None - ) - expected_minimum = ( - min(expected_minima[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_minima - else None - ) - expected_maximum = ( - max(expected_maxima[entity_id][i * 12 : (i + 1) * 12]) - if entity_id in expected_maxima - else None - ) - expected_state = ( - expected_states[entity_id][(i + 1) * 12 - 1] - if entity_id in expected_states - else None - ) - expected_sum = ( - expected_sums[entity_id][(i + 1) * 12 - 1] - if entity_id in expected_sums - else None - ) - expected_stats[entity_id].append( - { - "start": process_timestamp(start).timestamp(), - "end": process_timestamp(end).timestamp(), - "mean": pytest.approx(expected_average), - "min": pytest.approx(expected_minimum), - "max": pytest.approx(expected_maximum), - "last_reset": None, - "state": expected_state, - "sum": expected_sum, - } - ) - start = (start + timedelta(days=31)).replace(day=1) - end = (end + timedelta(days=31)).replace(day=1) - assert stats == expected_stats + assert start + verify_stats("month", start, lambda v: (v + timedelta(days=31)).replace(day=1)) assert "Error while processing event StatisticsTask" not in caplog.text @@ -4337,11 +4988,11 @@ async def test_validate_unit_change_convertible( "statistic_id": "sensor.test", "supported_unit": supported_unit, }, - "type": "units_changed", + "type": UNITS_CHANGED_ISSUE, } ], } - await assert_validation_result(hass, client, expected, {"units_changed"}) + await assert_validation_result(hass, client, expected, {UNITS_CHANGED_ISSUE}) # Unavailable state - empty response hass.states.async_set( @@ -4562,11 +5213,11 @@ async def test_validate_statistics_unit_change_no_device_class( "statistic_id": "sensor.test", "supported_unit": supported_unit, }, - "type": "units_changed", + "type": UNITS_CHANGED_ISSUE, } ], } - await assert_validation_result(hass, client, expected, {"units_changed"}) + await assert_validation_result(hass, client, expected, {UNITS_CHANGED_ISSUE}) # Unavailable state - empty response hass.states.async_set( @@ -4678,11 +5329,11 @@ async def test_validate_statistics_state_class_removed( "sensor.test": [ { "data": {"statistic_id": "sensor.test"}, - "type": "state_class_removed", + "type": STATE_CLASS_REMOVED_ISSUE, } ], } - await assert_validation_result(hass, client, expected, {"state_class_removed"}) + await assert_validation_result(hass, client, expected, {STATE_CLASS_REMOVED_ISSUE}) # Unavailable state - empty response hass.states.async_set( @@ -4746,11 +5397,11 @@ async def test_validate_statistics_state_class_removed_issue_cleaned_up( "sensor.test": [ { "data": {"statistic_id": "sensor.test"}, - "type": "state_class_removed", + "type": STATE_CLASS_REMOVED_ISSUE, } ], } - await assert_validation_result(hass, client, expected, {"state_class_removed"}) + await assert_validation_result(hass, client, expected, {STATE_CLASS_REMOVED_ISSUE}) # Remove the statistics - empty response get_instance(hass).async_clear_statistics(["sensor.test"]) @@ -4995,11 +5646,11 @@ async def test_validate_statistics_unit_change_no_conversion( "statistic_id": "sensor.test", "supported_unit": unit1, }, - "type": "units_changed", + "type": UNITS_CHANGED_ISSUE, } ], } - await assert_validation_result(hass, client, expected, {"units_changed"}) + await assert_validation_result(hass, client, expected, {UNITS_CHANGED_ISSUE}) # Unavailable state - empty response hass.states.async_set( @@ -5176,11 +5827,11 @@ async def test_validate_statistics_unit_change_equivalent_units_2( "statistic_id": "sensor.test", "supported_unit": supported_unit, }, - "type": "units_changed", + "type": UNITS_CHANGED_ISSUE, } ], } - await assert_validation_result(hass, client, expected, {"units_changed"}) + await assert_validation_result(hass, client, expected, {UNITS_CHANGED_ISSUE}) # Run statistics one hour later, metadata will not be updated await async_recorder_block_till_done(hass) @@ -5189,7 +5840,7 @@ async def test_validate_statistics_unit_change_equivalent_units_2( await assert_statistic_ids( hass, [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] ) - await assert_validation_result(hass, client, expected, {"units_changed"}) + await assert_validation_result(hass, client, expected, {UNITS_CHANGED_ISSUE}) async def test_validate_statistics_other_domain( @@ -5278,7 +5929,7 @@ async def test_update_statistics_issues( now = await one_hour_stats(now) expected = { "state_class_removed_sensor.test": { - "issue_type": "state_class_removed", + "issue_type": STATE_CLASS_REMOVED_ISSUE, "statistic_id": "sensor.test", } } @@ -5482,8 +6133,9 @@ async def test_clean_up_repairs( create_issue("test", "test_issue", None) create_issue(DOMAIN, "test_issue_1", None) create_issue(DOMAIN, "test_issue_2", {"issue_type": "another_issue"}) - create_issue(DOMAIN, "test_issue_3", {"issue_type": "state_class_removed"}) - create_issue(DOMAIN, "test_issue_4", {"issue_type": "units_changed"}) + create_issue(DOMAIN, "test_issue_3", {"issue_type": STATE_CLASS_REMOVED_ISSUE}) + create_issue(DOMAIN, "test_issue_4", {"issue_type": UNITS_CHANGED_ISSUE}) + create_issue(DOMAIN, "test_issue_5", {"issue_type": MEAN_TYPE_CHANGED_ISSUE}) # Check the issues assert set(issue_registry.issues) == { @@ -5492,6 +6144,7 @@ async def test_clean_up_repairs( ("sensor", "test_issue_2"), ("sensor", "test_issue_3"), ("sensor", "test_issue_4"), + ("sensor", "test_issue_5"), } # Request update of issues @@ -5505,3 +6158,140 @@ async def test_clean_up_repairs( ("sensor", "test_issue_1"), ("sensor", "test_issue_2"), } + + +async def test_validate_statistics_mean_type_changed( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test validate_statistics. + + This tests a validation issue is created when a the mean type is changed. + """ + now = get_start_time(dt_util.utcnow()) + + await async_setup_component(hass, "sensor", {}) + await async_recorder_block_till_done(hass) + client = await hass_ws_client() + + # No statistics, no state - empty response + await assert_validation_result(hass, client, {}, {}) + + # No statistics, original unit - empty response + hass.states.async_set( + "sensor.wind_direction", + 10, + attributes=WIND_DIRECTION_ATTRIBUTES, + timestamp=now.timestamp(), + ) + await assert_validation_result(hass, client, {}, {}) + + # Run statistics + await async_recorder_block_till_done(hass) + do_adhoc_statistics(hass, start=now) + await async_recorder_block_till_done(hass) + statistic_ids = await async_list_statistic_ids(hass) + assert statistic_ids == [ + { + "statistic_id": "sensor.wind_direction", + "display_unit_of_measurement": DEGREE, + "has_mean": False, + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": None, + "source": "recorder", + "statistics_unit_of_measurement": DEGREE, + "unit_class": None, + } + ] + + expected_log_entry = ( + "homeassistant.components.sensor.recorder", + logging.WARNING, + ( + "The statistics mean algorithm for sensor.wind_direction have changed from" + " CIRCULAR to ARITHMETIC. Generation of long term statistics will be " + "suppressed unless it changes back or go to " + "https://my.home-assistant.io/redirect/developer_statistics " + "to delete the old statistics" + ), + ) + # Valid stats, no log entry + assert expected_log_entry not in caplog.record_tuples + + # State class changed + hass.states.async_set( + "sensor.wind_direction", + 5, + attributes={ + **WIND_DIRECTION_ATTRIBUTES, + "state_class": SensorStateClass.MEASUREMENT, + }, + timestamp=now.timestamp(), + ) + expected = { + "sensor.wind_direction": [ + { + "data": { + "statistic_id": "sensor.wind_direction", + "metadata_mean_type": StatisticMeanType.CIRCULAR, + "state_mean_type": StatisticMeanType.ARITHMETIC, + }, + "type": MEAN_TYPE_CHANGED_ISSUE, + } + ], + } + await assert_validation_result(hass, client, expected, {MEAN_TYPE_CHANGED_ISSUE}) + + # Run statistics one hour later, metadata will not be updated + await async_recorder_block_till_done(hass) + do_adhoc_statistics(hass, start=now + timedelta(hours=1)) + await async_recorder_block_till_done(hass) + statistic_ids = await async_list_statistic_ids(hass) + assert statistic_ids == [ + { + "statistic_id": "sensor.wind_direction", + "display_unit_of_measurement": DEGREE, + "has_mean": False, + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": None, + "source": "recorder", + "statistics_unit_of_measurement": DEGREE, + "unit_class": None, + } + ] + await assert_validation_result(hass, client, expected, {MEAN_TYPE_CHANGED_ISSUE}) + assert expected_log_entry in caplog.record_tuples + + # State class changed back + hass.states.async_set( + "sensor.wind_direction", + 350, + attributes=WIND_DIRECTION_ATTRIBUTES, + timestamp=now.timestamp(), + ) + await assert_validation_result(hass, client, {}, {}) + + # Run statistics + await async_recorder_block_till_done(hass) + do_adhoc_statistics(hass, start=now) + await async_recorder_block_till_done(hass) + statistic_ids = await async_list_statistic_ids(hass) + assert statistic_ids == [ + { + "statistic_id": "sensor.wind_direction", + "display_unit_of_measurement": DEGREE, + "has_mean": False, + "mean_type": StatisticMeanType.CIRCULAR, + "has_sum": False, + "name": None, + "source": "recorder", + "statistics_unit_of_measurement": DEGREE, + "unit_class": None, + } + ] + + # Issue should be resolved + await assert_validation_result(hass, client, {}, {}) diff --git a/tests/components/shelly/__init__.py b/tests/components/shelly/__init__.py index 7a20560e25f..ec2d3d2c829 100644 --- a/tests/components/shelly/__init__.py +++ b/tests/components/shelly/__init__.py @@ -18,7 +18,7 @@ from homeassistant.components.shelly.const import ( RPC_SENSORS_POLLING_INTERVAL, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_HOST +from homeassistant.const import CONF_HOST, CONF_MODEL from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import ( @@ -40,13 +40,15 @@ async def init_integration( sleep_period=0, options: dict[str, Any] | None = None, skip_setup: bool = False, + data: dict[str, Any] | None = None, ) -> MockConfigEntry: """Set up the Shelly integration in Home Assistant.""" - data = { - CONF_HOST: "192.168.1.37", - CONF_SLEEP_PERIOD: sleep_period, - "model": model, - } + if data is None: + data = { + CONF_HOST: "192.168.1.37", + CONF_SLEEP_PERIOD: sleep_period, + CONF_MODEL: model, + } if gen is not None: data[CONF_GEN] = gen @@ -141,20 +143,6 @@ def get_entity( ) -def get_entity_state(hass: HomeAssistant, entity_id: str) -> str: - """Return entity state.""" - entity = hass.states.get(entity_id) - assert entity - return entity.state - - -def get_entity_attribute(hass: HomeAssistant, entity_id: str, attribute: str) -> str: - """Return entity attribute.""" - entity = hass.states.get(entity_id) - assert entity - return entity.attributes[attribute] - - def register_device( device_registry: DeviceRegistry, config_entry: ConfigEntry ) -> DeviceEntry: diff --git a/tests/components/shelly/conftest.py b/tests/components/shelly/conftest.py index a332d16f95d..8f8255235be 100644 --- a/tests/components/shelly/conftest.py +++ b/tests/components/shelly/conftest.py @@ -101,12 +101,15 @@ MOCK_BLOCKS = [ "overpower": 0, "power": 53.4, "energy": 1234567.89, + "output": True, + "totalWorkTime": 3600, }, channel="0", type="relay", overpower=0, power=53.4, energy=1234567.89, + totalWorkTime=3600, description="relay_0", set_state=AsyncMock(side_effect=lambda turn: {"ison": turn == "on"}), ), @@ -133,11 +136,20 @@ MOCK_BLOCKS = [ set_state=AsyncMock(side_effect=mock_light_set_state), ), Mock( - sensor_ids={"motion": 0, "temp": 22.1, "gas": "mild", "motionActive": 1}, + sensor_ids={ + "motion": 0, + "temp": 22.1, + "gas": "mild", + "motionActive": 1, + "sensorOp": "normal", + "selfTest": "pending", + }, channel="0", motion=0, temp=22.1, gas="mild", + sensorOp="normal", + selfTest="pending", targetTemp=4, description="sensor_0", type="sensor", @@ -207,7 +219,7 @@ MOCK_CONFIG = { }, "sys": { "ui_data": {}, - "device": {"name": "Test name"}, + "device": {"name": "Test name", "mac": MOCK_MAC}, }, "wifi": {"sta": {"enable": True}, "sta1": {"enable": False}}, "ws": {"enable": False, "server": None}, @@ -312,7 +324,11 @@ MOCK_STATUS_COAP = { MOCK_STATUS_RPC = { - "switch:0": {"output": True}, + "switch:0": { + "id": 0, + "output": True, + "apower": 85.3, + }, "input:0": {"id": 0, "state": None}, "input:1": {"id": 1, "percent": 89, "xpercent": 8.9}, "input:2": { @@ -497,6 +513,10 @@ def _mock_blu_rtv_device(version: str | None = None): firmware_version="some fw string", initialized=True, connected=True, + script_getcode=AsyncMock( + side_effect=lambda script_id: {"data": MOCK_SCRIPTS[script_id - 1]} + ), + xmod_info={}, ) type(device).name = PropertyMock(return_value="Test name") return device diff --git a/tests/components/shelly/snapshots/test_button.ambr b/tests/components/shelly/snapshots/test_button.ambr new file mode 100644 index 00000000000..f5a38f1b847 --- /dev/null +++ b/tests/components/shelly/snapshots/test_button.ambr @@ -0,0 +1,96 @@ +# serializer version: 1 +# name: test_rpc_blu_trv_button[button.trv_name_calibrate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.trv_name_calibrate', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'TRV-Name Calibrate', + 'platform': 'shelly', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'calibrate', + 'unique_id': 'f8:44:77:25:f0:dd_calibrate', + 'unit_of_measurement': None, + }) +# --- +# name: test_rpc_blu_trv_button[button.trv_name_calibrate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'TRV-Name Calibrate', + }), + 'context': , + 'entity_id': 'button.trv_name_calibrate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_rpc_button[button.test_name_reboot-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.test_name_reboot', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Test name Reboot', + 'platform': 'shelly', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456789ABC_reboot', + 'unit_of_measurement': None, + }) +# --- +# name: test_rpc_button[button.test_name_reboot-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'Test name Reboot', + }), + 'context': , + 'entity_id': 'button.test_name_reboot', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/shelly/snapshots/test_climate.ambr b/tests/components/shelly/snapshots/test_climate.ambr new file mode 100644 index 00000000000..991c570172e --- /dev/null +++ b/tests/components/shelly/snapshots/test_climate.ambr @@ -0,0 +1,276 @@ +# serializer version: 1 +# name: test_blu_trv_climate_set_temperature[climate.trv_name-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + ]), + 'max_temp': 30, + 'min_temp': 4, + 'target_temp_step': 0.1, + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.trv_name', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'shelly', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'f8:44:77:25:f0:dd-blutrv:200', + 'unit_of_measurement': None, + }) +# --- +# name: test_blu_trv_climate_set_temperature[climate.trv_name-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 15.2, + 'friendly_name': 'TRV-Name', + 'hvac_action': , + 'hvac_modes': list([ + , + ]), + 'max_temp': 30, + 'min_temp': 4, + 'supported_features': , + 'target_temp_step': 0.1, + 'temperature': 17.1, + }), + 'context': , + 'entity_id': 'climate.trv_name', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_climate_hvac_mode[climate.test_name-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 31, + 'min_temp': 4, + 'preset_modes': list([ + 'none', + 'Profile1', + 'Profile2', + ]), + 'target_temp_step': 0.5, + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.test_name', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Test name', + 'platform': 'shelly', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '123456789ABC-sensor_0', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate_hvac_mode[climate.test_name-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 22.1, + 'friendly_name': 'Test name', + 'hvac_action': , + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 31, + 'min_temp': 4, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'Profile1', + 'Profile2', + ]), + 'supported_features': , + 'target_temp_step': 0.5, + 'temperature': 4, + }), + 'context': , + 'entity_id': 'climate.test_name', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_rpc_climate_hvac_mode[climate.test_name_thermostat_0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35, + 'min_temp': 5, + 'target_temp_step': 0.5, + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.test_name_thermostat_0', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Test name Thermostat 0', + 'platform': 'shelly', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '123456789ABC-thermostat:0', + 'unit_of_measurement': None, + }) +# --- +# name: test_rpc_climate_hvac_mode[climate.test_name_thermostat_0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_humidity': 44.4, + 'current_temperature': 12.3, + 'friendly_name': 'Test name Thermostat 0', + 'hvac_action': , + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35, + 'min_temp': 5, + 'supported_features': , + 'target_temp_step': 0.5, + 'temperature': 23, + }), + 'context': , + 'entity_id': 'climate.test_name_thermostat_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_wall_display_thermostat_mode[climate.test_name_thermostat_0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35, + 'min_temp': 5, + 'target_temp_step': 0.5, + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.test_name_thermostat_0', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Test name Thermostat 0', + 'platform': 'shelly', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '123456789ABC-thermostat:0', + 'unit_of_measurement': None, + }) +# --- +# name: test_wall_display_thermostat_mode[climate.test_name_thermostat_0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_humidity': 44.4, + 'current_temperature': 12.3, + 'friendly_name': 'Test name Thermostat 0', + 'hvac_action': , + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35, + 'min_temp': 5, + 'supported_features': , + 'target_temp_step': 0.5, + 'temperature': 23, + }), + 'context': , + 'entity_id': 'climate.test_name_thermostat_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- diff --git a/tests/components/shelly/test_binary_sensor.py b/tests/components/shelly/test_binary_sensor.py index 1e7c54320e8..ea3a7d5f3d2 100644 --- a/tests/components/shelly/test_binary_sensor.py +++ b/tests/components/shelly/test_binary_sensor.py @@ -39,15 +39,16 @@ async def test_block_binary_sensor( entity_id = f"{BINARY_SENSOR_DOMAIN}.test_name_channel_1_overpowering" await init_integration(hass, 1) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF monkeypatch.setattr(mock_block_device.blocks[RELAY_BLOCK_ID], "overpower", 1) mock_block_device.mock_update() - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-relay_0-overpower" @@ -61,19 +62,18 @@ async def test_block_binary_sensor_extra_state_attr( entity_id = f"{BINARY_SENSOR_DOMAIN}.test_name_gas" await init_integration(hass, 1) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes.get("detected") == "mild" monkeypatch.setattr(mock_block_device.blocks[SENSOR_BLOCK_ID], "gas", "none") mock_block_device.mock_update() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes.get("detected") == "none" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-sensor_0-gas" @@ -89,15 +89,16 @@ async def test_block_rest_binary_sensor( monkeypatch.setitem(mock_block_device.status, "cloud", {"connected": False}) await init_integration(hass, 1) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF monkeypatch.setitem(mock_block_device.status["cloud"], "connected", True) await mock_rest_update(hass, freezer) - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-cloud" @@ -115,20 +116,22 @@ async def test_block_rest_binary_sensor_connected_battery_devices( monkeypatch.setitem(mock_block_device.settings["coiot"], "update_period", 3600) await init_integration(hass, 1, model=MODEL_MOTION) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF monkeypatch.setitem(mock_block_device.status["cloud"], "connected", True) # Verify no update on fast intervals await mock_rest_update(hass, freezer) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF # Verify update on slow intervals await mock_rest_update(hass, freezer, seconds=UPDATE_PERIOD_MULTIPLIER * 3600) - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-cloud" @@ -149,15 +152,16 @@ async def test_block_sleeping_binary_sensor( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF monkeypatch.setattr(mock_block_device.blocks[SENSOR_BLOCK_ID], "motion", 1) mock_block_device.mock_update() - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-sensor_0-motion" @@ -183,14 +187,16 @@ async def test_block_restored_sleeping_binary_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF async def test_block_restored_sleeping_binary_sensor_no_last_state( @@ -214,14 +220,16 @@ async def test_block_restored_sleeping_binary_sensor_no_last_state( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF async def test_rpc_binary_sensor( @@ -234,17 +242,18 @@ async def test_rpc_binary_sensor( entity_id = f"{BINARY_SENSOR_DOMAIN}.test_cover_0_overpowering" await init_integration(hass, 2) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF mutate_rpc_device_status( monkeypatch, mock_rpc_device, "cover:0", "errors", "overpower" ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-cover:0-overpower" @@ -290,20 +299,22 @@ async def test_rpc_sleeping_binary_sensor( mock_rpc_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cloud", "connected", True) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_ON - - # test external power sensor - state = hass.states.get("binary_sensor.test_name_external_power") - assert state + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - entry = entity_registry.async_get("binary_sensor.test_name_external_power") - assert entry + # test external power sensor + assert (state := hass.states.get("binary_sensor.test_name_external_power")) + assert state.state == STATE_ON + + assert ( + entry := entity_registry.async_get("binary_sensor.test_name_external_power") + ) assert entry.unique_id == "123456789ABC-devicepower:0-external_power" @@ -331,14 +342,16 @@ async def test_rpc_restored_sleeping_binary_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON # Make device online monkeypatch.setattr(mock_rpc_device, "initialized", True) mock_rpc_device.mock_update() await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF async def test_rpc_restored_sleeping_binary_sensor_no_last_state( @@ -364,7 +377,8 @@ async def test_rpc_restored_sleeping_binary_sensor_no_last_state( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN # Make device online monkeypatch.setattr(mock_rpc_device, "initialized", True) @@ -375,7 +389,8 @@ async def test_rpc_restored_sleeping_binary_sensor_no_last_state( mock_rpc_device.mock_update() await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF @pytest.mark.parametrize( @@ -407,17 +422,17 @@ async def test_rpc_device_virtual_binary_sensor( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-boolean:203-boolean" monkeypatch.setitem(mock_rpc_device.status["boolean:203"], "value", False) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_OFF + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF async def test_rpc_remove_virtual_binary_sensor_when_mode_toggle( @@ -450,8 +465,7 @@ async def test_rpc_remove_virtual_binary_sensor_when_mode_toggle( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_virtual_binary_sensor_when_orphaned( @@ -475,8 +489,7 @@ async def test_rpc_remove_virtual_binary_sensor_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_blu_trv_binary_sensor_entity( diff --git a/tests/components/shelly/test_button.py b/tests/components/shelly/test_button.py index 14349411670..2057076d18b 100644 --- a/tests/components/shelly/test_button.py +++ b/tests/components/shelly/test_button.py @@ -2,12 +2,17 @@ from unittest.mock import Mock +from aioshelly.const import MODEL_BLU_GATEWAY_G3 +from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError, RpcCallError import pytest +from syrupy import SnapshotAssertion from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.components.shelly.const import DOMAIN +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_registry import EntityRegistry from . import init_integration @@ -22,10 +27,10 @@ async def test_block_button( entity_id = "button.test_name_reboot" # reboot button - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC_reboot" await hass.services.async_call( @@ -38,7 +43,10 @@ async def test_block_button( async def test_rpc_button( - hass: HomeAssistant, mock_rpc_device: Mock, entity_registry: EntityRegistry + hass: HomeAssistant, + mock_rpc_device: Mock, + entity_registry: EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test rpc device OTA button.""" await init_integration(hass, 2) @@ -46,11 +54,11 @@ async def test_rpc_button( entity_id = "button.test_name_reboot" # reboot button - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state == snapshot(name=f"{entity_id}-state") - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.unique_id == "123456789ABC_reboot" + assert (entry := entity_registry.async_get(entity_id)) + assert entry == snapshot(name=f"{entity_id}-entry") await hass.services.async_call( BUTTON_DOMAIN, @@ -61,6 +69,68 @@ async def test_rpc_button( assert mock_rpc_device.trigger_reboot.call_count == 1 +@pytest.mark.parametrize( + ("exception", "error"), + [ + ( + DeviceConnectionError, + "Device communication error occurred while calling action for button.test_name_reboot of Test name", + ), + ( + RpcCallError(999), + "RPC call error occurred while calling action for button.test_name_reboot of Test name", + ), + ], +) +async def test_rpc_button_exc( + hass: HomeAssistant, + mock_rpc_device: Mock, + exception: Exception, + error: str, +) -> None: + """Test RPC button with exception.""" + await init_integration(hass, 2) + + mock_rpc_device.trigger_reboot.side_effect = exception + + with pytest.raises(HomeAssistantError, match=error): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.test_name_reboot"}, + blocking=True, + ) + + +async def test_rpc_button_reauth_error( + hass: HomeAssistant, mock_rpc_device: Mock +) -> None: + """Test rpc device OTA button with authentication error.""" + entry = await init_integration(hass, 2) + + mock_rpc_device.trigger_reboot.side_effect = InvalidAuthError + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.test_name_reboot"}, + blocking=True, + ) + + assert entry.state is ConfigEntryState.LOADED + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow.get("step_id") == "reauth_confirm" + assert flow.get("handler") == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == entry.entry_id + + @pytest.mark.parametrize( ("gen", "old_unique_id", "new_unique_id", "migration"), [ @@ -104,3 +174,107 @@ async def test_migrate_unique_id( bool("Migrating unique_id for button.test_name_reboot" in caplog.text) == migration ) + + +async def test_rpc_blu_trv_button( + hass: HomeAssistant, + mock_blu_trv: Mock, + entity_registry: EntityRegistry, + monkeypatch: pytest.MonkeyPatch, + snapshot: SnapshotAssertion, +) -> None: + """Test RPC BLU TRV button.""" + monkeypatch.delitem(mock_blu_trv.status, "script:1") + monkeypatch.delitem(mock_blu_trv.status, "script:2") + monkeypatch.delitem(mock_blu_trv.status, "script:3") + + await init_integration(hass, 3, model=MODEL_BLU_GATEWAY_G3) + + entity_id = "button.trv_name_calibrate" + + state = hass.states.get(entity_id) + assert state == snapshot(name=f"{entity_id}-state") + + entry = entity_registry.async_get(entity_id) + assert entry == snapshot(name=f"{entity_id}-entry") + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + assert mock_blu_trv.trigger_blu_trv_calibration.call_count == 1 + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + ( + DeviceConnectionError, + "Device communication error occurred while calling action for button.trv_name_calibrate of Test name", + ), + ( + RpcCallError(999), + "RPC call error occurred while calling action for button.trv_name_calibrate of Test name", + ), + ], +) +async def test_rpc_blu_trv_button_exc( + hass: HomeAssistant, + mock_blu_trv: Mock, + monkeypatch: pytest.MonkeyPatch, + exception: Exception, + error: str, +) -> None: + """Test RPC BLU TRV button with exception.""" + monkeypatch.delitem(mock_blu_trv.status, "script:1") + monkeypatch.delitem(mock_blu_trv.status, "script:2") + monkeypatch.delitem(mock_blu_trv.status, "script:3") + + await init_integration(hass, 3, model=MODEL_BLU_GATEWAY_G3) + + mock_blu_trv.trigger_blu_trv_calibration.side_effect = exception + + with pytest.raises(HomeAssistantError, match=error): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.trv_name_calibrate"}, + blocking=True, + ) + + +async def test_rpc_blu_trv_button_auth_error( + hass: HomeAssistant, + mock_blu_trv: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test RPC BLU TRV button with authentication error.""" + monkeypatch.delitem(mock_blu_trv.status, "script:1") + monkeypatch.delitem(mock_blu_trv.status, "script:2") + monkeypatch.delitem(mock_blu_trv.status, "script:3") + + entry = await init_integration(hass, 3, model=MODEL_BLU_GATEWAY_G3) + + mock_blu_trv.trigger_blu_trv_calibration.side_effect = InvalidAuthError + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.trv_name_calibrate"}, + blocking=True, + ) + + assert entry.state is ConfigEntryState.LOADED + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow.get("step_id") == "reauth_confirm" + assert flow.get("handler") == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == entry.entry_id diff --git a/tests/components/shelly/test_climate.py b/tests/components/shelly/test_climate.py index 040d67cb9c4..b2135fb38af 100644 --- a/tests/components/shelly/test_climate.py +++ b/tests/components/shelly/test_climate.py @@ -5,12 +5,14 @@ from unittest.mock import AsyncMock, Mock, PropertyMock from aioshelly.const import ( BLU_TRV_IDENTIFIER, + BLU_TRV_TIMEOUT, MODEL_BLU_GATEWAY_G3, MODEL_VALVE, MODEL_WALL_DISPLAY, ) from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError import pytest +from syrupy import SnapshotAssertion from homeassistant.components.climate import ( ATTR_CURRENT_HUMIDITY, @@ -26,7 +28,7 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.components.shelly.const import BLU_TRV_TIMEOUT, DOMAIN +from homeassistant.components.shelly.const import DOMAIN from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import ( @@ -42,13 +44,7 @@ from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.helpers.entity_registry import EntityRegistry from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM -from . import ( - MOCK_MAC, - get_entity_attribute, - init_integration, - register_device, - register_entity, -) +from . import MOCK_MAC, init_integration, register_device, register_entity from .conftest import MOCK_STATUS_COAP from tests.common import mock_restore_cache, mock_restore_cache_with_extra_data @@ -65,6 +61,7 @@ async def test_climate_hvac_mode( mock_block_device: Mock, monkeypatch: pytest.MonkeyPatch, entity_registry: EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test climate hvac mode service.""" monkeypatch.delattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "targetTemp") @@ -83,12 +80,9 @@ async def test_climate_hvac_mode( await hass.async_block_till_done(wait_background_tasks=True) # Test initial hvac mode - off - state = hass.states.get(ENTITY_ID) - assert state.state == HVACMode.OFF + assert hass.states.get(ENTITY_ID) == snapshot(name=f"{ENTITY_ID}-state") - entry = entity_registry.async_get(ENTITY_ID) - assert entry - assert entry.unique_id == "123456789ABC-sensor_0" + assert entity_registry.async_get(ENTITY_ID) == snapshot(name=f"{ENTITY_ID}-entry") # Test set hvac mode heat await hass.services.async_call( @@ -103,7 +97,8 @@ async def test_climate_hvac_mode( monkeypatch.setattr(mock_block_device.blocks[SENSOR_BLOCK_ID], "targetTemp", 20.0) mock_block_device.mock_update() - state = hass.states.get(ENTITY_ID) + + assert (state := hass.states.get(ENTITY_ID)) assert state.state == HVACMode.HEAT # Test set hvac mode off @@ -120,13 +115,13 @@ async def test_climate_hvac_mode( monkeypatch.setattr(mock_block_device.blocks[SENSOR_BLOCK_ID], "targetTemp", 4.0) mock_block_device.mock_update() - state = hass.states.get(ENTITY_ID) + assert (state := hass.states.get(ENTITY_ID)) assert state.state == HVACMode.OFF # Test unavailable on error monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "valveError", 1) mock_block_device.mock_update() - state = hass.states.get(ENTITY_ID) + assert (state := hass.states.get(ENTITY_ID)) assert state.state == STATE_UNAVAILABLE @@ -143,7 +138,7 @@ async def test_climate_set_temperature( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(ENTITY_ID) + assert (state := hass.states.get(ENTITY_ID)) assert state.state == HVACMode.OFF assert state.attributes[ATTR_TEMPERATURE] == 4 @@ -197,7 +192,7 @@ async def test_climate_set_preset_mode( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(ENTITY_ID) + assert (state := hass.states.get(ENTITY_ID)) assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE # Test set Profile2 @@ -215,7 +210,7 @@ async def test_climate_set_preset_mode( monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "mode", 2) mock_block_device.mock_update() - state = hass.states.get(ENTITY_ID) + assert (state := hass.states.get(ENTITY_ID)) assert state.attributes[ATTR_PRESET_MODE] == "Profile2" # Set preset to none @@ -234,7 +229,7 @@ async def test_climate_set_preset_mode( monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "mode", 0) mock_block_device.mock_update() - state = hass.states.get(ENTITY_ID) + assert (state := hass.states.get(ENTITY_ID)) assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE @@ -269,23 +264,26 @@ async def test_block_restored_climate( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == HVACMode.OFF - assert hass.states.get(entity_id).attributes.get("temperature") == 4.0 + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.OFF + assert state.attributes.get(ATTR_TEMPERATURE) == 4.0 # Partial update, should not change state mock_block_device.mock_update() await hass.async_block_till_done() - assert hass.states.get(entity_id).state == HVACMode.OFF - assert hass.states.get(entity_id).attributes.get("temperature") == 4.0 + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.OFF + assert state.attributes.get(ATTR_TEMPERATURE) == 4.0 # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == HVACMode.OFF - assert hass.states.get(entity_id).attributes.get("temperature") == 4.0 + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.OFF + assert state.attributes.get(ATTR_TEMPERATURE) == 4.0 # Test set hvac mode heat, target temp should be set to last target temp (22) await hass.services.async_call( @@ -300,9 +298,10 @@ async def test_block_restored_climate( monkeypatch.setattr(mock_block_device.blocks[SENSOR_BLOCK_ID], "targetTemp", 22.0) mock_block_device.mock_update() - state = hass.states.get(ENTITY_ID) + + assert (state := hass.states.get(entity_id)) assert state.state == HVACMode.HEAT - assert hass.states.get(entity_id).attributes.get("temperature") == 22.0 + assert state.attributes.get(ATTR_TEMPERATURE) == 22.0 async def test_block_restored_climate_us_customary( @@ -337,17 +336,19 @@ async def test_block_restored_climate_us_customary( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == HVACMode.OFF - assert hass.states.get(entity_id).attributes.get("temperature") == 39 - assert hass.states.get(entity_id).attributes.get("current_temperature") == 67 + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.OFF + assert state.attributes.get(ATTR_TEMPERATURE) == 39 + assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) == 67 # Partial update, should not change state mock_block_device.mock_update() await hass.async_block_till_done() - assert hass.states.get(entity_id).state == HVACMode.OFF - assert hass.states.get(entity_id).attributes.get("temperature") == 39 - assert hass.states.get(entity_id).attributes.get("current_temperature") == 67 + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.OFF + assert state.attributes.get(ATTR_TEMPERATURE) == 39 + assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) == 67 # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) @@ -356,9 +357,10 @@ async def test_block_restored_climate_us_customary( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == HVACMode.OFF - assert hass.states.get(entity_id).attributes.get("temperature") == 39 - assert hass.states.get(entity_id).attributes.get("current_temperature") == 65 + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.OFF + assert state.attributes.get(ATTR_TEMPERATURE) == 39 + assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) == 65 # Test set hvac mode heat, target temp should be set to last target temp (10.0/50) await hass.services.async_call( @@ -373,9 +375,10 @@ async def test_block_restored_climate_us_customary( monkeypatch.setattr(mock_block_device.blocks[SENSOR_BLOCK_ID], "targetTemp", 10.0) mock_block_device.mock_update() - state = hass.states.get(ENTITY_ID) + + assert (state := hass.states.get(entity_id)) assert state.state == HVACMode.HEAT - assert hass.states.get(entity_id).attributes.get("temperature") == 50 + assert state.attributes.get(ATTR_TEMPERATURE) == 50 async def test_block_restored_climate_unavailable( @@ -403,7 +406,8 @@ async def test_block_restored_climate_unavailable( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == HVACMode.OFF + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.OFF async def test_block_restored_climate_set_preset_before_online( @@ -431,7 +435,8 @@ async def test_block_restored_climate_set_preset_before_online( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == HVACMode.HEAT + assert (state := hass.states.get(entity_id)) + assert state.state == HVACMode.HEAT with pytest.raises(ServiceValidationError): await hass.services.async_call( @@ -460,7 +465,10 @@ async def test_block_set_mode_connection_error( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - with pytest.raises(HomeAssistantError): + with pytest.raises( + HomeAssistantError, + match="Device communication error occurred while calling action for climate.test_name of Test name", + ): await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, @@ -603,27 +611,21 @@ async def test_rpc_climate_hvac_mode( entity_registry: EntityRegistry, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch, + snapshot: SnapshotAssertion, ) -> None: """Test climate hvac mode service.""" entity_id = "climate.test_name_thermostat_0" await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) - state = hass.states.get(entity_id) - assert state.state == HVACMode.HEAT - assert state.attributes[ATTR_TEMPERATURE] == 23 - assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 12.3 - assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING - assert state.attributes[ATTR_CURRENT_HUMIDITY] == 44.4 + assert (state := hass.states.get(entity_id)) == snapshot(name=f"{entity_id}-state") - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.unique_id == "123456789ABC-thermostat:0" + assert entity_registry.async_get(entity_id) == snapshot(name=f"{entity_id}-entry") monkeypatch.setitem(mock_rpc_device.status["thermostat:0"], "output", False) mock_rpc_device.mock_update() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE assert state.attributes[ATTR_CURRENT_HUMIDITY] == 44.4 @@ -639,7 +641,7 @@ async def test_rpc_climate_hvac_mode( mock_rpc_device.call_rpc.assert_called_once_with( "Thermostat.SetConfig", {"config": {"id": 0, "enable": False}} ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == HVACMode.OFF @@ -657,15 +659,14 @@ async def test_rpc_climate_without_humidity( await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == HVACMode.HEAT assert state.attributes[ATTR_TEMPERATURE] == 23 assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 12.3 assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING assert ATTR_CURRENT_HUMIDITY not in state.attributes - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-thermostat:0" @@ -677,7 +678,7 @@ async def test_rpc_climate_set_temperature( await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_TEMPERATURE] == 23 monkeypatch.setitem(mock_rpc_device.status["thermostat:0"], "target_C", 28) @@ -692,7 +693,7 @@ async def test_rpc_climate_set_temperature( mock_rpc_device.call_rpc.assert_called_once_with( "Thermostat.SetConfig", {"config": {"id": 0, "target_C": 28}} ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_TEMPERATURE] == 28 @@ -707,7 +708,7 @@ async def test_rpc_climate_hvac_mode_cool( await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == HVACMode.COOL assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.COOLING @@ -717,6 +718,7 @@ async def test_wall_display_thermostat_mode( mock_rpc_device: Mock, entity_registry: EntityRegistry, monkeypatch: pytest.MonkeyPatch, + snapshot: SnapshotAssertion, ) -> None: """Test Wall Display in thermostat mode.""" climate_entity_id = "climate.test_name_thermostat_0" @@ -730,13 +732,11 @@ async def test_wall_display_thermostat_mode( # the climate entity should be created state = hass.states.get(climate_entity_id) - assert state - assert state.state == HVACMode.HEAT + assert state == snapshot(name=f"{climate_entity_id}-state") assert len(hass.states.async_entity_ids(CLIMATE_DOMAIN)) == 1 entry = entity_registry.async_get(climate_entity_id) - assert entry - assert entry.unique_id == "123456789ABC-thermostat:0" + assert entry == snapshot(name=f"{climate_entity_id}-entry") async def test_wall_display_thermostat_mode_external_actuator( @@ -751,31 +751,31 @@ async def test_wall_display_thermostat_mode_external_actuator( new_status = deepcopy(mock_rpc_device.status) new_status["sys"]["relay_in_thermostat"] = False + new_status.pop("cover:0") monkeypatch.setattr(mock_rpc_device, "status", new_status) await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) # the switch entity should be created - state = hass.states.get(switch_entity_id) - assert state + assert (state := hass.states.get(switch_entity_id)) assert state.state == STATE_ON assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 # the climate entity should be created - state = hass.states.get(climate_entity_id) - assert state + assert (state := hass.states.get(climate_entity_id)) assert state.state == HVACMode.HEAT assert len(hass.states.async_entity_ids(CLIMATE_DOMAIN)) == 1 - entry = entity_registry.async_get(climate_entity_id) - assert entry + assert (entry := entity_registry.async_get(climate_entity_id)) assert entry.unique_id == "123456789ABC-thermostat:0" async def test_blu_trv_climate_set_temperature( hass: HomeAssistant, mock_blu_trv: Mock, + entity_registry: EntityRegistry, monkeypatch: pytest.MonkeyPatch, + snapshot: SnapshotAssertion, ) -> None: """Test BLU TRV set target temperature.""" @@ -784,7 +784,9 @@ async def test_blu_trv_climate_set_temperature( await init_integration(hass, 3, model=MODEL_BLU_GATEWAY_G3) - assert get_entity_attribute(hass, entity_id, ATTR_TEMPERATURE) == 17.1 + assert (state := hass.states.get(entity_id)) == snapshot(name=f"{entity_id}-state") + + assert entity_registry.async_get(entity_id) == snapshot(name=f"{entity_id}-entry") monkeypatch.setitem( mock_blu_trv.status[f"{BLU_TRV_IDENTIFIER}:200"], "target_C", 28 @@ -807,7 +809,8 @@ async def test_blu_trv_climate_set_temperature( BLU_TRV_TIMEOUT, ) - assert get_entity_attribute(hass, entity_id, ATTR_TEMPERATURE) == 28 + assert (state := hass.states.get(entity_id)) + assert state.attributes[ATTR_TEMPERATURE] == 28 async def test_blu_trv_climate_disabled( @@ -822,14 +825,16 @@ async def test_blu_trv_climate_disabled( await init_integration(hass, 3, model=MODEL_BLU_GATEWAY_G3) - assert get_entity_attribute(hass, entity_id, ATTR_TEMPERATURE) == 17.1 + assert (state := hass.states.get(entity_id)) + assert state.attributes[ATTR_TEMPERATURE] == 17.1 monkeypatch.setitem( mock_blu_trv.config[f"{BLU_TRV_IDENTIFIER}:200"], "enable", False ) mock_blu_trv.mock_update() - assert get_entity_attribute(hass, entity_id, ATTR_TEMPERATURE) is None + assert (state := hass.states.get(entity_id)) + assert state.attributes[ATTR_TEMPERATURE] is None async def test_blu_trv_climate_hvac_action( @@ -844,9 +849,11 @@ async def test_blu_trv_climate_hvac_action( await init_integration(hass, 3, model=MODEL_BLU_GATEWAY_G3) - assert get_entity_attribute(hass, entity_id, ATTR_HVAC_ACTION) == HVACAction.IDLE + assert (state := hass.states.get(entity_id)) + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE monkeypatch.setitem(mock_blu_trv.status[f"{BLU_TRV_IDENTIFIER}:200"], "pos", 10) mock_blu_trv.mock_update() - assert get_entity_attribute(hass, entity_id, ATTR_HVAC_ACTION) == HVACAction.HEATING + assert (state := hass.states.get(entity_id)) + assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING diff --git a/tests/components/shelly/test_config_flow.py b/tests/components/shelly/test_config_flow.py index 50b8b552268..fffffc21cae 100644 --- a/tests/components/shelly/test_config_flow.py +++ b/tests/components/shelly/test_config_flow.py @@ -18,10 +18,20 @@ from homeassistant import config_entries from homeassistant.components.shelly import MacAddressMismatchError, config_flow from homeassistant.components.shelly.const import ( CONF_BLE_SCANNER_MODE, + CONF_GEN, + CONF_SLEEP_PERIOD, DOMAIN, BLEScannerMode, ) from homeassistant.components.shelly.coordinator import ENTRY_RELOAD_COOLDOWN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + CONF_HOST, + CONF_MODEL, + CONF_PASSWORD, + CONF_PORT, + CONF_USERNAME, +) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers.service_info.zeroconf import ( @@ -100,18 +110,18 @@ async def test_form( ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - {"host": "1.1.1.1", "port": port}, + {CONF_HOST: "1.1.1.1", CONF_PORT: port}, ) await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Test name" assert result2["data"] == { - "host": "1.1.1.1", - "port": port, - "model": model, - "sleep_period": 0, - "gen": gen, + CONF_HOST: "1.1.1.1", + CONF_PORT: port, + CONF_MODEL: model, + CONF_SLEEP_PERIOD: 0, + CONF_GEN: gen, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -163,18 +173,18 @@ async def test_user_flow_overrides_existing_discovery( assert result["errors"] == {} result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - {"host": "1.1.1.1", "port": 80}, + {CONF_HOST: "1.1.1.1", CONF_PORT: 80}, ) await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Test name" assert result2["data"] == { - "host": "1.1.1.1", - "port": 80, - "model": MODEL_PLUS_2PM, - "sleep_period": 0, - "gen": 2, + CONF_HOST: "1.1.1.1", + CONF_PORT: 80, + CONF_MODEL: MODEL_PLUS_2PM, + CONF_SLEEP_PERIOD: 0, + CONF_GEN: 2, } assert result2["context"]["unique_id"] == "AABBCCDDEEFF" assert len(mock_setup.mock_calls) == 1 @@ -220,19 +230,19 @@ async def test_form_gen1_custom_port( ( 1, MODEL_1, - {"username": "test user", "password": "test1 password"}, + {CONF_USERNAME: "test user", CONF_PASSWORD: "test1 password"}, "test user", ), ( 2, MODEL_PLUS_2PM, - {"password": "test2 password"}, + {CONF_PASSWORD: "test2 password"}, "admin", ), ( 3, MODEL_PLUS_2PM, - {"password": "test2 password"}, + {CONF_PASSWORD: "test2 password"}, "admin", ), ], @@ -259,7 +269,7 @@ async def test_form_auth( ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - {"host": "1.1.1.1"}, + {CONF_HOST: "1.1.1.1"}, ) assert result2["type"] is FlowResultType.FORM @@ -282,13 +292,13 @@ async def test_form_auth( assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == "Test name" assert result3["data"] == { - "host": "1.1.1.1", - "port": DEFAULT_HTTP_PORT, - "model": model, - "sleep_period": 0, - "gen": gen, - "username": username, - "password": user_input["password"], + CONF_HOST: "1.1.1.1", + CONF_PORT: DEFAULT_HTTP_PORT, + CONF_MODEL: model, + CONF_SLEEP_PERIOD: 0, + CONF_GEN: gen, + CONF_USERNAME: username, + CONF_PASSWORD: user_input[CONF_PASSWORD], } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -312,7 +322,7 @@ async def test_form_errors_get_info( with patch("homeassistant.components.shelly.config_flow.get_info", side_effect=exc): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - {"host": "1.1.1.1"}, + {CONF_HOST: "1.1.1.1"}, ) assert result2["type"] is FlowResultType.FORM @@ -333,7 +343,7 @@ async def test_form_missing_model_key( ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - {"host": "1.1.1.1"}, + {CONF_HOST: "1.1.1.1"}, ) assert result2["type"] is FlowResultType.FORM @@ -356,7 +366,7 @@ async def test_form_missing_model_key_auth_enabled( ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - {"host": "1.1.1.1"}, + {CONF_HOST: "1.1.1.1"}, ) assert result2["type"] is FlowResultType.FORM @@ -364,7 +374,7 @@ async def test_form_missing_model_key_auth_enabled( monkeypatch.setattr(mock_rpc_device, "shelly", {"gen": 2}) result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], {"password": "1234"} + result2["flow_id"], {CONF_PASSWORD: "1234"} ) assert result3["type"] is FlowResultType.FORM assert result3["errors"] == {"base": "firmware_not_fully_provisioned"} @@ -424,7 +434,7 @@ async def test_form_errors_test_connection( ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - {"host": "1.1.1.1"}, + {CONF_HOST: "1.1.1.1"}, ) assert result2["type"] is FlowResultType.FORM @@ -435,7 +445,7 @@ async def test_form_already_configured(hass: HomeAssistant) -> None: """Test we get the form.""" entry = MockConfigEntry( - domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0"} + domain="shelly", unique_id="test-mac", data={CONF_HOST: "0.0.0.0"} ) entry.add_to_hass(hass) @@ -449,14 +459,14 @@ async def test_form_already_configured(hass: HomeAssistant) -> None: ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - {"host": "1.1.1.1"}, + {CONF_HOST: "1.1.1.1"}, ) assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "already_configured" # Test config entry got updated with latest IP - assert entry.data["host"] == "1.1.1.1" + assert entry.data[CONF_HOST] == "1.1.1.1" async def test_user_setup_ignored_device( @@ -467,7 +477,7 @@ async def test_user_setup_ignored_device( entry = MockConfigEntry( domain="shelly", unique_id="test-mac", - data={"host": "0.0.0.0"}, + data={CONF_HOST: "0.0.0.0"}, source=config_entries.SOURCE_IGNORE, ) entry.add_to_hass(hass) @@ -491,13 +501,13 @@ async def test_user_setup_ignored_device( ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - {"host": "1.1.1.1"}, + {CONF_HOST: "1.1.1.1"}, ) assert result2["type"] is FlowResultType.CREATE_ENTRY # Test config entry got updated with latest IP - assert entry.data["host"] == "1.1.1.1" + assert entry.data[CONF_HOST] == "1.1.1.1" assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -525,7 +535,7 @@ async def test_form_auth_errors_test_connection_gen1( ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - {"host": "1.1.1.1"}, + {CONF_HOST: "1.1.1.1"}, ) with patch( @@ -534,7 +544,7 @@ async def test_form_auth_errors_test_connection_gen1( ): result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], - {"username": "test username", "password": "test password"}, + {CONF_USERNAME: "test username", CONF_PASSWORD: "test password"}, ) assert result3["type"] is FlowResultType.FORM assert result3["errors"] == {"base": base_error} @@ -563,7 +573,7 @@ async def test_form_auth_errors_test_connection_gen2( ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - {"host": "1.1.1.1"}, + {CONF_HOST: "1.1.1.1"}, ) with patch( @@ -571,7 +581,7 @@ async def test_form_auth_errors_test_connection_gen2( new=AsyncMock(side_effect=exc), ): result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], {"password": "test password"} + result2["flow_id"], {CONF_PASSWORD: "test password"} ) assert result3["type"] is FlowResultType.FORM assert result3["errors"] == {"base": base_error} @@ -642,10 +652,10 @@ async def test_zeroconf( assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Test name" assert result2["data"] == { - "host": "1.1.1.1", - "model": model, - "sleep_period": 0, - "gen": gen, + CONF_HOST: "1.1.1.1", + CONF_MODEL: model, + CONF_SLEEP_PERIOD: 0, + CONF_GEN: gen, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -700,10 +710,10 @@ async def test_zeroconf_sleeping_device( assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Test name" assert result2["data"] == { - "host": "1.1.1.1", - "model": MODEL_1, - "sleep_period": 600, - "gen": 1, + CONF_HOST: "1.1.1.1", + CONF_MODEL: MODEL_1, + CONF_SLEEP_PERIOD: 600, + CONF_GEN: 1, } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -735,11 +745,43 @@ async def test_zeroconf_sleeping_device_error(hass: HomeAssistant) -> None: assert result["reason"] == "cannot_connect" +async def test_options_flow_abort_setup_retry( + hass: HomeAssistant, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch +) -> None: + """Test ble options abort if device is in setup retry.""" + monkeypatch.setattr( + mock_rpc_device, "initialize", AsyncMock(side_effect=DeviceConnectionError) + ) + entry = await init_integration(hass, 2) + + assert entry.state is ConfigEntryState.SETUP_RETRY + + result = await hass.config_entries.options.async_init(entry.entry_id) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + +async def test_options_flow_abort_no_scripts_support( + hass: HomeAssistant, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch +) -> None: + """Test ble options abort if device does not support scripts.""" + monkeypatch.setattr( + mock_rpc_device, "supports_scripts", AsyncMock(return_value=False) + ) + entry = await init_integration(hass, 2) + + result = await hass.config_entries.options.async_init(entry.entry_id) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_scripts_support" + + async def test_zeroconf_already_configured(hass: HomeAssistant) -> None: """Test we get the form.""" entry = MockConfigEntry( - domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0"} + domain="shelly", unique_id="test-mac", data={CONF_HOST: "0.0.0.0"} ) entry.add_to_hass(hass) @@ -756,7 +798,7 @@ async def test_zeroconf_already_configured(hass: HomeAssistant) -> None: assert result["reason"] == "already_configured" # Test config entry got updated with latest IP - assert entry.data["host"] == "1.1.1.1" + assert entry.data[CONF_HOST] == "1.1.1.1" async def test_zeroconf_ignored(hass: HomeAssistant) -> None: @@ -787,7 +829,7 @@ async def test_zeroconf_with_wifi_ap_ip(hass: HomeAssistant) -> None: """Test we ignore the Wi-FI AP IP.""" entry = MockConfigEntry( - domain="shelly", unique_id="test-mac", data={"host": "2.2.2.2"} + domain="shelly", unique_id="test-mac", data={CONF_HOST: "2.2.2.2"} ) entry.add_to_hass(hass) @@ -806,7 +848,7 @@ async def test_zeroconf_with_wifi_ap_ip(hass: HomeAssistant) -> None: assert result["reason"] == "already_configured" # Test config entry was not updated with the wifi ap ip - assert entry.data["host"] == "2.2.2.2" + assert entry.data[CONF_HOST] == "2.2.2.2" async def test_zeroconf_cannot_connect(hass: HomeAssistant) -> None: @@ -852,20 +894,20 @@ async def test_zeroconf_require_auth( ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - {"username": "test username", "password": "test password"}, + {CONF_USERNAME: "test username", CONF_PASSWORD: "test password"}, ) await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Test name" assert result2["data"] == { - "host": "1.1.1.1", - "port": DEFAULT_HTTP_PORT, - "model": MODEL_1, - "sleep_period": 0, - "gen": 1, - "username": "test username", - "password": "test password", + CONF_HOST: "1.1.1.1", + CONF_PORT: DEFAULT_HTTP_PORT, + CONF_MODEL: MODEL_1, + CONF_SLEEP_PERIOD: 0, + CONF_GEN: 1, + CONF_USERNAME: "test username", + CONF_PASSWORD: "test password", } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -874,9 +916,9 @@ async def test_zeroconf_require_auth( @pytest.mark.parametrize( ("gen", "user_input"), [ - (1, {"username": "test user", "password": "test1 password"}), - (2, {"password": "test2 password"}), - (3, {"password": "test2 password"}), + (1, {CONF_USERNAME: "test user", CONF_PASSWORD: "test1 password"}), + (2, {CONF_PASSWORD: "test2 password"}), + (3, {CONF_PASSWORD: "test2 password"}), ], ) async def test_reauth_successful( @@ -888,7 +930,9 @@ async def test_reauth_successful( ) -> None: """Test starting a reauthentication flow.""" entry = MockConfigEntry( - domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0", "gen": gen} + domain="shelly", + unique_id="test-mac", + data={CONF_HOST: "0.0.0.0", CONF_GEN: gen}, ) entry.add_to_hass(hass) result = await entry.start_reauth_flow(hass) @@ -912,9 +956,9 @@ async def test_reauth_successful( @pytest.mark.parametrize( ("gen", "user_input"), [ - (1, {"username": "test user", "password": "test1 password"}), - (2, {"password": "test2 password"}), - (3, {"password": "test2 password"}), + (1, {CONF_USERNAME: "test user", CONF_PASSWORD: "test1 password"}), + (2, {CONF_PASSWORD: "test2 password"}), + (3, {CONF_PASSWORD: "test2 password"}), ], ) @pytest.mark.parametrize( @@ -933,7 +977,9 @@ async def test_reauth_unsuccessful( ) -> None: """Test reauthentication flow failed.""" entry = MockConfigEntry( - domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0", "gen": gen} + domain="shelly", + unique_id="test-mac", + data={CONF_HOST: "0.0.0.0", CONF_GEN: gen}, ) entry.add_to_hass(hass) result = await entry.start_reauth_flow(hass) @@ -943,7 +989,12 @@ async def test_reauth_unsuccessful( with ( patch( "homeassistant.components.shelly.config_flow.get_info", - return_value={"mac": "test-mac", "type": MODEL_1, "auth": True, "gen": gen}, + return_value={ + "mac": "test-mac", + "type": MODEL_1, + "auth": True, + "gen": gen, + }, ), patch( "aioshelly.block_device.BlockDevice.create", new=AsyncMock(side_effect=exc) @@ -962,7 +1013,7 @@ async def test_reauth_unsuccessful( async def test_reauth_get_info_error(hass: HomeAssistant) -> None: """Test reauthentication flow failed with error in get_info().""" entry = MockConfigEntry( - domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0", "gen": 2} + domain="shelly", unique_id="test-mac", data={CONF_HOST: "0.0.0.0", CONF_GEN: 2} ) entry.add_to_hass(hass) result = await entry.start_reauth_flow(hass) @@ -975,7 +1026,7 @@ async def test_reauth_get_info_error(hass: HomeAssistant) -> None: ): result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input={"password": "test2 password"}, + user_input={CONF_PASSWORD: "test2 password"}, ) assert result["type"] is FlowResultType.ABORT @@ -1062,7 +1113,7 @@ async def test_options_flow_ble(hass: HomeAssistant, mock_rpc_device: Mock) -> N await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"][CONF_BLE_SCANNER_MODE] == BLEScannerMode.DISABLED + assert result["data"][CONF_BLE_SCANNER_MODE] is BLEScannerMode.DISABLED result = await hass.config_entries.options.async_init(entry.entry_id) assert result["type"] is FlowResultType.FORM @@ -1078,7 +1129,7 @@ async def test_options_flow_ble(hass: HomeAssistant, mock_rpc_device: Mock) -> N await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"][CONF_BLE_SCANNER_MODE] == BLEScannerMode.ACTIVE + assert result["data"][CONF_BLE_SCANNER_MODE] is BLEScannerMode.ACTIVE result = await hass.config_entries.options.async_init(entry.entry_id) assert result["type"] is FlowResultType.FORM @@ -1094,7 +1145,7 @@ async def test_options_flow_ble(hass: HomeAssistant, mock_rpc_device: Mock) -> N await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"][CONF_BLE_SCANNER_MODE] == BLEScannerMode.PASSIVE + assert result["data"][CONF_BLE_SCANNER_MODE] is BLEScannerMode.PASSIVE await hass.config_entries.async_unload(entry.entry_id) @@ -1106,7 +1157,12 @@ async def test_zeroconf_already_configured_triggers_refresh_mac_in_name( entry = MockConfigEntry( domain="shelly", unique_id="AABBCCDDEEFF", - data={"host": "1.1.1.1", "gen": 2, "sleep_period": 0, "model": MODEL_1}, + data={ + CONF_HOST: "1.1.1.1", + CONF_GEN: 2, + CONF_SLEEP_PERIOD: 0, + CONF_MODEL: MODEL_1, + }, ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -1141,7 +1197,12 @@ async def test_zeroconf_already_configured_triggers_refresh( entry = MockConfigEntry( domain="shelly", unique_id="AABBCCDDEEFF", - data={"host": "1.1.1.1", "gen": 2, "sleep_period": 0, "model": MODEL_1}, + data={ + CONF_HOST: "1.1.1.1", + CONF_GEN: 2, + CONF_SLEEP_PERIOD: 0, + CONF_MODEL: MODEL_1, + }, ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -1181,7 +1242,12 @@ async def test_zeroconf_sleeping_device_not_triggers_refresh( entry = MockConfigEntry( domain="shelly", unique_id="AABBCCDDEEFF", - data={"host": "1.1.1.1", "gen": 2, "sleep_period": 1000, "model": MODEL_1}, + data={ + CONF_HOST: "1.1.1.1", + CONF_GEN: 2, + CONF_SLEEP_PERIOD: 1000, + CONF_MODEL: MODEL_1, + }, ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -1228,7 +1294,12 @@ async def test_zeroconf_sleeping_device_attempts_configure( entry = MockConfigEntry( domain="shelly", unique_id="AABBCCDDEEFF", - data={"host": "1.1.1.1", "gen": 2, "sleep_period": 1000, "model": MODEL_1}, + data={ + CONF_HOST: "1.1.1.1", + CONF_GEN: 2, + CONF_SLEEP_PERIOD: 1000, + CONF_MODEL: MODEL_1, + }, ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -1288,7 +1359,12 @@ async def test_zeroconf_sleeping_device_attempts_configure_ws_disabled( entry = MockConfigEntry( domain="shelly", unique_id="AABBCCDDEEFF", - data={"host": "1.1.1.1", "gen": 2, "sleep_period": 1000, "model": MODEL_1}, + data={ + CONF_HOST: "1.1.1.1", + CONF_GEN: 2, + CONF_SLEEP_PERIOD: 1000, + CONF_MODEL: MODEL_1, + }, ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -1348,7 +1424,12 @@ async def test_zeroconf_sleeping_device_attempts_configure_no_url_available( entry = MockConfigEntry( domain="shelly", unique_id="AABBCCDDEEFF", - data={"host": "1.1.1.1", "gen": 2, "sleep_period": 1000, "model": MODEL_1}, + data={ + CONF_HOST: "1.1.1.1", + CONF_GEN: 2, + CONF_SLEEP_PERIOD: 1000, + CONF_MODEL: MODEL_1, + }, ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -1415,20 +1496,20 @@ async def test_sleeping_device_gen2_with_new_firmware( ): result = await hass.config_entries.flow.async_configure( result["flow_id"], - {"host": "1.1.1.1"}, + {CONF_HOST: "1.1.1.1"}, ) await hass.async_block_till_done() assert result["data"] == { - "host": "1.1.1.1", - "port": DEFAULT_HTTP_PORT, - "model": MODEL_PLUS_2PM, - "sleep_period": 666, - "gen": 2, + CONF_HOST: "1.1.1.1", + CONF_PORT: DEFAULT_HTTP_PORT, + CONF_MODEL: MODEL_PLUS_2PM, + CONF_SLEEP_PERIOD: 666, + CONF_GEN: 2, } -@pytest.mark.parametrize("gen", [1, 2, 3]) +@pytest.mark.parametrize(CONF_GEN, [1, 2, 3]) async def test_reconfigure_successful( hass: HomeAssistant, gen: int, @@ -1437,7 +1518,9 @@ async def test_reconfigure_successful( ) -> None: """Test starting a reconfiguration flow.""" entry = MockConfigEntry( - domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0", "gen": gen} + domain="shelly", + unique_id="test-mac", + data={CONF_HOST: "0.0.0.0", CONF_GEN: gen}, ) entry.add_to_hass(hass) @@ -1452,12 +1535,12 @@ async def test_reconfigure_successful( ): result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input={"host": "10.10.10.10", "port": 99}, + user_input={CONF_HOST: "10.10.10.10", CONF_PORT: 99}, ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reconfigure_successful" - assert entry.data == {"host": "10.10.10.10", "port": 99, "gen": gen} + assert entry.data == {CONF_HOST: "10.10.10.10", CONF_PORT: 99, CONF_GEN: gen} @pytest.mark.parametrize("gen", [1, 2, 3]) @@ -1469,7 +1552,9 @@ async def test_reconfigure_unsuccessful( ) -> None: """Test reconfiguration flow failed.""" entry = MockConfigEntry( - domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0", "gen": gen} + domain="shelly", + unique_id="test-mac", + data={CONF_HOST: "0.0.0.0", CONF_GEN: gen}, ) entry.add_to_hass(hass) @@ -1480,11 +1565,16 @@ async def test_reconfigure_unsuccessful( with patch( "homeassistant.components.shelly.config_flow.get_info", - return_value={"mac": "another-mac", "type": MODEL_1, "auth": False, "gen": gen}, + return_value={ + "mac": "another-mac", + "type": MODEL_1, + "auth": False, + "gen": gen, + }, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input={"host": "10.10.10.10", "port": 99}, + user_input={CONF_HOST: "10.10.10.10", CONF_PORT: 99}, ) assert result["type"] is FlowResultType.ABORT @@ -1506,7 +1596,7 @@ async def test_reconfigure_with_exception( ) -> None: """Test reconfiguration flow when an exception is raised.""" entry = MockConfigEntry( - domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0", "gen": 2} + domain="shelly", unique_id="test-mac", data={CONF_HOST: "0.0.0.0", CONF_GEN: 2} ) entry.add_to_hass(hass) @@ -1518,7 +1608,7 @@ async def test_reconfigure_with_exception( with patch("homeassistant.components.shelly.config_flow.get_info", side_effect=exc): result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input={"host": "10.10.10.10", "port": 99}, + user_input={CONF_HOST: "10.10.10.10", CONF_PORT: 99}, ) assert result["errors"] == {"base": base_error} diff --git a/tests/components/shelly/test_coordinator.py b/tests/components/shelly/test_coordinator.py index 8c011e4ad0d..f89bec8853a 100644 --- a/tests/components/shelly/test_coordinator.py +++ b/tests/components/shelly/test_coordinator.py @@ -32,7 +32,6 @@ from homeassistant.helpers import device_registry as dr, issue_registry as ir from . import ( MOCK_MAC, - get_entity_state, init_integration, inject_rpc_device_event, mock_polling_rpc_update, @@ -72,7 +71,7 @@ async def test_block_reload_on_cfg_change( async_fire_time_changed(hass) await hass.async_block_till_done() - assert hass.states.get("switch.test_name_channel_1") is not None + assert hass.states.get("switch.test_name_channel_1") # Generate config change from switch to light monkeypatch.setitem( @@ -82,7 +81,7 @@ async def test_block_reload_on_cfg_change( mock_block_device.mock_update() await hass.async_block_till_done() - assert hass.states.get("switch.test_name_channel_1") is not None + assert hass.states.get("switch.test_name_channel_1") # Wait for debouncer freezer.tick(timedelta(seconds=ENTRY_RELOAD_COOLDOWN)) @@ -114,14 +113,14 @@ async def test_block_no_reload_on_bulb_changes( mock_block_device.mock_update() await hass.async_block_till_done() - assert hass.states.get("switch.test_name_channel_1") is not None + assert hass.states.get("switch.test_name_channel_1") # Wait for debouncer freezer.tick(timedelta(seconds=ENTRY_RELOAD_COOLDOWN)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert hass.states.get("switch.test_name_channel_1") is not None + assert hass.states.get("switch.test_name_channel_1") # Test no reload on effect change monkeypatch.setattr(mock_block_device.blocks[LIGHT_BLOCK_ID], "effect", 1) @@ -129,14 +128,14 @@ async def test_block_no_reload_on_bulb_changes( mock_block_device.mock_update() await hass.async_block_till_done() - assert hass.states.get("switch.test_name_channel_1") is not None + assert hass.states.get("switch.test_name_channel_1") # Wait for debouncer freezer.tick(timedelta(seconds=ENTRY_RELOAD_COOLDOWN)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert hass.states.get("switch.test_name_channel_1") is not None + assert hass.states.get("switch.test_name_channel_1") async def test_block_polling_auth_error( @@ -245,14 +244,16 @@ async def test_block_polling_connection_error( ) await init_integration(hass, 1) - assert get_entity_state(hass, "switch.test_name_channel_1") == STATE_ON + assert (state := hass.states.get("switch.test_name_channel_1")) + assert state.state == STATE_ON # Move time to generate polling freezer.tick(timedelta(seconds=UPDATE_PERIOD_MULTIPLIER * 15)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert get_entity_state(hass, "switch.test_name_channel_1") == STATE_UNAVAILABLE + assert (state := hass.states.get("switch.test_name_channel_1")) + assert state.state == STATE_UNAVAILABLE @pytest.mark.parametrize("exc", [DeviceConnectionError, MacAddressMismatchError]) @@ -270,12 +271,14 @@ async def test_block_rest_update_connection_error( await init_integration(hass, 1) await mock_rest_update(hass, freezer) - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON monkeypatch.setattr(mock_block_device, "update_shelly", AsyncMock(side_effect=exc)) await mock_rest_update(hass, freezer) - assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE async def test_block_sleeping_device_no_periodic_updates( @@ -297,14 +300,16 @@ async def test_block_sleeping_device_no_periodic_updates( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert get_entity_state(hass, entity_id) == "22.1" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.1" # Move time to generate polling freezer.tick(timedelta(seconds=UPDATE_PERIOD_MULTIPLIER * 3600)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE async def test_block_device_push_updates_failure( @@ -386,6 +391,8 @@ async def test_rpc_reload_on_cfg_change( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test RPC reload on config change.""" + monkeypatch.delitem(mock_rpc_device.status, "cover:0") + monkeypatch.setitem(mock_rpc_device.status["sys"], "relay_in_thermostat", False) await init_integration(hass, 2) # Generate config change from switch to light @@ -414,7 +421,7 @@ async def test_rpc_reload_on_cfg_change( ) await hass.async_block_till_done() - assert hass.states.get("switch.test_switch_0") is not None + assert hass.states.get("switch.test_switch_0") # Wait for debouncer freezer.tick(timedelta(seconds=ENTRY_RELOAD_COOLDOWN)) @@ -560,7 +567,7 @@ async def test_rpc_update_entry_sleep_period( mock_rpc_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert entry.data["sleep_period"] == 600 + assert entry.data[CONF_SLEEP_PERIOD] == 600 # Move time to generate sleep period update monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 3600) @@ -568,7 +575,7 @@ async def test_rpc_update_entry_sleep_period( async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - assert entry.data["sleep_period"] == 3600 + assert entry.data[CONF_SLEEP_PERIOD] == 3600 async def test_rpc_sleeping_device_no_periodic_updates( @@ -594,14 +601,16 @@ async def test_rpc_sleeping_device_no_periodic_updates( mock_rpc_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert get_entity_state(hass, entity_id) == "22.9" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.9" # Move time to generate polling freezer.tick(timedelta(seconds=UPDATE_PERIOD_MULTIPLIER * 1000)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - assert get_entity_state(hass, entity_id) is STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE async def test_rpc_sleeping_device_firmware_unsupported( @@ -710,9 +719,12 @@ async def test_rpc_reconnect_error( exc: Exception, ) -> None: """Test RPC reconnect error.""" + monkeypatch.delitem(mock_rpc_device.status, "cover:0") + monkeypatch.setitem(mock_rpc_device.status["sys"], "relay_in_thermostat", False) await init_integration(hass, 2) - assert get_entity_state(hass, "switch.test_switch_0") == STATE_ON + assert (state := hass.states.get("switch.test_switch_0")) + assert state.state == STATE_ON monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setattr(mock_rpc_device, "initialize", AsyncMock(side_effect=exc)) @@ -722,16 +734,20 @@ async def test_rpc_reconnect_error( async_fire_time_changed(hass) await hass.async_block_till_done() - assert get_entity_state(hass, "switch.test_switch_0") == STATE_UNAVAILABLE + assert (state := hass.states.get("switch.test_switch_0")) + assert state.state == STATE_UNAVAILABLE async def test_rpc_error_running_connected_events( hass: HomeAssistant, freezer: FrozenDateTimeFactory, mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, caplog: pytest.LogCaptureFixture, ) -> None: """Test RPC error while running connected events.""" + monkeypatch.delitem(mock_rpc_device.status, "cover:0") + monkeypatch.setitem(mock_rpc_device.status["sys"], "relay_in_thermostat", False) with patch( "homeassistant.components.shelly.coordinator.async_ensure_ble_enabled", side_effect=DeviceConnectionError, @@ -741,14 +757,17 @@ async def test_rpc_error_running_connected_events( ) assert "Error running connected events for device" in caplog.text - assert get_entity_state(hass, "switch.test_switch_0") == STATE_UNAVAILABLE + + assert (state := hass.states.get("switch.test_switch_0")) + assert state.state == STATE_UNAVAILABLE # Move time to generate reconnect without error freezer.tick(timedelta(seconds=RPC_RECONNECT_INTERVAL)) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - assert get_entity_state(hass, "switch.test_switch_0") == STATE_ON + assert (state := hass.states.get("switch.test_switch_0")) + assert state.state == STATE_ON async def test_rpc_polling_connection_error( @@ -769,11 +788,13 @@ async def test_rpc_polling_connection_error( ), ) - assert get_entity_state(hass, entity_id) == "-63" + assert (state := hass.states.get(entity_id)) + assert state.state == "-63" await mock_polling_rpc_update(hass, freezer) - assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE async def test_rpc_polling_disconnected( @@ -788,11 +809,13 @@ async def test_rpc_polling_disconnected( monkeypatch.setattr(mock_rpc_device, "connected", False) - assert get_entity_state(hass, entity_id) == "-63" + assert (state := hass.states.get(entity_id)) + assert state.state == "-63" await mock_polling_rpc_update(hass, freezer) - assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE async def test_rpc_update_entry_fw_ver( @@ -830,12 +853,17 @@ async def test_rpc_update_entry_fw_ver( assert device.sw_version == "99.0.0" +@pytest.mark.parametrize(("supports_scripts"), [True, False]) async def test_rpc_runs_connected_events_when_initialized( hass: HomeAssistant, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch, + supports_scripts: bool, ) -> None: """Test RPC runs connected events when initialized.""" + monkeypatch.setattr( + mock_rpc_device, "supports_scripts", AsyncMock(return_value=supports_scripts) + ) monkeypatch.setattr(mock_rpc_device, "initialized", False) await init_integration(hass, 2) @@ -846,8 +874,9 @@ async def test_rpc_runs_connected_events_when_initialized( mock_rpc_device.mock_initialized() await hass.async_block_till_done() - # BLE script list is called during connected events - assert call.script_list() in mock_rpc_device.mock_calls + assert call.supports_scripts() in mock_rpc_device.mock_calls + # BLE script list is called during connected events if device supports scripts + assert bool(call.script_list() in mock_rpc_device.mock_calls) == supports_scripts async def test_rpc_sleeping_device_unload_ignore_ble_scanner( @@ -896,7 +925,8 @@ async def test_block_sleeping_device_connection_error( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON # Make device online event with connection error monkeypatch.setattr( @@ -910,7 +940,8 @@ async def test_block_sleeping_device_connection_error( await hass.async_block_till_done(wait_background_tasks=True) assert "Error connecting to Shelly device" in caplog.text - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON # Move time to generate sleep period update freezer.tick(timedelta(seconds=sleep_period * UPDATE_PERIOD_MULTIPLIER)) @@ -918,7 +949,8 @@ async def test_block_sleeping_device_connection_error( await hass.async_block_till_done(wait_background_tasks=True) assert "Sleeping device did not update" in caplog.text - assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE async def test_rpc_sleeping_device_connection_error( @@ -947,7 +979,8 @@ async def test_rpc_sleeping_device_connection_error( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON # Make device online event with connection error monkeypatch.setattr( @@ -961,7 +994,8 @@ async def test_rpc_sleeping_device_connection_error( await hass.async_block_till_done(wait_background_tasks=True) assert "Error connecting to Shelly device" in caplog.text - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON # Move time to generate sleep period update freezer.tick(timedelta(seconds=sleep_period * UPDATE_PERIOD_MULTIPLIER)) @@ -969,7 +1003,8 @@ async def test_rpc_sleeping_device_connection_error( await hass.async_block_till_done(wait_background_tasks=True) assert "Sleeping device did not update" in caplog.text - assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE async def test_rpc_sleeping_device_late_setup( @@ -994,7 +1029,8 @@ async def test_rpc_sleeping_device_late_setup( monkeypatch.setattr(mock_rpc_device, "connected", True) mock_rpc_device.mock_initialized() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get("sensor.test_name_temperature") is not None + + assert hass.states.get("sensor.test_name_temperature") async def test_rpc_already_connected( diff --git a/tests/components/shelly/test_cover.py b/tests/components/shelly/test_cover.py index 40a364fd435..df3ab4f288d 100644 --- a/tests/components/shelly/test_cover.py +++ b/tests/components/shelly/test_cover.py @@ -47,7 +47,7 @@ async def test_block_device_services( {ATTR_ENTITY_ID: entity_id, ATTR_POSITION: 50}, blocking=True, ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_CURRENT_POSITION] == 50 await hass.services.async_call( @@ -56,7 +56,8 @@ async def test_block_device_services( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == CoverState.OPENING + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.OPENING await hass.services.async_call( COVER_DOMAIN, @@ -64,7 +65,8 @@ async def test_block_device_services( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == CoverState.CLOSING + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.CLOSING await hass.services.async_call( COVER_DOMAIN, @@ -72,10 +74,10 @@ async def test_block_device_services( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == CoverState.CLOSED + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.CLOSED - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-roller_0" @@ -86,11 +88,15 @@ async def test_block_device_update( monkeypatch.setattr(mock_block_device.blocks[ROLLER_BLOCK_ID], "rollerPos", 0) await init_integration(hass, 1) - assert hass.states.get("cover.test_name").state == CoverState.CLOSED + state = hass.states.get("cover.test_name") + assert state + assert state.state == CoverState.CLOSED monkeypatch.setattr(mock_block_device.blocks[ROLLER_BLOCK_ID], "rollerPos", 100) mock_block_device.mock_update() - assert hass.states.get("cover.test_name").state == CoverState.OPEN + state = hass.states.get("cover.test_name") + assert state + assert state.state == CoverState.OPEN async def test_block_device_no_roller_blocks( @@ -99,6 +105,7 @@ async def test_block_device_no_roller_blocks( """Test block device without roller blocks.""" monkeypatch.setattr(mock_block_device.blocks[ROLLER_BLOCK_ID], "type", None) await init_integration(hass, 1) + assert hass.states.get("cover.test_name") is None @@ -118,7 +125,7 @@ async def test_rpc_device_services( {ATTR_ENTITY_ID: entity_id, ATTR_POSITION: 50}, blocking=True, ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_CURRENT_POSITION] == 50 mutate_rpc_device_status( @@ -131,7 +138,9 @@ async def test_rpc_device_services( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == CoverState.OPENING + + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.OPENING mutate_rpc_device_status( monkeypatch, mock_rpc_device, "cover:0", "state", "closing" @@ -143,7 +152,9 @@ async def test_rpc_device_services( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == CoverState.CLOSING + + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.CLOSING mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "state", "closed") await hass.services.async_call( @@ -153,10 +164,10 @@ async def test_rpc_device_services( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == CoverState.CLOSED + assert (state := hass.states.get(entity_id)) + assert state.state == CoverState.CLOSED - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-cover:0" @@ -166,6 +177,7 @@ async def test_rpc_device_no_cover_keys( """Test RPC device without cover keys.""" monkeypatch.delitem(mock_rpc_device.status, "cover:0") await init_integration(hass, 2) + assert hass.states.get("cover.test_cover_0") is None @@ -175,11 +187,16 @@ async def test_rpc_device_update( """Test RPC device update.""" mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "state", "closed") await init_integration(hass, 2) - assert hass.states.get("cover.test_cover_0").state == CoverState.CLOSED + + state = hass.states.get("cover.test_cover_0") + assert state + assert state.state == CoverState.CLOSED mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "state", "open") mock_rpc_device.mock_update() - assert hass.states.get("cover.test_cover_0").state == CoverState.OPEN + state = hass.states.get("cover.test_cover_0") + assert state + assert state.state == CoverState.OPEN async def test_rpc_device_no_position_control( @@ -190,7 +207,10 @@ async def test_rpc_device_no_position_control( monkeypatch, mock_rpc_device, "cover:0", "pos_control", False ) await init_integration(hass, 2) - assert hass.states.get("cover.test_cover_0").state == CoverState.OPEN + + state = hass.states.get("cover.test_cover_0") + assert state + assert state.state == CoverState.OPEN async def test_rpc_cover_tilt( @@ -212,11 +232,10 @@ async def test_rpc_cover_tilt( await init_integration(hass, 3) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-cover:0" await hass.services.async_call( @@ -228,7 +247,7 @@ async def test_rpc_cover_tilt( mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "slat_pos", 50) mock_rpc_device.mock_update() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 await hass.services.async_call( @@ -240,7 +259,7 @@ async def test_rpc_cover_tilt( mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "slat_pos", 100) mock_rpc_device.mock_update() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 await hass.services.async_call( @@ -258,5 +277,5 @@ async def test_rpc_cover_tilt( mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "slat_pos", 10) mock_rpc_device.mock_update() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 10 diff --git a/tests/components/shelly/test_device_trigger.py b/tests/components/shelly/test_device_trigger.py index fb68393304b..ca9edb19fa7 100644 --- a/tests/components/shelly/test_device_trigger.py +++ b/tests/components/shelly/test_device_trigger.py @@ -25,7 +25,7 @@ from homeassistant.setup import async_setup_component from . import init_integration -from tests.common import MockConfigEntry, async_get_device_automations +from tests.common import async_get_device_automations @pytest.mark.parametrize( @@ -162,14 +162,16 @@ async def test_get_triggers_for_invalid_device_id( ) -> None: """Test error raised for invalid shelly device_id.""" await init_integration(hass, 1) - config_entry = MockConfigEntry(domain=DOMAIN, data={}) - config_entry.add_to_hass(hass) + config_entry = await init_integration(hass, 1, data={}, skip_setup=True) invalid_device = device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, ) - with pytest.raises(InvalidDeviceAutomationConfig): + with pytest.raises( + InvalidDeviceAutomationConfig, + match="not found while configuring device automation triggers", + ): await async_get_device_automations( hass, DeviceAutomationType.TRIGGER, invalid_device.id ) @@ -385,7 +387,10 @@ async def test_validate_trigger_invalid_triggers( }, ) - assert "Invalid (type,subtype): ('single', 'button3')" in caplog.text + assert ( + "Invalid device automation trigger (type, subtype): ('single', 'button3')" + in caplog.text + ) async def test_rpc_no_runtime_data( diff --git a/tests/components/shelly/test_diagnostics.py b/tests/components/shelly/test_diagnostics.py index 85bf1cc4b37..84ebd50c425 100644 --- a/tests/components/shelly/test_diagnostics.py +++ b/tests/components/shelly/test_diagnostics.py @@ -109,8 +109,14 @@ async def test_rpc_config_entry_diagnostics( "bluetooth": { "scanner": { "connectable": False, - "current_mode": None, - "requested_mode": None, + "current_mode": { + "__type": "", + "repr": "", + }, + "requested_mode": { + "__type": "", + "repr": "", + }, "discovered_device_timestamps": {"AA:BB:CC:DD:EE:FF": ANY}, "discovered_devices_and_advertisement_data": [ { @@ -134,17 +140,17 @@ async def test_rpc_config_entry_diagnostics( -62, [], ], - "details": {"source": "12:34:56:78:9A:BC"}, + "details": {"source": "12:34:56:78:9A:BE"}, "name": None, "rssi": -62, } ], "last_detection": ANY, "monotonic_time": ANY, - "name": "Mock Title (12:34:56:78:9A:BC)", + "name": "Mock Title (12:34:56:78:9A:BE)", "scanning": True, "start_time": ANY, - "source": "12:34:56:78:9A:BC", + "source": "12:34:56:78:9A:BE", "time_since_last_device_detection": {"AA:BB:CC:DD:EE:FF": ANY}, "type": "ShellyBLEScanner", } diff --git a/tests/components/shelly/test_event.py b/tests/components/shelly/test_event.py index e184c154697..a5367408955 100644 --- a/tests/components/shelly/test_event.py +++ b/tests/components/shelly/test_event.py @@ -33,8 +33,7 @@ async def test_rpc_button( await init_integration(hass, 2) entity_id = "event.test_name_input_0" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == STATE_UNKNOWN assert state.attributes.get(ATTR_EVENT_TYPES) == unordered( ["btn_down", "btn_up", "double_push", "long_push", "single_push", "triple_push"] @@ -42,8 +41,7 @@ async def test_rpc_button( assert state.attributes.get(ATTR_EVENT_TYPE) is None assert state.attributes.get(ATTR_DEVICE_CLASS) == EventDeviceClass.BUTTON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-input:0" inject_rpc_device_event( @@ -62,7 +60,7 @@ async def test_rpc_button( ) await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes.get(ATTR_EVENT_TYPE) == "single_push" @@ -78,11 +76,9 @@ async def test_rpc_script_1_event( await init_integration(hass, 2) entity_id = "event.test_name_test_script_js" - state = hass.states.get(entity_id) - assert state == snapshot(name=f"{entity_id}-state") + assert hass.states.get(entity_id) == snapshot(name=f"{entity_id}-state") - entry = entity_registry.async_get(entity_id) - assert entry == snapshot(name=f"{entity_id}-entry") + assert entity_registry.async_get(entity_id) == snapshot(name=f"{entity_id}-entry") inject_rpc_device_event( monkeypatch, @@ -101,7 +97,7 @@ async def test_rpc_script_1_event( ) await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes.get(ATTR_EVENT_TYPE) == "script_start" inject_rpc_device_event( @@ -121,7 +117,7 @@ async def test_rpc_script_1_event( ) await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes.get(ATTR_EVENT_TYPE) != "unknown_event" @@ -135,11 +131,9 @@ async def test_rpc_script_2_event( await init_integration(hass, 2) entity_id = "event.test_name_test_script_2_js" - state = hass.states.get(entity_id) - assert state == snapshot(name=f"{entity_id}-state") + assert hass.states.get(entity_id) == snapshot(name=f"{entity_id}-state") - entry = entity_registry.async_get(entity_id) - assert entry == snapshot(name=f"{entity_id}-entry") + assert entity_registry.async_get(entity_id) == snapshot(name=f"{entity_id}-entry") @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -152,11 +146,9 @@ async def test_rpc_script_ble_event( await init_integration(hass, 2) entity_id = f"event.test_name_{BLE_SCRIPT_NAME}" - state = hass.states.get(entity_id) - assert state == snapshot(name=f"{entity_id}-state") + assert hass.states.get(entity_id) == snapshot(name=f"{entity_id}-state") - entry = entity_registry.async_get(entity_id) - assert entry == snapshot(name=f"{entity_id}-entry") + assert entity_registry.async_get(entity_id) == snapshot(name=f"{entity_id}-entry") async def test_rpc_event_removal( @@ -186,15 +178,13 @@ async def test_block_event( await init_integration(hass, 1) entity_id = "event.test_name_channel_1" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == STATE_UNKNOWN assert state.attributes.get(ATTR_EVENT_TYPES) == unordered(["single", "long"]) assert state.attributes.get(ATTR_EVENT_TYPE) is None assert state.attributes.get(ATTR_DEVICE_CLASS) == EventDeviceClass.BUTTON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-relay_0-1" monkeypatch.setattr( @@ -206,7 +196,7 @@ async def test_block_event( mock_block_device.mock_update() await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes.get(ATTR_EVENT_TYPE) == "long" @@ -217,8 +207,7 @@ async def test_block_event_shix3_1( await init_integration(hass, 1, model=MODEL_I3) entity_id = "event.test_name_channel_1" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.attributes.get(ATTR_EVENT_TYPES) == unordered( ["double", "long", "long_single", "single", "single_long", "triple"] ) diff --git a/tests/components/shelly/test_init.py b/tests/components/shelly/test_init.py index b05bce76728..129aa812580 100644 --- a/tests/components/shelly/test_init.py +++ b/tests/components/shelly/test_init.py @@ -10,7 +10,9 @@ from aioshelly.exceptions import ( DeviceConnectionError, InvalidAuthError, MacAddressMismatchError, + RpcCallError, ) +from aioshelly.rpc_device.utils import bluetooth_mac_from_primary_mac import pytest from homeassistant.components.shelly.const import ( @@ -24,19 +26,19 @@ from homeassistant.components.shelly.const import ( BLEScannerMode, ) from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState -from homeassistant.const import CONF_HOST, CONF_PORT, STATE_ON, STATE_UNAVAILABLE +from homeassistant.const import ( + CONF_HOST, + CONF_MODEL, + CONF_PORT, + STATE_ON, + STATE_UNAVAILABLE, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir -from homeassistant.helpers.device_registry import ( - CONNECTION_NETWORK_MAC, - DeviceRegistry, - format_mac, -) +from homeassistant.helpers.device_registry import DeviceRegistry, format_mac from homeassistant.setup import async_setup_component -from . import MOCK_MAC, init_integration, mutate_rpc_device_status - -from tests.common import MockConfigEntry +from . import init_integration, mutate_rpc_device_status async def test_custom_coap_port( @@ -121,12 +123,6 @@ async def test_shared_device_mac( caplog: pytest.LogCaptureFixture, ) -> None: """Test first time shared device with another domain.""" - config_entry = MockConfigEntry(domain="test", data={}, unique_id="some_id") - config_entry.add_to_hass(hass) - device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, - connections={(CONNECTION_NETWORK_MAC, format_mac(MOCK_MAC))}, - ) await init_integration(hass, gen, sleep_period=1000) assert "will resume when device is online" in caplog.text @@ -135,12 +131,7 @@ async def test_setup_entry_not_shelly( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test not Shelly entry.""" - entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN) - entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(entry.entry_id) is False - await hass.async_block_till_done() - + await init_integration(hass, 1, data={}) assert "probably comes from a custom integration" in caplog.text @@ -247,12 +238,7 @@ async def test_sleeping_block_device_online( caplog: pytest.LogCaptureFixture, ) -> None: """Test sleeping block device online.""" - config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id="shelly") - config_entry.add_to_hass(hass) - device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, - connections={(CONNECTION_NETWORK_MAC, format_mac(MOCK_MAC))}, - ) + await init_integration(hass, 1, data={}) monkeypatch.setitem( mock_block_device.settings, @@ -266,7 +252,7 @@ async def test_sleeping_block_device_online( await hass.async_block_till_done(wait_background_tasks=True) assert "online, resuming setup" in caplog.text - assert entry.data["sleep_period"] == device_sleep + assert entry.data[CONF_SLEEP_PERIOD] == device_sleep @pytest.mark.parametrize(("entry_sleep", "device_sleep"), [(None, 0), (1000, 1000)]) @@ -288,7 +274,7 @@ async def test_sleeping_rpc_device_online( await hass.async_block_till_done(wait_background_tasks=True) assert "online, resuming setup" in caplog.text - assert entry.data["sleep_period"] == device_sleep + assert entry.data[CONF_SLEEP_PERIOD] == device_sleep async def test_sleeping_rpc_device_online_new_firmware( @@ -307,7 +293,7 @@ async def test_sleeping_rpc_device_online_new_firmware( await hass.async_block_till_done(wait_background_tasks=True) assert "online, resuming setup" in caplog.text - assert entry.data["sleep_period"] == 1500 + assert entry.data[CONF_SLEEP_PERIOD] == 1500 async def test_sleeping_rpc_device_online_during_setup( @@ -321,7 +307,8 @@ async def test_sleeping_rpc_device_online_during_setup( assert "will resume when device is online" in caplog.text assert "is online (source: setup)" in caplog.text - assert hass.states.get("sensor.test_name_temperature") is not None + + assert hass.states.get("sensor.test_name_temperature") async def test_sleeping_rpc_device_offline_during_setup( @@ -350,7 +337,7 @@ async def test_sleeping_rpc_device_offline_during_setup( mock_rpc_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get("sensor.test_name_temperature") is not None + assert hass.states.get("sensor.test_name_temperature") @pytest.mark.parametrize( @@ -366,18 +353,23 @@ async def test_entry_unload( entity_id: str, mock_block_device: Mock, mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test entry unload.""" + monkeypatch.delitem(mock_rpc_device.status, "cover:0") + monkeypatch.setitem(mock_rpc_device.status["sys"], "relay_in_thermostat", False) entry = await init_integration(hass, gen) assert entry.state is ConfigEntryState.LOADED - assert hass.states.get(entity_id).state is STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() assert entry.state is ConfigEntryState.NOT_LOADED - assert hass.states.get(entity_id).state is STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE @pytest.mark.parametrize( @@ -395,9 +387,9 @@ async def test_entry_unload_device_not_ready( mock_rpc_device: Mock, ) -> None: """Test entry unload when device is not ready.""" - entry = await init_integration(hass, gen, sleep_period=1000) - + assert (entry := await init_integration(hass, gen, sleep_period=1000)) assert entry.state is ConfigEntryState.LOADED + assert hass.states.get(entity_id) is None await hass.config_entries.async_unload(entry.entry_id) @@ -410,16 +402,21 @@ async def test_entry_unload_not_connected( hass: HomeAssistant, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch ) -> None: """Test entry unload when not connected.""" + monkeypatch.delitem(mock_rpc_device.status, "cover:0") + monkeypatch.setitem(mock_rpc_device.status["sys"], "relay_in_thermostat", False) + with patch( "homeassistant.components.shelly.coordinator.async_stop_scanner" ) as mock_stop_scanner: - entry = await init_integration( - hass, 2, options={CONF_BLE_SCANNER_MODE: BLEScannerMode.ACTIVE} + assert ( + entry := await init_integration( + hass, 2, options={CONF_BLE_SCANNER_MODE: BLEScannerMode.ACTIVE} + ) ) - entity_id = "switch.test_switch_0" - assert entry.state is ConfigEntryState.LOADED - assert hass.states.get(entity_id).state is STATE_ON + + assert (state := hass.states.get("switch.test_switch_0")) + assert state.state == STATE_ON assert not mock_stop_scanner.call_count monkeypatch.setattr(mock_rpc_device, "connected", False) @@ -435,17 +432,22 @@ async def test_entry_unload_not_connected_but_we_think_we_are( hass: HomeAssistant, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch ) -> None: """Test entry unload when not connected but we think we are still connected.""" + monkeypatch.delitem(mock_rpc_device.status, "cover:0") + monkeypatch.setitem(mock_rpc_device.status["sys"], "relay_in_thermostat", False) + with patch( "homeassistant.components.shelly.coordinator.async_stop_scanner", side_effect=DeviceConnectionError, ) as mock_stop_scanner: - entry = await init_integration( - hass, 2, options={CONF_BLE_SCANNER_MODE: BLEScannerMode.ACTIVE} + assert ( + entry := await init_integration( + hass, 2, options={CONF_BLE_SCANNER_MODE: BLEScannerMode.ACTIVE} + ) ) - entity_id = "switch.test_switch_0" - assert entry.state is ConfigEntryState.LOADED - assert hass.states.get(entity_id).state is STATE_ON + + assert (state := hass.states.get("switch.test_switch_0")) + assert state.state == STATE_ON assert not mock_stop_scanner.call_count monkeypatch.setattr(mock_rpc_device, "connected", False) @@ -478,7 +480,9 @@ async def test_entry_missing_gen(hass: HomeAssistant, mock_block_device: Mock) - entry = await init_integration(hass, None) assert entry.state is ConfigEntryState.LOADED - assert hass.states.get("switch.test_name_channel_1").state is STATE_ON + + assert (state := hass.states.get("switch.test_name_channel_1")) + assert state.state == STATE_ON async def test_entry_missing_port(hass: HomeAssistant) -> None: @@ -486,11 +490,10 @@ async def test_entry_missing_port(hass: HomeAssistant) -> None: data = { CONF_HOST: "192.168.1.37", CONF_SLEEP_PERIOD: 0, - "model": MODEL_PLUS_2PM, + CONF_MODEL: MODEL_PLUS_2PM, CONF_GEN: 2, } - entry = MockConfigEntry(domain=DOMAIN, data=data, unique_id=MOCK_MAC) - entry.add_to_hass(hass) + entry = await init_integration(hass, 2, data=data, skip_setup=True) with ( patch("homeassistant.components.shelly.RpcDevice.initialize"), patch( @@ -510,12 +513,11 @@ async def test_rpc_entry_custom_port(hass: HomeAssistant) -> None: data = { CONF_HOST: "192.168.1.37", CONF_SLEEP_PERIOD: 0, - "model": MODEL_PLUS_2PM, + CONF_MODEL: MODEL_PLUS_2PM, CONF_GEN: 2, CONF_PORT: 8001, } - entry = MockConfigEntry(domain=DOMAIN, data=data, unique_id=MOCK_MAC) - entry.add_to_hass(hass) + entry = await init_integration(hass, 2, data=data, skip_setup=True) with ( patch("homeassistant.components.shelly.RpcDevice.initialize"), patch( @@ -560,4 +562,20 @@ async def test_bluetooth_cleanup_on_remove_entry( await hass.config_entries.async_remove(entry.entry_id) await hass.async_block_till_done() - remove_mock.assert_called_once_with(hass, entry.unique_id.upper()) + remove_mock.assert_called_once_with( + hass, format_mac(bluetooth_mac_from_primary_mac(entry.unique_id)).upper() + ) + + +async def test_device_script_getcode_error( + hass: HomeAssistant, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test device script get code error.""" + monkeypatch.setattr( + mock_rpc_device, "script_getcode", AsyncMock(side_effect=RpcCallError(0)) + ) + + entry = await init_integration(hass, 2) + assert entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/shelly/test_light.py b/tests/components/shelly/test_light.py index 482821aa966..0dab06f53a9 100644 --- a/tests/components/shelly/test_light.py +++ b/tests/components/shelly/test_light.py @@ -65,18 +65,17 @@ async def test_block_device_rgbw_bulb( await init_integration(hass, 1, model=MODEL_BULB) # Test initial - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_RGBW_COLOR] == (45, 55, 65, 70) - assert attributes[ATTR_BRIGHTNESS] == 48 - assert attributes[ATTR_SUPPORTED_COLOR_MODES] == [ + assert state.attributes[ATTR_RGBW_COLOR] == (45, 55, 65, 70) + assert state.attributes[ATTR_BRIGHTNESS] == 48 + assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ ColorMode.COLOR_TEMP, ColorMode.RGBW, ] - assert attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.EFFECT - assert len(attributes[ATTR_EFFECT_LIST]) == 7 - assert attributes[ATTR_EFFECT] == "Off" + assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.EFFECT + assert len(state.attributes[ATTR_EFFECT_LIST]) == 7 + assert state.attributes[ATTR_EFFECT] == "Off" # Turn off mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.reset_mock() @@ -89,7 +88,7 @@ async def test_block_device_rgbw_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="off" ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF # Turn on, RGBW = [70, 80, 90, 20], brightness = 33, effect = Flash @@ -108,13 +107,12 @@ async def test_block_device_rgbw_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="on", gain=13, brightness=13, red=70, green=80, blue=90, white=30, effect=3 ) - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_COLOR_MODE] == ColorMode.RGBW - assert attributes[ATTR_RGBW_COLOR] == (70, 80, 90, 30) - assert attributes[ATTR_BRIGHTNESS] == 33 - assert attributes[ATTR_EFFECT] == "Flash" + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.RGBW + assert state.attributes[ATTR_RGBW_COLOR] == (70, 80, 90, 30) + assert state.attributes[ATTR_BRIGHTNESS] == 33 + assert state.attributes[ATTR_EFFECT] == "Flash" # Turn on, COLOR_TEMP_KELVIN = 3500 mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.reset_mock() @@ -127,14 +125,12 @@ async def test_block_device_rgbw_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="on", temp=3500, mode="white" ) - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP - assert attributes[ATTR_COLOR_TEMP_KELVIN] == 3500 + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 3500 - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-light_0" @@ -154,21 +150,20 @@ async def test_block_device_rgb_bulb( await init_integration(hass, 1, model=MODEL_BULB_RGBW) # Test initial - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_RGB_COLOR] == (45, 55, 65) - assert attributes[ATTR_BRIGHTNESS] == 48 - assert attributes[ATTR_SUPPORTED_COLOR_MODES] == [ + assert state.attributes[ATTR_RGB_COLOR] == (45, 55, 65) + assert state.attributes[ATTR_BRIGHTNESS] == 48 + assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ ColorMode.COLOR_TEMP, ColorMode.RGB, ] assert ( - attributes[ATTR_SUPPORTED_FEATURES] + state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.EFFECT | LightEntityFeature.TRANSITION ) - assert len(attributes[ATTR_EFFECT_LIST]) == 4 - assert attributes[ATTR_EFFECT] == "Off" + assert len(state.attributes[ATTR_EFFECT_LIST]) == 4 + assert state.attributes[ATTR_EFFECT] == "Off" # Turn off mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.reset_mock() @@ -181,7 +176,7 @@ async def test_block_device_rgb_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="off" ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF # Turn on, RGB = [70, 80, 90], brightness = 33, effect = Flash @@ -200,13 +195,12 @@ async def test_block_device_rgb_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="on", gain=13, brightness=13, red=70, green=80, blue=90, effect=3 ) - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_COLOR_MODE] == ColorMode.RGB - assert attributes[ATTR_RGB_COLOR] == (70, 80, 90) - assert attributes[ATTR_BRIGHTNESS] == 33 - assert attributes[ATTR_EFFECT] == "Flash" + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.RGB + assert state.attributes[ATTR_RGB_COLOR] == (70, 80, 90) + assert state.attributes[ATTR_BRIGHTNESS] == 33 + assert state.attributes[ATTR_EFFECT] == "Flash" # Turn on, COLOR_TEMP_KELVIN = 3500 mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.reset_mock() @@ -219,11 +213,10 @@ async def test_block_device_rgb_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="on", temp=3500, mode="white" ) - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP - assert attributes[ATTR_COLOR_TEMP_KELVIN] == 3500 + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 3500 # Turn on with unsupported effect mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.reset_mock() @@ -236,14 +229,13 @@ async def test_block_device_rgb_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="on", mode="color" ) - state = hass.states.get(entity_id) - attributes = state.attributes + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_EFFECT] == "Off" + assert state.attributes[ATTR_EFFECT] == "Off" assert "Effect 'Breath' not supported" in caplog.text - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-light_1" @@ -272,12 +264,11 @@ async def test_block_device_white_bulb( await init_integration(hass, 1, model=MODEL_VINTAGE_V2) # Test initial - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_BRIGHTNESS] == 128 - assert attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.BRIGHTNESS] - assert attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION + assert state.attributes[ATTR_BRIGHTNESS] == 128 + assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.BRIGHTNESS] + assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION # Turn off mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.reset_mock() @@ -290,7 +281,7 @@ async def test_block_device_white_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="off" ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF # Turn on, brightness = 33 @@ -304,13 +295,11 @@ async def test_block_device_white_bulb( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="on", gain=13, brightness=13 ) - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_BRIGHTNESS] == 33 + assert state.attributes[ATTR_BRIGHTNESS] == 33 - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-light_1" @@ -343,9 +332,8 @@ async def test_block_device_support_transition( await init_integration(hass, 1, model=model) # Test initial - state = hass.states.get(entity_id) - attributes = state.attributes - assert attributes[ATTR_SUPPORTED_FEATURES] & LightEntityFeature.TRANSITION + assert (state := hass.states.get(entity_id)) + assert state.attributes[ATTR_SUPPORTED_FEATURES] & LightEntityFeature.TRANSITION # Turn on, TRANSITION = 4 mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.reset_mock() @@ -358,7 +346,7 @@ async def test_block_device_support_transition( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="on", transition=4000 ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON # Turn off, TRANSITION = 6, limit to 5000ms @@ -372,11 +360,10 @@ async def test_block_device_support_transition( mock_block_device.blocks[LIGHT_BLOCK_ID].set_state.assert_called_once_with( turn="off", transition=5000 ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-light_1" @@ -403,14 +390,14 @@ async def test_block_device_relay_app_type_light( mock_block_device.blocks[RELAY_BLOCK_ID], "description", "relay_1" ) await init_integration(hass, 1) + assert hass.states.get("switch.test_name_channel_1") is None # Test initial - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.ONOFF] - assert attributes[ATTR_SUPPORTED_FEATURES] == 0 + assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.ONOFF] + assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0 # Turn off mock_block_device.blocks[RELAY_BLOCK_ID].set_state.reset_mock() @@ -423,7 +410,7 @@ async def test_block_device_relay_app_type_light( mock_block_device.blocks[RELAY_BLOCK_ID].set_state.assert_called_once_with( turn="off" ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF # Turn on @@ -437,11 +424,10 @@ async def test_block_device_relay_app_type_light( mock_block_device.blocks[RELAY_BLOCK_ID].set_state.assert_called_once_with( turn="on" ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-relay_1" @@ -451,6 +437,7 @@ async def test_block_device_no_light_blocks( """Test block device without light blocks.""" monkeypatch.setattr(mock_block_device.blocks[LIGHT_BLOCK_ID], "type", "roller") await init_integration(hass, 1) + assert hass.states.get("light.test_name_channel_1") is None @@ -473,7 +460,9 @@ async def test_rpc_device_switch_type_lights_mode( {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get(entity_id).state == STATE_ON + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON mutate_rpc_device_status(monkeypatch, mock_rpc_device, "switch:0", "output", False) await hass.services.async_call( @@ -483,10 +472,11 @@ async def test_rpc_device_switch_type_lights_mode( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_OFF - entry = entity_registry.async_get(entity_id) - assert entry + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF + + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-switch:0" @@ -510,7 +500,8 @@ async def test_rpc_light( ) mock_rpc_device.call_rpc.assert_called_once_with("Light.Set", {"id": 0, "on": True}) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_BRIGHTNESS] == 135 @@ -528,7 +519,8 @@ async def test_rpc_light( mock_rpc_device.call_rpc.assert_called_once_with( "Light.Set", {"id": 0, "on": False} ) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF # Turn on, brightness = 33 @@ -547,7 +539,8 @@ async def test_rpc_light( mock_rpc_device.call_rpc.assert_called_once_with( "Light.Set", {"id": 0, "on": True, "brightness": 13} ) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_BRIGHTNESS] == 33 @@ -565,7 +558,8 @@ async def test_rpc_light( mock_rpc_device.call_rpc.assert_called_once_with( "Light.Set", {"id": 0, "on": True, "transition_duration": 10.1} ) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON # Turn off, transition = 0.4, should be limited to 0.5 @@ -584,11 +578,10 @@ async def test_rpc_light( "Light.Set", {"id": 0, "on": False, "transition_duration": 0.5} ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-light:0" @@ -606,12 +599,11 @@ async def test_rpc_device_rgb_profile( await init_integration(hass, 2) # Test initial - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_RGB_COLOR] == (45, 55, 65) - assert attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.RGB] - assert attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION + assert state.attributes[ATTR_RGB_COLOR] == (45, 55, 65) + assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.RGB] + assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION # Turn on, RGB = [70, 80, 90] await hass.services.async_call( @@ -628,14 +620,12 @@ async def test_rpc_device_rgb_profile( "RGB.Set", {"id": 0, "on": True, "rgb": [70, 80, 90]} ) - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_COLOR_MODE] == ColorMode.RGB - assert attributes[ATTR_RGB_COLOR] == (70, 80, 90) + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.RGB + assert state.attributes[ATTR_RGB_COLOR] == (70, 80, 90) - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-rgb:0" @@ -653,12 +643,11 @@ async def test_rpc_device_rgbw_profile( await init_integration(hass, 2) # Test initial - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_RGBW_COLOR] == (21, 22, 23, 120) - assert attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.RGBW] - assert attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION + assert state.attributes[ATTR_RGBW_COLOR] == (21, 22, 23, 120) + assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.RGBW] + assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION # Turn on, RGBW = [72, 82, 92, 128] await hass.services.async_call( @@ -678,14 +667,12 @@ async def test_rpc_device_rgbw_profile( "RGBW.Set", {"id": 0, "on": True, "rgb": [72, 82, 92], "white": 128} ) - state = hass.states.get(entity_id) - attributes = state.attributes + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - assert attributes[ATTR_COLOR_MODE] == ColorMode.RGBW - assert attributes[ATTR_RGBW_COLOR] == (72, 82, 92, 128) + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.RGBW + assert state.attributes[ATTR_RGBW_COLOR] == (72, 82, 92, 128) - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-rgbw:0" @@ -730,9 +717,11 @@ async def test_rpc_rgbw_device_light_mode_remove_others( # verify we have 4 lights for i in range(SHELLY_PLUS_RGBW_CHANNELS): entity_id = f"light.test_light_{i}" - assert hass.states.get(entity_id).state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON + + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-light:{i}" # verify RGB & RGBW entities removed @@ -793,9 +782,11 @@ async def test_rpc_rgbw_device_rgb_w_modes_remove_others( # verify we have RGB/w light entity_id = f"light.test_{active_mode}_0" - assert hass.states.get(entity_id).state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON + + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-{active_mode}:0" # verify light & RGB/W entities removed @@ -823,8 +814,7 @@ async def test_rpc_cct_light( await init_integration(hass, 2) - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-cct:0" # Turn off @@ -836,7 +826,8 @@ async def test_rpc_cct_light( ) mock_rpc_device.call_rpc.assert_called_once_with("CCT.Set", {"id": 0, "on": False}) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF # Turn on @@ -851,7 +842,8 @@ async def test_rpc_cct_light( mock_rpc_device.mock_update() mock_rpc_device.call_rpc.assert_called_once_with("CCT.Set", {"id": 0, "on": True}) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP assert state.attributes[ATTR_BRIGHTNESS] == 196 # 77% of 255 @@ -874,7 +866,8 @@ async def test_rpc_cct_light( mock_rpc_device.call_rpc.assert_called_once_with( "CCT.Set", {"id": 0, "on": True, "brightness": 88} ) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_BRIGHTNESS] == 224 # 88% of 255 @@ -894,7 +887,8 @@ async def test_rpc_cct_light( mock_rpc_device.call_rpc.assert_called_once_with( "CCT.Set", {"id": 0, "on": True, "ct": 4444} ) - state = hass.states.get(entity_id) + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 4444 diff --git a/tests/components/shelly/test_number.py b/tests/components/shelly/test_number.py index 6bddd1eeb23..41002917d86 100644 --- a/tests/components/shelly/test_number.py +++ b/tests/components/shelly/test_number.py @@ -3,7 +3,7 @@ from copy import deepcopy from unittest.mock import AsyncMock, Mock -from aioshelly.const import MODEL_BLU_GATEWAY_G3 +from aioshelly.const import BLU_TRV_TIMEOUT, MODEL_BLU_GATEWAY_G3 from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError import pytest from syrupy import SnapshotAssertion @@ -18,7 +18,7 @@ from homeassistant.components.number import ( SERVICE_SET_VALUE, NumberMode, ) -from homeassistant.components.shelly.const import BLU_TRV_TIMEOUT, DOMAIN +from homeassistant.components.shelly.const import DOMAIN from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN from homeassistant.core import HomeAssistant, State @@ -54,15 +54,16 @@ async def test_block_number_update( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "50" + assert (state := hass.states.get(entity_id)) + assert state.state == "50" monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "valvePos", 30) mock_block_device.mock_update() - assert hass.states.get(entity_id).state == "30" + assert (state := hass.states.get(entity_id)) + assert state.state == "30" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-device_0-valvePos" @@ -103,14 +104,16 @@ async def test_block_restored_number( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == "40" + assert (state := hass.states.get(entity_id)) + assert state.state == "40" # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "50" + assert (state := hass.states.get(entity_id)) + assert state.state == "50" async def test_block_restored_number_no_last_state( @@ -141,14 +144,16 @@ async def test_block_restored_number_no_last_state( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "50" + assert (state := hass.states.get(entity_id)) + assert state.state == "50" async def test_block_number_set_value( @@ -200,7 +205,10 @@ async def test_block_set_value_connection_error( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - with pytest.raises(HomeAssistantError): + with pytest.raises( + HomeAssistantError, + match="Device communication error occurred while calling action for number.test_name_valve_position of Test name", + ): await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, @@ -302,8 +310,7 @@ async def test_rpc_device_virtual_number( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "12.3" assert state.attributes.get(ATTR_MIN) == 0 assert state.attributes.get(ATTR_MAX) == 100 @@ -311,13 +318,13 @@ async def test_rpc_device_virtual_number( assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit assert state.attributes.get(ATTR_MODE) is mode - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-number:203-number" monkeypatch.setitem(mock_rpc_device.status["number:203"], "value", 78.9) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "78.9" + assert (state := hass.states.get(entity_id)) + assert state.state == "78.9" monkeypatch.setitem(mock_rpc_device.status["number:203"], "value", 56.7) await hass.services.async_call( @@ -327,7 +334,8 @@ async def test_rpc_device_virtual_number( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "56.7" + assert (state := hass.states.get(entity_id)) + assert state.state == "56.7" async def test_rpc_remove_virtual_number_when_mode_label( @@ -365,8 +373,7 @@ async def test_rpc_remove_virtual_number_when_mode_label( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_virtual_number_when_orphaned( @@ -390,8 +397,7 @@ async def test_rpc_remove_virtual_number_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_blu_trv_number_entity( @@ -427,7 +433,8 @@ async def test_blu_trv_ext_temp_set_value( # After HA start the state should be unknown because there was no previous external # temperature report - assert hass.states.get(entity_id).state is STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN await hass.services.async_call( NUMBER_DOMAIN, @@ -449,7 +456,8 @@ async def test_blu_trv_ext_temp_set_value( BLU_TRV_TIMEOUT, ) - assert hass.states.get(entity_id).state == "22.2" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.2" async def test_blu_trv_valve_pos_set_value( @@ -465,7 +473,8 @@ async def test_blu_trv_valve_pos_set_value( entity_id = f"{NUMBER_DOMAIN}.trv_name_valve_position" - assert hass.states.get(entity_id).state == "0" + assert (state := hass.states.get(entity_id)) + assert state.state == "0" monkeypatch.setitem(mock_blu_trv.status["blutrv:200"], "pos", 20) await hass.services.async_call( @@ -490,4 +499,5 @@ async def test_blu_trv_valve_pos_set_value( # device only accepts int for 'pos' value assert isinstance(mock_blu_trv.call_rpc.call_args[0][1]["params"]["pos"], int) - assert hass.states.get(entity_id).state == "20" + assert (state := hass.states.get(entity_id)) + assert state.state == "20" diff --git a/tests/components/shelly/test_select.py b/tests/components/shelly/test_select.py index 0a6eb2a5843..39e426baa58 100644 --- a/tests/components/shelly/test_select.py +++ b/tests/components/shelly/test_select.py @@ -56,8 +56,7 @@ async def test_rpc_device_virtual_enum( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == expected_state assert state.attributes.get(ATTR_OPTIONS) == [ "Title 1", @@ -65,13 +64,14 @@ async def test_rpc_device_virtual_enum( "option 3", ] - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-enum:203-enum" monkeypatch.setitem(mock_rpc_device.status["enum:203"], "value", "option 2") mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "option 2" + + assert (state := hass.states.get(entity_id)) + assert state.state == "option 2" monkeypatch.setitem(mock_rpc_device.status["enum:203"], "value", "option 1") await hass.services.async_call( @@ -83,7 +83,9 @@ async def test_rpc_device_virtual_enum( # 'Title 1' corresponds to 'option 1' assert mock_rpc_device.call_rpc.call_args[0][1] == {"id": 203, "value": "option 1"} mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "Title 1" + + assert (state := hass.states.get(entity_id)) + assert state.state == "Title 1" async def test_rpc_remove_virtual_enum_when_mode_label( @@ -122,8 +124,7 @@ async def test_rpc_remove_virtual_enum_when_mode_label( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_virtual_enum_when_orphaned( @@ -147,5 +148,4 @@ async def test_rpc_remove_virtual_enum_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None diff --git a/tests/components/shelly/test_sensor.py b/tests/components/shelly/test_sensor.py index d0fec65c7de..7edd38a4b31 100644 --- a/tests/components/shelly/test_sensor.py +++ b/tests/components/shelly/test_sensor.py @@ -40,7 +40,6 @@ from homeassistant.helpers.entity_registry import EntityRegistry from homeassistant.setup import async_setup_component from . import ( - get_entity_state, init_integration, mock_polling_rpc_update, mock_rest_update, @@ -66,15 +65,16 @@ async def test_block_sensor( entity_id = f"{SENSOR_DOMAIN}.test_name_channel_1_power" await init_integration(hass, 1) - assert hass.states.get(entity_id).state == "53.4" + assert (state := hass.states.get(entity_id)) + assert state.state == "53.4" monkeypatch.setattr(mock_block_device.blocks[RELAY_BLOCK_ID], "power", 60.1) mock_block_device.mock_update() - assert hass.states.get(entity_id).state == "60.1" + assert (state := hass.states.get(entity_id)) + assert state.state == "60.1" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-relay_0-power" @@ -85,14 +85,13 @@ async def test_energy_sensor( entity_id = f"{SENSOR_DOMAIN}.test_name_channel_1_energy" await init_integration(hass, 1) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) # 1234567.89 Wmin / 60 / 1000 = 20.5761315 kWh assert state.state == "20.5761315" # suggested unit is KWh assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.KILO_WATT_HOUR - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-relay_0-energy" @@ -111,13 +110,12 @@ async def test_power_factory_unit_migration( entity_id = f"{SENSOR_DOMAIN}.test_name_power_factor" await init_integration(hass, 1) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) # Value of 0.98 is converted to 98.0% assert state.state == "98.0" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-emeter_0-powerFactor" @@ -128,12 +126,11 @@ async def test_power_factory_without_unit_migration( entity_id = f"{SENSOR_DOMAIN}.test_name_power_factor" await init_integration(hass, 1) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == "0.98" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-emeter_0-powerFactor" @@ -147,12 +144,14 @@ async def test_block_rest_sensor( entity_id = register_entity(hass, SENSOR_DOMAIN, "test_name_rssi", "rssi") await init_integration(hass, 1) - assert hass.states.get(entity_id).state == "-64" + assert (state := hass.states.get(entity_id)) + assert state.state == "-64" monkeypatch.setitem(mock_block_device.status["wifi_sta"], "rssi", -71) await mock_rest_update(hass, freezer) - assert hass.states.get(entity_id).state == "-71" + assert (state := hass.states.get(entity_id)) + assert state.state == "-71" async def test_block_sleeping_sensor( @@ -175,15 +174,16 @@ async def test_block_sleeping_sensor( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "22.1" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.1" monkeypatch.setattr(mock_block_device.blocks[SENSOR_BLOCK_ID], "temp", 23.4) mock_block_device.mock_update() - assert hass.states.get(entity_id).state == "23.4" + assert (state := hass.states.get(entity_id)) + assert state.state == "23.4" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-sensor_0-temp" @@ -211,8 +211,7 @@ async def test_block_restored_sleeping_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "20.4" assert state.attributes[ATTR_STATE_CLASS] == SensorStateClass.MEASUREMENT assert state.attributes[ATTR_DEVICE_CLASS] == SensorDeviceClass.TEMPERATURE @@ -222,7 +221,8 @@ async def test_block_restored_sleeping_sensor( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "22.1" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.1" async def test_block_restored_sleeping_sensor_no_last_state( @@ -246,14 +246,16 @@ async def test_block_restored_sleeping_sensor_no_last_state( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "22.1" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.1" async def test_block_sensor_error( @@ -266,15 +268,16 @@ async def test_block_sensor_error( entity_id = f"{SENSOR_DOMAIN}.test_name_battery" await init_integration(hass, 1) - assert hass.states.get(entity_id).state == "98" + assert (state := hass.states.get(entity_id)) + assert state.state == "98" monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "battery", -1) mock_block_device.mock_update() - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-device_0-battery" @@ -321,7 +324,8 @@ async def test_block_not_matched_restored_sleeping_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == "20.4" + assert (state := hass.states.get(entity_id)) + assert state.state == "20.4" # Make device online monkeypatch.setattr( @@ -331,7 +335,8 @@ async def test_block_not_matched_restored_sleeping_sensor( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "20.4" + assert (state := hass.states.get(entity_id)) + assert state.state == "20.4" async def test_block_sensor_without_value( @@ -345,17 +350,94 @@ async def test_block_sensor_without_value( assert hass.states.get(entity_id) is None -async def test_block_sensor_unknown_value( - hass: HomeAssistant, mock_block_device: Mock, monkeypatch: pytest.MonkeyPatch +@pytest.mark.parametrize( + ("entity", "initial_state", "block_id", "attribute", "value", "final_value"), + [ + ("test_name_battery", "98", DEVICE_BLOCK_ID, "battery", None, STATE_UNKNOWN), + ( + "test_name_operation", + "normal", + SENSOR_BLOCK_ID, + "sensorOp", + None, + STATE_UNKNOWN, + ), + ( + "test_name_operation", + "normal", + SENSOR_BLOCK_ID, + "sensorOp", + "normal", + "normal", + ), + ( + "test_name_self_test", + "pending", + SENSOR_BLOCK_ID, + "selfTest", + "completed", + "completed", + ), + ( + "test_name_gas_detected", + "mild", + SENSOR_BLOCK_ID, + "gas", + "heavy", + "heavy", + ), + ], +) +async def test_block_sensor_values( + hass: HomeAssistant, + mock_block_device: Mock, + monkeypatch: pytest.MonkeyPatch, + entity: str, + initial_state: str, + block_id: int, + attribute: str, + value: str | None, + final_value: str, ) -> None: """Test block sensor unknown value.""" - entity_id = f"{SENSOR_DOMAIN}.test_name_battery" + entity_id = f"{SENSOR_DOMAIN}.{entity}" await init_integration(hass, 1) - monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "battery", None) + assert hass.states.get(entity_id).state == initial_state + + monkeypatch.setattr(mock_block_device.blocks[block_id], attribute, value) mock_block_device.mock_update() - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == final_value + + +@pytest.mark.parametrize( + ("lamp_life_seconds", "percentage"), + [ + (0 * 3600, "100.0"), # 0 hours, 100% remaining + (16 * 3600, "99.8222222222222"), + (4500 * 3600, "50.0"), # 4500 hours, 50% remaining + (9000 * 3600, "0.0"), # 9000 hours, 0% remaining + (10000 * 3600, "0.0"), # > 9000 hours, 0% remaining + ], +) +async def test_block_shelly_air_lamp_life( + hass: HomeAssistant, + mock_block_device: Mock, + monkeypatch: pytest.MonkeyPatch, + lamp_life_seconds: int, + percentage: float, +) -> None: + """Test block Shelly Air lamp life percentage sensor.""" + entity_id = f"{SENSOR_DOMAIN}.{'test_name_channel_1_lamp_life'}" + monkeypatch.setattr( + mock_block_device.blocks[RELAY_BLOCK_ID], "totalWorkTime", lamp_life_seconds + ) + await init_integration(hass, 1) + + assert (state := hass.states.get(entity_id)) + assert state.state == percentage async def test_rpc_sensor( @@ -365,17 +447,20 @@ async def test_rpc_sensor( entity_id = f"{SENSOR_DOMAIN}.test_cover_0_power" await init_integration(hass, 2) - assert hass.states.get(entity_id).state == "85.3" + assert (state := hass.states.get(entity_id)) + assert state.state == "85.3" mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "apower", "88.2") mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "88.2" + assert (state := hass.states.get(entity_id)) + assert state.state == "88.2" mutate_rpc_device_status(monkeypatch, mock_rpc_device, "cover:0", "apower", None) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -389,7 +474,8 @@ async def test_rpc_rssi_sensor_removal( entry = await init_integration(hass, 2) # WiFi1 enabled, do not remove sensor - assert get_entity_state(hass, entity_id) == "-63" + assert (state := hass.states.get(entity_id)) + assert state.state == "-63" # WiFi1 & WiFi2 disabled - remove sensor monkeypatch.setitem(mock_rpc_device.config["wifi"]["sta"], "enable", False) @@ -401,7 +487,9 @@ async def test_rpc_rssi_sensor_removal( monkeypatch.setitem(mock_rpc_device.config["wifi"]["sta1"], "enable", True) await hass.config_entries.async_reload(entry.entry_id) await hass.async_block_till_done() - assert get_entity_state(hass, entity_id) == "-63" + + assert (state := hass.states.get(entity_id)) + assert state.state == "-63" async def test_rpc_illuminance_sensor( @@ -411,10 +499,10 @@ async def test_rpc_illuminance_sensor( entity_id = f"{SENSOR_DOMAIN}.test_name_illuminance" await init_integration(hass, 2) - assert hass.states.get(entity_id).state == "345" + assert (state := hass.states.get(entity_id)) + assert state.state == "345" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-illuminance:0-illuminance" @@ -428,17 +516,18 @@ async def test_rpc_sensor_error( entity_id = f"{SENSOR_DOMAIN}.test_name_voltmeter" await init_integration(hass, 2) - assert hass.states.get(entity_id).state == "4.321" + assert (state := hass.states.get(entity_id)) + assert state.state == "4.321" mutate_rpc_device_status( monkeypatch, mock_rpc_device, "voltmeter:100", "voltage", None ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNAVAILABLE - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-voltmeter:100-voltmeter" @@ -453,15 +542,16 @@ async def test_rpc_polling_sensor( entity_id = register_entity(hass, SENSOR_DOMAIN, "test_name_rssi", "wifi-rssi") await init_integration(hass, 2) - assert hass.states.get(entity_id).state == "-63" + assert (state := hass.states.get(entity_id)) + assert state.state == "-63" mutate_rpc_device_status(monkeypatch, mock_rpc_device, "wifi", "rssi", "-70") await mock_polling_rpc_update(hass, freezer) - assert hass.states.get(entity_id).state == "-70" + assert (state := hass.states.get(entity_id)) + assert state.state == "-70" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-wifi-rssi" @@ -492,12 +582,14 @@ async def test_rpc_sleeping_sensor( mock_rpc_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "22.9" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.9" mutate_rpc_device_status(monkeypatch, mock_rpc_device, "temperature:0", "tC", 23.4) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "23.4" + assert (state := hass.states.get(entity_id)) + assert state.state == "23.4" async def test_rpc_restored_sleeping_sensor( @@ -525,7 +617,8 @@ async def test_rpc_restored_sleeping_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == "21.0" + assert (state := hass.states.get(entity_id)) + assert state.state == "21.0" # Make device online monkeypatch.setattr(mock_rpc_device, "initialized", True) @@ -536,7 +629,8 @@ async def test_rpc_restored_sleeping_sensor( mock_rpc_device.mock_update() await hass.async_block_till_done() - assert hass.states.get(entity_id).state == "22.9" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.9" async def test_rpc_restored_sleeping_sensor_no_last_state( @@ -562,7 +656,8 @@ async def test_rpc_restored_sleeping_sensor_no_last_state( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN # Make device online monkeypatch.setattr(mock_rpc_device, "initialized", True) @@ -573,7 +668,8 @@ async def test_rpc_restored_sleeping_sensor_no_last_state( mock_rpc_device.mock_update() await hass.async_block_till_done() - assert hass.states.get(entity_id).state == "22.9" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.9" @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -583,36 +679,32 @@ async def test_rpc_em1_sensors( """Test RPC sensors for EM1 component.""" await init_integration(hass, 2) - state = hass.states.get("sensor.test_name_em0_power") - assert state + assert (state := hass.states.get("sensor.test_name_em0_power")) assert state.state == "85.3" - entry = entity_registry.async_get("sensor.test_name_em0_power") - assert entry + assert (entry := entity_registry.async_get("sensor.test_name_em0_power")) assert entry.unique_id == "123456789ABC-em1:0-power_em1" - state = hass.states.get("sensor.test_name_em1_power") - assert state + assert (state := hass.states.get("sensor.test_name_em1_power")) assert state.state == "123.3" - entry = entity_registry.async_get("sensor.test_name_em1_power") - assert entry + assert (entry := entity_registry.async_get("sensor.test_name_em1_power")) assert entry.unique_id == "123456789ABC-em1:1-power_em1" - state = hass.states.get("sensor.test_name_em0_total_active_energy") - assert state + assert (state := hass.states.get("sensor.test_name_em0_total_active_energy")) assert state.state == "123.4564" - entry = entity_registry.async_get("sensor.test_name_em0_total_active_energy") - assert entry + assert ( + entry := entity_registry.async_get("sensor.test_name_em0_total_active_energy") + ) assert entry.unique_id == "123456789ABC-em1data:0-total_act_energy" - state = hass.states.get("sensor.test_name_em1_total_active_energy") - assert state + assert (state := hass.states.get("sensor.test_name_em1_total_active_energy")) assert state.state == "987.6543" - entry = entity_registry.async_get("sensor.test_name_em1_total_active_energy") - assert entry + assert ( + entry := entity_registry.async_get("sensor.test_name_em1_total_active_energy") + ) assert entry.unique_id == "123456789ABC-em1data:1-total_act_energy" @@ -638,7 +730,7 @@ async def test_rpc_sleeping_update_entity_service( mock_rpc_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == "22.9" await hass.services.async_call( @@ -649,11 +741,10 @@ async def test_rpc_sleeping_update_entity_service( ) # Entity should be available after update_entity service call - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == "22.9" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-temperature:0-temperature_0" assert ( @@ -687,7 +778,8 @@ async def test_block_sleeping_update_entity_service( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get(entity_id).state == "22.1" + assert (state := hass.states.get(entity_id)) + assert state.state == "22.1" await hass.services.async_call( HA_DOMAIN, @@ -697,11 +789,10 @@ async def test_block_sleeping_update_entity_service( ) # Entity should be available after update_entity service call - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == "22.1" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-sensor_0-temp" assert ( @@ -734,20 +825,18 @@ async def test_rpc_analog_input_sensors( await init_integration(hass, 2) entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog" - assert hass.states.get(entity_id).state == "89" + assert (state := hass.states.get(entity_id)) + assert state.state == "89" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-input:1-analoginput" entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog_value" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "8.9" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-input:1-analoginput_xpercent" @@ -782,7 +871,8 @@ async def test_rpc_disabled_xpercent( await init_integration(hass, 2) entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog" - assert hass.states.get(entity_id).state == "89" + assert (state := hass.states.get(entity_id)) + assert state.state == "89" entity_id = f"{SENSOR_DOMAIN}.test_name_input_1_analog_value" assert hass.states.get(entity_id) is None @@ -812,23 +902,20 @@ async def test_rpc_pulse_counter_sensors( await init_integration(hass, 2) entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter" - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == "56174" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "pulse" assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.TOTAL - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-input:2-pulse_counter" entity_id = f"{SENSOR_DOMAIN}.gas_counter_value" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "561.74" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-input:2-counter_value" @@ -863,7 +950,8 @@ async def test_rpc_disabled_xtotal_counter( await init_integration(hass, 2) entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter" - assert hass.states.get(entity_id).state == "20635" + assert (state := hass.states.get(entity_id)) + assert state.state == "20635" entity_id = f"{SENSOR_DOMAIN}.gas_counter_value" assert hass.states.get(entity_id) is None @@ -893,23 +981,20 @@ async def test_rpc_pulse_counter_frequency_sensors( await init_integration(hass, 2) entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter_frequency" - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == "208.0" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfFrequency.HERTZ assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-input:2-counter_frequency" entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter_frequency_value" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "6.11" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-input:2-counter_frequency_value" @@ -932,11 +1017,9 @@ async def test_rpc_disabled_xfreq( entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter_frequency_value" - state = hass.states.get(entity_id) - assert not state + assert hass.states.get(entity_id) is None - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None @pytest.mark.parametrize( @@ -968,17 +1051,16 @@ async def test_rpc_device_virtual_text_sensor( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "lorem ipsum" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-text:203-text" monkeypatch.setitem(mock_rpc_device.status["text:203"], "value", "dolor sit amet") mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "dolor sit amet" + assert (state := hass.states.get(entity_id)) + assert state.state == "dolor sit amet" async def test_rpc_remove_text_virtual_sensor_when_mode_field( @@ -1011,8 +1093,7 @@ async def test_rpc_remove_text_virtual_sensor_when_mode_field( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_text_virtual_sensor_when_orphaned( @@ -1036,8 +1117,7 @@ async def test_rpc_remove_text_virtual_sensor_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None @pytest.mark.parametrize( @@ -1073,18 +1153,17 @@ async def test_rpc_device_virtual_number_sensor( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "34.5" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-number:203-number" monkeypatch.setitem(mock_rpc_device.status["number:203"], "value", 56.7) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "56.7" + assert (state := hass.states.get(entity_id)) + assert state.state == "56.7" async def test_rpc_remove_number_virtual_sensor_when_mode_field( @@ -1122,8 +1201,7 @@ async def test_rpc_remove_number_virtual_sensor_when_mode_field( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_number_virtual_sensor_when_orphaned( @@ -1147,8 +1225,7 @@ async def test_rpc_remove_number_virtual_sensor_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None @pytest.mark.parametrize( @@ -1188,19 +1265,18 @@ async def test_rpc_device_virtual_enum_sensor( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == expected_state assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENUM assert state.attributes.get(ATTR_OPTIONS) == ["Title 1", "two", "three"] - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-enum:203-enum" monkeypatch.setitem(mock_rpc_device.status["enum:203"], "value", "two") mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "two" + assert (state := hass.states.get(entity_id)) + assert state.state == "two" async def test_rpc_remove_enum_virtual_sensor_when_mode_dropdown( @@ -1242,8 +1318,7 @@ async def test_rpc_remove_enum_virtual_sensor_when_mode_dropdown( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_enum_virtual_sensor_when_orphaned( @@ -1267,8 +1342,7 @@ async def test_rpc_remove_enum_virtual_sensor_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -1299,61 +1373,51 @@ async def test_rpc_rgbw_sensors( entity_id = f"sensor.test_name_{light_type}_light_0_power" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "12.2" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfPower.WATT - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-{light_type}:0-power_{light_type}" entity_id = f"sensor.test_name_{light_type}_light_0_energy" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "0.045141" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.KILO_WATT_HOUR - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-{light_type}:0-energy_{light_type}" entity_id = f"sensor.test_name_{light_type}_light_0_current" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "0.23" assert ( state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfElectricCurrent.AMPERE ) - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-{light_type}:0-current_{light_type}" entity_id = f"sensor.test_name_{light_type}_light_0_voltage" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "12.4" assert ( state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfElectricPotential.VOLT ) - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-{light_type}:0-voltage_{light_type}" entity_id = f"sensor.test_name_{light_type}_light_0_device_temperature" - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "54.3" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-{light_type}:0-temperature_{light_type}" @@ -1366,15 +1430,17 @@ async def test_rpc_device_sensor_goes_unavailable_on_disconnect( ) -> None: """Test RPC device with sensor goes unavailable on disconnect.""" await init_integration(hass, 2) - temp_sensor_state = hass.states.get("sensor.test_name_temperature") - assert temp_sensor_state is not None - assert temp_sensor_state.state != STATE_UNAVAILABLE + + assert (state := hass.states.get("sensor.test_name_temperature")) + assert state.state != STATE_UNAVAILABLE + monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setattr(mock_rpc_device, "initialized", False) mock_rpc_device.mock_disconnected() await hass.async_block_till_done() - temp_sensor_state = hass.states.get("sensor.test_name_temperature") - assert temp_sensor_state.state == STATE_UNAVAILABLE + + assert (state := hass.states.get("sensor.test_name_temperature")) + assert state.state == STATE_UNAVAILABLE freezer.tick(60) async_fire_time_changed(hass) @@ -1385,8 +1451,9 @@ async def test_rpc_device_sensor_goes_unavailable_on_disconnect( monkeypatch.setattr(mock_rpc_device, "initialized", True) mock_rpc_device.mock_initialized() await hass.async_block_till_done() - temp_sensor_state = hass.states.get("sensor.test_name_temperature") - assert temp_sensor_state.state != STATE_UNAVAILABLE + + assert (state := hass.states.get("sensor.test_name_temperature")) + assert state.state != STATE_UNAVAILABLE async def test_rpc_voltmeter_value( @@ -1399,13 +1466,11 @@ async def test_rpc_voltmeter_value( await init_integration(hass, 2) - state = hass.states.get(entity_id) - + assert (state := hass.states.get(entity_id)) assert state.state == "12.34" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "ppm" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-voltmeter:100-voltmeter_value" @@ -1450,8 +1515,7 @@ async def test_rpc_device_virtual_number_sensor_with_device_class( await init_integration(hass, 3) - state = hass.states.get("sensor.test_name_current_humidity") - assert state + assert (state := hass.states.get("sensor.test_name_current_humidity")) assert state.state == "34" assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.HUMIDITY diff --git a/tests/components/shelly/test_switch.py b/tests/components/shelly/test_switch.py index 5aae9dfffc9..824742d1798 100644 --- a/tests/components/shelly/test_switch.py +++ b/tests/components/shelly/test_switch.py @@ -3,7 +3,7 @@ from copy import deepcopy from unittest.mock import AsyncMock, Mock -from aioshelly.const import MODEL_GAS +from aioshelly.const import MODEL_1PM, MODEL_GAS, MODEL_MOTION from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError, RpcCallError import pytest @@ -28,7 +28,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.helpers.entity_registry import EntityRegistry -from . import get_entity_state, init_integration, register_device, register_entity +from . import init_integration, register_device, register_entity from tests.common import mock_restore_cache @@ -42,22 +42,25 @@ async def test_block_device_services( ) -> None: """Test block device turn on/off services.""" await init_integration(hass, 1) + entity_id = "switch.test_name_channel_1" await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "switch.test_name_channel_1"}, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get("switch.test_name_channel_1").state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "switch.test_name_channel_1"}, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get("switch.test_name_channel_1").state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF @pytest.mark.parametrize("model", MOTION_MODELS) @@ -75,7 +78,8 @@ async def test_block_motion_switch( mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON # turn off await hass.services.async_call( @@ -88,7 +92,9 @@ async def test_block_motion_switch( mock_block_device.mock_update() mock_block_device.set_shelly_motion_detection.assert_called_once_with(False) - assert get_entity_state(hass, entity_id) == STATE_OFF + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF # turn on mock_block_device.set_shelly_motion_detection.reset_mock() @@ -102,7 +108,9 @@ async def test_block_motion_switch( mock_block_device.mock_update() mock_block_device.set_shelly_motion_detection.assert_called_once_with(True) - assert get_entity_state(hass, entity_id) == STATE_ON + + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON @pytest.mark.parametrize("model", MOTION_MODELS) @@ -132,14 +140,16 @@ async def test_block_restored_motion_switch( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert get_entity_state(hass, entity_id) == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON @pytest.mark.parametrize("model", MOTION_MODELS) @@ -167,25 +177,48 @@ async def test_block_restored_motion_switch_no_last_state( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert get_entity_state(hass, entity_id) == STATE_UNKNOWN + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_UNKNOWN # Make device online monkeypatch.setattr(mock_block_device, "initialized", True) mock_block_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - assert get_entity_state(hass, entity_id) == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON +@pytest.mark.parametrize( + ("model", "sleep", "entity", "unique_id"), + [ + (MODEL_1PM, 0, "switch.test_name_channel_1", "123456789ABC-relay_0"), + ( + MODEL_MOTION, + 1000, + "switch.test_name_motion_detection", + "123456789ABC-sensor_0-motionActive", + ), + ], +) async def test_block_device_unique_ids( - hass: HomeAssistant, entity_registry: EntityRegistry, mock_block_device: Mock + hass: HomeAssistant, + entity_registry: EntityRegistry, + mock_block_device: Mock, + model: str, + sleep: int, + entity: str, + unique_id: str, ) -> None: """Test block device unique_ids.""" - await init_integration(hass, 1) + await init_integration(hass, 1, model=model, sleep_period=sleep) - entry = entity_registry.async_get("switch.test_name_channel_1") - assert entry - assert entry.unique_id == "123456789ABC-relay_0" + if sleep: + mock_block_device.mock_online() + await hass.async_block_till_done(wait_background_tasks=True) + + assert (entry := entity_registry.async_get(entity)) + assert entry.unique_id == unique_id async def test_block_set_state_connection_error( @@ -199,7 +232,10 @@ async def test_block_set_state_connection_error( ) await init_integration(hass, 1) - with pytest.raises(HomeAssistantError): + with pytest.raises( + HomeAssistantError, + match="Device communication error occurred while calling action for switch.test_name_channel_1 of Test name", + ): await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, @@ -248,11 +284,15 @@ async def test_block_device_update( """Test block device update.""" monkeypatch.setattr(mock_block_device.blocks[RELAY_BLOCK_ID], "output", False) await init_integration(hass, 1) - assert hass.states.get("switch.test_name_channel_1").state == STATE_OFF + + entity_id = "switch.test_name_channel_1" + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF monkeypatch.setattr(mock_block_device.blocks[RELAY_BLOCK_ID], "output", True) mock_block_device.mock_update() - assert hass.states.get("switch.test_name_channel_1").state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON async def test_block_device_no_relay_blocks( @@ -288,35 +328,44 @@ async def test_rpc_device_services( hass: HomeAssistant, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch ) -> None: """Test RPC device turn on/off services.""" + monkeypatch.delitem(mock_rpc_device.status, "cover:0") + monkeypatch.setitem(mock_rpc_device.status["sys"], "relay_in_thermostat", False) await init_integration(hass, 2) + entity_id = "switch.test_switch_0" await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "switch.test_switch_0"}, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) - assert hass.states.get("switch.test_switch_0").state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON monkeypatch.setitem(mock_rpc_device.status["switch:0"], "output", False) await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "switch.test_switch_0"}, + {ATTR_ENTITY_ID: entity_id}, blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get("switch.test_switch_0").state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF async def test_rpc_device_unique_ids( - hass: HomeAssistant, mock_rpc_device: Mock, entity_registry: EntityRegistry + hass: HomeAssistant, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + entity_registry: EntityRegistry, ) -> None: """Test RPC device unique_ids.""" + monkeypatch.delitem(mock_rpc_device.status, "cover:0") + monkeypatch.setitem(mock_rpc_device.status["sys"], "relay_in_thermostat", False) await init_integration(hass, 2) - entry = entity_registry.async_get("switch.test_switch_0") - assert entry + assert (entry := entity_registry.async_get("switch.test_switch_0")) assert entry.unique_id == "123456789ABC-switch:0" @@ -328,21 +377,37 @@ async def test_rpc_device_switch_type_lights_mode( mock_rpc_device.config["sys"]["ui_data"], "consumption_types", ["lights"] ) await init_integration(hass, 2) + assert hass.states.get("switch.test_switch_0") is None -@pytest.mark.parametrize("exc", [DeviceConnectionError, RpcCallError(-1, "error")]) +@pytest.mark.parametrize( + ("exc", "error"), + [ + ( + DeviceConnectionError, + "Device communication error occurred while calling action for switch.test_switch_0 of Test name", + ), + ( + RpcCallError(-1, "error"), + "RPC call error occurred while calling action for switch.test_switch_0 of Test name", + ), + ], +) async def test_rpc_set_state_errors( hass: HomeAssistant, exc: Exception, + error: str, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch, ) -> None: """Test RPC device set state connection/call errors.""" monkeypatch.setattr(mock_rpc_device, "call_rpc", AsyncMock(side_effect=exc)) + monkeypatch.delitem(mock_rpc_device.status, "cover:0") + monkeypatch.setitem(mock_rpc_device.status["sys"], "relay_in_thermostat", False) await init_integration(hass, 2) - with pytest.raises(HomeAssistantError): + with pytest.raises(HomeAssistantError, match=error): await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, @@ -360,6 +425,8 @@ async def test_rpc_auth_error( "call_rpc", AsyncMock(side_effect=InvalidAuthError), ) + monkeypatch.delitem(mock_rpc_device.status, "cover:0") + monkeypatch.setitem(mock_rpc_device.status["sys"], "relay_in_thermostat", False) entry = await init_integration(hass, 2) assert entry.state is ConfigEntryState.LOADED @@ -409,28 +476,34 @@ async def test_wall_display_relay_mode( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test Wall Display in relay mode.""" - climate_entity_id = "climate.test_name" + climate_entity_id = "climate.test_name_thermostat_0" switch_entity_id = "switch.test_switch_0" + config_entry = await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) + + assert (state := hass.states.get(climate_entity_id)) + assert len(hass.states.async_entity_ids(CLIMATE_DOMAIN)) == 1 + new_status = deepcopy(mock_rpc_device.status) new_status["sys"]["relay_in_thermostat"] = False new_status.pop("thermostat:0") + new_status.pop("cover:0") monkeypatch.setattr(mock_rpc_device, "status", new_status) - await init_integration(hass, 2, model=MODEL_WALL_DISPLAY) + await hass.config_entries.async_reload(config_entry.entry_id) + await hass.async_block_till_done() # the climate entity should be removed + assert hass.states.get(climate_entity_id) is None assert len(hass.states.async_entity_ids(CLIMATE_DOMAIN)) == 0 # the switch entity should be created - state = hass.states.get(switch_entity_id) - assert state + assert (state := hass.states.get(switch_entity_id)) assert state.state == STATE_ON assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 - entry = entity_registry.async_get(switch_entity_id) - assert entry + assert (entry := entity_registry.async_get(switch_entity_id)) assert entry.unique_id == "123456789ABC-switch:0" @@ -463,12 +536,10 @@ async def test_rpc_device_virtual_switch( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-boolean:200-boolean" monkeypatch.setitem(mock_rpc_device.status["boolean:200"], "value", False) @@ -479,7 +550,8 @@ async def test_rpc_device_virtual_switch( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_OFF + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_OFF monkeypatch.setitem(mock_rpc_device.status["boolean:200"], "value", True) await hass.services.async_call( @@ -489,7 +561,8 @@ async def test_rpc_device_virtual_switch( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == STATE_ON + assert (state := hass.states.get(entity_id)) + assert state.state == STATE_ON async def test_rpc_device_virtual_binary_sensor( @@ -510,8 +583,7 @@ async def test_rpc_device_virtual_binary_sensor( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert not state + assert hass.states.get(entity_id) is None async def test_rpc_remove_virtual_switch_when_mode_label( @@ -544,8 +616,7 @@ async def test_rpc_remove_virtual_switch_when_mode_label( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_virtual_switch_when_orphaned( @@ -569,8 +640,7 @@ async def test_rpc_remove_virtual_switch_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -600,11 +670,10 @@ async def test_rpc_device_script_switch( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON - entry = entity_registry.async_get(entity_id) - assert entry + + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == f"123456789ABC-{key}-script" monkeypatch.setitem(mock_rpc_device.status[key], "running", False) @@ -615,8 +684,8 @@ async def test_rpc_device_script_switch( blocking=True, ) mock_rpc_device.mock_update() - state = hass.states.get(entity_id) - assert state + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF monkeypatch.setitem(mock_rpc_device.status[key], "running", True) @@ -627,6 +696,6 @@ async def test_rpc_device_script_switch( blocking=True, ) mock_rpc_device.mock_update() - state = hass.states.get(entity_id) - assert state + + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON diff --git a/tests/components/shelly/test_text.py b/tests/components/shelly/test_text.py index 19acb856f35..a4812cc4160 100644 --- a/tests/components/shelly/test_text.py +++ b/tests/components/shelly/test_text.py @@ -47,17 +47,17 @@ async def test_rpc_device_virtual_text( await init_integration(hass, 3) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == "lorem ipsum" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-text:203-text" monkeypatch.setitem(mock_rpc_device.status["text:203"], "value", "dolor sit amet") mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "dolor sit amet" + + assert (state := hass.states.get(entity_id)) + assert state.state == "dolor sit amet" monkeypatch.setitem(mock_rpc_device.status["text:203"], "value", "sed do eiusmod") await hass.services.async_call( @@ -67,7 +67,9 @@ async def test_rpc_device_virtual_text( blocking=True, ) mock_rpc_device.mock_update() - assert hass.states.get(entity_id).state == "sed do eiusmod" + + assert (state := hass.states.get(entity_id)) + assert state.state == "sed do eiusmod" async def test_rpc_remove_virtual_text_when_mode_label( @@ -100,8 +102,7 @@ async def test_rpc_remove_virtual_text_when_mode_label( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None async def test_rpc_remove_virtual_text_when_orphaned( @@ -125,5 +126,4 @@ async def test_rpc_remove_virtual_text_when_orphaned( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - entry = entity_registry.async_get(entity_id) - assert not entry + assert entity_registry.async_get(entity_id) is None diff --git a/tests/components/shelly/test_update.py b/tests/components/shelly/test_update.py index 9ea66c1acb7..51016f0cdaa 100644 --- a/tests/components/shelly/test_update.py +++ b/tests/components/shelly/test_update.py @@ -61,14 +61,16 @@ async def test_block_update( monkeypatch.setitem(mock_block_device.status, "cloud", {"connected": False}) await init_integration(hass, 1) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0" assert state.attributes[ATTR_IN_PROGRESS] is False assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None - supported_feat = state.attributes[ATTR_SUPPORTED_FEATURES] - assert supported_feat == UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS + assert ( + state.attributes[ATTR_SUPPORTED_FEATURES] + == UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS + ) await hass.services.async_call( UPDATE_DOMAIN, @@ -78,7 +80,7 @@ async def test_block_update( ) assert mock_block_device.trigger_ota_update.call_count == 1 - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0" @@ -89,15 +91,14 @@ async def test_block_update( monkeypatch.setitem(mock_block_device.status["update"], "old_version", "2.0.0") await mock_rest_update(hass, freezer) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "2.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0" assert state.attributes[ATTR_IN_PROGRESS] is False assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-fwupdate" @@ -117,7 +118,7 @@ async def test_block_beta_update( monkeypatch.setitem(mock_block_device.status, "cloud", {"connected": False}) await init_integration(hass, 1) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "1.0.0" @@ -129,7 +130,7 @@ async def test_block_beta_update( ) await mock_rest_update(hass, freezer) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0-beta" @@ -145,7 +146,7 @@ async def test_block_beta_update( ) assert mock_block_device.trigger_ota_update.call_count == 1 - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0-beta" @@ -155,15 +156,14 @@ async def test_block_beta_update( monkeypatch.setitem(mock_block_device.status["update"], "old_version", "2.0.0-beta") await mock_rest_update(hass, freezer) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "2.0.0-beta" assert state.attributes[ATTR_LATEST_VERSION] == "2.0.0-beta" assert state.attributes[ATTR_IN_PROGRESS] is False assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-fwupdate_beta" @@ -184,14 +184,16 @@ async def test_block_update_connection_error( ) await init_integration(hass, 1) - with pytest.raises(HomeAssistantError) as excinfo: + with pytest.raises( + HomeAssistantError, + match="Device communication error occurred while triggering OTA update for Test name", + ): await hass.services.async_call( UPDATE_DOMAIN, SERVICE_INSTALL, {ATTR_ENTITY_ID: "update.test_name_firmware"}, blocking=True, ) - assert "Error starting OTA update" in str(excinfo.value) @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -254,11 +256,12 @@ async def test_block_version_compare( monkeypatch.setitem(mock_block_device.status, "cloud", {"connected": False}) await init_integration(hass, 1) - state = hass.states.get(entity_id_latest) + assert (state := hass.states.get(entity_id_latest)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == STABLE assert state.attributes[ATTR_LATEST_VERSION] == STABLE - state = hass.states.get(entity_id_beta) + + assert (state := hass.states.get(entity_id_beta)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == STABLE assert state.attributes[ATTR_LATEST_VERSION] == BETA @@ -268,11 +271,12 @@ async def test_block_version_compare( monkeypatch.setitem(mock_block_device.status["update"], "beta_version", BETA) await mock_rest_update(hass, freezer) - state = hass.states.get(entity_id_latest) + assert (state := hass.states.get(entity_id_latest)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == BETA assert state.attributes[ATTR_LATEST_VERSION] == STABLE - state = hass.states.get(entity_id_beta) + + assert (state := hass.states.get(entity_id_beta)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == BETA assert state.attributes[ATTR_LATEST_VERSION] == BETA @@ -296,7 +300,7 @@ async def test_rpc_update( ) await init_integration(hass, 2) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" @@ -314,7 +318,7 @@ async def test_rpc_update( assert mock_rpc_device.trigger_ota_update.call_count == 1 - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" @@ -337,7 +341,7 @@ async def test_rpc_update( }, ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_IN_PROGRESS] is True assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 0 @@ -357,7 +361,7 @@ async def test_rpc_update( }, ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_IN_PROGRESS] is True assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 50 @@ -378,15 +382,14 @@ async def test_rpc_update( monkeypatch.setitem(mock_rpc_device.shelly, "ver", "2") mock_rpc_device.mock_update() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "2" assert state.attributes[ATTR_LATEST_VERSION] == "2" assert state.attributes[ATTR_IN_PROGRESS] is False assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-sys-fwupdate" @@ -417,7 +420,7 @@ async def test_rpc_sleeping_update( mock_rpc_device.mock_online() await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" @@ -429,7 +432,7 @@ async def test_rpc_sleeping_update( monkeypatch.setitem(mock_rpc_device.shelly, "ver", "2") mock_rpc_device.mock_update() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "2" assert state.attributes[ATTR_LATEST_VERSION] == "2" @@ -437,8 +440,7 @@ async def test_rpc_sleeping_update( assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None assert state.attributes[ATTR_SUPPORTED_FEATURES] == UpdateEntityFeature(0) - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-sys-fwupdate" @@ -469,7 +471,7 @@ async def test_rpc_restored_sleeping_update( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" @@ -486,7 +488,7 @@ async def test_rpc_restored_sleeping_update( mock_rpc_device.mock_update() await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "2" assert state.attributes[ATTR_LATEST_VERSION] == "2" @@ -525,7 +527,7 @@ async def test_rpc_restored_sleeping_update_no_last_state( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_UNKNOWN # Make device online @@ -537,7 +539,7 @@ async def test_rpc_restored_sleeping_update_no_last_state( mock_rpc_device.mock_update() await hass.async_block_till_done() - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2" @@ -567,7 +569,7 @@ async def test_rpc_beta_update( ) await init_integration(hass, 2) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "1" @@ -584,7 +586,7 @@ async def test_rpc_beta_update( ) await mock_rest_update(hass, freezer) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2b" @@ -614,7 +616,7 @@ async def test_rpc_beta_update( assert mock_rpc_device.trigger_ota_update.call_count == 1 - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_ON assert state.attributes[ATTR_INSTALLED_VERSION] == "1" assert state.attributes[ATTR_LATEST_VERSION] == "2b" @@ -637,7 +639,7 @@ async def test_rpc_beta_update( }, ) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.attributes[ATTR_IN_PROGRESS] is True assert state.attributes[ATTR_UPDATE_PERCENTAGE] == 40 @@ -658,23 +660,28 @@ async def test_rpc_beta_update( monkeypatch.setitem(mock_rpc_device.shelly, "ver", "2b") await mock_rest_update(hass, freezer) - state = hass.states.get(entity_id) + assert (state := hass.states.get(entity_id)) assert state.state == STATE_OFF assert state.attributes[ATTR_INSTALLED_VERSION] == "2b" assert state.attributes[ATTR_LATEST_VERSION] == "2b" assert state.attributes[ATTR_IN_PROGRESS] is False assert state.attributes[ATTR_UPDATE_PERCENTAGE] is None - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-sys-fwupdate_beta" @pytest.mark.parametrize( ("exc", "error"), [ - (DeviceConnectionError, "OTA update connection error: DeviceConnectionError()"), - (RpcCallError(-1, "error"), "OTA update request error"), + ( + DeviceConnectionError, + "Device communication error occurred while triggering OTA update for Test name", + ), + ( + RpcCallError(-1, "error"), + "RPC call error occurred while triggering OTA update for Test name", + ), ], ) @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -701,14 +708,13 @@ async def test_rpc_update_errors( ) await init_integration(hass, 2) - with pytest.raises(HomeAssistantError) as excinfo: + with pytest.raises(HomeAssistantError, match=error): await hass.services.async_call( UPDATE_DOMAIN, SERVICE_INSTALL, {ATTR_ENTITY_ID: "update.test_name_firmware"}, blocking=True, ) - assert error in str(excinfo.value) @pytest.mark.usefixtures("entity_registry_enabled_by_default") diff --git a/tests/components/shelly/test_valve.py b/tests/components/shelly/test_valve.py index 9dc8597120a..7bf9e3b5f1a 100644 --- a/tests/components/shelly/test_valve.py +++ b/tests/components/shelly/test_valve.py @@ -25,11 +25,11 @@ async def test_block_device_gas_valve( await init_integration(hass, 1, MODEL_GAS) entity_id = "valve.test_name_valve" - entry = entity_registry.async_get(entity_id) - assert entry + assert (entry := entity_registry.async_get(entity_id)) assert entry.unique_id == "123456789ABC-valve_0-valve" - assert hass.states.get(entity_id).state == ValveState.CLOSED + assert (state := hass.states.get(entity_id)) + assert state.state == ValveState.CLOSED await hass.services.async_call( VALVE_DOMAIN, @@ -38,16 +38,14 @@ async def test_block_device_gas_valve( blocking=True, ) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == ValveState.OPENING monkeypatch.setattr(mock_block_device.blocks[GAS_VALVE_BLOCK_ID], "valve", "opened") mock_block_device.mock_update() await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == ValveState.OPEN await hass.services.async_call( @@ -57,14 +55,12 @@ async def test_block_device_gas_valve( blocking=True, ) - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == ValveState.CLOSING monkeypatch.setattr(mock_block_device.blocks[GAS_VALVE_BLOCK_ID], "valve", "closed") mock_block_device.mock_update() await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state + assert (state := hass.states.get(entity_id)) assert state.state == ValveState.CLOSED diff --git a/tests/components/smartthings/__init__.py b/tests/components/smartthings/__init__.py index e87d1a8bcdf..fce344b57a7 100644 --- a/tests/components/smartthings/__init__.py +++ b/tests/components/smartthings/__init__.py @@ -3,7 +3,7 @@ from typing import Any from unittest.mock import AsyncMock -from pysmartthings.models import Attribute, Capability, DeviceEvent +from pysmartthings import Attribute, Capability, DeviceEvent from syrupy import SnapshotAssertion from homeassistant.components.smartthings.const import MAIN @@ -55,6 +55,7 @@ async def trigger_update( attribute: Attribute, value: str | float | dict[str, Any] | list[Any] | None, data: dict[str, Any] | None = None, + component: str = MAIN, ) -> None: """Trigger an update.""" event = DeviceEvent( @@ -62,7 +63,7 @@ async def trigger_update( "abc", "abc", device_id, - MAIN, + component, capability, attribute, value, diff --git a/tests/components/smartthings/conftest.py b/tests/components/smartthings/conftest.py index 74bb7a84cba..277c327744f 100644 --- a/tests/components/smartthings/conftest.py +++ b/tests/components/smartthings/conftest.py @@ -4,10 +4,11 @@ from collections.abc import Generator import time from unittest.mock import AsyncMock, patch -from pysmartthings.models import ( +from pysmartthings import ( DeviceResponse, DeviceStatus, LocationResponse, + RoomResponse, SceneResponse, Subscription, ) @@ -79,6 +80,9 @@ def mock_smartthings() -> Generator[AsyncMock]: client.get_locations.return_value = LocationResponse.from_json( load_fixture("locations.json", DOMAIN) ).items + client.get_rooms.return_value = RoomResponse.from_json( + load_fixture("rooms.json", DOMAIN) + ).items client.create_subscription.return_value = Subscription.from_json( load_fixture("subscription.json", DOMAIN) ) @@ -87,7 +91,9 @@ def mock_smartthings() -> Generator[AsyncMock]: @pytest.fixture( params=[ + "da_ac_airsensor_01001", "da_ac_rac_000001", + "da_ac_rac_000003", "da_ac_rac_100001", "da_ac_rac_01001", "multipurpose_sensor", @@ -100,15 +106,20 @@ def mock_smartthings() -> Generator[AsyncMock]: "ge_in_wall_smart_dimmer", "centralite", "da_ref_normal_000001", + "da_ref_normal_01011", "vd_network_audio_002s", + "vd_sensor_light_2023", "iphone", + "da_sac_ehs_000001_sub", "da_wm_dw_000001", "da_wm_wd_000001", "da_wm_wd_000001_1", "da_wm_wm_000001", "da_wm_wm_000001_1", + "da_wm_sc_000001", "da_rvc_normal_000001", "da_ks_microwave_0101x", + "da_ks_cooktop_31001", "da_ks_range_0101x", "da_ks_oven_01061", "hue_color_temperature_bulb", @@ -126,6 +137,7 @@ def mock_smartthings() -> Generator[AsyncMock]: "fake_fan", "generic_fan_3_speed", "heatit_ztrm3_thermostat", + "heatit_zpushwall", "generic_ef00_v1", "bosch_radiator_thermostat_ii", "im_speaker_ai_0001", @@ -133,6 +145,7 @@ def mock_smartthings() -> Generator[AsyncMock]: "tplink_p110", "ikea_kadrilj", "aux_ac", + "hw_q80r_soundbar", ] ) def device_fixture( @@ -175,6 +188,7 @@ def mock_config_entry(expires_at: int) -> MockConfigEntry: CONF_INSTALLED_APP_ID: "123", }, version=3, + minor_version=2, ) diff --git a/tests/components/smartthings/fixtures/device_status/contact_sensor.json b/tests/components/smartthings/fixtures/device_status/contact_sensor.json index fa158d41b39..ca8c2628c99 100644 --- a/tests/components/smartthings/fixtures/device_status/contact_sensor.json +++ b/tests/components/smartthings/fixtures/device_status/contact_sensor.json @@ -36,7 +36,7 @@ "value": null }, "availableVersion": { - "value": "00000103", + "value": "00000104", "timestamp": "2025-02-09T13:59:19.101Z" }, "lastUpdateStatus": { diff --git a/tests/components/smartthings/fixtures/device_status/da_ac_airsensor_01001.json b/tests/components/smartthings/fixtures/device_status/da_ac_airsensor_01001.json new file mode 100644 index 00000000000..903b5163335 --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/da_ac_airsensor_01001.json @@ -0,0 +1,362 @@ +{ + "components": { + "main": { + "samsungce.rechargeableBattery": { + "chargingStatus": { + "value": "charging", + "timestamp": "2025-02-18T05:20:27.966Z" + }, + "battery": { + "value": 100, + "unit": "%", + "timestamp": "2025-02-22T04:50:19.633Z" + }, + "resolution": { + "value": 1, + "timestamp": "2024-12-20T14:38:31.662Z" + } + }, + "relativeHumidityMeasurement": { + "humidity": { + "value": 54, + "unit": "%", + "timestamp": "2025-03-21T07:26:16.872Z" + } + }, + "refresh": {}, + "carbonDioxideHealthConcern": { + "carbonDioxideHealthConcern": { + "value": "moderate", + "timestamp": "2025-03-21T13:40:56.560Z" + } + }, + "execute": { + "data": { + "value": { + "payload": { + "rt": ["x.com.samsung.da.sensors"], + "if": ["oic.if.baseline", "oic.if.s"], + "x.com.samsung.da.cleanLevel": "2", + "x.com.samsung.da.refresh": "Off", + "x.com.samsung.da.lastSensingTime": "1740829045", + "x.com.samsung.da.items": [ + { + "x.com.samsung.da.id": "0", + "x.com.samsung.da.description": "Sensor for CleanLevel", + "x.com.samsung.da.type": "CleanLevel", + "x.com.samsung.da.value": ["2"] + }, + { + "x.com.samsung.da.id": "1", + "x.com.samsung.da.description": "Sensor for Odor", + "x.com.samsung.da.type": "Odor", + "x.com.samsung.da.value": ["2"] + }, + { + "x.com.samsung.da.id": "2", + "x.com.samsung.da.description": "Sensor for Dust", + "x.com.samsung.da.type": "Dust", + "x.com.samsung.da.value": ["29", "1"] + }, + { + "x.com.samsung.da.id": "3", + "x.com.samsung.da.description": "Sensor for FineDust", + "x.com.samsung.da.type": "FineDust", + "x.com.samsung.da.value": ["7", "1"] + }, + { + "x.com.samsung.da.id": "4", + "x.com.samsung.da.description": "Sensor for SuperFineDust", + "x.com.samsung.da.type": "SuperFineDust", + "x.com.samsung.da.value": ["6", "1"] + }, + { + "x.com.samsung.da.id": "5", + "x.com.samsung.da.description": "Sensor for CO2", + "x.com.samsung.da.type": "CO2", + "x.com.samsung.da.value": ["2527", "3"] + } + ] + } + }, + "data": { + "href": "/sensors/vs/0" + }, + "timestamp": "2025-03-01T11:37:26.334Z" + } + }, + "carbonDioxideMeasurement": { + "carbonDioxide": { + "value": 1045, + "unit": "ppm", + "timestamp": "2025-03-21T15:05:44.312Z" + } + }, + "samsungce.deviceIdentification": { + "micomAssayCode": { + "value": null + }, + "modelName": { + "value": null + }, + "serialNumber": { + "value": null + }, + "serialNumberExtra": { + "value": null + }, + "modelClassificationCode": { + "value": null + }, + "description": { + "value": null + }, + "releaseYear": { + "value": null + }, + "binaryId": { + "value": "ASM-KR-TP1-22-ACMB1M", + "timestamp": "2025-03-20T23:08:07.388Z" + } + }, + "airQualitySensor": { + "airQuality": { + "value": 2, + "unit": "CAQI", + "timestamp": "2025-03-21T15:06:39.609Z" + } + }, + "fineDustHealthConcern": { + "fineDustHealthConcern": { + "value": "good", + "timestamp": "2025-03-21T10:25:04.548Z" + } + }, + "ocf": { + "st": { + "value": null + }, + "mndt": { + "value": null + }, + "mnfv": { + "value": "ASM-KR-TP1-22-ACMB1M_16240426", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "mnhw": { + "value": "Realtek", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "di": { + "value": "a3a970ea-e09c-9c04-161b-94c934e21666", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "mnsl": { + "value": "http://www.samsung.com", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "dmv": { + "value": "1.2.1", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "n": { + "value": "Samsung AirMonitor", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "mnmo": { + "value": "ASM-KR-TP1-22-ACMB1M|10243041|75000000001611C40800020000080000", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "vid": { + "value": "DA-AC-AIRSENSOR-01001", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "mnmn": { + "value": "Samsung Electronics", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "mnml": { + "value": "http://www.samsung.com", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "mnpv": { + "value": "DAWIT 2.0", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "mnos": { + "value": "TizenRT 4.0", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "pi": { + "value": "a3a970ea-e09c-9c04-161b-94c934e21666", + "timestamp": "2024-08-19T07:28:01.277Z" + }, + "icv": { + "value": "core.1.1.0", + "timestamp": "2024-08-19T07:28:01.277Z" + } + }, + "odorSensor": { + "odorLevel": { + "value": 1, + "timestamp": "2025-03-21T13:29:15.650Z" + } + }, + "veryFineDustHealthConcern": { + "veryFineDustHealthConcern": { + "value": "good", + "timestamp": "2025-03-21T02:56:21.007Z" + } + }, + "samsungce.doNotDisturb": { + "settable": { + "value": true, + "timestamp": "2024-12-20T14:38:31.895Z" + }, + "dayOfWeek": { + "value": null + }, + "repeatMode": { + "value": null + }, + "startTime": { + "value": "14:00:00Z", + "timestamp": "2024-12-20T14:38:31.895Z" + }, + "endTime": { + "value": "22:00:00Z", + "timestamp": "2024-12-20T14:38:31.895Z" + }, + "activated": { + "value": false, + "timestamp": "2024-12-20T14:38:31.895Z" + } + }, + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": [], + "timestamp": "2025-03-01T11:37:26.334Z" + } + }, + "samsungce.driverVersion": { + "versionNumber": { + "value": 22100101, + "timestamp": "2023-12-09T04:05:59.505Z" + } + }, + "samsungce.softwareUpdate": { + "targetModule": { + "value": null + }, + "otnDUID": { + "value": "EXCHUODPSCTZY", + "timestamp": "2024-12-20T14:38:31.716Z" + }, + "lastUpdatedDate": { + "value": null + }, + "availableModules": { + "value": [], + "timestamp": "2024-12-20T14:38:31.716Z" + }, + "newVersionAvailable": { + "value": false, + "timestamp": "2024-12-20T14:38:31.716Z" + }, + "operatingState": { + "value": null + }, + "progress": { + "value": null + } + }, + "sec.diagnosticsInformation": { + "logType": { + "value": ["errCode", "dump"], + "timestamp": "2024-12-20T14:38:31.716Z" + }, + "endpoint": { + "value": "SSM", + "timestamp": "2024-12-20T14:38:31.716Z" + }, + "minVersion": { + "value": "1.0", + "timestamp": "2024-12-20T14:38:31.716Z" + }, + "signinPermission": { + "value": null + }, + "setupId": { + "value": "AM0", + "timestamp": "2024-12-20T14:38:31.716Z" + }, + "protocolType": { + "value": "wifi_https", + "timestamp": "2024-12-20T14:38:31.716Z" + }, + "tsId": { + "value": null + }, + "mnId": { + "value": "0AJT", + "timestamp": "2024-12-20T14:38:31.716Z" + }, + "dumpType": { + "value": "file", + "timestamp": "2024-12-20T14:38:31.716Z" + } + }, + "temperatureMeasurement": { + "temperatureRange": { + "value": null + }, + "temperature": { + "value": 23.0, + "unit": "C", + "timestamp": "2025-03-21T04:40:33.951Z" + } + }, + "dustSensor": { + "dustLevel": { + "value": 31, + "unit": "\u03bcg/m^3", + "timestamp": "2025-03-21T15:06:39.609Z" + }, + "fineDustLevel": { + "value": 7, + "unit": "\u03bcg/m^3", + "timestamp": "2025-03-21T15:06:28.515Z" + } + }, + "veryFineDustSensor": { + "veryFineDustLevel": { + "value": 6, + "unit": "\u03bcg/m^3", + "timestamp": "2025-03-21T15:06:28.515Z" + } + }, + "custom.deviceReportStateConfiguration": { + "reportStateRealtimePeriod": { + "value": "enabled", + "timestamp": "2024-12-20T14:38:31.769Z" + }, + "reportStateRealtime": { + "value": { + "state": "disabled" + }, + "timestamp": "2025-03-20T22:02:48.215Z" + }, + "reportStatePeriod": { + "value": "enabled", + "timestamp": "2024-12-20T14:38:31.769Z" + } + }, + "dustHealthConcern": { + "dustHealthConcern": { + "value": "moderate", + "timestamp": "2025-03-21T15:06:39.609Z" + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/da_ac_rac_000003.json b/tests/components/smartthings/fixtures/device_status/da_ac_rac_000003.json new file mode 100644 index 00000000000..98434aa2c5a --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/da_ac_rac_000003.json @@ -0,0 +1,585 @@ +{ + "components": { + "main": { + "relativeHumidityMeasurement": { + "humidity": { + "value": 48, + "unit": "%", + "timestamp": "2025-03-27T05:12:16.158Z" + } + }, + "custom.airConditionerOdorController": { + "airConditionerOdorControllerProgress": { + "value": null + }, + "airConditionerOdorControllerState": { + "value": null + } + }, + "custom.thermostatSetpointControl": { + "minimumSetpoint": { + "value": 16, + "unit": "C", + "timestamp": "2025-03-13T09:29:37.008Z" + }, + "maximumSetpoint": { + "value": 30, + "unit": "C", + "timestamp": "2024-06-21T13:45:16.785Z" + } + }, + "airConditionerMode": { + "availableAcModes": { + "value": null + }, + "supportedAcModes": { + "value": ["cool", "dry", "wind", "auto"], + "timestamp": "2024-06-21T13:45:16.785Z" + }, + "airConditionerMode": { + "value": "cool", + "timestamp": "2025-03-13T09:29:36.789Z" + } + }, + "custom.spiMode": { + "spiMode": { + "value": "off", + "timestamp": "2025-02-08T08:54:15.661Z" + } + }, + "samsungce.deviceIdentification": { + "micomAssayCode": { + "value": null + }, + "modelName": { + "value": null + }, + "serialNumber": { + "value": null + }, + "serialNumberExtra": { + "value": null + }, + "modelClassificationCode": { + "value": null + }, + "description": { + "value": null + }, + "releaseYear": { + "value": null + }, + "binaryId": { + "value": "ARTIK051_PRAC_20K", + "timestamp": "2025-03-27T05:12:15.284Z" + } + }, + "airQualitySensor": { + "airQuality": { + "value": null + } + }, + "custom.airConditionerOptionalMode": { + "supportedAcOptionalMode": { + "value": [ + "off", + "sleep", + "quiet", + "smart", + "speed", + "windFree", + "windFreeSleep" + ], + "timestamp": "2024-06-21T13:45:16.785Z" + }, + "acOptionalMode": { + "value": "off", + "timestamp": "2025-03-26T12:20:41.095Z" + } + }, + "switch": { + "switch": { + "value": "on", + "timestamp": "2025-03-27T05:41:42.291Z" + } + }, + "custom.airConditionerTropicalNightMode": { + "acTropicalNightModeLevel": { + "value": 0, + "timestamp": "2025-02-08T08:54:15.789Z" + } + }, + "ocf": { + "st": { + "value": null + }, + "mndt": { + "value": null + }, + "mnfv": { + "value": "ARTIK051_PRAC_20K_11230313", + "timestamp": "2024-06-21T13:58:04.085Z" + }, + "mnhw": { + "value": "ARTIK051", + "timestamp": "2024-06-21T13:51:35.294Z" + }, + "di": { + "value": "c76d6f38-1b7f-13dd-37b5-db18d5272783", + "timestamp": "2024-06-21T13:45:16.329Z" + }, + "mnsl": { + "value": "http://www.samsung.com", + "timestamp": "2024-06-21T13:51:35.980Z" + }, + "dmv": { + "value": "res.1.1.0,sh.1.1.0", + "timestamp": "2024-06-21T13:58:04.698Z" + }, + "n": { + "value": "Samsung Room A/C", + "timestamp": "2024-06-21T13:58:04.085Z" + }, + "mnmo": { + "value": "ARTIK051_PRAC_20K|10256941|60010534001411014600003200800000", + "timestamp": "2024-06-21T13:45:16.329Z" + }, + "vid": { + "value": "DA-AC-RAC-000003", + "timestamp": "2024-06-21T13:45:16.329Z" + }, + "mnmn": { + "value": "Samsung Electronics", + "timestamp": "2024-06-21T13:45:16.329Z" + }, + "mnml": { + "value": "http://www.samsung.com", + "timestamp": "2024-06-21T13:45:16.329Z" + }, + "mnpv": { + "value": "DAWIT 2.0", + "timestamp": "2024-06-21T13:51:35.294Z" + }, + "mnos": { + "value": "TizenRT 1.0 + IPv6", + "timestamp": "2024-06-21T13:51:35.294Z" + }, + "pi": { + "value": "c76d6f38-1b7f-13dd-37b5-db18d5272783", + "timestamp": "2024-06-21T13:45:16.329Z" + }, + "icv": { + "value": "core.1.1.0", + "timestamp": "2024-06-21T13:45:16.329Z" + } + }, + "airConditionerFanMode": { + "fanMode": { + "value": "low", + "timestamp": "2025-03-26T12:20:41.393Z" + }, + "supportedAcFanModes": { + "value": ["auto", "low", "medium", "high", "turbo"], + "timestamp": "2024-06-21T13:45:16.785Z" + }, + "availableAcFanModes": { + "value": null + } + }, + "samsungce.dustFilterAlarm": { + "alarmThreshold": { + "value": 500, + "unit": "Hour", + "timestamp": "2025-02-08T08:54:15.473Z" + }, + "supportedAlarmThresholds": { + "value": [180, 300, 500, 700], + "unit": "Hour", + "timestamp": "2025-02-08T08:54:15.473Z" + } + }, + "custom.electricHepaFilter": { + "electricHepaFilterCapacity": { + "value": null + }, + "electricHepaFilterUsageStep": { + "value": null + }, + "electricHepaFilterLastResetDate": { + "value": null + }, + "electricHepaFilterStatus": { + "value": null + }, + "electricHepaFilterUsage": { + "value": null + }, + "electricHepaFilterResetType": { + "value": null + } + }, + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": [ + "remoteControlStatus", + "airQualitySensor", + "dustSensor", + "odorSensor", + "veryFineDustSensor", + "custom.spiMode", + "custom.deodorFilter", + "custom.electricHepaFilter", + "custom.periodicSensing", + "custom.doNotDisturbMode", + "custom.airConditionerOdorController", + "samsungce.individualControlLock" + ], + "timestamp": "2025-02-08T08:54:15.355Z" + } + }, + "custom.ocfResourceVersion": { + "ocfResourceUpdatedTime": { + "value": null + }, + "ocfResourceVersion": { + "value": null + } + }, + "samsungce.driverVersion": { + "versionNumber": { + "value": 24040101, + "timestamp": "2024-06-21T13:45:16.348Z" + } + }, + "fanOscillationMode": { + "supportedFanOscillationModes": { + "value": ["fixed", "all", "vertical", "horizontal"], + "timestamp": "2025-02-08T08:54:15.797Z" + }, + "availableFanOscillationModes": { + "value": null + }, + "fanOscillationMode": { + "value": "fixed", + "timestamp": "2025-02-25T15:40:11.773Z" + } + }, + "temperatureMeasurement": { + "temperatureRange": { + "value": null + }, + "temperature": { + "value": 26, + "unit": "C", + "timestamp": "2025-03-26T14:19:08.047Z" + } + }, + "dustSensor": { + "dustLevel": { + "value": null + }, + "fineDustLevel": { + "value": null + } + }, + "custom.deviceReportStateConfiguration": { + "reportStateRealtimePeriod": { + "value": "disabled", + "timestamp": "2025-02-08T08:54:15.726Z" + }, + "reportStateRealtime": { + "value": { + "state": "enabled", + "duration": 10, + "unit": "minute" + }, + "timestamp": "2025-03-24T08:28:07.030Z" + }, + "reportStatePeriod": { + "value": "enabled", + "timestamp": "2025-02-08T08:54:15.726Z" + } + }, + "custom.periodicSensing": { + "automaticExecutionSetting": { + "value": null + }, + "automaticExecutionMode": { + "value": null + }, + "supportedAutomaticExecutionSetting": { + "value": null + }, + "supportedAutomaticExecutionMode": { + "value": null + }, + "periodicSensing": { + "value": null + }, + "periodicSensingInterval": { + "value": null + }, + "lastSensingTime": { + "value": null + }, + "lastSensingLevel": { + "value": null + }, + "periodicSensingStatus": { + "value": null + } + }, + "thermostatCoolingSetpoint": { + "coolingSetpointRange": { + "value": null + }, + "coolingSetpoint": { + "value": 24, + "unit": "C", + "timestamp": "2025-03-26T12:20:41.346Z" + } + }, + "demandResponseLoadControl": { + "drlcStatus": { + "value": { + "drlcType": 1, + "duration": 0, + "override": false + }, + "timestamp": "2025-03-24T04:56:36.855Z" + } + }, + "audioVolume": { + "volume": { + "value": 100, + "unit": "%", + "timestamp": "2025-02-08T08:54:15.789Z" + } + }, + "powerConsumptionReport": { + "powerConsumption": { + "value": { + "energy": 602171, + "deltaEnergy": 0, + "power": 0, + "powerEnergy": 0.0, + "persistedEnergy": 602171, + "energySaved": 0, + "persistedSavedEnergy": 0, + "start": "2025-03-27T05:29:22Z", + "end": "2025-03-27T05:40:02Z" + }, + "timestamp": "2025-03-27T05:40:02.686Z" + } + }, + "custom.autoCleaningMode": { + "supportedAutoCleaningModes": { + "value": null + }, + "timedCleanDuration": { + "value": null + }, + "operatingState": { + "value": null + }, + "timedCleanDurationRange": { + "value": null + }, + "supportedOperatingStates": { + "value": null + }, + "progress": { + "value": null + }, + "autoCleaningMode": { + "value": "off", + "timestamp": "2025-03-15T05:30:11.075Z" + } + }, + "samsungce.individualControlLock": { + "lockState": { + "value": null + } + }, + "refresh": {}, + "execute": { + "data": { + "value": null + } + }, + "samsungce.selfCheck": { + "result": { + "value": null + }, + "supportedActions": { + "value": ["start"], + "timestamp": "2024-06-21T13:45:16.348Z" + }, + "progress": { + "value": null + }, + "errors": { + "value": [], + "timestamp": "2025-02-08T08:54:15.048Z" + }, + "status": { + "value": null + } + }, + "custom.dustFilter": { + "dustFilterUsageStep": { + "value": 1, + "timestamp": "2025-02-08T08:54:15.473Z" + }, + "dustFilterUsage": { + "value": 69, + "timestamp": "2025-03-26T10:57:41.097Z" + }, + "dustFilterLastResetDate": { + "value": null + }, + "dustFilterStatus": { + "value": "normal", + "timestamp": "2025-02-08T08:54:15.473Z" + }, + "dustFilterCapacity": { + "value": 500, + "unit": "Hour", + "timestamp": "2025-02-08T08:54:15.473Z" + }, + "dustFilterResetType": { + "value": ["replaceable", "washable"], + "timestamp": "2025-02-08T08:54:15.473Z" + } + }, + "odorSensor": { + "odorLevel": { + "value": null + } + }, + "remoteControlStatus": { + "remoteControlEnabled": { + "value": null + } + }, + "custom.deodorFilter": { + "deodorFilterCapacity": { + "value": null + }, + "deodorFilterLastResetDate": { + "value": null + }, + "deodorFilterStatus": { + "value": null + }, + "deodorFilterResetType": { + "value": null + }, + "deodorFilterUsage": { + "value": null + }, + "deodorFilterUsageStep": { + "value": null + } + }, + "custom.energyType": { + "energyType": { + "value": "1.0", + "timestamp": "2024-06-21T13:45:16.785Z" + }, + "energySavingSupport": { + "value": true, + "timestamp": "2024-06-21T13:58:08.419Z" + }, + "drMaxDuration": { + "value": 99999999, + "unit": "min", + "timestamp": "2024-06-21T13:51:39.304Z" + }, + "energySavingLevel": { + "value": null + }, + "energySavingInfo": { + "value": null + }, + "supportedEnergySavingLevels": { + "value": null + }, + "energySavingOperation": { + "value": false, + "timestamp": "2025-02-08T08:54:16.767Z" + }, + "notificationTemplateID": { + "value": null + }, + "energySavingOperationSupport": { + "value": false, + "timestamp": "2025-03-24T04:56:36.855Z" + } + }, + "samsungce.softwareUpdate": { + "targetModule": { + "value": {}, + "timestamp": "2025-02-08T08:54:16.685Z" + }, + "otnDUID": { + "value": "MTCPH4AI4MTYO", + "timestamp": "2025-02-08T08:54:15.626Z" + }, + "lastUpdatedDate": { + "value": null + }, + "availableModules": { + "value": [], + "timestamp": "2025-02-08T08:54:15.626Z" + }, + "newVersionAvailable": { + "value": false, + "timestamp": "2025-02-08T08:54:15.626Z" + }, + "operatingState": { + "value": null + }, + "progress": { + "value": null + } + }, + "veryFineDustSensor": { + "veryFineDustLevel": { + "value": null + } + }, + "custom.veryFineDustFilter": { + "veryFineDustFilterStatus": { + "value": null + }, + "veryFineDustFilterResetType": { + "value": null + }, + "veryFineDustFilterUsage": { + "value": null + }, + "veryFineDustFilterLastResetDate": { + "value": null + }, + "veryFineDustFilterUsageStep": { + "value": null + }, + "veryFineDustFilterCapacity": { + "value": null + } + }, + "custom.doNotDisturbMode": { + "doNotDisturb": { + "value": null + }, + "startTime": { + "value": null + }, + "endTime": { + "value": null + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/da_ks_cooktop_31001.json b/tests/components/smartthings/fixtures/device_status/da_ks_cooktop_31001.json new file mode 100644 index 00000000000..5ca8f56fbbf --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/da_ks_cooktop_31001.json @@ -0,0 +1,508 @@ +{ + "components": { + "burner-02": { + "samsungce.surfaceResidualHeat": { + "surfaceResidualHeat": { + "value": "normal", + "timestamp": "2025-03-25T18:18:28.550Z" + } + }, + "samsungce.cooktopHeatingPower": { + "manualLevel": { + "value": 0, + "timestamp": "2025-03-26T05:57:23.203Z" + }, + "heatingMode": { + "value": "manual", + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "manualLevelMin": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "supportedHeatingModes": { + "value": ["manual", "boost", "keepWarm"], + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "manualLevelMax": { + "value": 15, + "timestamp": "2025-03-25T18:18:28.550Z" + } + }, + "samsungce.countDownTimer": { + "startValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "currentValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "status": { + "value": "idle", + "timestamp": "2025-03-25T18:18:28.550Z" + } + } + }, + "burner-01": { + "samsungce.surfaceResidualHeat": { + "surfaceResidualHeat": { + "value": "normal", + "timestamp": "2025-03-25T18:18:28.518Z" + } + }, + "samsungce.cooktopHeatingPower": { + "manualLevel": { + "value": 0, + "timestamp": "2025-03-26T05:57:23.203Z" + }, + "heatingMode": { + "value": "manual", + "timestamp": "2025-03-25T18:18:28.518Z" + }, + "manualLevelMin": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.518Z" + }, + "supportedHeatingModes": { + "value": ["manual", "boost", "keepWarm"], + "timestamp": "2025-03-25T18:18:28.518Z" + }, + "manualLevelMax": { + "value": 15, + "timestamp": "2025-03-25T18:18:28.518Z" + } + }, + "samsungce.countDownTimer": { + "startValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.518Z" + }, + "currentValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.518Z" + }, + "status": { + "value": "idle", + "timestamp": "2025-03-25T18:18:28.518Z" + } + } + }, + "main": { + "custom.disabledComponents": { + "disabledComponents": { + "value": ["burner-6"], + "timestamp": "2025-03-25T18:18:28.464Z" + } + }, + "custom.userNotification": { + "message": { + "value": null + } + }, + "samsungce.remoteManagementData": { + "reportRawData": { + "value": "AgUBASCgAwAACaEDAAAM4AQAAAAA4QHwAw==", + "timestamp": "2025-03-26T07:27:58.282Z" + }, + "version": { + "value": "CT-31.0001", + "timestamp": "2025-03-25T18:18:28.476Z" + } + }, + "samsungce.deviceIdentification": { + "micomAssayCode": { + "value": "5828", + "timestamp": "2025-03-25T18:18:28.476Z" + }, + "modelName": { + "value": "NZ64B5046GK", + "timestamp": "2025-03-25T18:18:28.476Z" + }, + "serialNumber": { + "value": "B8C878DX900290H", + "timestamp": "2025-03-25T18:18:28.476Z" + }, + "serialNumberExtra": { + "value": "N/A", + "timestamp": "2025-03-25T18:18:28.476Z" + }, + "modelClassificationCode": { + "value": "50000204001611000E00000000000000", + "timestamp": "2025-03-25T18:18:28.476Z" + }, + "description": { + "value": "N/A", + "timestamp": "2025-03-25T18:18:28.476Z" + }, + "releaseYear": { + "value": null + }, + "binaryId": { + "value": "TP2X_DA-KS-COOKTOP-31001", + "timestamp": "2025-03-25T18:18:28.476Z" + } + }, + "switch": { + "switch": { + "value": "off", + "timestamp": "2025-03-26T07:27:58.478Z" + } + }, + "samsungce.errorAndAlarmState": { + "events": { + "value": [], + "timestamp": "2025-03-25T18:18:28.476Z" + } + }, + "samsungce.cooktopFlexZone": { + "flexZones": { + "value": [], + "timestamp": "2025-03-26T05:57:23.671Z" + } + }, + "samsungce.softwareVersion": { + "versions": { + "value": [ + { + "id": "Wifi", + "swType": "Wifi-Application", + "versionNumber": "80001A220811", + "description": "Aug 11 2022 08:38:36, Wifi:ws029_030, STDK : 1.7.4)" + }, + { + "id": "Micom", + "swType": "Micom Software", + "versionNumber": "240617", + "description": "Description for this micom version" + } + ], + "timestamp": "2025-03-25T18:18:28.482Z" + } + }, + "healthCheck": { + "checkInterval": { + "value": null + }, + "healthStatus": { + "value": null + }, + "DeviceWatch-Enroll": { + "value": null + }, + "DeviceWatch-DeviceStatus": { + "value": null + } + }, + "custom.cooktopOperatingState": { + "supportedCooktopOperatingState": { + "value": ["ready", "run", "paused"], + "timestamp": "2025-03-26T07:26:39.690Z" + }, + "cooktopOperatingState": { + "value": "ready", + "timestamp": "2025-03-26T07:27:58.652Z" + } + }, + "samsungce.kitchenDeviceIdentification": { + "regionCode": { + "value": "EU", + "timestamp": "2025-03-25T18:18:28.501Z" + }, + "modelCode": { + "value": "OZ8500B/EU2", + "timestamp": "2025-03-25T18:18:28.501Z" + }, + "fuel": { + "value": null + }, + "type": { + "value": "cooktop", + "timestamp": "2025-03-25T18:18:28.501Z" + }, + "representativeComponent": { + "value": null + } + }, + "samsungce.softwareUpdate": { + "targetModule": { + "value": null + }, + "otnDUID": { + "value": "JHCB2ZD4E2KRY", + "timestamp": "2025-03-25T18:18:28.482Z" + }, + "lastUpdatedDate": { + "value": null + }, + "availableModules": { + "value": [], + "timestamp": "2025-03-25T18:18:28.501Z" + }, + "newVersionAvailable": { + "value": false, + "timestamp": "2025-03-25T18:18:28.501Z" + }, + "operatingState": { + "value": null + }, + "progress": { + "value": null + } + }, + "samsungce.kidsLockControl": { + "lockState": { + "value": "unlocked", + "timestamp": "2025-03-25T18:18:28.476Z" + } + }, + "audioMute": { + "mute": { + "value": "unmuted", + "timestamp": "2025-03-25T18:18:28.464Z" + } + } + }, + "burner-06": { + "samsungce.surfaceResidualHeat": { + "surfaceResidualHeat": { + "value": "normal", + "timestamp": "2025-03-25T18:18:28.591Z" + } + }, + "samsungce.cooktopHeatingPower": { + "manualLevel": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.591Z" + }, + "heatingMode": { + "value": "manual", + "timestamp": "2025-03-25T18:18:28.591Z" + }, + "manualLevelMin": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.591Z" + }, + "supportedHeatingModes": { + "value": ["manual", "boost", "keepWarm"], + "timestamp": "2025-03-25T18:18:28.591Z" + }, + "manualLevelMax": { + "value": 15, + "timestamp": "2025-03-25T18:18:28.591Z" + } + }, + "samsungce.countDownTimer": { + "startValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.591Z" + }, + "currentValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.591Z" + }, + "status": { + "value": null + } + } + }, + "hood": { + "samsungce.connectionState": { + "connectionState": { + "value": "disconnected", + "timestamp": "2025-03-25T18:18:28.650Z" + } + }, + "samsungce.hoodFanSpeed": { + "settableMaxFanSpeed": { + "value": 5, + "timestamp": "2025-03-25T18:18:28.650Z" + }, + "hoodFanSpeed": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.650Z" + }, + "supportedHoodFanSpeed": { + "value": [1, 2, 3, 4, 5], + "timestamp": "2025-03-25T18:18:28.650Z" + }, + "settableMinFanSpeed": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.650Z" + } + }, + "samsungce.countDownTimer": { + "startValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.650Z" + }, + "currentValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.646Z" + }, + "status": { + "value": null + } + }, + "switch": { + "switch": { + "value": null + } + }, + "samsungce.lamp": { + "brightnessLevel": { + "value": null + }, + "supportedBrightnessLevel": { + "value": ["off", "mid"], + "timestamp": "2025-03-25T18:18:28.650Z" + } + } + }, + "burner-05": { + "samsungce.surfaceResidualHeat": { + "surfaceResidualHeat": { + "value": "normal", + "timestamp": "2025-03-25T18:18:28.586Z" + } + }, + "samsungce.cooktopHeatingPower": { + "manualLevel": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.586Z" + }, + "heatingMode": { + "value": "manual", + "timestamp": "2025-03-25T18:18:28.586Z" + }, + "manualLevelMin": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.586Z" + }, + "supportedHeatingModes": { + "value": ["manual", "boost", "keepWarm"], + "timestamp": "2025-03-25T18:18:28.586Z" + }, + "manualLevelMax": { + "value": 15, + "timestamp": "2025-03-25T18:18:28.586Z" + } + }, + "samsungce.countDownTimer": { + "startValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.586Z" + }, + "currentValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.591Z" + }, + "status": { + "value": null + } + } + }, + "burner-04": { + "samsungce.surfaceResidualHeat": { + "surfaceResidualHeat": { + "value": "normal", + "timestamp": "2025-03-25T18:18:28.578Z" + } + }, + "samsungce.cooktopHeatingPower": { + "manualLevel": { + "value": 0, + "timestamp": "2025-03-25T18:49:25.153Z" + }, + "heatingMode": { + "value": "manual", + "timestamp": "2025-03-25T18:18:28.578Z" + }, + "manualLevelMin": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.578Z" + }, + "supportedHeatingModes": { + "value": ["manual", "boost", "keepWarm"], + "timestamp": "2025-03-25T18:18:28.578Z" + }, + "manualLevelMax": { + "value": 15, + "timestamp": "2025-03-25T18:18:28.578Z" + } + }, + "samsungce.countDownTimer": { + "startValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.586Z" + }, + "currentValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.586Z" + }, + "status": { + "value": "idle", + "timestamp": "2025-03-25T18:18:28.578Z" + } + } + }, + "burner-03": { + "samsungce.surfaceResidualHeat": { + "surfaceResidualHeat": { + "value": "normal", + "timestamp": "2025-03-25T18:18:28.550Z" + } + }, + "samsungce.cooktopHeatingPower": { + "manualLevel": { + "value": 0, + "timestamp": "2025-03-26T07:27:58.652Z" + }, + "heatingMode": { + "value": "manual", + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "manualLevelMin": { + "value": 0, + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "supportedHeatingModes": { + "value": ["manual", "boost", "keepWarm"], + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "manualLevelMax": { + "value": 15, + "timestamp": "2025-03-25T18:18:28.550Z" + } + }, + "samsungce.countDownTimer": { + "startValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "currentValue": { + "value": 0, + "unit": "min", + "timestamp": "2025-03-25T18:18:28.550Z" + }, + "status": { + "value": "idle", + "timestamp": "2025-03-25T18:18:28.550Z" + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/da_ref_normal_01011.json b/tests/components/smartthings/fixtures/device_status/da_ref_normal_01011.json new file mode 100644 index 00000000000..350a0ee14bb --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/da_ref_normal_01011.json @@ -0,0 +1,933 @@ +{ + "components": { + "pantry-01": { + "samsungce.foodDefrost": { + "supportedOptions": { + "value": null + }, + "foodType": { + "value": null + }, + "weight": { + "value": null + }, + "operationTime": { + "value": null + }, + "remainingTime": { + "value": null + } + }, + "samsungce.fridgePantryInfo": { + "name": { + "value": null + } + }, + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": null + } + }, + "samsungce.meatAging": { + "zoneInfo": { + "value": null + }, + "supportedMeatTypes": { + "value": null + }, + "supportedAgingMethods": { + "value": null + }, + "status": { + "value": null + } + }, + "samsungce.fridgePantryMode": { + "mode": { + "value": null + }, + "supportedModes": { + "value": null + } + } + }, + "pantry-02": { + "samsungce.foodDefrost": { + "supportedOptions": { + "value": null + }, + "foodType": { + "value": null + }, + "weight": { + "value": null + }, + "operationTime": { + "value": null + }, + "remainingTime": { + "value": null + } + }, + "samsungce.fridgePantryInfo": { + "name": { + "value": null + } + }, + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": null + } + }, + "samsungce.meatAging": { + "zoneInfo": { + "value": null + }, + "supportedMeatTypes": { + "value": null + }, + "supportedAgingMethods": { + "value": null + }, + "status": { + "value": null + } + }, + "samsungce.fridgePantryMode": { + "mode": { + "value": null + }, + "supportedModes": { + "value": null + } + } + }, + "icemaker": { + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": null + } + }, + "switch": { + "switch": { + "value": null + } + } + }, + "onedoor": { + "custom.fridgeMode": { + "fridgeModeValue": { + "value": null + }, + "fridgeMode": { + "value": null + }, + "supportedFridgeModes": { + "value": null + } + }, + "contactSensor": { + "contact": { + "value": null + } + }, + "samsungce.unavailableCapabilities": { + "unavailableCommands": { + "value": [], + "timestamp": "2024-12-01T18:22:19.337Z" + } + }, + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": ["samsungce.freezerConvertMode", "custom.fridgeMode"], + "timestamp": "2024-12-01T18:22:20.155Z" + } + }, + "samsungce.temperatureSetting": { + "supportedDesiredTemperatures": { + "value": null + }, + "desiredTemperature": { + "value": null + } + }, + "temperatureMeasurement": { + "temperatureRange": { + "value": null + }, + "temperature": { + "value": null + } + }, + "custom.thermostatSetpointControl": { + "minimumSetpoint": { + "value": null + }, + "maximumSetpoint": { + "value": null + } + }, + "samsungce.freezerConvertMode": { + "supportedFreezerConvertModes": { + "value": null + }, + "freezerConvertMode": { + "value": null + } + }, + "thermostatCoolingSetpoint": { + "coolingSetpointRange": { + "value": null + }, + "coolingSetpoint": { + "value": null + } + } + }, + "scale-10": { + "samsungce.connectionState": { + "connectionState": { + "value": null + } + }, + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": null + } + }, + "samsungce.weightMeasurement": { + "weight": { + "value": null + } + }, + "samsungce.weightMeasurementCalibration": {} + }, + "scale-11": { + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": null + } + }, + "samsungce.weightMeasurement": { + "weight": { + "value": null + } + } + }, + "cooler": { + "custom.fridgeMode": { + "fridgeModeValue": { + "value": null + }, + "fridgeMode": { + "value": null + }, + "supportedFridgeModes": { + "value": null + } + }, + "contactSensor": { + "contact": { + "value": "closed", + "timestamp": "2025-03-30T18:36:45.151Z" + } + }, + "samsungce.unavailableCapabilities": { + "unavailableCommands": { + "value": [], + "timestamp": "2024-12-01T18:22:19.337Z" + } + }, + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": ["custom.fridgeMode", "samsungce.temperatureSetting"], + "timestamp": "2024-12-01T18:22:22.081Z" + } + }, + "samsungce.temperatureSetting": { + "supportedDesiredTemperatures": { + "value": null + }, + "desiredTemperature": { + "value": null + } + }, + "temperatureMeasurement": { + "temperatureRange": { + "value": null + }, + "temperature": { + "value": 6, + "unit": "C", + "timestamp": "2025-03-30T17:41:42.863Z" + } + }, + "custom.thermostatSetpointControl": { + "minimumSetpoint": { + "value": 1, + "unit": "C", + "timestamp": "2024-12-01T18:22:19.337Z" + }, + "maximumSetpoint": { + "value": 7, + "unit": "C", + "timestamp": "2024-12-01T18:22:19.337Z" + } + }, + "thermostatCoolingSetpoint": { + "coolingSetpointRange": { + "value": { + "minimum": 1, + "maximum": 7, + "step": 1 + }, + "unit": "C", + "timestamp": "2024-12-01T18:22:19.337Z" + }, + "coolingSetpoint": { + "value": 6, + "unit": "C", + "timestamp": "2025-03-30T17:33:48.530Z" + } + } + }, + "freezer": { + "custom.fridgeMode": { + "fridgeModeValue": { + "value": null + }, + "fridgeMode": { + "value": null + }, + "supportedFridgeModes": { + "value": null + } + }, + "contactSensor": { + "contact": { + "value": "closed", + "timestamp": "2024-12-01T18:22:19.331Z" + } + }, + "samsungce.unavailableCapabilities": { + "unavailableCommands": { + "value": [], + "timestamp": "2024-12-01T18:22:19.337Z" + } + }, + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": [ + "custom.fridgeMode", + "samsungce.temperatureSetting", + "samsungce.freezerConvertMode" + ], + "timestamp": "2024-12-01T18:22:22.081Z" + } + }, + "samsungce.temperatureSetting": { + "supportedDesiredTemperatures": { + "value": null + }, + "desiredTemperature": { + "value": null + } + }, + "temperatureMeasurement": { + "temperatureRange": { + "value": null + }, + "temperature": { + "value": -17, + "unit": "C", + "timestamp": "2025-03-30T17:35:48.599Z" + } + }, + "custom.thermostatSetpointControl": { + "minimumSetpoint": { + "value": -23, + "unit": "C", + "timestamp": "2024-12-01T18:22:19.337Z" + }, + "maximumSetpoint": { + "value": -15, + "unit": "C", + "timestamp": "2024-12-01T18:22:19.337Z" + } + }, + "samsungce.freezerConvertMode": { + "supportedFreezerConvertModes": { + "value": null + }, + "freezerConvertMode": { + "value": null + } + }, + "thermostatCoolingSetpoint": { + "coolingSetpointRange": { + "value": { + "minimum": -23, + "maximum": -15, + "step": 1 + }, + "unit": "C", + "timestamp": "2024-12-01T18:22:19.337Z" + }, + "coolingSetpoint": { + "value": -17, + "unit": "C", + "timestamp": "2025-03-30T17:32:34.710Z" + } + } + }, + "main": { + "contactSensor": { + "contact": { + "value": "closed", + "timestamp": "2025-03-30T18:36:45.151Z" + } + }, + "samsungce.fridgeWelcomeLighting": { + "detectionProximity": { + "value": null + }, + "supportedDetectionProximities": { + "value": null + }, + "status": { + "value": null + } + }, + "samsungce.viewInside": { + "supportedFocusAreas": { + "value": null + }, + "contents": { + "value": null + }, + "lastUpdatedTime": { + "value": null + } + }, + "samsungce.deviceIdentification": { + "micomAssayCode": { + "value": null + }, + "modelName": { + "value": null + }, + "serialNumber": { + "value": null + }, + "serialNumberExtra": { + "value": null + }, + "modelClassificationCode": { + "value": null + }, + "description": { + "value": null + }, + "releaseYear": { + "value": null + }, + "binaryId": { + "value": "TP1X_REF_21K", + "timestamp": "2025-03-23T21:53:15.900Z" + } + }, + "samsungce.quickControl": { + "version": { + "value": "1.0", + "timestamp": "2025-02-12T21:52:01.494Z" + } + }, + "custom.fridgeMode": { + "fridgeModeValue": { + "value": null + }, + "fridgeMode": { + "value": null + }, + "supportedFridgeModes": { + "value": null + } + }, + "ocf": { + "st": { + "value": null + }, + "mndt": { + "value": null + }, + "mnfv": { + "value": "A-RFWW-TP1-22-REV1_20241030", + "timestamp": "2025-02-12T21:51:58.927Z" + }, + "mnhw": { + "value": "Realtek", + "timestamp": "2025-02-12T21:51:58.927Z" + }, + "di": { + "value": "5758b2ec-563e-f39b-ec39-208e54aabf60", + "timestamp": "2025-02-12T21:51:58.927Z" + }, + "mnsl": { + "value": "http://www.samsung.com", + "timestamp": "2025-02-12T21:51:58.927Z" + }, + "dmv": { + "value": "1.2.1", + "timestamp": "2025-02-12T21:51:58.927Z" + }, + "n": { + "value": "Samsung-Refrigerator", + "timestamp": "2025-02-12T21:51:58.927Z" + }, + "mnmo": { + "value": "TP1X_REF_21K|00156941|00050126001611304100000030010000", + "timestamp": "2025-02-12T21:51:58.927Z" + }, + "vid": { + "value": "DA-REF-NORMAL-01011", + "timestamp": "2025-02-12T21:51:58.927Z" + }, + "mnmn": { + "value": "Samsung Electronics", + "timestamp": "2025-02-12T21:51:58.927Z" + }, + "mnml": { + "value": "http://www.samsung.com", + "timestamp": "2025-02-12T21:51:58.927Z" + }, + "mnpv": { + "value": "DAWIT 2.0", + "timestamp": "2025-02-12T21:51:58.927Z" + }, + "mnos": { + "value": "TizenRT 3.1", + "timestamp": "2025-02-12T21:51:58.927Z" + }, + "pi": { + "value": "5758b2ec-563e-f39b-ec39-208e54aabf60", + "timestamp": "2025-02-12T21:51:58.927Z" + }, + "icv": { + "value": "core.1.1.0", + "timestamp": "2025-02-12T21:51:58.927Z" + } + }, + "samsungce.fridgeVacationMode": { + "vacationMode": { + "value": "off", + "timestamp": "2024-12-01T18:22:19.337Z" + } + }, + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": [ + "temperatureMeasurement", + "thermostatCoolingSetpoint", + "custom.fridgeMode", + "custom.deodorFilter", + "custom.waterFilter", + "custom.dustFilter", + "samsungce.viewInside", + "samsungce.fridgeWelcomeLighting", + "samsungce.sabbathMode" + ], + "timestamp": "2025-02-12T21:52:01.494Z" + } + }, + "samsungce.driverVersion": { + "versionNumber": { + "value": 24090102, + "timestamp": "2024-12-01T18:22:19.337Z" + } + }, + "sec.diagnosticsInformation": { + "logType": { + "value": ["errCode", "dump"], + "timestamp": "2024-12-01T18:22:19.337Z" + }, + "endpoint": { + "value": "SSM", + "timestamp": "2024-12-01T18:22:19.337Z" + }, + "minVersion": { + "value": "3.0", + "timestamp": "2025-02-12T21:52:00.460Z" + }, + "signinPermission": { + "value": null + }, + "setupId": { + "value": "RB0", + "timestamp": "2024-12-01T18:22:19.337Z" + }, + "protocolType": { + "value": "ble_ocf", + "timestamp": "2025-02-12T21:52:00.460Z" + }, + "tsId": { + "value": "DA01", + "timestamp": "2025-02-12T21:52:00.460Z" + }, + "mnId": { + "value": "0AJT", + "timestamp": "2024-12-01T18:22:19.337Z" + }, + "dumpType": { + "value": "file", + "timestamp": "2024-12-01T18:22:19.337Z" + } + }, + "temperatureMeasurement": { + "temperatureRange": { + "value": null + }, + "temperature": { + "value": null + } + }, + "custom.deviceReportStateConfiguration": { + "reportStateRealtimePeriod": { + "value": null + }, + "reportStateRealtime": { + "value": { + "state": "disabled" + }, + "timestamp": "2024-12-01T18:22:19.337Z" + }, + "reportStatePeriod": { + "value": "enabled", + "timestamp": "2024-12-01T18:22:19.337Z" + } + }, + "thermostatCoolingSetpoint": { + "coolingSetpointRange": { + "value": null + }, + "coolingSetpoint": { + "value": null + } + }, + "custom.disabledComponents": { + "disabledComponents": { + "value": [ + "icemaker", + "icemaker-02", + "icemaker-03", + "pantry-01", + "pantry-02", + "scale-10", + "scale-11", + "cvroom", + "onedoor" + ], + "timestamp": "2024-12-01T18:22:19.337Z" + } + }, + "demandResponseLoadControl": { + "drlcStatus": { + "value": { + "drlcType": 1, + "drlcLevel": 0, + "duration": 0, + "override": false + }, + "timestamp": "2024-12-01T18:22:19.337Z" + } + }, + "samsungce.sabbathMode": { + "supportedActions": { + "value": null + }, + "status": { + "value": null + } + }, + "powerConsumptionReport": { + "powerConsumption": { + "value": { + "energy": 66571, + "deltaEnergy": 19, + "power": 61, + "powerEnergy": 18.91178222020467, + "persistedEnergy": 0, + "energySaved": 0, + "persistedSavedEnergy": 0, + "start": "2025-03-30T18:21:37Z", + "end": "2025-03-30T18:38:18Z" + }, + "timestamp": "2025-03-30T18:38:18.219Z" + } + }, + "refresh": {}, + "execute": { + "data": { + "value": null + } + }, + "sec.wifiConfiguration": { + "autoReconnection": { + "value": true, + "timestamp": "2024-12-01T18:22:19.337Z" + }, + "minVersion": { + "value": "1.0", + "timestamp": "2024-12-01T18:22:19.337Z" + }, + "supportedWiFiFreq": { + "value": ["2.4G"], + "timestamp": "2024-12-01T18:22:19.331Z" + }, + "supportedAuthType": { + "value": ["OPEN", "WEP", "WPA-PSK", "WPA2-PSK", "SAE"], + "timestamp": "2024-12-01T18:22:19.331Z" + }, + "protocolType": { + "value": ["helper_hotspot"], + "timestamp": "2024-12-01T18:22:19.331Z" + } + }, + "samsungce.selfCheck": { + "result": { + "value": "passed", + "timestamp": "2024-12-01T18:22:19.337Z" + }, + "supportedActions": { + "value": ["start"], + "timestamp": "2024-12-01T18:22:19.337Z" + }, + "progress": { + "value": null + }, + "errors": { + "value": [], + "timestamp": "2024-12-01T18:22:19.337Z" + }, + "status": { + "value": "ready", + "timestamp": "2024-12-01T18:22:19.337Z" + } + }, + "custom.dustFilter": { + "dustFilterUsageStep": { + "value": null + }, + "dustFilterUsage": { + "value": null + }, + "dustFilterLastResetDate": { + "value": null + }, + "dustFilterStatus": { + "value": null + }, + "dustFilterCapacity": { + "value": null + }, + "dustFilterResetType": { + "value": null + } + }, + "refrigeration": { + "defrost": { + "value": null + }, + "rapidCooling": { + "value": "off", + "timestamp": "2024-12-01T18:22:19.337Z" + }, + "rapidFreezing": { + "value": "off", + "timestamp": "2024-12-01T18:22:19.337Z" + } + }, + "custom.deodorFilter": { + "deodorFilterCapacity": { + "value": null + }, + "deodorFilterLastResetDate": { + "value": null + }, + "deodorFilterStatus": { + "value": null + }, + "deodorFilterResetType": { + "value": null + }, + "deodorFilterUsage": { + "value": null + }, + "deodorFilterUsageStep": { + "value": null + } + }, + "samsungce.powerCool": { + "activated": { + "value": false, + "timestamp": "2024-12-01T18:22:19.337Z" + } + }, + "custom.energyType": { + "energyType": { + "value": "2.0", + "timestamp": "2024-12-01T18:22:19.337Z" + }, + "energySavingSupport": { + "value": true, + "timestamp": "2025-03-06T23:10:37.429Z" + }, + "drMaxDuration": { + "value": 99999999, + "unit": "min", + "timestamp": "2024-12-01T18:22:20.756Z" + }, + "energySavingLevel": { + "value": 1, + "timestamp": "2024-12-01T18:22:19.337Z" + }, + "energySavingInfo": { + "value": null + }, + "supportedEnergySavingLevels": { + "value": [1, 2], + "timestamp": "2024-12-01T18:22:19.337Z" + }, + "energySavingOperation": { + "value": false, + "timestamp": "2024-12-01T18:22:19.337Z" + }, + "notificationTemplateID": { + "value": null + }, + "energySavingOperationSupport": { + "value": true, + "timestamp": "2024-12-01T18:22:19.337Z" + } + }, + "samsungce.softwareUpdate": { + "targetModule": { + "value": {}, + "timestamp": "2024-12-01T18:55:10.062Z" + }, + "otnDUID": { + "value": "MTCB2ZD4B6BT4", + "timestamp": "2024-12-01T18:22:19.337Z" + }, + "lastUpdatedDate": { + "value": null + }, + "availableModules": { + "value": [], + "timestamp": "2024-12-01T18:22:19.337Z" + }, + "newVersionAvailable": { + "value": false, + "timestamp": "2024-12-01T18:22:19.337Z" + }, + "operatingState": { + "value": "none", + "timestamp": "2024-12-01T18:28:40.492Z" + }, + "progress": { + "value": 0, + "unit": "%", + "timestamp": "2024-12-01T18:43:42.645Z" + } + }, + "samsungce.powerFreeze": { + "activated": { + "value": false, + "timestamp": "2024-12-01T18:22:19.337Z" + } + }, + "custom.waterFilter": { + "waterFilterUsageStep": { + "value": null + }, + "waterFilterResetType": { + "value": null + }, + "waterFilterCapacity": { + "value": null + }, + "waterFilterLastResetDate": { + "value": null + }, + "waterFilterUsage": { + "value": null + }, + "waterFilterStatus": { + "value": null + } + } + }, + "cvroom": { + "custom.fridgeMode": { + "fridgeModeValue": { + "value": null + }, + "fridgeMode": { + "value": null + }, + "supportedFridgeModes": { + "value": null + } + }, + "contactSensor": { + "contact": { + "value": null + } + }, + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": null + } + }, + "temperatureMeasurement": { + "temperatureRange": { + "value": null + }, + "temperature": { + "value": null + } + }, + "thermostatCoolingSetpoint": { + "coolingSetpointRange": { + "value": null + }, + "coolingSetpoint": { + "value": null + } + } + }, + "icemaker-02": { + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": null + } + }, + "switch": { + "switch": { + "value": null + } + } + }, + "icemaker-03": { + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": null + } + }, + "switch": { + "switch": { + "value": null + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/da_sac_ehs_000001_sub.json b/tests/components/smartthings/fixtures/device_status/da_sac_ehs_000001_sub.json new file mode 100644 index 00000000000..e27c6c3de21 --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/da_sac_ehs_000001_sub.json @@ -0,0 +1,680 @@ +{ + "components": { + "main": { + "demandResponseLoadControl": { + "drlcStatus": { + "value": { + "drlcType": 1, + "drlcLevel": -1, + "start": "1970-01-01T00:00:00Z", + "duration": 0, + "override": false + }, + "timestamp": "2025-03-09T08:18:06.394Z" + } + }, + "powerConsumptionReport": { + "powerConsumption": { + "value": { + "energy": 8193810.0, + "deltaEnergy": 0, + "power": 2.539, + "powerEnergy": 0.009404173966911105, + "persistedEnergy": 8193810.0, + "energySaved": 0, + "start": "2025-03-09T11:14:44Z", + "end": "2025-03-09T11:14:57Z" + }, + "timestamp": "2025-03-09T11:14:57.338Z" + } + }, + "samsungce.ehsCycleData": { + "outdoor": { + "value": [ + { + "timestamp": "2025-03-09T02:00:29Z", + "data": "0038003870FF3C3B46020218019A00050000" + }, + { + "timestamp": "2025-03-09T02:05:29Z", + "data": "0034003471FF3C3C46020218019A00050000" + }, + { + "timestamp": "2025-03-09T02:10:29Z", + "data": "002D002D71FF3D3D460201C9019A00050000" + } + ], + "unit": "C", + "timestamp": "2025-03-09T11:11:30.786Z" + }, + "indoor": { + "value": [ + { + "timestamp": "2025-03-09T02:00:29Z", + "data": "5F055C050505002564000000000000000001FFFF00079440" + }, + { + "timestamp": "2025-03-09T02:05:29Z", + "data": "60055E050505002563000000000000000001FFFF00079445" + }, + { + "timestamp": "2025-03-09T02:10:29Z", + "data": "61055F050505002560000000000000000001FFFF0007944B" + } + ], + "unit": "C", + "timestamp": "2025-03-09T11:11:30.786Z" + } + }, + "custom.outingMode": { + "outingMode": { + "value": "off", + "timestamp": "2025-03-09T08:00:05.571Z" + } + }, + "samsungce.ehsThermostat": { + "connectionState": { + "value": "disconnected", + "timestamp": "2025-03-09T08:00:05.562Z" + } + }, + "refresh": {}, + "custom.thermostatSetpointControl": { + "minimumSetpoint": { + "value": 40, + "unit": "C", + "timestamp": "2025-03-09T08:18:06.394Z" + }, + "maximumSetpoint": { + "value": 55, + "unit": "C", + "timestamp": "2025-03-09T08:18:06.394Z" + } + }, + "airConditionerMode": { + "availableAcModes": { + "value": null + }, + "supportedAcModes": { + "value": ["eco", "std", "force"], + "timestamp": "2025-03-09T08:18:06.394Z" + }, + "airConditionerMode": { + "value": "std", + "timestamp": "2025-03-09T08:00:05.562Z" + } + }, + "samsungce.ehsFsvSettings": { + "fsvSettings": { + "value": [ + { + "id": "1031", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": 37, + "maxValue": 70, + "value": 70, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "1032", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": 15, + "maxValue": 37, + "value": 25, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "1051", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": 50, + "maxValue": 70, + "value": 55, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "1052", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": 30, + "maxValue": 40, + "value": 40, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "2011", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": -20, + "maxValue": 5, + "value": -3, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "2012", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": 10, + "maxValue": 20, + "value": 15, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "2021", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": 17, + "maxValue": 70, + "value": 50, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "2022", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": 17, + "maxValue": 70, + "value": 32, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "2031", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": 17, + "maxValue": 70, + "value": 50, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "2032", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": 17, + "maxValue": 70, + "value": 38, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "2091", + "inUse": true, + "resolution": 1, + "type": "etc", + "minValue": 0, + "maxValue": 4, + "value": 0, + "isValid": true + }, + { + "id": "2092", + "inUse": true, + "resolution": 1, + "type": "etc", + "minValue": 0, + "maxValue": 4, + "value": 0, + "isValid": true + }, + { + "id": "2093", + "inUse": true, + "resolution": 1, + "type": "etc", + "minValue": 1, + "maxValue": 4, + "value": 4, + "isValid": true + }, + { + "id": "3011", + "inUse": true, + "resolution": 1, + "type": "etc", + "minValue": 0, + "maxValue": 2, + "value": 2, + "isValid": true + }, + { + "id": "3071", + "inUse": true, + "resolution": 1, + "type": "etc", + "minValue": 0, + "maxValue": 1, + "value": 0, + "isValid": true + }, + { + "id": "4011", + "inUse": true, + "resolution": 1, + "type": "etc", + "minValue": 0, + "maxValue": 1, + "value": 0, + "isValid": true + }, + { + "id": "4012", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": -15, + "maxValue": 20, + "value": 0, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "4021", + "inUse": true, + "resolution": 1, + "type": "etc", + "minValue": 0, + "maxValue": 2, + "value": 0, + "isValid": true + }, + { + "id": "4042", + "inUse": true, + "resolution": 1, + "type": "temperature", + "minValue": 5, + "maxValue": 15, + "value": 10, + "isValid": true, + "temperatureUnit": "C" + }, + { + "id": "4061", + "inUse": true, + "resolution": 1, + "type": "etc", + "minValue": 0, + "maxValue": 1, + "value": 0, + "isValid": true + } + ], + "timestamp": "2025-03-09T08:18:06.394Z" + } + }, + "execute": { + "data": { + "value": { + "payload": { + "rt": ["x.com.samsung.da.information"], + "if": ["oic.if.a"], + "x.com.samsung.da.modelNum": "SAC_EHS_MONO|220614|61007400001600000400000000000000", + "x.com.samsung.da.description": "EHS", + "x.com.samsung.da.serialNum": "", + "x.com.samsung.da.versionId": "Samsung Electronics", + "x.com.samsung.da.items": [ + { + "x.com.samsung.da.number": "DB91-02102A 2023-01-11", + "x.com.samsung.da.type": "Software", + "x.com.samsung.da.newVersionAvailable": "false", + "x.com.samsung.da.id": "0", + "x.com.samsung.da.description": "Version" + }, + { + "x.com.samsung.da.number": "DB91-02100A 2020-07-10", + "x.com.samsung.da.type": "Firmware", + "x.com.samsung.da.newVersionAvailable": "false", + "x.com.samsung.da.id": "1", + "x.com.samsung.da.description": "Version" + }, + { + "x.com.samsung.da.number": "DB91-02103B 2022-06-14", + "x.com.samsung.da.type": "Firmware", + "x.com.samsung.da.newVersionAvailable": "false", + "x.com.samsung.da.id": "2", + "x.com.samsung.da.description": "" + }, + { + "x.com.samsung.da.number": "DB91-02450A 2022-07-06", + "x.com.samsung.da.type": "Firmware", + "x.com.samsung.da.newVersionAvailable": "false", + "x.com.samsung.da.id": "3", + "x.com.samsung.da.description": "EHS MONO LOWTEMP" + } + ] + } + }, + "data": { + "href": "/information/vs/0" + }, + "timestamp": "2023-08-02T14:32:28.435Z" + } + }, + "samsungce.deviceIdentification": { + "micomAssayCode": { + "value": null + }, + "modelName": { + "value": null + }, + "serialNumber": { + "value": null + }, + "serialNumberExtra": { + "value": null + }, + "modelClassificationCode": { + "value": null + }, + "description": { + "value": null + }, + "releaseYear": { + "value": null + }, + "binaryId": { + "value": "SAC_EHS_MONO", + "timestamp": "2025-03-09T08:18:06.394Z" + } + }, + "samsungce.sacDisplayCondition": { + "switch": { + "value": "enabled", + "timestamp": "2025-03-09T08:00:05.514Z" + } + }, + "switch": { + "switch": { + "value": "off", + "timestamp": "2025-03-09T11:00:27.522Z" + } + }, + "ocf": { + "st": { + "value": "2025-03-06T08:37:35Z", + "timestamp": "2025-03-09T08:18:05.953Z" + }, + "mndt": { + "value": "", + "timestamp": "2025-03-09T08:18:05.953Z" + }, + "mnfv": { + "value": "20240611.1", + "timestamp": "2025-03-09T08:18:05.953Z" + }, + "mnhw": { + "value": "", + "timestamp": "2025-03-09T08:18:05.953Z" + }, + "di": { + "value": "1f98ebd0-ac48-d802-7f62-000001200100", + "timestamp": "2025-03-09T08:18:05.955Z" + }, + "mnsl": { + "value": "", + "timestamp": "2025-03-09T08:18:05.953Z" + }, + "dmv": { + "value": "res.1.1.0,sh.1.1.0", + "timestamp": "2025-03-09T08:18:05.955Z" + }, + "n": { + "value": "Eco Heating System", + "timestamp": "2025-03-09T08:18:05.955Z" + }, + "mnmo": { + "value": "SAC_EHS_MONO|220614|61007400001600000400000000000000", + "timestamp": "2025-03-09T08:18:06.394Z" + }, + "vid": { + "value": "DA-SAC-EHS-000001-SUB", + "timestamp": "2025-03-09T08:18:05.953Z" + }, + "mnmn": { + "value": "Samsung Electronics", + "timestamp": "2025-03-09T08:18:05.953Z" + }, + "mnml": { + "value": "", + "timestamp": "2025-03-09T08:18:05.953Z" + }, + "mnpv": { + "value": "4.0", + "timestamp": "2025-03-09T08:18:05.953Z" + }, + "mnos": { + "value": "Tizen", + "timestamp": "2025-03-09T08:18:05.953Z" + }, + "pi": { + "value": "1f98ebd0-ac48-d802-7f62-000001200100", + "timestamp": "2025-03-09T08:18:05.953Z" + }, + "icv": { + "value": "core.1.1.0", + "timestamp": "2025-03-09T08:18:05.955Z" + } + }, + "remoteControlStatus": { + "remoteControlEnabled": { + "value": "true", + "timestamp": "2025-03-09T08:18:06.394Z" + } + }, + "custom.energyType": { + "energyType": { + "value": "2.0", + "timestamp": "2025-03-09T08:18:06.394Z" + }, + "energySavingSupport": { + "value": false, + "timestamp": "2023-08-02T14:36:25.480Z" + }, + "drMaxDuration": { + "value": null + }, + "energySavingLevel": { + "value": null + }, + "energySavingInfo": { + "value": null + }, + "supportedEnergySavingLevels": { + "value": null + }, + "energySavingOperation": { + "value": null + }, + "notificationTemplateID": { + "value": null + }, + "energySavingOperationSupport": { + "value": null + } + }, + "samsungce.toggleSwitch": { + "switch": { + "value": "off", + "timestamp": "2025-03-09T11:00:22.880Z" + } + }, + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": ["remoteControlStatus", "demandResponseLoadControl"], + "timestamp": "2025-03-09T08:31:30.641Z" + } + }, + "samsungce.driverVersion": { + "versionNumber": { + "value": 23070101, + "timestamp": "2023-08-02T14:32:26.195Z" + } + }, + "samsungce.softwareUpdate": { + "targetModule": { + "value": null + }, + "otnDUID": { + "value": null + }, + "lastUpdatedDate": { + "value": null + }, + "availableModules": { + "value": [], + "timestamp": "2025-03-09T08:18:06.394Z" + }, + "newVersionAvailable": { + "value": false, + "timestamp": "2025-03-09T08:18:06.394Z" + }, + "operatingState": { + "value": null + }, + "progress": { + "value": null + } + }, + "temperatureMeasurement": { + "temperatureRange": { + "value": null + }, + "temperature": { + "value": 54.3, + "unit": "C", + "timestamp": "2025-03-09T10:43:24.134Z" + } + }, + "custom.deviceReportStateConfiguration": { + "reportStateRealtimePeriod": { + "value": "enabled", + "timestamp": "2024-11-08T01:41:37.280Z" + }, + "reportStateRealtime": { + "value": { + "state": "disabled" + }, + "timestamp": "2025-03-08T12:06:55.069Z" + }, + "reportStatePeriod": { + "value": "enabled", + "timestamp": "2024-11-08T01:41:37.280Z" + } + }, + "samsungce.ehsTemperatureReference": { + "temperatureReference": { + "value": "water", + "timestamp": "2025-03-09T07:15:48.438Z" + } + }, + "thermostatCoolingSetpoint": { + "coolingSetpointRange": { + "value": null + }, + "coolingSetpoint": { + "value": 48, + "unit": "C", + "timestamp": "2025-03-09T10:58:50.857Z" + } + } + }, + "INDOOR": { + "samsungce.ehsThermostat": { + "connectionState": { + "value": "disconnected", + "timestamp": "2025-03-09T08:18:06.394Z" + } + }, + "samsungce.toggleSwitch": { + "switch": { + "value": "off", + "timestamp": "2025-03-09T11:14:44.775Z" + } + }, + "temperatureMeasurement": { + "temperatureRange": { + "value": null + }, + "temperature": { + "value": 39.2, + "unit": "C", + "timestamp": "2025-03-09T11:15:49.852Z" + } + }, + "custom.thermostatSetpointControl": { + "minimumSetpoint": { + "value": 25, + "unit": "C", + "timestamp": "2025-03-09T07:06:20.699Z" + }, + "maximumSetpoint": { + "value": 65, + "unit": "C", + "timestamp": "2025-03-09T07:06:20.699Z" + } + }, + "airConditionerMode": { + "availableAcModes": { + "value": null + }, + "supportedAcModes": { + "value": ["auto", "cool", "heat"], + "timestamp": "2025-03-09T08:18:06.394Z" + }, + "airConditionerMode": { + "value": "heat", + "timestamp": "2025-03-09T08:18:06.394Z" + } + }, + "samsungce.ehsTemperatureReference": { + "temperatureReference": { + "value": "water", + "timestamp": "2025-03-09T07:06:20.699Z" + } + }, + "thermostatCoolingSetpoint": { + "coolingSetpointRange": { + "value": null + }, + "coolingSetpoint": { + "value": 25, + "unit": "C", + "timestamp": "2025-03-09T11:14:44.734Z" + } + }, + "samsungce.sacDisplayCondition": { + "switch": { + "value": "enabled", + "timestamp": "2025-03-09T08:18:06.394Z" + } + }, + "switch": { + "switch": { + "value": "off", + "timestamp": "2025-03-09T11:14:57.238Z" + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/da_wm_sc_000001.json b/tests/components/smartthings/fixtures/device_status/da_wm_sc_000001.json new file mode 100644 index 00000000000..d52b5186db3 --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/da_wm_sc_000001.json @@ -0,0 +1,929 @@ +{ + "components": { + "main": { + "samsungce.welcomeMessage": { + "welcomeMessage": { + "value": null + } + }, + "samsungce.deviceIdentification": { + "micomAssayCode": { + "value": "20299141", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "modelName": { + "value": null + }, + "serialNumber": { + "value": null + }, + "serialNumberExtra": { + "value": null + }, + "modelClassificationCode": { + "value": "3801010200151107020100FF00000000", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "description": { + "value": "DA_DF_TP2_20_COMMON_DF8500A/DC92-02995A_0010", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "releaseYear": { + "value": null + }, + "binaryId": { + "value": "DA_DF_TP2_20_COMMON", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "samsungce.steamClosetCycle": { + "supportedCycles": { + "value": [ + { + "cycle": "22", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6106", + "default": "off", + "options": ["off", "on"] + } + } + }, + { + "cycle": "23", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "32", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "09", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "12", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "0C", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "31", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "0B", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "10", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "0A", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6106", + "default": "off", + "options": ["off", "on"] + } + } + }, + { + "cycle": "14", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "13", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6106", + "default": "off", + "options": ["off", "on"] + } + } + }, + { + "cycle": "16", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "24", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6206", + "default": "on", + "options": ["off", "on"] + } + } + }, + { + "cycle": "25", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6206", + "default": "on", + "options": ["off", "on"] + } + } + }, + { + "cycle": "2F", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6206", + "default": "on", + "options": ["off", "on"] + } + } + }, + { + "cycle": "20", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6204", + "default": "on", + "options": ["on"] + } + } + }, + { + "cycle": "0F", + "supportedOptions": { + "keepFresh": { + "raw": "66F0", + "default": "off", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6204", + "default": "on", + "options": ["on"] + } + } + }, + { + "cycle": "27", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "30", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "15", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "1A", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "1B", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "1C", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "2D", + "supportedOptions": { + "keepFresh": { + "raw": "660F", + "default": "on", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "07", + "supportedOptions": { + "keepFresh": { + "raw": "66F0", + "default": "off", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + }, + { + "cycle": "08", + "supportedOptions": { + "keepFresh": { + "raw": "66F0", + "default": "off", + "options": ["on", "off"] + }, + "sanitize": { + "raw": "6102", + "default": "off", + "options": ["off"] + } + } + } + ], + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "steamClosetCycle": { + "value": "Table_00_Course_22", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "referenceTable": { + "value": { + "id": "Table_00" + }, + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "switch": { + "switch": { + "value": "off", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "samsungce.quickControl": { + "version": { + "value": null + } + }, + "ocf": { + "st": { + "value": null + }, + "mndt": { + "value": null + }, + "mnfv": { + "value": "DA_DF_TP2_20_COMMON_30230807", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "mnhw": { + "value": "MediaTek", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "di": { + "value": "b93211bf-9d96-bd21-3b2f-964fcc87f5cc", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "mnsl": { + "value": "http://www.samsung.com", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "dmv": { + "value": "res.1.1.0,sh.1.1.0", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "n": { + "value": "[airdresser] Samsung", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "mnmo": { + "value": "DA_DF_TP2_20_COMMON|20299141|3801010200151107020100FF00000000", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "vid": { + "value": "DA-WM-SC-000001", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "mnmn": { + "value": "Samsung Electronics", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "mnml": { + "value": "http://www.samsung.com", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "mnpv": { + "value": "DAWIT 2.0", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "mnos": { + "value": "TizenRT 2.0 + IPv6", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "pi": { + "value": "b93211bf-9d96-bd21-3b2f-964fcc87f5cc", + "timestamp": "2025-01-14T01:42:53.834Z" + }, + "icv": { + "value": "core.1.1.0", + "timestamp": "2025-01-14T01:42:53.834Z" + } + }, + "samsungce.steamClosetCyclePreset": { + "maxNumberOfPresets": { + "value": 10, + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "presets": { + "value": { + "F1": {}, + "F2": {}, + "F3": {}, + "F4": {}, + "F5": {}, + "F6": {}, + "F7": {}, + "F8": {}, + "F9": {}, + "FA": {} + }, + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "custom.disabledCapabilities": { + "disabledCapabilities": { + "value": [ + "custom.steamClosetWrinklePrevent", + "custom.veryFineDustFilter", + "demandResponseLoadControl", + "sec.wifiConfiguration", + "samsungce.quickControl", + "samsungce.deviceInfoPrivate" + ], + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "samsungce.driverVersion": { + "versionNumber": { + "value": 24110101, + "timestamp": "2024-12-02T07:55:47.237Z" + } + }, + "sec.diagnosticsInformation": { + "logType": { + "value": ["errCode", "dump"], + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "endpoint": { + "value": "SSM", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "minVersion": { + "value": "1.0", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "signinPermission": { + "value": null + }, + "setupId": { + "value": "A00", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "protocolType": { + "value": "wifi_https", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "tsId": { + "value": null + }, + "mnId": { + "value": "0AJT", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "dumpType": { + "value": "file", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "samsungce.steamClosetKeepFreshMode": { + "operatingState": { + "value": "ready", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "status": { + "value": "off", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "samsungce.kidsLock": { + "lockState": { + "value": "unlocked", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "demandResponseLoadControl": { + "drlcStatus": { + "value": null + } + }, + "powerConsumptionReport": { + "powerConsumption": { + "value": { + "energy": 207500, + "deltaEnergy": 0, + "power": 0, + "powerEnergy": 0.0, + "persistedEnergy": 0, + "energySaved": 0, + "start": "2025-02-10T22:51:59Z", + "end": "2025-02-11T08:21:17Z" + }, + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "dryerOperatingState": { + "completionTime": { + "value": "2025-02-11T09:00:17Z", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "machineState": { + "value": "stop", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "supportedMachineStates": { + "value": ["stop", "run", "pause"], + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "dryerJobState": { + "value": "none", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "refresh": {}, + "samsungce.steamClosetSanitizeMode": { + "status": { + "value": "off", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "custom.jobBeginningStatus": { + "jobBeginningStatus": { + "value": null + } + }, + "execute": { + "data": { + "value": { + "payload": { + "rt": ["x.com.samsung.da.information"], + "if": ["oic.if.baseline", "oic.if.a"], + "x.com.samsung.da.modelNum": "DA_DF_TP2_20_COMMON|20299141|3801010200151107020100FF00000000", + "x.com.samsung.da.description": "DA_DF_TP2_20_COMMON_DF8500A/DC92-02995A_0010", + "x.com.samsung.da.serialNum": "1EG158TW400002M", + "x.com.samsung.da.otnDUID": "MTCHUODP5V4FA", + "x.com.samsung.da.diagProtocolType": "WIFI_HTTPS", + "x.com.samsung.da.diagLogType": ["errCode", "dump"], + "x.com.samsung.da.diagDumpType": "file", + "x.com.samsung.da.diagEndPoint": "SSM", + "x.com.samsung.da.diagMnid": "0AJT", + "x.com.samsung.da.diagSetupid": "A00", + "x.com.samsung.da.diagMinVersion": "1.0", + "x.com.samsung.da.items": [ + { + "x.com.samsung.da.id": "0", + "x.com.samsung.da.description": "DA_DF_TP2_20_COMMON|20299141|3801010200151107020100FF00000000", + "x.com.samsung.da.type": "Software", + "x.com.samsung.da.number": "02673A230807(F821)", + "x.com.samsung.da.newVersionAvailable": "0" + }, + { + "x.com.samsung.da.id": "1", + "x.com.samsung.da.description": "Firmware_1_DB_20299141210618090FFFFF202995412203111604FFFF(015E2029914120299541_30000000)(FileDown:0)(Type:0)", + "x.com.samsung.da.type": "Firmware", + "x.com.samsung.da.number": "21061809,22031116", + "x.com.samsung.da.newVersionAvailable": "0" + }, + { + "x.com.samsung.da.id": "2", + "x.com.samsung.da.description": "Firmware_2_DB_2023564319111852041FFFFFFFFFFFFFFFFFFFFFFFFE(015E20235643FFFFFFFF_30000000)(FileDown:0)(Type:0)", + "x.com.samsung.da.type": "Firmware", + "x.com.samsung.da.number": "19111852,FFFFFFFF" + } + ] + } + }, + "data": { + "href": "/information/vs/0" + }, + "timestamp": "2024-03-06T11:24:05.312Z" + } + }, + "samsungce.steamClosetDelayEnd": { + "remainingTime": { + "value": 0, + "unit": "min", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "samsungce.steamClosetAutoCycleLink": { + "steamClosetAutoCycleLink": { + "value": "on", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "sec.wifiConfiguration": { + "autoReconnection": { + "value": null + }, + "minVersion": { + "value": null + }, + "supportedWiFiFreq": { + "value": null + }, + "supportedAuthType": { + "value": null + }, + "protocolType": { + "value": null + } + }, + "custom.steamClosetWrinklePrevent": { + "steamClosetWrinklePrevent": { + "value": "off", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "remoteControlStatus": { + "remoteControlEnabled": { + "value": "false", + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "custom.supportedOptions": { + "course": { + "value": null + }, + "referenceTable": { + "value": { + "id": "Table_00" + }, + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "supportedCourses": { + "value": [ + "22", + "23", + "32", + "09", + "12", + "0C", + "31", + "0B", + "10", + "0A", + "14", + "13", + "16", + "24", + "25", + "2F", + "20", + "0F", + "27", + "30", + "15", + "1A", + "1B", + "1C", + "2D", + "07", + "08" + ], + "timestamp": "2025-02-11T08:21:17.534Z" + } + }, + "custom.steamClosetOperatingState": { + "supportedSteamClosetJobState": { + "value": ["none", "steaming", "airwashing", "drying", "finish"], + "timestamp": "2025-02-09T22:16:19.221Z" + }, + "completionTime": { + "value": "2025-02-11T09:00:17Z", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "steamClosetMachineState": { + "value": "stop", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "supportedSteamClosetMachineState": { + "value": ["stop", "run", "pause"], + "timestamp": "2023-06-23T16:00:41.238Z" + }, + "steamClosetJobState": { + "value": "none", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "progress": { + "value": 1, + "unit": "%", + "timestamp": "2025-02-10T22:53:25.928Z" + }, + "remainingTimeStr": { + "value": "00:39", + "timestamp": "2025-02-10T22:53:25.928Z" + }, + "steamClosetDelayEndTime": { + "value": null + }, + "remainingTime": { + "value": 39, + "unit": "min", + "timestamp": "2025-02-10T22:53:25.928Z" + } + }, + "custom.energyType": { + "energyType": { + "value": "2.0", + "timestamp": "2024-03-06T11:24:06.106Z" + }, + "energySavingSupport": { + "value": false, + "timestamp": "2024-03-06T11:24:06.106Z" + }, + "drMaxDuration": { + "value": null + }, + "energySavingLevel": { + "value": null + }, + "energySavingInfo": { + "value": null + }, + "supportedEnergySavingLevels": { + "value": null + }, + "energySavingOperation": { + "value": null + }, + "notificationTemplateID": { + "value": null + }, + "energySavingOperationSupport": { + "value": null + } + }, + "samsungce.softwareUpdate": { + "targetModule": { + "value": {}, + "timestamp": "2025-02-09T17:33:28.019Z" + }, + "otnDUID": { + "value": "MTCHUODP5V4FA", + "timestamp": "2025-02-11T08:21:17.534Z" + }, + "lastUpdatedDate": { + "value": null + }, + "availableModules": { + "value": [], + "timestamp": "2023-06-23T16:00:41.636Z" + }, + "newVersionAvailable": { + "value": false, + "timestamp": "2025-02-09T17:33:28.019Z" + }, + "operatingState": { + "value": null + }, + "progress": { + "value": null + } + }, + "custom.veryFineDustFilter": { + "veryFineDustFilterStatus": { + "value": null + }, + "veryFineDustFilterResetType": { + "value": null + }, + "veryFineDustFilterUsage": { + "value": null + }, + "veryFineDustFilterLastResetDate": { + "value": null + }, + "veryFineDustFilterUsageStep": { + "value": null + }, + "veryFineDustFilterCapacity": { + "value": null + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/heatit_zpushwall.json b/tests/components/smartthings/fixtures/device_status/heatit_zpushwall.json new file mode 100644 index 00000000000..591d1128ea0 --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/heatit_zpushwall.json @@ -0,0 +1,116 @@ +{ + "components": { + "button4": { + "button": { + "button": { + "value": "pushed", + "timestamp": "2025-02-10T08:01:11.326Z" + }, + "numberOfButtons": { + "value": 1, + "timestamp": "2023-12-04T16:51:16.695Z" + }, + "supportedButtonValues": { + "value": ["pushed", "held", "down_hold"], + "timestamp": "2023-12-04T16:51:16.717Z" + } + } + }, + "button5": { + "button": { + "button": { + "value": "pushed", + "timestamp": "2025-03-09T16:37:40.792Z" + }, + "numberOfButtons": { + "value": 1, + "timestamp": "2023-12-04T16:51:16.762Z" + }, + "supportedButtonValues": { + "value": ["pushed", "held", "down_hold"], + "timestamp": "2023-12-04T16:51:16.813Z" + } + } + }, + "button2": { + "button": { + "button": { + "value": "pushed", + "timestamp": "2025-02-10T08:00:57.171Z" + }, + "numberOfButtons": { + "value": 1, + "timestamp": "2023-12-04T16:51:16.861Z" + }, + "supportedButtonValues": { + "value": ["pushed", "held", "down_hold"], + "timestamp": "2023-12-04T16:51:16.906Z" + } + } + }, + "button3": { + "button": { + "button": { + "value": "pushed", + "timestamp": "2025-01-30T05:53:00.663Z" + }, + "numberOfButtons": { + "value": 1, + "timestamp": "2023-12-04T16:51:16.852Z" + }, + "supportedButtonValues": { + "value": ["pushed", "held", "down_hold"], + "timestamp": "2023-12-04T16:51:16.848Z" + } + } + }, + "button6": { + "button": { + "button": { + "value": "pushed", + "timestamp": "2024-10-02T13:11:07.346Z" + }, + "numberOfButtons": { + "value": 1, + "timestamp": "2023-12-04T16:51:16.816Z" + }, + "supportedButtonValues": { + "value": ["pushed", "held", "down_hold"], + "timestamp": "2023-12-04T16:51:16.848Z" + } + } + }, + "main": { + "refresh": {}, + "battery": { + "quantity": { + "value": null + }, + "battery": { + "value": 100, + "unit": "%", + "timestamp": "2025-03-10T10:32:19.528Z" + }, + "type": { + "value": null + } + } + }, + "button1": { + "button": { + "button": { + "value": "pushed", + "timestamp": "2025-01-30T05:52:46.718Z" + }, + "numberOfButtons": { + "value": 1, + "timestamp": "2023-12-04T16:51:16.717Z" + }, + "supportedButtonValues": { + "value": ["pushed", "held", "down_hold"], + "timestamp": "2023-12-04T16:51:16.767Z" + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/hub.json b/tests/components/smartthings/fixtures/device_status/hub.json new file mode 100644 index 00000000000..98ff4c3a8b4 --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/hub.json @@ -0,0 +1,3 @@ +{ + "components": {} +} diff --git a/tests/components/smartthings/fixtures/device_status/hw_q80r_soundbar.json b/tests/components/smartthings/fixtures/device_status/hw_q80r_soundbar.json new file mode 100644 index 00000000000..8cd0d3e35a9 --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/hw_q80r_soundbar.json @@ -0,0 +1,173 @@ +{ + "components": { + "main": { + "mediaPlayback": { + "supportedPlaybackCommands": { + "value": ["play", "pause", "stop"], + "timestamp": "2025-03-23T01:10:02.207Z" + }, + "playbackStatus": { + "value": "playing", + "timestamp": "2025-03-23T01:19:44.622Z" + } + }, + "samsungvd.groupInfo": { + "role": { + "value": "none", + "timestamp": "2025-03-23T01:17:10.965Z" + }, + "channel": { + "value": "all", + "timestamp": "2025-03-23T01:17:10.965Z" + }, + "masterName": { + "value": "", + "timestamp": "2025-03-23T01:17:10.965Z" + }, + "status": { + "value": "single", + "timestamp": "2025-03-23T01:17:10.965Z" + } + }, + "audioVolume": { + "volume": { + "value": 1, + "unit": "%", + "timestamp": "2025-03-23T01:17:13.754Z" + } + }, + "ocf": { + "st": { + "value": "NONE", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mndt": { + "value": "2018-01-01", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnfv": { + "value": "HW-Q80RWWB-1012.6", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnhw": { + "value": "0-0", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "di": { + "value": "afcf3b91-48fe-4c3b-ab44-ddff2a0a6577", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnsl": { + "value": "http://www.samsung.com/sec/audio-video/", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "dmv": { + "value": "res.1.1.0,sh.1.1.0", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "n": { + "value": "[AV] Samsung Soundbar Q80R", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnmo": { + "value": "Q80R", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "vid": { + "value": "VD-NetworkAudio-001S", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnmn": { + "value": "Samsung Electronics", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnml": { + "value": "http://www.samsung.com", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnpv": { + "value": "Tizen 4.0", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "mnos": { + "value": "4.1.10", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "pi": { + "value": "afcf3b91-48fe-4c3b-ab44-ddff2a0a6577", + "timestamp": "2024-12-18T21:07:25.406Z" + }, + "icv": { + "value": "core.1.1.0", + "timestamp": "2024-12-18T21:07:25.406Z" + } + }, + "mediaInputSource": { + "supportedInputSources": { + "value": ["wifi", "bluetooth", "HDMI1", "HDMI2", "digital"], + "timestamp": "2025-03-23T01:18:01.663Z" + }, + "inputSource": { + "value": "wifi", + "timestamp": "2025-03-23T01:18:01.663Z" + } + }, + "refresh": {}, + "audioNotification": {}, + "audioMute": { + "mute": { + "value": "unmuted", + "timestamp": "2025-03-23T01:17:11.024Z" + } + }, + "execute": { + "data": { + "value": { + "payload": { + "rt": ["x.com.samsung.networkaudio.soundmode"], + "if": ["oic.if.a", "oic.if.baseline"], + "x.com.samsung.networkaudio.soundmode": "standard" + } + }, + "data": { + "href": "/sec/networkaudio/soundmode" + }, + "timestamp": "2023-07-16T23:16:55.582Z" + } + }, + "samsungvd.audioInputSource": { + "supportedInputSources": { + "value": ["wifi", "bluetooth", "HDMI1", "HDMI2", "digital"], + "timestamp": "2025-03-23T01:18:01.663Z" + }, + "inputSource": { + "value": "wificp", + "timestamp": "2025-03-23T01:18:01.663Z" + } + }, + "switch": { + "switch": { + "value": "on", + "timestamp": "2025-03-23T01:19:44.837Z" + } + }, + "audioTrackData": { + "totalTime": { + "value": null, + "timestamp": "2020-07-30T16:09:09.109Z" + }, + "audioTrackData": { + "value": { + "title": "Never Gonna Give You Up", + "artist": "Rick Astley" + }, + "timestamp": "2025-03-23T01:19:15.067Z" + }, + "elapsedTime": { + "value": null, + "timestamp": "2020-07-30T16:09:09.109Z" + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/device_status/vd_sensor_light_2023.json b/tests/components/smartthings/fixtures/device_status/vd_sensor_light_2023.json new file mode 100644 index 00000000000..cffefa20c4a --- /dev/null +++ b/tests/components/smartthings/fixtures/device_status/vd_sensor_light_2023.json @@ -0,0 +1,95 @@ +{ + "components": { + "main": { + "ocf": { + "st": { + "value": "2025-01-14T08:07:36Z", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mndt": { + "value": "2023-01-01", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnfv": { + "value": "latest", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnhw": { + "value": "", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "di": { + "value": "5cc1c096-98b9-460c-8f1c-1045509ec605", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnsl": { + "value": "", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "dmv": { + "value": "res.1.1.0,sh.1.1.0", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "n": { + "value": "Light Sensor - 55 The Frame", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnmo": { + "value": "QE55LS03DAUXXN", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "vid": { + "value": "VD-Sensor.Light-2023", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnmn": { + "value": "Samsung Electronics", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnml": { + "value": "", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnpv": { + "value": "8.0", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "mnos": { + "value": "Tizen", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "pi": { + "value": "5cc1c096-98b9-460c-8f1c-1045509ec605", + "timestamp": "2025-01-14T08:07:40.220Z" + }, + "icv": { + "value": "core.1.1.0", + "timestamp": "2025-01-14T08:07:40.220Z" + } + }, + "samsungvd.deviceCategory": { + "category": { + "value": null + } + }, + "relativeBrightness": { + "brightnessIntensity": { + "value": 2, + "unit": "level", + "timestamp": "2025-02-11T19:08:25.539Z" + } + }, + "refresh": {}, + "execute": { + "data": { + "value": null + } + }, + "switch": { + "switch": { + "value": null + } + } + } + } +} diff --git a/tests/components/smartthings/fixtures/devices/aeotec_home_energy_meter_gen5.json b/tests/components/smartthings/fixtures/devices/aeotec_home_energy_meter_gen5.json index 5ef0e2fd9eb..ab2fe41c678 100644 --- a/tests/components/smartthings/fixtures/devices/aeotec_home_energy_meter_gen5.json +++ b/tests/components/smartthings/fixtures/devices/aeotec_home_energy_meter_gen5.json @@ -45,7 +45,6 @@ } ], "createTime": "2023-01-12T23:02:44.917Z", - "parentDeviceId": "6a2d07a4-dd77-48bc-9acf-017029aaf099", "profile": { "id": "6372c227-93c7-32ef-9be5-aef2221adff1" }, diff --git a/tests/components/smartthings/fixtures/devices/base_electric_meter.json b/tests/components/smartthings/fixtures/devices/base_electric_meter.json index 9e0c130978c..a81ca788b29 100644 --- a/tests/components/smartthings/fixtures/devices/base_electric_meter.json +++ b/tests/components/smartthings/fixtures/devices/base_electric_meter.json @@ -9,7 +9,7 @@ "deviceManufacturerCode": "0086-0002-0009", "locationId": "c4d3b2a1-09f8-765e-4d3c-2b1a09f8e7d6 ", "ownerId": "d47f2b19-3a6e-4c8d-bf21-9e8a7c5d134e", - "roomId": "94be4a1e-382a-4b7f-a5ef-fdb1a7d9f9e6", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", "components": [ { "id": "main", @@ -37,7 +37,6 @@ } ], "createTime": "2023-06-03T16:23:57.284Z", - "parentDeviceId": "074fa784-8be8-4c70-8e22-6f5ed6f81b7e", "profile": { "id": "d382796f-8ed5-3088-8735-eb03e962203b" }, diff --git a/tests/components/smartthings/fixtures/devices/c2c_arlo_pro_3_switch.json b/tests/components/smartthings/fixtures/devices/c2c_arlo_pro_3_switch.json index a9e3bddb2ca..21d4d475e7a 100644 --- a/tests/components/smartthings/fixtures/devices/c2c_arlo_pro_3_switch.json +++ b/tests/components/smartthings/fixtures/devices/c2c_arlo_pro_3_switch.json @@ -9,7 +9,7 @@ "deviceManufacturerCode": "Arlo", "locationId": "c4d3b2a1-09f8-765e-4d3c-2b1a09f8e7d6 ", "ownerId": "d47f2b19-3a6e-4c8d-bf21-9e8a7c5d134e", - "roomId": "68b45114-9af8-4906-8636-b973a6faa271", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", "components": [ { "id": "main", diff --git a/tests/components/smartthings/fixtures/devices/centralite.json b/tests/components/smartthings/fixtures/devices/centralite.json index 68cdbdf4499..d94043efbc8 100644 --- a/tests/components/smartthings/fixtures/devices/centralite.json +++ b/tests/components/smartthings/fixtures/devices/centralite.json @@ -9,7 +9,7 @@ "deviceManufacturerCode": "CentraLite", "locationId": "c4d3b2a1-09f8-765e-4d3c-2b1a09f8e7d6 ", "ownerId": "d47f2b19-3a6e-4c8d-bf21-9e8a7c5d134e", - "roomId": "94be4a1e-382a-4b7f-a5ef-fdb1a7d9f9e6", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", "components": [ { "id": "main", @@ -45,7 +45,6 @@ } ], "createTime": "2024-08-15T22:16:37.926Z", - "parentDeviceId": "074fa784-8be8-4c70-8e22-6f5ed6f81b7e", "profile": { "id": "24195ea4-635c-3450-a235-71bc78ab3d1c" }, diff --git a/tests/components/smartthings/fixtures/devices/contact_sensor.json b/tests/components/smartthings/fixtures/devices/contact_sensor.json index a5de2e2cbfe..9823a70cb61 100644 --- a/tests/components/smartthings/fixtures/devices/contact_sensor.json +++ b/tests/components/smartthings/fixtures/devices/contact_sensor.json @@ -9,7 +9,7 @@ "deviceManufacturerCode": "Visonic", "locationId": "c4d3b2a1-09f8-765e-4d3c-2b1a09f8e7d6 ", "ownerId": "d47f2b19-3a6e-4c8d-bf21-9e8a7c5d134e", - "roomId": "68b45114-9af8-4906-8636-b973a6faa271", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", "components": [ { "id": "main", @@ -42,14 +42,13 @@ "categoryType": "manufacturer" }, { - "name": "ContactSensor", + "name": "GarageDoor", "categoryType": "user" } ] } ], "createTime": "2023-09-28T17:38:59.179Z", - "parentDeviceId": "074fa784-8be8-4c70-8e22-6f5ed6f81b7e", "profile": { "id": "22aa5a07-ac33-365f-b2f1-5ecef8cdb0eb" }, diff --git a/tests/components/smartthings/fixtures/devices/da_ac_airsensor_01001.json b/tests/components/smartthings/fixtures/devices/da_ac_airsensor_01001.json new file mode 100644 index 00000000000..c8304e9c6d8 --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/da_ac_airsensor_01001.json @@ -0,0 +1,145 @@ +{ + "items": [ + { + "deviceId": "a3a970ea-e09c-9c04-161b-94c934e21666", + "name": "Samsung AirMonitor", + "label": "\uc5d0\uc5b4\ubaa8\ub2c8\ud130 \ud50c\ub7ec\uc2a4", + "manufacturerName": "Samsung Electronics", + "presentationId": "DA-AC-AIRSENSOR-01001", + "deviceManufacturerCode": "Samsung Electronics", + "locationId": "33db9e71-abe9-43a0-acd3-3f0927bbe5b7", + "ownerId": "9a1ee192-04ba-46ca-9c3d-196d8dbcf807", + "roomId": "445c006d-1796-4dd6-8308-1c3cd045e8ff", + "deviceTypeName": "x.com.st.d.airqualitysensor", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "ocf", + "version": 1 + }, + { + "id": "execute", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + }, + { + "id": "temperatureMeasurement", + "version": 1 + }, + { + "id": "relativeHumidityMeasurement", + "version": 1 + }, + { + "id": "dustSensor", + "version": 1 + }, + { + "id": "dustHealthConcern", + "version": 1 + }, + { + "id": "fineDustHealthConcern", + "version": 1 + }, + { + "id": "veryFineDustSensor", + "version": 1 + }, + { + "id": "veryFineDustHealthConcern", + "version": 1 + }, + { + "id": "airQualitySensor", + "version": 1 + }, + { + "id": "odorSensor", + "version": 1 + }, + { + "id": "carbonDioxideMeasurement", + "version": 1 + }, + { + "id": "carbonDioxideHealthConcern", + "version": 1 + }, + { + "id": "samsungce.deviceIdentification", + "version": 1 + }, + { + "id": "samsungce.rechargeableBattery", + "version": 1 + }, + { + "id": "samsungce.doNotDisturb", + "version": 1 + }, + { + "id": "samsungce.driverVersion", + "version": 1 + }, + { + "id": "samsungce.softwareUpdate", + "version": 1 + }, + { + "id": "custom.deviceReportStateConfiguration", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + }, + { + "id": "sec.diagnosticsInformation", + "version": 1 + } + ], + "categories": [ + { + "name": "AirQualityDetector", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2023-12-09T04:05:59.040Z", + "profile": { + "id": "1d34dd9d-6840-3df6-a6d0-5d9f4a4af2e1" + }, + "ocf": { + "ocfDeviceType": "x.com.st.d.airqualitysensor", + "name": "Samsung AirMonitor", + "specVersion": "core.1.1.0", + "verticalDomainSpecVersion": "1.2.1", + "manufacturerName": "Samsung Electronics", + "modelNumber": "ASM-KR-TP1-22-ACMB1M|10243041|75000000001611C40800020000080000", + "platformVersion": "DAWIT 2.0", + "platformOS": "TizenRT 4.0", + "hwVersion": "Realtek", + "firmwareVersion": "ASM-KR-TP1-22-ACMB1M_16240426", + "vendorId": "DA-AC-AIRSENSOR-01001", + "vendorResourceClientServerVersion": "MediaTek Release 240426", + "lastSignupTime": "2023-12-09T04:05:54.816486Z", + "transferCandidate": false, + "additionalAuthCodeRequired": false + }, + "type": "OCF", + "restrictionTier": 0, + "allowed": null, + "executionContext": "CLOUD", + "relationships": [] + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/da_ac_rac_000001.json b/tests/components/smartthings/fixtures/devices/da_ac_rac_000001.json index 4f6faeddb09..cc4e13784bf 100644 --- a/tests/components/smartthings/fixtures/devices/da_ac_rac_000001.json +++ b/tests/components/smartthings/fixtures/devices/da_ac_rac_000001.json @@ -9,7 +9,7 @@ "deviceManufacturerCode": "Samsung Electronics", "locationId": "58d3fd7c-c512-4da3-b500-ef269382756c", "ownerId": "f9a28d7c-1ed5-d9e9-a81c-18971ec081db", - "roomId": "85a79db4-9cf2-4f09-a5b2-cd70a5c0cef0", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", "deviceTypeName": "Samsung OCF Air Conditioner", "components": [ { diff --git a/tests/components/smartthings/fixtures/devices/da_ac_rac_000003.json b/tests/components/smartthings/fixtures/devices/da_ac_rac_000003.json new file mode 100644 index 00000000000..44dafc213f0 --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/da_ac_rac_000003.json @@ -0,0 +1,217 @@ +{ + "items": [ + { + "deviceId": "c76d6f38-1b7f-13dd-37b5-db18d5272783", + "name": "Samsung Room A/C", + "label": "Office AirFree", + "manufacturerName": "Samsung Electronics", + "presentationId": "DA-AC-RAC-000003", + "deviceManufacturerCode": "Samsung Electronics", + "locationId": "403cd42e-f692-416c-91fd-1883c00e3262", + "ownerId": "dd474e5c-59c0-4bea-a319-ff5287fd3373", + "roomId": "dffe353e-b3c5-4a97-8a8a-797ccc649fab", + "deviceTypeName": "Samsung OCF Air Conditioner", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "ocf", + "version": 1 + }, + { + "id": "switch", + "version": 1 + }, + { + "id": "airConditionerMode", + "version": 1 + }, + { + "id": "airConditionerFanMode", + "version": 1 + }, + { + "id": "fanOscillationMode", + "version": 1 + }, + { + "id": "temperatureMeasurement", + "version": 1 + }, + { + "id": "thermostatCoolingSetpoint", + "version": 1 + }, + { + "id": "relativeHumidityMeasurement", + "version": 1 + }, + { + "id": "airQualitySensor", + "version": 1 + }, + { + "id": "odorSensor", + "version": 1 + }, + { + "id": "dustSensor", + "version": 1 + }, + { + "id": "veryFineDustSensor", + "version": 1 + }, + { + "id": "audioVolume", + "version": 1 + }, + { + "id": "remoteControlStatus", + "version": 1 + }, + { + "id": "powerConsumptionReport", + "version": 1 + }, + { + "id": "demandResponseLoadControl", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + }, + { + "id": "execute", + "version": 1 + }, + { + "id": "custom.spiMode", + "version": 1 + }, + { + "id": "custom.thermostatSetpointControl", + "version": 1 + }, + { + "id": "custom.airConditionerOptionalMode", + "version": 1 + }, + { + "id": "custom.airConditionerTropicalNightMode", + "version": 1 + }, + { + "id": "custom.autoCleaningMode", + "version": 1 + }, + { + "id": "custom.deviceReportStateConfiguration", + "version": 1 + }, + { + "id": "custom.energyType", + "version": 1 + }, + { + "id": "custom.dustFilter", + "version": 1 + }, + { + "id": "custom.veryFineDustFilter", + "version": 1 + }, + { + "id": "custom.deodorFilter", + "version": 1 + }, + { + "id": "custom.electricHepaFilter", + "version": 1 + }, + { + "id": "custom.doNotDisturbMode", + "version": 1 + }, + { + "id": "custom.periodicSensing", + "version": 1 + }, + { + "id": "custom.airConditionerOdorController", + "version": 1 + }, + { + "id": "custom.ocfResourceVersion", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + }, + { + "id": "samsungce.deviceIdentification", + "version": 1 + }, + { + "id": "samsungce.dustFilterAlarm", + "version": 1 + }, + { + "id": "samsungce.driverVersion", + "version": 1 + }, + { + "id": "samsungce.softwareUpdate", + "version": 1 + }, + { + "id": "samsungce.selfCheck", + "version": 1 + }, + { + "id": "samsungce.individualControlLock", + "version": 1 + } + ], + "categories": [ + { + "name": "AirConditioner", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2024-06-21T13:45:16.238Z", + "profile": { + "id": "cedae6e3-1ec9-37e3-9aba-f717518156b8" + }, + "ocf": { + "ocfDeviceType": "oic.d.airconditioner", + "name": "Samsung Room A/C", + "specVersion": "core.1.1.0", + "verticalDomainSpecVersion": "1.2.1", + "manufacturerName": "Samsung Electronics", + "modelNumber": "ARTIK051_PRAC_20K|10256941|60010534001411014600003200800000", + "platformVersion": "DAWIT 2.0", + "platformOS": "TizenRT 1.0 + IPv6", + "hwVersion": "ARTIK051", + "firmwareVersion": "ARTIK051_PRAC_20K_11230313", + "vendorId": "DA-AC-RAC-000003", + "vendorResourceClientServerVersion": "ARTIK051 Release 2.211222.1", + "lastSignupTime": "2024-06-21T13:45:08.592221Z", + "transferCandidate": false, + "additionalAuthCodeRequired": false + }, + "type": "OCF", + "restrictionTier": 0, + "allowed": null, + "executionContext": "CLOUD", + "relationships": [] + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/da_ac_rac_01001.json b/tests/components/smartthings/fixtures/devices/da_ac_rac_01001.json index 8d9ebde5bcd..db6f8d09673 100644 --- a/tests/components/smartthings/fixtures/devices/da_ac_rac_01001.json +++ b/tests/components/smartthings/fixtures/devices/da_ac_rac_01001.json @@ -9,7 +9,7 @@ "deviceManufacturerCode": "Samsung Electronics", "locationId": "c4189ac1-208f-461a-8ab6-ea67937b3743", "ownerId": "85ea07e1-7063-f673-3ba5-125293f297c8", - "roomId": "1f66199a-1773-4d8f-97b7-44c312a62cf7", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", "deviceTypeName": "Samsung OCF Air Conditioner", "components": [ { diff --git a/tests/components/smartthings/fixtures/devices/da_ks_cooktop_31001.json b/tests/components/smartthings/fixtures/devices/da_ks_cooktop_31001.json new file mode 100644 index 00000000000..433e45dae7a --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/da_ks_cooktop_31001.json @@ -0,0 +1,277 @@ +{ + "items": [ + { + "deviceId": "808dbd84-f357-47e2-a0cd-3b66fa22d584", + "name": "Builtin Cooktop", + "label": "Induction Hob", + "manufacturerName": "0A4H", + "presentationId": "DA-KS-COOKTOP-31001", + "deviceManufacturerCode": "0A4H", + "locationId": "7d27161a-0ef6-4294-91a0-80054ea5bc59", + "ownerId": "d52fb883-0f76-f4d9-0f6a-7ec2c0987b11", + "roomId": "afe14ff1-d444-420d-a766-4dd52f3e1c71", + "deviceTypeId": "Cooktop", + "deviceTypeName": "Samsung Cooktop", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "healthCheck", + "version": 1 + }, + { + "id": "switch", + "version": 1 + }, + { + "id": "audioMute", + "version": 1 + }, + { + "id": "custom.disabledComponents", + "version": 1 + }, + { + "id": "custom.userNotification", + "version": 1 + }, + { + "id": "custom.cooktopOperatingState", + "version": 1 + }, + { + "id": "samsungce.deviceIdentification", + "version": 1 + }, + { + "id": "samsungce.kitchenDeviceIdentification", + "version": 1 + }, + { + "id": "samsungce.softwareVersion", + "version": 1 + }, + { + "id": "samsungce.softwareUpdate", + "version": 1 + }, + { + "id": "samsungce.errorAndAlarmState", + "version": 1 + }, + { + "id": "samsungce.remoteManagementData", + "version": 1 + }, + { + "id": "samsungce.kidsLockControl", + "version": 1 + }, + { + "id": "samsungce.cooktopFlexZone", + "version": 1 + } + ], + "categories": [ + { + "name": "Cooktop", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "burner-01", + "label": "burner-01", + "capabilities": [ + { + "id": "samsungce.surfaceResidualHeat", + "version": 1 + }, + { + "id": "samsungce.cooktopHeatingPower", + "version": 1 + }, + { + "id": "samsungce.countDownTimer", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "burner-02", + "label": "burner-02", + "capabilities": [ + { + "id": "samsungce.surfaceResidualHeat", + "version": 1 + }, + { + "id": "samsungce.cooktopHeatingPower", + "version": 1 + }, + { + "id": "samsungce.countDownTimer", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "burner-03", + "label": "burner-03", + "capabilities": [ + { + "id": "samsungce.surfaceResidualHeat", + "version": 1 + }, + { + "id": "samsungce.cooktopHeatingPower", + "version": 1 + }, + { + "id": "samsungce.countDownTimer", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "burner-04", + "label": "burner-04", + "capabilities": [ + { + "id": "samsungce.surfaceResidualHeat", + "version": 1 + }, + { + "id": "samsungce.cooktopHeatingPower", + "version": 1 + }, + { + "id": "samsungce.countDownTimer", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "burner-05", + "label": "burner-05", + "capabilities": [ + { + "id": "samsungce.surfaceResidualHeat", + "version": 1 + }, + { + "id": "samsungce.cooktopHeatingPower", + "version": 1 + }, + { + "id": "samsungce.countDownTimer", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "burner-06", + "label": "burner-06", + "capabilities": [ + { + "id": "samsungce.surfaceResidualHeat", + "version": 1 + }, + { + "id": "samsungce.cooktopHeatingPower", + "version": 1 + }, + { + "id": "samsungce.countDownTimer", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "hood", + "label": "hood", + "capabilities": [ + { + "id": "switch", + "version": 1 + }, + { + "id": "samsungce.connectionState", + "version": 1 + }, + { + "id": "samsungce.hoodFanSpeed", + "version": 1 + }, + { + "id": "samsungce.lamp", + "version": 1 + }, + { + "id": "samsungce.countDownTimer", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2025-03-25T18:18:23.576Z", + "profile": { + "id": "a99bbcb8-51c9-468d-b9d5-0ce6dca09d5a" + }, + "mqtt": { + "executingLocally": false, + "transferCandidate": false + }, + "type": "MQTT", + "restrictionTier": 0, + "allowed": null, + "executionContext": "CLOUD", + "relationships": [] + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/da_ks_microwave_0101x.json b/tests/components/smartthings/fixtures/devices/da_ks_microwave_0101x.json index f6599fee461..f636b069e38 100644 --- a/tests/components/smartthings/fixtures/devices/da_ks_microwave_0101x.json +++ b/tests/components/smartthings/fixtures/devices/da_ks_microwave_0101x.json @@ -9,7 +9,7 @@ "deviceManufacturerCode": "Samsung Electronics", "locationId": "586e4602-34ab-4a22-993e-5f616b04604f", "ownerId": "b603d7e8-6066-4e10-8102-afa752a63816", - "roomId": "f4d03391-ab13-4c1d-b4dc-d6ddf86014a2", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", "deviceTypeName": "oic.d.microwave", "components": [ { diff --git a/tests/components/smartthings/fixtures/devices/da_ref_normal_000001.json b/tests/components/smartthings/fixtures/devices/da_ref_normal_000001.json index 67afc0ad32c..29372cac23c 100644 --- a/tests/components/smartthings/fixtures/devices/da_ref_normal_000001.json +++ b/tests/components/smartthings/fixtures/devices/da_ref_normal_000001.json @@ -9,7 +9,7 @@ "deviceManufacturerCode": "Samsung Electronics", "locationId": "c4d3b2a1-09f8-765e-4d3c-2b1a09f8e7d6 ", "ownerId": "d47f2b19-3a6e-4c8d-bf21-9e8a7c5d134e", - "roomId": "3a1f7e7c-4e59-4c29-adb0-0813be691efd", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", "deviceTypeName": "Samsung OCF Refrigerator", "components": [ { diff --git a/tests/components/smartthings/fixtures/devices/da_ref_normal_01011.json b/tests/components/smartthings/fixtures/devices/da_ref_normal_01011.json new file mode 100644 index 00000000000..9be5db0bda9 --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/da_ref_normal_01011.json @@ -0,0 +1,521 @@ +{ + "items": [ + { + "deviceId": "5758b2ec-563e-f39b-ec39-208e54aabf60", + "name": "Samsung-Refrigerator", + "label": "Frigo", + "manufacturerName": "Samsung Electronics", + "presentationId": "DA-REF-NORMAL-01011", + "deviceManufacturerCode": "Samsung Electronics", + "locationId": "d91ee683-be36-4124-9200-c0030253fbc2", + "ownerId": "60b5179d-607f-f754-a648-6e1e21aeeb31", + "roomId": "c4f98377-534d-422f-b061-a4f3e281ddf5", + "deviceTypeName": "Samsung OCF Refrigerator", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "contactSensor", + "version": 1 + }, + { + "id": "execute", + "version": 1 + }, + { + "id": "ocf", + "version": 1 + }, + { + "id": "powerConsumptionReport", + "version": 1 + }, + { + "id": "demandResponseLoadControl", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + }, + { + "id": "refrigeration", + "version": 1 + }, + { + "id": "temperatureMeasurement", + "version": 1 + }, + { + "id": "thermostatCoolingSetpoint", + "version": 1 + }, + { + "id": "custom.deodorFilter", + "version": 1 + }, + { + "id": "custom.dustFilter", + "version": 1 + }, + { + "id": "custom.deviceReportStateConfiguration", + "version": 1 + }, + { + "id": "custom.energyType", + "version": 1 + }, + { + "id": "custom.fridgeMode", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + }, + { + "id": "custom.disabledComponents", + "version": 1 + }, + { + "id": "custom.waterFilter", + "version": 1 + }, + { + "id": "samsungce.softwareUpdate", + "version": 1 + }, + { + "id": "samsungce.deviceIdentification", + "version": 1 + }, + { + "id": "samsungce.driverVersion", + "version": 1 + }, + { + "id": "samsungce.fridgeVacationMode", + "version": 1 + }, + { + "id": "samsungce.powerCool", + "version": 1 + }, + { + "id": "samsungce.powerFreeze", + "version": 1 + }, + { + "id": "samsungce.sabbathMode", + "version": 1 + }, + { + "id": "samsungce.selfCheck", + "version": 1 + }, + { + "id": "samsungce.viewInside", + "version": 1 + }, + { + "id": "samsungce.fridgeWelcomeLighting", + "version": 1 + }, + { + "id": "samsungce.quickControl", + "version": 1 + }, + { + "id": "sec.diagnosticsInformation", + "version": 1 + }, + { + "id": "sec.wifiConfiguration", + "version": 1 + } + ], + "categories": [ + { + "name": "Refrigerator", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "freezer", + "label": "freezer", + "capabilities": [ + { + "id": "contactSensor", + "version": 1 + }, + { + "id": "temperatureMeasurement", + "version": 1 + }, + { + "id": "thermostatCoolingSetpoint", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + }, + { + "id": "custom.fridgeMode", + "version": 1 + }, + { + "id": "custom.thermostatSetpointControl", + "version": 1 + }, + { + "id": "samsungce.temperatureSetting", + "version": 1 + }, + { + "id": "samsungce.freezerConvertMode", + "version": 1 + }, + { + "id": "samsungce.unavailableCapabilities", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "cooler", + "label": "cooler", + "capabilities": [ + { + "id": "contactSensor", + "version": 1 + }, + { + "id": "temperatureMeasurement", + "version": 1 + }, + { + "id": "thermostatCoolingSetpoint", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + }, + { + "id": "custom.fridgeMode", + "version": 1 + }, + { + "id": "custom.thermostatSetpointControl", + "version": 1 + }, + { + "id": "samsungce.temperatureSetting", + "version": 1 + }, + { + "id": "samsungce.unavailableCapabilities", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "cvroom", + "label": "cvroom", + "capabilities": [ + { + "id": "contactSensor", + "version": 1 + }, + { + "id": "temperatureMeasurement", + "version": 1 + }, + { + "id": "thermostatCoolingSetpoint", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + }, + { + "id": "custom.fridgeMode", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "onedoor", + "label": "onedoor", + "capabilities": [ + { + "id": "contactSensor", + "version": 1 + }, + { + "id": "temperatureMeasurement", + "version": 1 + }, + { + "id": "thermostatCoolingSetpoint", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + }, + { + "id": "custom.fridgeMode", + "version": 1 + }, + { + "id": "custom.thermostatSetpointControl", + "version": 1 + }, + { + "id": "samsungce.temperatureSetting", + "version": 1 + }, + { + "id": "samsungce.freezerConvertMode", + "version": 1 + }, + { + "id": "samsungce.unavailableCapabilities", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "icemaker", + "label": "icemaker", + "capabilities": [ + { + "id": "switch", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "icemaker-02", + "label": "icemaker-02", + "capabilities": [ + { + "id": "switch", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "icemaker-03", + "label": "icemaker-03", + "capabilities": [ + { + "id": "switch", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "scale-10", + "label": "scale-10", + "capabilities": [ + { + "id": "samsungce.weightMeasurement", + "version": 1 + }, + { + "id": "samsungce.weightMeasurementCalibration", + "version": 1 + }, + { + "id": "samsungce.connectionState", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "scale-11", + "label": "scale-11", + "capabilities": [ + { + "id": "samsungce.weightMeasurement", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "pantry-01", + "label": "pantry-01", + "capabilities": [ + { + "id": "samsungce.fridgePantryInfo", + "version": 1 + }, + { + "id": "samsungce.fridgePantryMode", + "version": 1 + }, + { + "id": "samsungce.meatAging", + "version": 1 + }, + { + "id": "samsungce.foodDefrost", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "pantry-02", + "label": "pantry-02", + "capabilities": [ + { + "id": "samsungce.fridgePantryInfo", + "version": 1 + }, + { + "id": "samsungce.fridgePantryMode", + "version": 1 + }, + { + "id": "samsungce.meatAging", + "version": 1 + }, + { + "id": "samsungce.foodDefrost", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2024-12-01T18:22:14.880Z", + "profile": { + "id": "37c7b355-bdaa-371b-b246-dbdf2a7f9c84" + }, + "ocf": { + "ocfDeviceType": "oic.d.refrigerator", + "name": "Samsung-Refrigerator", + "specVersion": "core.1.1.0", + "verticalDomainSpecVersion": "1.2.1", + "manufacturerName": "Samsung Electronics", + "modelNumber": "TP1X_REF_21K|00156941|00050126001611304100000030010000", + "platformVersion": "DAWIT 2.0", + "platformOS": "TizenRT 3.1", + "hwVersion": "Realtek", + "firmwareVersion": "A-RFWW-TP1-22-REV1_20241030", + "vendorId": "DA-REF-NORMAL-01011", + "vendorResourceClientServerVersion": "Realtek Release 3.1.240221", + "lastSignupTime": "2024-12-01T18:22:14.807976528Z", + "transferCandidate": false, + "additionalAuthCodeRequired": false + }, + "type": "OCF", + "restrictionTier": 0, + "allowed": null, + "executionContext": "CLOUD", + "relationships": [] + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/da_rvc_normal_000001.json b/tests/components/smartthings/fixtures/devices/da_rvc_normal_000001.json index b355eedb17a..b7f8ab2a42c 100644 --- a/tests/components/smartthings/fixtures/devices/da_rvc_normal_000001.json +++ b/tests/components/smartthings/fixtures/devices/da_rvc_normal_000001.json @@ -9,7 +9,7 @@ "deviceManufacturerCode": "Samsung Electronics", "locationId": "586e4602-34ab-4a22-993e-5f616b04604f", "ownerId": "b603d7e8-6066-4e10-8102-afa752a63816", - "roomId": "5d425f41-042a-4d9a-92c4-e43150a61bae", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", "deviceTypeName": "Samsung OCF Robot Vacuum", "components": [ { diff --git a/tests/components/smartthings/fixtures/devices/da_sac_ehs_000001_sub.json b/tests/components/smartthings/fixtures/devices/da_sac_ehs_000001_sub.json new file mode 100644 index 00000000000..dffe57b3280 --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/da_sac_ehs_000001_sub.json @@ -0,0 +1,202 @@ +{ + "items": [ + { + "deviceId": "1f98ebd0-ac48-d802-7f62-000001200100", + "name": "Eco Heating System", + "label": "Eco Heating System", + "manufacturerName": "Samsung Electronics", + "presentationId": "DA-SAC-EHS-000001-SUB", + "deviceManufacturerCode": "Samsung Electronics", + "locationId": "d22d6401-6070-4928-8e7b-b724e2dbf425", + "ownerId": "35445a41-3ae2-4bc0-6f51-31705de6b96f", + "roomId": "169ef666-a51d-4d74-9b45-e660ecd4a8d7", + "deviceTypeName": "Samsung OCF Air Conditioner", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "ocf", + "version": 1 + }, + { + "id": "switch", + "version": 1 + }, + { + "id": "temperatureMeasurement", + "version": 1 + }, + { + "id": "thermostatCoolingSetpoint", + "version": 1 + }, + { + "id": "airConditionerMode", + "version": 1 + }, + { + "id": "powerConsumptionReport", + "version": 1 + }, + { + "id": "demandResponseLoadControl", + "version": 1 + }, + { + "id": "remoteControlStatus", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + }, + { + "id": "execute", + "version": 1 + }, + { + "id": "custom.energyType", + "version": 1 + }, + { + "id": "custom.outingMode", + "version": 1 + }, + { + "id": "custom.thermostatSetpointControl", + "version": 1 + }, + { + "id": "custom.deviceReportStateConfiguration", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + }, + { + "id": "samsungce.deviceIdentification", + "version": 1 + }, + { + "id": "samsungce.driverVersion", + "version": 1 + }, + { + "id": "samsungce.sacDisplayCondition", + "version": 1 + }, + { + "id": "samsungce.softwareUpdate", + "version": 1 + }, + { + "id": "samsungce.ehsFsvSettings", + "version": 1 + }, + { + "id": "samsungce.ehsCycleData", + "version": 1 + }, + { + "id": "samsungce.ehsTemperatureReference", + "version": 1 + }, + { + "id": "samsungce.ehsThermostat", + "version": 1 + }, + { + "id": "samsungce.toggleSwitch", + "version": 1 + } + ], + "categories": [ + { + "name": "AirConditioner", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "INDOOR", + "label": "INDOOR", + "capabilities": [ + { + "id": "switch", + "version": 1 + }, + { + "id": "temperatureMeasurement", + "version": 1 + }, + { + "id": "thermostatCoolingSetpoint", + "version": 1 + }, + { + "id": "airConditionerMode", + "version": 1 + }, + { + "id": "custom.thermostatSetpointControl", + "version": 1 + }, + { + "id": "samsungce.ehsTemperatureReference", + "version": 1 + }, + { + "id": "samsungce.sacDisplayCondition", + "version": 1 + }, + { + "id": "samsungce.ehsThermostat", + "version": 1 + }, + { + "id": "samsungce.toggleSwitch", + "version": 1 + } + ], + "categories": [ + { + "name": "Other", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2023-08-02T14:32:26.006Z", + "parentDeviceId": "1f98ebd0-ac48-d802-7f62-12592d8286b7", + "profile": { + "id": "54b9789f-2c8c-310d-9e14-9a84903c792b" + }, + "ocf": { + "ocfDeviceType": "oic.d.airconditioner", + "name": "Eco Heating System", + "specVersion": "core.1.1.0", + "verticalDomainSpecVersion": "1.2.1", + "manufacturerName": "Samsung Electronics", + "modelNumber": "SAC_EHS_MONO|220614|61007400001600000400000000000000", + "platformVersion": "4.0", + "platformOS": "Tizen", + "hwVersion": "", + "firmwareVersion": "20240611.1", + "vendorId": "DA-SAC-EHS-000001-SUB", + "vendorResourceClientServerVersion": "3.2.20", + "lastSignupTime": "2023-08-02T14:32:25.282882Z", + "transferCandidate": false, + "additionalAuthCodeRequired": false + }, + "type": "OCF", + "restrictionTier": 0, + "allowed": null, + "executionContext": "CLOUD", + "relationships": [] + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/da_wm_dw_000001.json b/tests/components/smartthings/fixtures/devices/da_wm_dw_000001.json index 1c7024e153f..33392081bf5 100644 --- a/tests/components/smartthings/fixtures/devices/da_wm_dw_000001.json +++ b/tests/components/smartthings/fixtures/devices/da_wm_dw_000001.json @@ -9,7 +9,7 @@ "deviceManufacturerCode": "Samsung Electronics", "locationId": "586e4602-34ab-4a22-993e-5f616b04604f", "ownerId": "b603d7e8-6066-4e10-8102-afa752a63816", - "roomId": "f4d03391-ab13-4c1d-b4dc-d6ddf86014a2", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", "deviceTypeName": "Samsung OCF Dishwasher", "components": [ { diff --git a/tests/components/smartthings/fixtures/devices/da_wm_sc_000001.json b/tests/components/smartthings/fixtures/devices/da_wm_sc_000001.json new file mode 100644 index 00000000000..8b501cba9b7 --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/da_wm_sc_000001.json @@ -0,0 +1,172 @@ +{ + "items": [ + { + "deviceId": "b93211bf-9d96-bd21-3b2f-964fcc87f5cc", + "name": "[airdresser] Samsung", + "label": "AirDresser", + "manufacturerName": "Samsung Electronics", + "presentationId": "DA-WM-SC-000001", + "deviceManufacturerCode": "Samsung Electronics", + "locationId": "df59873c-4e2c-43ba-bcd4-ade4efb0504a", + "ownerId": "71254e90-c144-45b6-aabe-709f78f48376", + "roomId": "4c9052ba-4430-4cb1-a788-f1e4449c43c9", + "deviceTypeName": "Samsung OCF Steam Closet", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "ocf", + "version": 1 + }, + { + "id": "execute", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + }, + { + "id": "switch", + "version": 1 + }, + { + "id": "remoteControlStatus", + "version": 1 + }, + { + "id": "dryerOperatingState", + "version": 1 + }, + { + "id": "demandResponseLoadControl", + "version": 1 + }, + { + "id": "powerConsumptionReport", + "version": 1 + }, + { + "id": "custom.steamClosetOperatingState", + "version": 1 + }, + { + "id": "custom.disabledCapabilities", + "version": 1 + }, + { + "id": "custom.energyType", + "version": 1 + }, + { + "id": "custom.steamClosetWrinklePrevent", + "version": 1 + }, + { + "id": "custom.jobBeginningStatus", + "version": 1 + }, + { + "id": "custom.supportedOptions", + "version": 1 + }, + { + "id": "custom.veryFineDustFilter", + "version": 1 + }, + { + "id": "samsungce.deviceIdentification", + "version": 1 + }, + { + "id": "samsungce.driverVersion", + "version": 1 + }, + { + "id": "samsungce.softwareUpdate", + "version": 1 + }, + { + "id": "samsungce.steamClosetDelayEnd", + "version": 1 + }, + { + "id": "samsungce.steamClosetKeepFreshMode", + "version": 1 + }, + { + "id": "samsungce.steamClosetSanitizeMode", + "version": 1 + }, + { + "id": "samsungce.steamClosetAutoCycleLink", + "version": 1 + }, + { + "id": "samsungce.steamClosetCycle", + "version": 1 + }, + { + "id": "samsungce.steamClosetCyclePreset", + "version": 1 + }, + { + "id": "samsungce.kidsLock", + "version": 1 + }, + { + "id": "samsungce.welcomeMessage", + "version": 1 + }, + { + "id": "samsungce.quickControl", + "version": 1 + }, + { + "id": "sec.diagnosticsInformation", + "version": 1 + }, + { + "id": "sec.wifiConfiguration", + "version": 1 + } + ], + "categories": [ + { + "name": "ClothingCareMachine", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2023-06-23T16:00:40.545Z", + "profile": { + "id": "a3623498-4747-3761-bac1-ba13f437d8ea" + }, + "ocf": { + "ocfDeviceType": "x.com.st.d.steamcloset", + "name": "[airdresser] Samsung", + "specVersion": "core.1.1.0", + "verticalDomainSpecVersion": "res.1.1.0,sh.1.1.0", + "manufacturerName": "Samsung Electronics", + "modelNumber": "DA_DF_TP2_20_COMMON|20299141|3801010200151107020100FF00000000", + "platformVersion": "DAWIT 2.0", + "platformOS": "TizenRT 2.0 + IPv6", + "hwVersion": "MediaTek", + "firmwareVersion": "DA_DF_TP2_20_COMMON_30230807", + "vendorId": "DA-WM-SC-000001", + "vendorResourceClientServerVersion": "MediaTek Release 2.211214.1", + "lastSignupTime": "2023-06-23T16:00:36.793123Z", + "transferCandidate": false, + "additionalAuthCodeRequired": false + }, + "type": "OCF", + "restrictionTier": 0, + "allowed": [], + "executionContext": "CLOUD" + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/da_wm_wd_000001.json b/tests/components/smartthings/fixtures/devices/da_wm_wd_000001.json index b9a650718e2..ef47260a989 100644 --- a/tests/components/smartthings/fixtures/devices/da_wm_wd_000001.json +++ b/tests/components/smartthings/fixtures/devices/da_wm_wd_000001.json @@ -9,7 +9,7 @@ "deviceManufacturerCode": "Samsung Electronics", "locationId": "781d5f1e-c87e-455e-87f7-8e954879e91d", "ownerId": "b603d7e8-6066-4e10-8102-afa752a63816", - "roomId": "2a8637b2-77ad-475e-b537-7b6f7f97fff6", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", "deviceTypeName": "Samsung OCF Dryer", "components": [ { diff --git a/tests/components/smartthings/fixtures/devices/da_wm_wm_000001.json b/tests/components/smartthings/fixtures/devices/da_wm_wm_000001.json index 852a2afa932..4996eebab96 100644 --- a/tests/components/smartthings/fixtures/devices/da_wm_wm_000001.json +++ b/tests/components/smartthings/fixtures/devices/da_wm_wm_000001.json @@ -9,7 +9,7 @@ "deviceManufacturerCode": "Samsung Electronics", "locationId": "781d5f1e-c87e-455e-87f7-8e954879e91d", "ownerId": "b603d7e8-6066-4e10-8102-afa752a63816", - "roomId": "2a8637b2-77ad-475e-b537-7b6f7f97fff6", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", "deviceTypeName": "Samsung OCF Washer", "components": [ { diff --git a/tests/components/smartthings/fixtures/devices/fake_fan.json b/tests/components/smartthings/fixtures/devices/fake_fan.json index 7b8e174d420..6a447ae7aff 100644 --- a/tests/components/smartthings/fixtures/devices/fake_fan.json +++ b/tests/components/smartthings/fixtures/devices/fake_fan.json @@ -9,7 +9,7 @@ "deviceManufacturerCode": "0086-0002-005F", "locationId": "6f11ddf5-f0cb-4516-a06a-3a2a6ec22bca", "ownerId": "9f257fc4-6471-2566-b06e-2fe72dd979fa", - "roomId": "cdf080f0-0542-41d7-a606-aff69683e04c", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", "components": [ { "id": "main", @@ -36,7 +36,6 @@ } ], "createTime": "2023-01-12T23:02:44.917Z", - "parentDeviceId": "6a2dd7a4-dd77-48bc-9acf-017029aaf099", "profile": { "id": "6372cd27-93c7-32ef-9be5-aef2221adff1" }, diff --git a/tests/components/smartthings/fixtures/devices/ge_in_wall_smart_dimmer.json b/tests/components/smartthings/fixtures/devices/ge_in_wall_smart_dimmer.json index 910eacec2cc..646196fa980 100644 --- a/tests/components/smartthings/fixtures/devices/ge_in_wall_smart_dimmer.json +++ b/tests/components/smartthings/fixtures/devices/ge_in_wall_smart_dimmer.json @@ -8,7 +8,7 @@ "presentationId": "31cf01ee-cb49-3d95-ac2d-2afab47f25c7", "deviceManufacturerCode": "0063-4944-3130", "locationId": "c4d3b2a1-09f8-765e-4d3c-2b1a09f8e7d6 ", - "roomId": "e73dcd00-6953-431d-ae79-73fd2f2c528e", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", "components": [ { "id": "main", @@ -40,7 +40,6 @@ } ], "createTime": "2020-05-25T18:18:01Z", - "parentDeviceId": "074fa784-8be8-4c70-8e22-6f5ed6f81b7e", "profile": { "id": "ec5458c2-c011-3479-a59b-82b42820c2f7" }, diff --git a/tests/components/smartthings/fixtures/devices/heatit_zpushwall.json b/tests/components/smartthings/fixtures/devices/heatit_zpushwall.json new file mode 100644 index 00000000000..0cd42e0e2ce --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/heatit_zpushwall.json @@ -0,0 +1,155 @@ +{ + "items": [ + { + "deviceId": "5e5b97f3-3094-44e6-abc0-f61283412d6a", + "name": "heatit-zpushwall", + "label": "Livingroom smart switch", + "manufacturerName": "SmartThingsCommunity", + "presentationId": "52933933-7123-3315-a441-92d65df5f031", + "deviceManufacturerCode": "019B-0004-2403", + "locationId": "c85a9f8a-5d2e-4cdd-8bdb-bc49ba4a3544", + "ownerId": "7b68139b-d068-45d8-bf27-961320350024", + "roomId": "56e43461-2f7d-4c43-ba7c-29465f991289", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "battery", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + } + ], + "categories": [ + { + "name": "RemoteController", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "button1", + "label": "button1", + "capabilities": [ + { + "id": "button", + "version": 1 + } + ], + "categories": [ + { + "name": "RemoteController", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "button2", + "label": "button2", + "capabilities": [ + { + "id": "button", + "version": 1 + } + ], + "categories": [ + { + "name": "RemoteController", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "button3", + "label": "button3", + "capabilities": [ + { + "id": "button", + "version": 1 + } + ], + "categories": [ + { + "name": "RemoteController", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "button4", + "label": "button4", + "capabilities": [ + { + "id": "button", + "version": 1 + } + ], + "categories": [ + { + "name": "RemoteController", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "button5", + "label": "button5", + "capabilities": [ + { + "id": "button", + "version": 1 + } + ], + "categories": [ + { + "name": "RemoteController", + "categoryType": "manufacturer" + } + ] + }, + { + "id": "button6", + "label": "button6", + "capabilities": [ + { + "id": "button", + "version": 1 + } + ], + "categories": [ + { + "name": "RemoteController", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2023-12-04T16:51:15.774Z", + "parentDeviceId": "4869d882-e898-40c3-a198-7611b72187a5", + "profile": { + "id": "2d6e59af-63df-3102-8515-66f3d75c9323" + }, + "zwave": { + "networkId": "12", + "driverId": "1d39c140-ce10-490d-bf52-4de7b72caab6", + "executingLocally": true, + "hubId": "4869d882-e898-40c3-a198-7611b72187a5", + "networkSecurityLevel": "ZWAVE_S2_AUTHENTICATED", + "provisioningState": "NONFUNCTIONAL", + "manufacturerId": 411, + "productType": 4, + "productId": 9219 + }, + "type": "ZWAVE", + "restrictionTier": 0, + "allowed": null, + "executionContext": "LOCAL", + "relationships": [] + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/hub.json b/tests/components/smartthings/fixtures/devices/hub.json new file mode 100644 index 00000000000..81046859db6 --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/hub.json @@ -0,0 +1,718 @@ +{ + "items": [ + { + "deviceId": "074fa784-8be8-4c70-8e22-6f5ed6f81b7e", + "name": "SmartThings v2 Hub", + "label": "Home Hub", + "manufacturerName": "SmartThingsCommunity", + "presentationId": "63f1469e-dc4a-3689-8cc5-69e293c1eb21", + "locationId": "c4d3b2a1-09f8-765e-4d3c-2b1a09f8e7d6 ", + "ownerId": "d47f2b19-3a6e-4c8d-bf21-9e8a7c5d134e", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "bridge", + "version": 1 + } + ], + "categories": [ + { + "name": "Hub", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2016-11-13T18:18:07Z", + "childDevices": [ + { + "deviceId": "0781c9d0-92cb-4c7b-bb5b-2f2dbe0c41f3", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "08ee0358-9f40-4afa-b5a0-3a6aba18c267", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "09076422-62cc-4b2d-8beb-b53bc451c704", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "0b5577db-5074-4b70-a2c5-efec286d264d", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "115236ea-59e5-4cd4-bade-d67c409967bc", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "1691801c-ae59-438b-89dc-f2c761fe937d", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "1a987293-0962-4447-99d4-aa82655ffb55", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "2533fdd0-e064-4fa2-b77b-1e17260b58d7", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "265e653b-3c0b-4fa6-8e2a-f6a69c7040f0", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "277e0a96-c8ec-41aa-b4cf-0bac57dc1cee", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "2d9a892b-1c93-45a5-84cb-0e81889498c6", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "374ba6fa-5a08-4ea2-969c-1fa43d86e21f", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "37c0cdda-9158-41ad-9635-4ca32df9fe5b", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "3f82e13c-bd39-4043-bb54-7432a4e47113", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "4339f999-1ad2-46fb-9103-cb628b30a022", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "4a59f635-9f0a-4a6c-a2f0-ffb7ef182a7c", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "4c3469c9-3556-4f19-a2e1-1c0a598341dc", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "4fddedf0-2662-476e-b1fd-aceaec17ad3a", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "550a1c72-65a0-4d55-b97b-75168e055398", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "630cf009-eb3b-409e-a77a-9b298540532f", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "6356b240-c7d8-403c-883e-ae438d432abe", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "68e786a6-7f61-4c3a-9e13-70b803cf782b", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "6a2e5058-36f3-4668-aa43-49a66f8df93d", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "6b5535c7-c039-42ee-9970-8af86c6b0775", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "6c1b7cfa-7429-4f35-9d02-ab1dfd2f1297", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "6ca56087-481f-4e93-9727-fb91049fe396", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "6e3e44b3-d84a-4efc-a97b-b5e0dae28ddc", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "6f4d2e72-7af4-4c96-97ab-d6b6a0d6bc4b", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "7111243f-39d6-4ed0-a277-f040e40a806d", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "7b9d924a-de0c-44f9-ac5c-f15869c59411", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "7bedac4c-5681-4897-a2ef-e9153cb19ba0", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "7d246592-93db-4d72-a10d-5a51793ece8c", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "803cb0d9-addd-4c2d-aaef-d4e20bf88228", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "829da938-6e92-4a93-8923-7c67f9663c03", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "84f1eaf0-592e-459a-a2b3-4fc43e004dae", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "8eacf25f-aa33-4d9e-ba90-0e4ac3ceb8e0", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "8f873071-a9aa-4580-a736-8f5f696e044a", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "91172212-e9ff-4ca6-9626-e7af0361c9ad", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "92138ee5-d3bf-4348-98e8-445dedc319cb", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "971b05df-6ed3-446e-b54f-5092eac01921", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "9a9cb299-5279-4dea-9249-b5c153d22ba1", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "9b479ba0-81e1-4877-87c5-c301a87cbdab", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "9dd17f8f-cf5e-4647-a11c-d8f24cdf9b2a", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "a1e6525c-1e24-403c-b18c-eecb65e22ccf", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "a9d42ef0-f972-44b0-86bc-efd6569a1aef", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "a9f587c5-5d8b-4273-8907-e7f609af5158", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "aaedaf28-2ae0-4c1d-b57e-87f6a420c298", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "b3a84295-ac3c-4fb1-95e4-4a4bbb1b0bce", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "b90c085d-7d1f-4abc-a66d-d5ce3f96be02", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "bafc5147-2e48-498b-97ff-34c93fae7814", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "c1107a0c-fa71-43c5-8ff9-a128ea6c4f20", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "c5209cd2-fcb5-46be-b685-5b05f22dcb2c", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "c5699ff6-af09-4922-901d-bb81b8345bc3", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "cfcd9a21-a943-4519-9972-3c7890cd25b1", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "d0268a69-abfb-4c92-a646-61cec2e510ad", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "d20891e5-59b4-46ce-9184-b7fdf0c7ae4c", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "d48848b9-25b0-4423-8fcf-96a022ac571e", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "ea2aa187-40fd-4140-9742-453e691c4469", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "f27d0b27-24fd-4d8c-b003-d3d7aaba1e70", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "f3c18803-cbec-48e3-8f15-3c31f302d68b", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "f3e184b2-631a-47b2-b583-32ac2fec9e3c", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + }, + { + "deviceId": "f4e0517a-d94f-4bd6-a464-222c8c413a66", + "profile": {}, + "allowed": null, + "executionContext": "CLOUD" + } + ], + "profile": { + "id": "d77ba2f6-c377-36f5-bb68-15db9d1aa0e1" + }, + "hub": { + "hubEui": "D052A872947A0001", + "firmwareVersion": "000.055.00005", + "hubDrivers": [ + { + "driverVersion": "2025-01-19T15:05:25.835006968", + "driverId": "00425c55-0932-416f-a1ba-78fae98ab614", + "channelId": "c8bb99e1-04a3-426b-9d94-2d260134d624" + }, + { + "driverVersion": "2024-12-17T18:00:36.611958104", + "driverId": "01976eca-e7ff-4d1b-91db-9c980ce668d7", + "channelId": "15ea8adc-8be7-4ea6-8b51-4155f56dc6cf" + }, + { + "driverVersion": "2024-12-17T18:00:48.572636846", + "driverId": "0f206d13-508e-4342-9cbb-937e02489141", + "channelId": "15ea8adc-8be7-4ea6-8b51-4155f56dc6cf" + }, + { + "driverVersion": "2024-12-17T18:00:07.735400483", + "driverId": "2cbf55e3-dbc2-48a2-8be5-4c3ce756b692", + "channelId": "b1373fea-da9b-434b-b674-6694ce5d08cc" + }, + { + "driverVersion": "2024-11-04T22:39:17.976631549", + "driverId": "3fb97b6c-f481-441b-a14e-f270d738764e", + "channelId": "15ea8adc-8be7-4ea6-8b51-4155f56dc6cf" + }, + { + "driverVersion": "2024-10-10T18:17:51.437710641", + "driverId": "408981c2-91d4-4dfc-bbfb-84ca0205d993", + "channelId": "15ea8adc-8be7-4ea6-8b51-4155f56dc6cf" + }, + { + "driverVersion": "2024-10-10T18:17:35.032104982", + "driverId": "4eb5b19a-7bbc-452f-859b-c6d7d857b2da", + "channelId": "15ea8adc-8be7-4ea6-8b51-4155f56dc6cf" + }, + { + "driverVersion": "2023-08-08T18:58:32.479650566", + "driverId": "4fb7ec02-2697-4d73-977d-2b1c65c4484f", + "channelId": "b1373fea-da9b-434b-b674-6694ce5d08cc" + }, + { + "driverVersion": "2024-12-17T18:00:47.743217473", + "driverId": "572a2641-2af8-47e4-bfe5-ad83748fd7a1", + "channelId": "15ea8adc-8be7-4ea6-8b51-4155f56dc6cf" + }, + { + "driverVersion": "2023-07-12T03:33:26.23424277", + "driverId": "5ad2cc83-5503-4040-a98b-b0fc9931b9fe", + "channelId": "479886db-f6f5-41dd-979c-9c5f9366f070" + }, + { + "driverVersion": "2024-09-17T20:08:25.82515546", + "driverId": "5db3363a-d954-412f-93e0-2ee40572658b", + "channelId": "2423da55-101c-4b21-af58-0903656b85ca" + }, + { + "driverVersion": "2024-12-08T10:10:03.832334965", + "driverId": "6342be70-6da0-4535-afc1-ff6378d6c650", + "channelId": "c8bb99e1-04a3-426b-9d94-2d260134d624" + }, + { + "driverVersion": "2022-02-01T21:35:33.624882", + "driverId": "6a90f7a0-e275-4366-bbf2-2e8a502efc5d", + "channelId": "479886db-f6f5-41dd-979c-9c5f9366f070" + }, + { + "driverVersion": "2024-09-28T21:56:32.002090649", + "driverId": "7333473f-722c-465d-9e5d-f3a6ca760489", + "channelId": "f8900c5e-d591-4979-9826-75a867e9e0bd" + }, + { + "driverVersion": "2025-02-03T22:38:47.582952919", + "driverId": "7beb8bc2-8dfa-45c2-8fdb-7373d4597b12", + "channelId": "15ea8adc-8be7-4ea6-8b51-4155f56dc6cf" + }, + { + "driverVersion": "2024-11-15T16:18:24.739596514", + "driverId": "7ca45ba9-7cfe-4547-b752-fe41a0efb848", + "channelId": "c8bb99e1-04a3-426b-9d94-2d260134d624" + }, + { + "driverVersion": "2024-02-06T21:13:39.427465986", + "driverId": "8bf71a5d-677b-4391-93c2-e76471f3d7eb", + "channelId": "15ea8adc-8be7-4ea6-8b51-4155f56dc6cf" + }, + { + "driverVersion": "2024-10-21T19:06:49.949052991", + "driverId": "9050ac53-358c-47a1-a927-9a70f5f28cee", + "channelId": "15ea8adc-8be7-4ea6-8b51-4155f56dc6cf" + }, + { + "driverVersion": "2024-10-10T19:30:29.754256377", + "driverId": "92f39ab3-7b2f-47ee-94a7-ba47c4ee8a47", + "channelId": "b1373fea-da9b-434b-b674-6694ce5d08cc" + }, + { + "driverVersion": "2024-12-17T18:00:21.846431345", + "driverId": "9870bccd-2b3d-4edf-8940-532fcb11e946", + "channelId": "b1373fea-da9b-434b-b674-6694ce5d08cc" + }, + { + "driverVersion": "2024-12-09T21:10:00.424854506", + "driverId": "a6994e70-93de-4a76-8b5d-42971a3427c9", + "channelId": "c8bb99e1-04a3-426b-9d94-2d260134d624" + }, + { + "driverVersion": "2022-01-03T08:19:45.80869", + "driverId": "a89371c4-8765-404b-9de9-e9882cc48bd8", + "channelId": "14bcc056-f80d-416b-9445-467b0db325e3" + }, + { + "driverVersion": "2025-01-11T20:03:43.842469565", + "driverId": "b1504ded-efa4-4ef0-acd5-ae24e7a92e6e", + "channelId": "c8bb99e1-04a3-426b-9d94-2d260134d624" + }, + { + "driverVersion": "2024-12-08T09:45:01.460678797", + "driverId": "bb1b3fd4-dcba-4d55-8d85-58ed7f1979fb", + "channelId": "c8bb99e1-04a3-426b-9d94-2d260134d624" + }, + { + "driverVersion": "2024-11-04T22:39:18.253781754", + "driverId": "c21a6c77-872c-474e-be5b-5f6f11a240ef", + "channelId": "15ea8adc-8be7-4ea6-8b51-4155f56dc6cf" + }, + { + "driverVersion": "2024-01-30T21:36:15.547412569", + "driverId": "c856a3fd-69ee-4478-a224-d7279b6d978f", + "channelId": "15ea8adc-8be7-4ea6-8b51-4155f56dc6cf" + }, + { + "driverVersion": "2025-01-13T18:55:57.509807915", + "driverId": "cd898d81-6c27-4d27-a529-dfadc8caae5a", + "channelId": "15ea8adc-8be7-4ea6-8b51-4155f56dc6cf" + }, + { + "driverVersion": "2024-12-17T18:00:48.892833142", + "driverId": "ce930ffd-8155-4dca-aaa9-6c4158fc4278", + "channelId": "15ea8adc-8be7-4ea6-8b51-4155f56dc6cf" + }, + { + "driverVersion": "2024-10-10T19:30:41.208767469", + "driverId": "d620900d-f7bc-4ab5-a171-6dd159872f7d", + "channelId": "b1373fea-da9b-434b-b674-6694ce5d08cc" + }, + { + "driverVersion": "2024-10-10T19:30:33.46670456", + "driverId": "d6b43c85-1561-446b-9e3e-15e2ad81a952", + "channelId": "b1373fea-da9b-434b-b674-6694ce5d08cc" + }, + { + "driverVersion": "2023-07-11T18:43:49.169154271", + "driverId": "d9c3f8b8-c3c3-4b77-9ddd-01d08102c84b", + "channelId": "15ea8adc-8be7-4ea6-8b51-4155f56dc6cf" + }, + { + "driverVersion": "2024-10-10T18:17:54.195543653", + "driverId": "dbe192cb-f6a1-4369-a843-d1c42e5c91ba", + "channelId": "15ea8adc-8be7-4ea6-8b51-4155f56dc6cf" + }, + { + "driverVersion": "2022-10-02T20:15:49.147522379", + "driverId": "e120daf2-8000-4a9d-93fa-653214ce70d1", + "channelId": "479886db-f6f5-41dd-979c-9c5f9366f070" + }, + { + "driverVersion": "2023-08-15T20:08:28.115440571", + "driverId": "e7947a05-947d-4bb5-92c4-2aafaff6d69c", + "channelId": "b1373fea-da9b-434b-b674-6694ce5d08cc" + }, + { + "driverVersion": "2025-02-05T18:49:13.3338494", + "driverId": "f2e891c6-00cc-446c-9192-8ebda63d9898", + "channelId": "b1373fea-da9b-434b-b674-6694ce5d08cc" + } + ], + "hubData": { + "zwaveStaticDsk": "13740-14339-50623-49310-29679-58685-46457-16097", + "zwaveS2": true, + "hardwareType": "V2_HUB", + "hardwareId": "000D", + "zigbeeFirmware": "5.7.10", + "zigbee3": true, + "zigbeeOta": "ENABLED_WITH_LIGHTS", + "otaEnable": "true", + "zigbeeUnsecureRejoin": true, + "zigbeeRequiresExternalHardware": false, + "threadRequiresExternalHardware": false, + "failoverAvailability": "Unsupported", + "primarySupportAvailability": "Unsupported", + "secondarySupportAvailability": "Unsupported", + "zigbeeAvailability": "Available", + "zwaveAvailability": "Available", + "lanAvailability": "Available", + "matterAvailability": "Available", + "localVirtualDeviceAvailability": "Available", + "childDeviceAvailability": "Unsupported", + "edgeDriversAvailability": "Available", + "hubReplaceAvailability": "Available", + "hubLocalApiAvailability": "Available", + "zigbeeManualFirmwareUpdateSupported": true, + "matterRendezvousHedgeSupported": true, + "matterSoftwareComponentVersion": "1.3-0", + "matterDeviceDiagnosticsAvailability": "Available", + "zigbeeDeviceDiagnosticsAvailability": "Available", + "hedgeTlsCertificate": "", + "zigbeeChannel": "14", + "zigbeePanId": "0EE7", + "zigbeeEui": "D052A872947A0001", + "zigbeeNodeID": "0000", + "zwaveNodeID": "01", + "zwaveHomeID": "CF0F089E", + "zwaveSucID": "01", + "zwaveVersion": "6.10", + "zwaveRegion": "US", + "macAddress": "D0:52:A8:72:91:02", + "localIP": "192.168.168.189", + "zigbeeRadioFunctional": true, + "zwaveRadioFunctional": true, + "zigbeeRadioEnabled": true, + "zwaveRadioEnabled": true, + "zigbeeRadioDetected": true, + "zwaveRadioDetected": true + } + }, + "type": "HUB", + "restrictionTier": 0, + "allowed": [], + "executionContext": "CLOUD" + }, + { + "deviceId": "374ba6fa-5a08-4ea2-969c-1fa43d86e21f", + "name": "Multipurpose Sensor", + "label": "Mail Box", + "manufacturerName": "SmartThingsCommunity", + "presentationId": "c385e2bc-acb8-317b-be2a-6efd1f879720", + "deviceManufacturerCode": "SmartThings", + "locationId": "c4d3b2a1-09f8-765e-4d3c-2b1a09f8e7d6 ", + "ownerId": "d47f2b19-3a6e-4c8d-bf21-9e8a7c5d134e", + "roomId": "f7f39cf6-ff3a-4bcb-8d1b-00a3324c016d", + "components": [ + { + "id": "main", + "label": "Mail Box", + "capabilities": [ + { + "id": "contactSensor", + "version": 1 + }, + { + "id": "temperatureMeasurement", + "version": 1 + }, + { + "id": "threeAxis", + "version": 1 + }, + { + "id": "accelerationSensor", + "version": 1 + }, + { + "id": "battery", + "version": 1 + }, + { + "id": "firmwareUpdate", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + } + ], + "categories": [ + { + "name": "MultiFunctionalSensor", + "categoryType": "manufacturer" + }, + { + "name": "MultiFunctionalSensor", + "categoryType": "user" + } + ] + } + ], + "createTime": "2022-08-16T21:08:09.983Z", + "parentDeviceId": "074fa784-8be8-4c70-8e22-6f5ed6f81b7e", + "profile": { + "id": "4471213f-121b-38fd-b022-51df37ac1d4c" + }, + "zigbee": { + "eui": "24FD5B00010A3A95", + "networkId": "E71B", + "driverId": "408981c2-91d4-4dfc-bbfb-84ca0205d993", + "executingLocally": true, + "hubId": "074fa784-8be8-4c70-8e22-6f5ed6f81b7e", + "provisioningState": "PROVISIONED" + }, + "type": "ZIGBEE", + "restrictionTier": 0, + "allowed": [], + "executionContext": "LOCAL" + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/hw_q80r_soundbar.json b/tests/components/smartthings/fixtures/devices/hw_q80r_soundbar.json new file mode 100644 index 00000000000..5f99cefddcb --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/hw_q80r_soundbar.json @@ -0,0 +1,106 @@ +{ + "items": [ + { + "deviceId": "afcf3b91-0000-1111-2222-ddff2a0a6577", + "name": "[AV] Samsung Soundbar Q80R", + "label": "Soundbar", + "manufacturerName": "Samsung Electronics", + "presentationId": "VD-NetworkAudio-001S", + "deviceManufacturerCode": "Samsung Electronics", + "locationId": "c7f8e400-0000-1111-2222-76463f4eb484", + "ownerId": "bd0d9288-0000-1111-2222-68310a42a709", + "roomId": "be09ff51-0000-1111-2222-e48e2dab37fd", + "deviceTypeName": "Samsung OCF Network Audio Player", + "components": [ + { + "id": "main", + "label": "Soundbar", + "capabilities": [ + { + "id": "ocf", + "version": 1 + }, + { + "id": "execute", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + }, + { + "id": "switch", + "version": 1 + }, + { + "id": "audioVolume", + "version": 1 + }, + { + "id": "audioMute", + "version": 1 + }, + { + "id": "audioTrackData", + "version": 1 + }, + { + "id": "mediaInputSource", + "version": 1 + }, + { + "id": "samsungvd.audioInputSource", + "version": 1 + }, + { + "id": "mediaPlayback", + "version": 1 + }, + { + "id": "audioNotification", + "version": 1 + }, + { + "id": "samsungvd.groupInfo", + "version": 1 + } + ], + "categories": [ + { + "name": "NetworkAudio", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2020-10-19T01:35:08Z", + "profile": { + "id": "c1036d88-000-1111-2222-a361463fd53f" + }, + "ocf": { + "ocfDeviceType": "oic.d.networkaudio", + "name": "[AV] Samsung Soundbar Q80R", + "specVersion": "core.1.1.0", + "verticalDomainSpecVersion": "res.1.1.0,sh.1.1.0", + "manufacturerName": "Samsung Electronics", + "modelNumber": "Q80R", + "platformVersion": "Tizen 4.0", + "platformOS": "4.1.10", + "hwVersion": "0-0", + "firmwareVersion": "HW-Q80RWWB-1012.6", + "vendorId": "VD-NetworkAudio-001S", + "vendorResourceClientServerVersion": "1.2", + "locale": "KO", + "lastSignupTime": "2021-01-16T07:05:02.184545Z", + "transferCandidate": false, + "additionalAuthCodeRequired": false + }, + "type": "OCF", + "restrictionTier": 0, + "allowed": null, + "executionContext": "CLOUD", + "relationships": [] + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/iphone.json b/tests/components/smartthings/fixtures/devices/iphone.json index 3fc26307c90..1ae79aa06ef 100644 --- a/tests/components/smartthings/fixtures/devices/iphone.json +++ b/tests/components/smartthings/fixtures/devices/iphone.json @@ -27,7 +27,6 @@ } ], "createTime": "2021-12-02T16:14:24.394Z", - "parentDeviceId": "b8e11599-5297-4574-8e62-885995fcaa20", "profile": { "id": "21d0f660-98b4-3f7b-8114-fe62e555628e" }, diff --git a/tests/components/smartthings/fixtures/devices/multipurpose_sensor.json b/tests/components/smartthings/fixtures/devices/multipurpose_sensor.json index 3770614a366..c8088d6473d 100644 --- a/tests/components/smartthings/fixtures/devices/multipurpose_sensor.json +++ b/tests/components/smartthings/fixtures/devices/multipurpose_sensor.json @@ -8,7 +8,7 @@ "presentationId": "c385e2bc-acb8-317b-be2a-6efd1f879720", "deviceManufacturerCode": "SmartThings", "locationId": "c4d3b2a1-09f8-765e-4d3c-2b1a09f8e7d6 ", - "roomId": "b277a3c0-b8fe-44de-9133-c1108747810c", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", "components": [ { "id": "main", @@ -56,7 +56,6 @@ } ], "createTime": "2019-02-23T16:53:57Z", - "parentDeviceId": "074fa784-8be8-4c70-8e22-6f5ed6f81b7e", "profile": { "id": "4471213f-121b-38fd-b022-51df37ac1d4c" }, diff --git a/tests/components/smartthings/fixtures/devices/smart_plug.json b/tests/components/smartthings/fixtures/devices/smart_plug.json index 24d0fbc6e84..e5ec6c38dad 100644 --- a/tests/components/smartthings/fixtures/devices/smart_plug.json +++ b/tests/components/smartthings/fixtures/devices/smart_plug.json @@ -9,7 +9,7 @@ "deviceManufacturerCode": "LEDVANCE", "locationId": "c4d3b2a1-09f8-765e-4d3c-2b1a09f8e7d6 ", "ownerId": "d47f2b19-3a6e-4c8d-bf21-9e8a7c5d134e", - "roomId": "94be4a1e-382a-4b7f-a5ef-fdb1a7d9f9e6", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", "components": [ { "id": "main", @@ -37,7 +37,6 @@ } ], "createTime": "2018-10-05T12:23:14Z", - "parentDeviceId": "074fa784-8be8-4c70-8e22-6f5ed6f81b7e", "profile": { "id": "daeff874-075a-32e3-8b11-bdb99d8e67c7" }, diff --git a/tests/components/smartthings/fixtures/devices/sonos_player.json b/tests/components/smartthings/fixtures/devices/sonos_player.json index 67d1ef24cf9..c84caf57475 100644 --- a/tests/components/smartthings/fixtures/devices/sonos_player.json +++ b/tests/components/smartthings/fixtures/devices/sonos_player.json @@ -9,7 +9,7 @@ "deviceManufacturerCode": "Sonos", "locationId": "eed0e167-e793-459b-80cb-a0b02e2b86c2", "ownerId": "2c69cc36-85ae-c41a-9981-a4ee96cd9137", - "roomId": "105e6d1a-52a4-4797-a235-5a48d7d433c8", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", "components": [ { "id": "main", @@ -61,7 +61,6 @@ } ], "createTime": "2025-02-02T13:18:28.570Z", - "parentDeviceId": "2f7f7d2b-e683-48ae-86f7-e57df6a0bce2", "profile": { "id": "0443d359-3f76-383f-82a4-6fc4a879ef1d" }, diff --git a/tests/components/smartthings/fixtures/devices/vd_network_audio_002s.json b/tests/components/smartthings/fixtures/devices/vd_network_audio_002s.json index 7fb07533810..20f4aa71fec 100644 --- a/tests/components/smartthings/fixtures/devices/vd_network_audio_002s.json +++ b/tests/components/smartthings/fixtures/devices/vd_network_audio_002s.json @@ -9,7 +9,7 @@ "deviceManufacturerCode": "Samsung Electronics", "locationId": "c4189ac1-208f-461a-8ab6-ea67937b3743", "ownerId": "85ea07e1-7063-f673-3ba5-125293f297c8", - "roomId": "db506ec3-83b1-4125-9c4c-eb597da5db6a", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", "deviceTypeName": "Samsung OCF Network Audio Player", "components": [ { diff --git a/tests/components/smartthings/fixtures/devices/vd_sensor_light_2023.json b/tests/components/smartthings/fixtures/devices/vd_sensor_light_2023.json new file mode 100644 index 00000000000..ef1dd2e96bc --- /dev/null +++ b/tests/components/smartthings/fixtures/devices/vd_sensor_light_2023.json @@ -0,0 +1,81 @@ +{ + "items": [ + { + "deviceId": "5cc1c096-98b9-460c-8f1c-1045509ec605", + "name": "VD-Sensor.Light-2023", + "label": "Light Sensor - 55\" The Frame", + "manufacturerName": "Samsung Electronics", + "presentationId": "VD-Sensor.Light-2023", + "deviceManufacturerCode": "Samsung Electronics", + "locationId": "df59873c-4e2c-43ba-bcd4-ade4efb0504a", + "ownerId": "71254e90-c144-45b6-aabe-709f78f48376", + "roomId": "8a4fac38-48d1-4a8c-922b-92620442363b", + "deviceTypeName": "x.com.st.d.sensor.light", + "components": [ + { + "id": "main", + "label": "main", + "capabilities": [ + { + "id": "ocf", + "version": 1 + }, + { + "id": "switch", + "version": 1 + }, + { + "id": "refresh", + "version": 1 + }, + { + "id": "execute", + "version": 1 + }, + { + "id": "relativeBrightness", + "version": 1 + }, + { + "id": "samsungvd.deviceCategory", + "version": 1 + } + ], + "categories": [ + { + "name": "LightSensor", + "categoryType": "manufacturer" + } + ] + } + ], + "createTime": "2024-11-15T22:21:27.908Z", + "parentDeviceId": "425ac77a-f7c9-a62d-ff12-cdad144952e3", + "profile": { + "id": "5f1633fb-0c63-34d3-9d04-a314d393d225" + }, + "ocf": { + "ocfDeviceType": "x.com.st.d.sensor.light", + "name": "Light Sensor - 55 The Frame", + "specVersion": "core.1.1.0", + "verticalDomainSpecVersion": "res.1.1.0,sh.1.1.0", + "manufacturerName": "Samsung Electronics", + "modelNumber": "QE55LS03DAUXXN", + "platformVersion": "8.0", + "platformOS": "Tizen", + "hwVersion": "", + "firmwareVersion": "latest", + "vendorId": "VD-Sensor.Light-2023", + "vendorResourceClientServerVersion": "4.0.26", + "lastSignupTime": "2024-11-15T22:21:27.933740026Z", + "transferCandidate": false, + "additionalAuthCodeRequired": false + }, + "type": "OCF", + "restrictionTier": 0, + "allowed": [], + "executionContext": "CLOUD" + } + ], + "_links": {} +} diff --git a/tests/components/smartthings/fixtures/devices/vd_stv_2017_k.json b/tests/components/smartthings/fixtures/devices/vd_stv_2017_k.json index 3c22a214495..42630f452d5 100644 --- a/tests/components/smartthings/fixtures/devices/vd_stv_2017_k.json +++ b/tests/components/smartthings/fixtures/devices/vd_stv_2017_k.json @@ -9,7 +9,7 @@ "deviceManufacturerCode": "Samsung Electronics", "locationId": "c4d3b2a1-09f8-765e-4d3c-2b1a09f8e7d6 ", "ownerId": "d47f2b19-3a6e-4c8d-bf21-9e8a7c5d134e", - "roomId": "94be4a1e-382a-4b7f-a5ef-fdb1a7d9f9e6", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", "deviceTypeName": "Samsung OCF TV", "components": [ { diff --git a/tests/components/smartthings/fixtures/devices/virtual_thermostat.json b/tests/components/smartthings/fixtures/devices/virtual_thermostat.json index d5bf3b32a0c..1b7a55d779d 100644 --- a/tests/components/smartthings/fixtures/devices/virtual_thermostat.json +++ b/tests/components/smartthings/fixtures/devices/virtual_thermostat.json @@ -8,7 +8,7 @@ "presentationId": "78906115-bf23-3c43-9cd6-f42ca3d5517a", "locationId": "88a3a314-f0c8-40b4-bb44-44ba06c9c42f", "ownerId": "12d4af93-cb68-b108-87f5-625437d7371f", - "roomId": "58826afc-9f38-426a-b868-dc94776286e3", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", "components": [ { "id": "main", diff --git a/tests/components/smartthings/fixtures/devices/virtual_valve.json b/tests/components/smartthings/fixtures/devices/virtual_valve.json index 1988617afad..e46b7846631 100644 --- a/tests/components/smartthings/fixtures/devices/virtual_valve.json +++ b/tests/components/smartthings/fixtures/devices/virtual_valve.json @@ -8,7 +8,7 @@ "presentationId": "916408b6-c94e-38b8-9fbf-03c8a48af5c3", "locationId": "88a3a314-f0c8-40b4-bb44-44ba06c9c42f", "ownerId": "12d4af93-cb68-b108-87f5-625437d7371f", - "roomId": "58826afc-9f38-426a-b868-dc94776286e3", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", "components": [ { "id": "main", diff --git a/tests/components/smartthings/fixtures/devices/virtual_water_sensor.json b/tests/components/smartthings/fixtures/devices/virtual_water_sensor.json index ad3a45a0481..ffea2664c88 100644 --- a/tests/components/smartthings/fixtures/devices/virtual_water_sensor.json +++ b/tests/components/smartthings/fixtures/devices/virtual_water_sensor.json @@ -8,7 +8,7 @@ "presentationId": "838ae989-b832-3610-968c-2940491600f6", "locationId": "88a3a314-f0c8-40b4-bb44-44ba06c9c42f", "ownerId": "12d4af93-cb68-b108-87f5-625437d7371f", - "roomId": "58826afc-9f38-426a-b868-dc94776286e3", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", "components": [ { "id": "main", diff --git a/tests/components/smartthings/fixtures/devices/yale_push_button_deadbolt_lock.json b/tests/components/smartthings/fixtures/devices/yale_push_button_deadbolt_lock.json index e83a1be7644..20f0dd5ca26 100644 --- a/tests/components/smartthings/fixtures/devices/yale_push_button_deadbolt_lock.json +++ b/tests/components/smartthings/fixtures/devices/yale_push_button_deadbolt_lock.json @@ -9,7 +9,7 @@ "deviceManufacturerCode": "Yale", "locationId": "c4d3b2a1-09f8-765e-4d3c-2b1a09f8e7d6 ", "ownerId": "d47f2b19-3a6e-4c8d-bf21-9e8a7c5d134e", - "roomId": "94be4a1e-382a-4b7f-a5ef-fdb1a7d9f9e6", + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", "components": [ { "id": "main", @@ -45,7 +45,6 @@ } ], "createTime": "2016-11-18T23:01:19Z", - "parentDeviceId": "074fa784-8be8-4c70-8e22-6f5ed6f81b7e", "profile": { "id": "51b76691-3c3a-3fce-8c7c-4f9d50e5885a" }, diff --git a/tests/components/smartthings/fixtures/rooms.json b/tests/components/smartthings/fixtures/rooms.json new file mode 100644 index 00000000000..355db9a3423 --- /dev/null +++ b/tests/components/smartthings/fixtures/rooms.json @@ -0,0 +1,17 @@ +{ + "items": [ + { + "roomId": "7715151d-0314-457a-a82c-5ce48900e065", + "locationId": "397678e5-9995-4a39-9d9f-ae6ba310236b", + "name": "Theater", + "backgroundImage": null + }, + { + "roomId": "cdf080f0-0542-41d7-a606-aff69683e04c", + "locationId": "397678e5-9995-4a39-9d9f-ae6ba310236b", + "name": "Toilet", + "backgroundImage": null + } + ], + "_links": null +} diff --git a/tests/components/smartthings/snapshots/test_binary_sensor.ambr b/tests/components/smartthings/snapshots/test_binary_sensor.ambr index 27a5e38a123..2419a154e05 100644 --- a/tests/components/smartthings/snapshots/test_binary_sensor.ambr +++ b/tests/components/smartthings/snapshots/test_binary_sensor.ambr @@ -29,7 +29,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd.motion', + 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd_main_motionSensor_motion_motion', 'unit_of_measurement': None, }) # --- @@ -77,7 +77,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd.sound', + 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd_main_soundSensor_sound_sound', 'unit_of_measurement': None, }) # --- @@ -95,7 +95,7 @@ 'state': 'off', }) # --- -# name: test_all_entities[contact_sensor][binary_sensor.front_door_open_closed_sensor_door-entry] +# name: test_all_entities[contact_sensor][binary_sensor.front_door_open_closed_sensor-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -108,7 +108,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': None, - 'entity_id': 'binary_sensor.front_door_open_closed_sensor_door', + 'entity_id': 'binary_sensor.front_door_open_closed_sensor', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -118,32 +118,32 @@ 'name': None, 'options': dict({ }), - 'original_device_class': , + 'original_device_class': , 'original_icon': None, - 'original_name': 'Door', + 'original_name': None, 'platform': 'smartthings', 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '2d9a892b-1c93-45a5-84cb-0e81889498c6.contact', + 'unique_id': '2d9a892b-1c93-45a5-84cb-0e81889498c6_main_contactSensor_contact_contact', 'unit_of_measurement': None, }) # --- -# name: test_all_entities[contact_sensor][binary_sensor.front_door_open_closed_sensor_door-state] +# name: test_all_entities[contact_sensor][binary_sensor.front_door_open_closed_sensor-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': '.Front Door Open/Closed Sensor Door', + 'device_class': 'garage_door', + 'friendly_name': '.Front Door Open/Closed Sensor', }), 'context': , - 'entity_id': 'binary_sensor.front_door_open_closed_sensor_door', + 'entity_id': 'binary_sensor.front_door_open_closed_sensor', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_all_entities[da_ref_normal_000001][binary_sensor.refrigerator_door-entry] +# name: test_all_entities[da_ks_cooktop_31001][binary_sensor.induction_hob_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -156,7 +156,529 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': None, - 'entity_id': 'binary_sensor.refrigerator_door', + 'entity_id': 'binary_sensor.induction_hob_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '808dbd84-f357-47e2-a0cd-3b66fa22d584_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_cooktop_31001][binary_sensor.induction_hob_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Induction Hob Power', + }), + 'context': , + 'entity_id': 'binary_sensor.induction_hob_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_ks_microwave_0101x][binary_sensor.microwave_child_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.microwave_child_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Child lock', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'child_lock', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_samsungce.kidsLock_lockState_lockState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_microwave_0101x][binary_sensor.microwave_child_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Microwave Child lock', + }), + 'context': , + 'entity_id': 'binary_sensor.microwave_child_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_ks_microwave_0101x][binary_sensor.microwave_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.microwave_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Door', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'door', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_samsungce.doorState_doorState_doorState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_microwave_0101x][binary_sensor.microwave_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'opening', + 'friendly_name': 'Microwave Door', + }), + 'context': , + 'entity_id': 'binary_sensor.microwave_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_ks_microwave_0101x][binary_sensor.microwave_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.microwave_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_microwave_0101x][binary_sensor.microwave_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Microwave Power', + }), + 'context': , + 'entity_id': 'binary_sensor.microwave_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_ks_microwave_0101x][binary_sensor.microwave_remote_control-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.microwave_remote_control', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Remote control', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'remote_control', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_microwave_0101x][binary_sensor.microwave_remote_control-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Microwave Remote control', + }), + 'context': , + 'entity_id': 'binary_sensor.microwave_remote_control', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_ks_oven_01061][binary_sensor.oven_child_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.oven_child_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Child lock', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'child_lock', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_samsungce.kidsLock_lockState_lockState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_oven_01061][binary_sensor.oven_child_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Oven Child lock', + }), + 'context': , + 'entity_id': 'binary_sensor.oven_child_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_ks_oven_01061][binary_sensor.oven_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.oven_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Door', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'door', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_samsungce.doorState_doorState_doorState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_oven_01061][binary_sensor.oven_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'opening', + 'friendly_name': 'Oven Door', + }), + 'context': , + 'entity_id': 'binary_sensor.oven_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_ks_oven_01061][binary_sensor.oven_remote_control-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.oven_remote_control', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Remote control', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'remote_control', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_oven_01061][binary_sensor.oven_remote_control-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Oven Remote control', + }), + 'context': , + 'entity_id': 'binary_sensor.oven_remote_control', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[da_ks_range_0101x][binary_sensor.vulcan_child_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.vulcan_child_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Child lock', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'child_lock', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_samsungce.kidsLock_lockState_lockState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_range_0101x][binary_sensor.vulcan_child_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Vulcan Child lock', + }), + 'context': , + 'entity_id': 'binary_sensor.vulcan_child_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_ks_range_0101x][binary_sensor.vulcan_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.vulcan_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Door', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'door', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_samsungce.doorState_doorState_doorState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_range_0101x][binary_sensor.vulcan_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'opening', + 'friendly_name': 'Vulcan Door', + }), + 'context': , + 'entity_id': 'binary_sensor.vulcan_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_ks_range_0101x][binary_sensor.vulcan_remote_control-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.vulcan_remote_control', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Remote control', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'remote_control', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_range_0101x][binary_sensor.vulcan_remote_control-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Vulcan Remote control', + }), + 'context': , + 'entity_id': 'binary_sensor.vulcan_remote_control', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[da_ref_normal_000001][binary_sensor.refrigerator_cooler_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.refrigerator_cooler_door', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -168,29 +690,1119 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'Door', + 'original_name': 'Cooler door', 'platform': 'smartthings', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, - 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09.contact', + 'translation_key': 'cooler_door', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_cooler_contactSensor_contact_contact', 'unit_of_measurement': None, }) # --- -# name: test_all_entities[da_ref_normal_000001][binary_sensor.refrigerator_door-state] +# name: test_all_entities[da_ref_normal_000001][binary_sensor.refrigerator_cooler_door-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'door', - 'friendly_name': 'Refrigerator Door', + 'friendly_name': 'Refrigerator Cooler door', }), 'context': , - 'entity_id': 'binary_sensor.refrigerator_door', + 'entity_id': 'binary_sensor.refrigerator_cooler_door', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- +# name: test_all_entities[da_ref_normal_000001][binary_sensor.refrigerator_freezer_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.refrigerator_freezer_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Freezer door', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'freezer_door', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_freezer_contactSensor_contact_contact', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ref_normal_000001][binary_sensor.refrigerator_freezer_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Refrigerator Freezer door', + }), + 'context': , + 'entity_id': 'binary_sensor.refrigerator_freezer_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_ref_normal_01011][binary_sensor.frigo_cooler_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.frigo_cooler_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cooler door', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cooler_door', + 'unique_id': '5758b2ec-563e-f39b-ec39-208e54aabf60_cooler_contactSensor_contact_contact', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ref_normal_01011][binary_sensor.frigo_cooler_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Frigo Cooler door', + }), + 'context': , + 'entity_id': 'binary_sensor.frigo_cooler_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_ref_normal_01011][binary_sensor.frigo_freezer_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.frigo_freezer_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Freezer door', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'freezer_door', + 'unique_id': '5758b2ec-563e-f39b-ec39-208e54aabf60_freezer_contactSensor_contact_contact', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ref_normal_01011][binary_sensor.frigo_freezer_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Frigo Freezer door', + }), + 'context': , + 'entity_id': 'binary_sensor.frigo_freezer_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_dw_000001][binary_sensor.dishwasher_child_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.dishwasher_child_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Child lock', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'child_lock', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_samsungce.kidsLock_lockState_lockState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_dw_000001][binary_sensor.dishwasher_child_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dishwasher Child lock', + }), + 'context': , + 'entity_id': 'binary_sensor.dishwasher_child_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_dw_000001][binary_sensor.dishwasher_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.dishwasher_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_dw_000001][binary_sensor.dishwasher_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Dishwasher Power', + }), + 'context': , + 'entity_id': 'binary_sensor.dishwasher_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_dw_000001][binary_sensor.dishwasher_remote_control-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.dishwasher_remote_control', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Remote control', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'remote_control', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_dw_000001][binary_sensor.dishwasher_remote_control-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dishwasher Remote control', + }), + 'context': , + 'entity_id': 'binary_sensor.dishwasher_remote_control', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][binary_sensor.airdresser_child_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.airdresser_child_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Child lock', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'child_lock', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_samsungce.kidsLock_lockState_lockState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_sc_000001][binary_sensor.airdresser_child_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'AirDresser Child lock', + }), + 'context': , + 'entity_id': 'binary_sensor.airdresser_child_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][binary_sensor.airdresser_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.airdresser_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_sc_000001][binary_sensor.airdresser_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'AirDresser Power', + }), + 'context': , + 'entity_id': 'binary_sensor.airdresser_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][binary_sensor.airdresser_remote_control-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.airdresser_remote_control', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Remote control', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'remote_control', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_sc_000001][binary_sensor.airdresser_remote_control-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'AirDresser Remote control', + }), + 'context': , + 'entity_id': 'binary_sensor.airdresser_remote_control', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wd_000001][binary_sensor.dryer_child_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.dryer_child_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Child lock', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'child_lock', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_samsungce.kidsLock_lockState_lockState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001][binary_sensor.dryer_child_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dryer Child lock', + }), + 'context': , + 'entity_id': 'binary_sensor.dryer_child_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wd_000001][binary_sensor.dryer_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.dryer_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001][binary_sensor.dryer_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Dryer Power', + }), + 'context': , + 'entity_id': 'binary_sensor.dryer_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wd_000001][binary_sensor.dryer_remote_control-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.dryer_remote_control', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Remote control', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'remote_control', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001][binary_sensor.dryer_remote_control-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dryer Remote control', + }), + 'context': , + 'entity_id': 'binary_sensor.dryer_remote_control', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wd_000001][binary_sensor.dryer_wrinkle_prevent_active-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.dryer_wrinkle_prevent_active', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Wrinkle prevent active', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dryer_wrinkle_prevent_active', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_custom.dryerWrinklePrevent_operatingState_operatingState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001][binary_sensor.dryer_wrinkle_prevent_active-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dryer Wrinkle prevent active', + }), + 'context': , + 'entity_id': 'binary_sensor.dryer_wrinkle_prevent_active', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][binary_sensor.seca_roupa_child_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.seca_roupa_child_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Child lock', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'child_lock', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_samsungce.kidsLock_lockState_lockState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][binary_sensor.seca_roupa_child_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Seca-Roupa Child lock', + }), + 'context': , + 'entity_id': 'binary_sensor.seca_roupa_child_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][binary_sensor.seca_roupa_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.seca_roupa_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][binary_sensor.seca_roupa_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Seca-Roupa Power', + }), + 'context': , + 'entity_id': 'binary_sensor.seca_roupa_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][binary_sensor.seca_roupa_remote_control-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.seca_roupa_remote_control', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Remote control', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'remote_control', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][binary_sensor.seca_roupa_remote_control-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Seca-Roupa Remote control', + }), + 'context': , + 'entity_id': 'binary_sensor.seca_roupa_remote_control', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][binary_sensor.seca_roupa_wrinkle_prevent_active-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.seca_roupa_wrinkle_prevent_active', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Wrinkle prevent active', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dryer_wrinkle_prevent_active', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_custom.dryerWrinklePrevent_operatingState_operatingState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][binary_sensor.seca_roupa_wrinkle_prevent_active-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Seca-Roupa Wrinkle prevent active', + }), + 'context': , + 'entity_id': 'binary_sensor.seca_roupa_wrinkle_prevent_active', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wm_000001][binary_sensor.washer_child_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.washer_child_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Child lock', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'child_lock', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_samsungce.kidsLock_lockState_lockState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wm_000001][binary_sensor.washer_child_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Washer Child lock', + }), + 'context': , + 'entity_id': 'binary_sensor.washer_child_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wm_000001][binary_sensor.washer_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.washer_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wm_000001][binary_sensor.washer_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Washer Power', + }), + 'context': , + 'entity_id': 'binary_sensor.washer_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wm_000001][binary_sensor.washer_remote_control-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.washer_remote_control', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Remote control', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'remote_control', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wm_000001][binary_sensor.washer_remote_control-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Washer Remote control', + }), + 'context': , + 'entity_id': 'binary_sensor.washer_remote_control', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wm_000001_1][binary_sensor.washing_machine_child_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.washing_machine_child_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Child lock', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'child_lock', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_samsungce.kidsLock_lockState_lockState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wm_000001_1][binary_sensor.washing_machine_child_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Washing Machine Child lock', + }), + 'context': , + 'entity_id': 'binary_sensor.washing_machine_child_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[da_wm_wm_000001_1][binary_sensor.washing_machine_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.washing_machine_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_switch_switch_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wm_000001_1][binary_sensor.washing_machine_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Washing Machine Power', + }), + 'context': , + 'entity_id': 'binary_sensor.washing_machine_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[da_wm_wm_000001_1][binary_sensor.washing_machine_remote_control-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.washing_machine_remote_control', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Remote control', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'remote_control', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_remoteControlStatus_remoteControlEnabled_remoteControlEnabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wm_000001_1][binary_sensor.washing_machine_remote_control-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Washing Machine Remote control', + }), + 'context': , + 'entity_id': 'binary_sensor.washing_machine_remote_control', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_all_entities[ecobee_sensor][binary_sensor.child_bedroom_motion-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -221,7 +1833,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'd5dc3299-c266-41c7-bd08-f540aea54b89.motion', + 'unique_id': 'd5dc3299-c266-41c7-bd08-f540aea54b89_main_motionSensor_motion_motion', 'unit_of_measurement': None, }) # --- @@ -269,7 +1881,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'd5dc3299-c266-41c7-bd08-f540aea54b89.presence', + 'unique_id': 'd5dc3299-c266-41c7-bd08-f540aea54b89_main_presenceSensor_presence_presence', 'unit_of_measurement': None, }) # --- @@ -317,7 +1929,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '184c67cc-69e2-44b6-8f73-55c963068ad9.presence', + 'unique_id': '184c67cc-69e2-44b6-8f73-55c963068ad9_main_presenceSensor_presence_presence', 'unit_of_measurement': None, }) # --- @@ -335,6 +1947,54 @@ 'state': 'on', }) # --- +# name: test_all_entities[multipurpose_sensor][binary_sensor.deck_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.deck_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main_contactSensor_contact_contact', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[multipurpose_sensor][binary_sensor.deck_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Deck Door', + }), + 'context': , + 'entity_id': 'binary_sensor.deck_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_all_entities[multipurpose_sensor][binary_sensor.deck_door_acceleration-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -365,7 +2025,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'acceleration', - 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c.acceleration', + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main_accelerationSensor_acceleration_acceleration', 'unit_of_measurement': None, }) # --- @@ -383,102 +2043,6 @@ 'state': 'off', }) # --- -# name: test_all_entities[multipurpose_sensor][binary_sensor.deck_door_door-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.deck_door_door', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Door', - 'platform': 'smartthings', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c.contact', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[multipurpose_sensor][binary_sensor.deck_door_door-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Deck Door Door', - }), - 'context': , - 'entity_id': 'binary_sensor.deck_door_door', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[virtual_valve][binary_sensor.volvo_valve-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.volvo_valve', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Valve', - 'platform': 'smartthings', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'valve', - 'unique_id': '612ab3c2-3bb0-48f7-b2c0-15b169cb2fc3.valve', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[virtual_valve][binary_sensor.volvo_valve-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'opening', - 'friendly_name': 'volvo Valve', - }), - 'context': , - 'entity_id': 'binary_sensor.volvo_valve', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_all_entities[virtual_water_sensor][binary_sensor.asd_moisture-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -509,7 +2073,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'a2a6018b-2663-4727-9d1d-8f56953b5116.water', + 'unique_id': 'a2a6018b-2663-4727-9d1d-8f56953b5116_main_waterSensor_water_water', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/smartthings/snapshots/test_button.ambr b/tests/components/smartthings/snapshots/test_button.ambr new file mode 100644 index 00000000000..2c9dbd008af --- /dev/null +++ b/tests/components/smartthings/snapshots/test_button.ambr @@ -0,0 +1,189 @@ +# serializer version: 1 +# name: test_all_entities[da_ks_microwave_0101x][button.microwave_stop-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.microwave_stop', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Stop', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'stop', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_stop', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_microwave_0101x][button.microwave_stop-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Microwave Stop', + }), + 'context': , + 'entity_id': 'button.microwave_stop', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[da_ks_oven_01061][button.oven_stop-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.oven_stop', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Stop', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'stop', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_ovenOperatingState_stop', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_oven_01061][button.oven_stop-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Oven Stop', + }), + 'context': , + 'entity_id': 'button.oven_stop', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[da_ks_range_0101x][button.vulcan_stop-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.vulcan_stop', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Stop', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'stop', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_ovenOperatingState_stop', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ks_range_0101x][button.vulcan_stop-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Vulcan Stop', + }), + 'context': , + 'entity_id': 'button.vulcan_stop', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[da_ref_normal_000001][button.refrigerator_reset_water_filter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.refrigerator_reset_water_filter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset water filter', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_water_filter', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_main_custom.waterFilter_resetWaterFilter', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ref_normal_000001][button.refrigerator_reset_water_filter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Refrigerator Reset water filter', + }), + 'context': , + 'entity_id': 'button.refrigerator_reset_water_filter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/smartthings/snapshots/test_climate.ambr b/tests/components/smartthings/snapshots/test_climate.ambr index 893093ee2aa..19cfe971d7f 100644 --- a/tests/components/smartthings/snapshots/test_climate.ambr +++ b/tests/components/smartthings/snapshots/test_climate.ambr @@ -36,7 +36,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': 'bf53a150-f8a4-45d1-aac4-86252475d551', + 'unique_id': 'bf53a150-f8a4-45d1-aac4-86252475d551_main', 'unit_of_measurement': None, }) # --- @@ -70,6 +70,7 @@ 'area_id': None, 'capabilities': dict({ 'hvac_modes': list([ + , , ]), 'max_temp': 35, @@ -99,7 +100,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '286ba274-4093-4bcb-849c-a1a3efe7b1e5', + 'unique_id': '286ba274-4093-4bcb-849c-a1a3efe7b1e5_main', 'unit_of_measurement': None, }) # --- @@ -109,6 +110,7 @@ 'current_temperature': 23.9, 'friendly_name': 'Radiator Thermostat II [+M] Wohnzimmer', 'hvac_modes': list([ + , , ]), 'max_temp': 35, @@ -178,7 +180,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main', 'unit_of_measurement': None, }) # --- @@ -226,6 +228,112 @@ 'state': 'off', }) # --- +# name: test_all_entities[da_ac_rac_000003][climate.office_airfree-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'fan_modes': list([ + 'auto', + 'low', + 'medium', + 'high', + 'turbo', + ]), + 'hvac_modes': list([ + , + , + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + 'preset_modes': list([ + 'windFree', + ]), + 'swing_modes': list([ + 'off', + 'both', + 'vertical', + 'horizontal', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.office_airfree', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'c76d6f38-1b7f-13dd-37b5-db18d5272783_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ac_rac_000003][climate.office_airfree-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 26, + 'drlc_status_duration': 0, + 'drlc_status_override': False, + 'fan_mode': 'low', + 'fan_modes': list([ + 'auto', + 'low', + 'medium', + 'high', + 'turbo', + ]), + 'friendly_name': 'Office AirFree', + 'hvac_modes': list([ + , + , + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + 'preset_mode': None, + 'preset_modes': list([ + 'windFree', + ]), + 'supported_features': , + 'swing_mode': 'off', + 'swing_modes': list([ + 'off', + 'both', + 'vertical', + 'horizontal', + ]), + 'temperature': 24, + }), + 'context': , + 'entity_id': 'climate.office_airfree', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'cool', + }) +# --- # name: test_all_entities[da_ac_rac_01001][climate.aire_dormitorio_principal-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -283,7 +391,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main', 'unit_of_measurement': None, }) # --- @@ -383,7 +491,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': 'F8042E25-0E53-0000-0000-000000000000', + 'unique_id': 'F8042E25-0E53-0000-0000-000000000000_main', 'unit_of_measurement': None, }) # --- @@ -431,6 +539,7 @@ 'auto', ]), 'hvac_modes': list([ + , , , ]), @@ -461,7 +570,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '028469cb-6e89-4f14-8d9a-bfbca5e0fbfc', + 'unique_id': '028469cb-6e89-4f14-8d9a-bfbca5e0fbfc_main', 'unit_of_measurement': None, }) # --- @@ -478,6 +587,7 @@ 'friendly_name': 'Main Floor', 'hvac_action': , 'hvac_modes': list([ + , , , ]), @@ -532,7 +642,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '1888b38f-6246-4f1e-911b-bfcfb66999db', + 'unique_id': '1888b38f-6246-4f1e-911b-bfcfb66999db_main', 'unit_of_measurement': None, }) # --- @@ -595,7 +705,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a', + 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a_main', 'unit_of_measurement': None, }) # --- @@ -628,6 +738,7 @@ 'area_id': None, 'capabilities': dict({ 'hvac_modes': list([ + , , ]), 'max_temp': 35, @@ -657,7 +768,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '69a271f6-6537-4982-8cd9-979866872692', + 'unique_id': '69a271f6-6537-4982-8cd9-979866872692_main', 'unit_of_measurement': None, }) # --- @@ -668,6 +779,7 @@ 'friendly_name': 'Hall thermostat', 'hvac_action': , 'hvac_modes': list([ + , , ]), 'max_temp': 35, @@ -695,6 +807,7 @@ 'on', ]), 'hvac_modes': list([ + , ]), 'max_temp': 35.0, 'min_temp': 7.0, @@ -723,7 +836,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '2894dc93-0f11-49cc-8a81-3a684cebebf6', + 'unique_id': '2894dc93-0f11-49cc-8a81-3a684cebebf6_main', 'unit_of_measurement': None, }) # --- @@ -738,6 +851,7 @@ 'friendly_name': 'asd', 'hvac_action': , 'hvac_modes': list([ + , ]), 'max_temp': 35.0, 'min_temp': 7.0, diff --git a/tests/components/smartthings/snapshots/test_cover.ambr b/tests/components/smartthings/snapshots/test_cover.ambr index 6877a8ccc01..4b5cf705665 100644 --- a/tests/components/smartthings/snapshots/test_cover.ambr +++ b/tests/components/smartthings/snapshots/test_cover.ambr @@ -29,7 +29,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '571af102-15db-4030-b76b-245a691f74a5', + 'unique_id': '571af102-15db-4030-b76b-245a691f74a5_main', 'unit_of_measurement': None, }) # --- @@ -79,7 +79,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '71afed1c-006d-4e48-b16e-e7f88f9fd638', + 'unique_id': '71afed1c-006d-4e48-b16e-e7f88f9fd638_main', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/smartthings/snapshots/test_diagnostics.ambr b/tests/components/smartthings/snapshots/test_diagnostics.ambr index 268cddd5b28..b9847bf9746 100644 --- a/tests/components/smartthings/snapshots/test_diagnostics.ambr +++ b/tests/components/smartthings/snapshots/test_diagnostics.ambr @@ -302,7 +302,7 @@ 'id': '60fbc713-8da5-315d-b31a-6d6dcde4be7b', }), 'restrictionTier': 0, - 'roomId': '85a79db4-9cf2-4f09-a5b2-cd70a5c0cef0', + 'roomId': '7715151d-0314-457a-a82c-5ce48900e065', 'type': 'OCF', }), ]), @@ -610,7 +610,7 @@ 'id': '60fbc713-8da5-315d-b31a-6d6dcde4be7b', }), 'restrictionTier': 0, - 'roomId': '85a79db4-9cf2-4f09-a5b2-cd70a5c0cef0', + 'roomId': '7715151d-0314-457a-a82c-5ce48900e065', 'type': 'OCF', }), 'status': dict({ diff --git a/tests/components/smartthings/snapshots/test_event.ambr b/tests/components/smartthings/snapshots/test_event.ambr new file mode 100644 index 00000000000..79c57df5fd7 --- /dev/null +++ b/tests/components/smartthings/snapshots/test_event.ambr @@ -0,0 +1,361 @@ +# serializer version: 1 +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.livingroom_smart_switch_button1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'button1', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '5e5b97f3-3094-44e6-abc0-f61283412d6a_button1_button', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + 'friendly_name': 'Livingroom smart switch button1', + }), + 'context': , + 'entity_id': 'event.livingroom_smart_switch_button1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.livingroom_smart_switch_button2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'button2', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '5e5b97f3-3094-44e6-abc0-f61283412d6a_button2_button', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + 'friendly_name': 'Livingroom smart switch button2', + }), + 'context': , + 'entity_id': 'event.livingroom_smart_switch_button2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.livingroom_smart_switch_button3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'button3', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '5e5b97f3-3094-44e6-abc0-f61283412d6a_button3_button', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + 'friendly_name': 'Livingroom smart switch button3', + }), + 'context': , + 'entity_id': 'event.livingroom_smart_switch_button3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button4-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.livingroom_smart_switch_button4', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'button4', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '5e5b97f3-3094-44e6-abc0-f61283412d6a_button4_button', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button4-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + 'friendly_name': 'Livingroom smart switch button4', + }), + 'context': , + 'entity_id': 'event.livingroom_smart_switch_button4', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.livingroom_smart_switch_button5', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'button5', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '5e5b97f3-3094-44e6-abc0-f61283412d6a_button5_button', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + 'friendly_name': 'Livingroom smart switch button5', + }), + 'context': , + 'entity_id': 'event.livingroom_smart_switch_button5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button6-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.livingroom_smart_switch_button6', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'button6', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'button', + 'unique_id': '5e5b97f3-3094-44e6-abc0-f61283412d6a_button6_button', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[heatit_zpushwall][event.livingroom_smart_switch_button6-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'button', + 'event_type': None, + 'event_types': list([ + 'pushed', + 'held', + 'down_hold', + ]), + 'friendly_name': 'Livingroom smart switch button6', + }), + 'context': , + 'entity_id': 'event.livingroom_smart_switch_button6', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/smartthings/snapshots/test_fan.ambr b/tests/components/smartthings/snapshots/test_fan.ambr index 40ab7b12267..1196118b3b5 100644 --- a/tests/components/smartthings/snapshots/test_fan.ambr +++ b/tests/components/smartthings/snapshots/test_fan.ambr @@ -37,7 +37,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': 'f1af21a2-d5a1-437c-b10a-b34a87394b71', + 'unique_id': 'f1af21a2-d5a1-437c-b10a-b34a87394b71_main', 'unit_of_measurement': None, }) # --- @@ -97,7 +97,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '6d95a8b7-4ee3-429a-a13a-00ec9354170c', + 'unique_id': '6d95a8b7-4ee3-429a-a13a-00ec9354170c_main', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/smartthings/snapshots/test_init.ambr b/tests/components/smartthings/snapshots/test_init.ambr index 301897134e5..8ec97af7d84 100644 --- a/tests/components/smartthings/snapshots/test_init.ambr +++ b/tests/components/smartthings/snapshots/test_init.ambr @@ -37,7 +37,7 @@ # --- # name: test_devices[aeotec_home_energy_meter_gen5] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'toilet', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -63,7 +63,7 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Toilet', 'sw_version': None, 'via_device_id': None, }) @@ -103,7 +103,7 @@ # --- # name: test_devices[base_electric_meter] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -129,7 +129,7 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Theater', 'sw_version': None, 'via_device_id': None, }) @@ -169,7 +169,7 @@ # --- # name: test_devices[c2c_arlo_pro_3_switch] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -195,7 +195,7 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Theater', 'sw_version': None, 'via_device_id': None, }) @@ -235,7 +235,7 @@ # --- # name: test_devices[centralite] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -261,14 +261,14 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Theater', 'sw_version': None, 'via_device_id': None, }) # --- # name: test_devices[contact_sensor] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -294,14 +294,47 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Theater', 'sw_version': None, 'via_device_id': None, }) # --- +# name: test_devices[da_ac_airsensor_01001] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': 'Realtek', + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + 'a3a970ea-e09c-9c04-161b-94c934e21666', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Samsung Electronics', + 'model': 'ASM-KR-TP1-22-ACMB1M', + 'model_id': None, + 'name': '에어모니터 플러스', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'ASM-KR-TP1-22-ACMB1M_16240426', + 'via_device_id': None, + }) +# --- # name: test_devices[da_ac_rac_000001] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -327,14 +360,47 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Theater', 'sw_version': None, 'via_device_id': None, }) # --- +# name: test_devices[da_ac_rac_000003] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': 'ARTIK051', + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + 'c76d6f38-1b7f-13dd-37b5-db18d5272783', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Samsung Electronics', + 'model': 'ARTIK051_PRAC_20K', + 'model_id': None, + 'name': 'Office AirFree', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'ARTIK051_PRAC_20K_11230313', + 'via_device_id': None, + }) +# --- # name: test_devices[da_ac_rac_01001] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -360,14 +426,14 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Theater', 'sw_version': 'ARA-WW-TP1-22-COMMON_11240702', 'via_device_id': None, }) # --- # name: test_devices[da_ac_rac_100001] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -393,6 +459,39 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, + 'suggested_area': 'Theater', + 'sw_version': None, + 'via_device_id': None, + }) +# --- +# name: test_devices[da_ks_cooktop_31001] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + '808dbd84-f357-47e2-a0cd-3b66fa22d584', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': None, + 'model': None, + 'model_id': None, + 'name': 'Induction Hob', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, 'suggested_area': None, 'sw_version': None, 'via_device_id': None, @@ -400,7 +499,7 @@ # --- # name: test_devices[da_ks_microwave_0101x] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -426,7 +525,7 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Theater', 'sw_version': 'AKS-WW-TP2-20-MICROWAVE-OTR_40230125', 'via_device_id': None, }) @@ -499,7 +598,7 @@ # --- # name: test_devices[da_ref_normal_000001] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -525,14 +624,47 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Theater', 'sw_version': 'A-RFWW-TP2-21-COMMON_20220110', 'via_device_id': None, }) # --- +# name: test_devices[da_ref_normal_01011] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': 'Realtek', + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + '5758b2ec-563e-f39b-ec39-208e54aabf60', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Samsung Electronics', + 'model': 'TP1X_REF_21K', + 'model_id': None, + 'name': 'Frigo', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'A-RFWW-TP1-22-REV1_20241030', + 'via_device_id': None, + }) +# --- # name: test_devices[da_rvc_normal_000001] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -558,14 +690,47 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Theater', 'sw_version': '1.0', 'via_device_id': None, }) # --- +# name: test_devices[da_sac_ehs_000001_sub] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + '1f98ebd0-ac48-d802-7f62-000001200100', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Samsung Electronics', + 'model': 'SAC_EHS_MONO', + 'model_id': None, + 'name': 'Eco Heating System', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '20240611.1', + 'via_device_id': None, + }) +# --- # name: test_devices[da_wm_dw_000001] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -591,14 +756,47 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Theater', 'sw_version': 'DA_DW_A51_20_COMMON_30230714', 'via_device_id': None, }) # --- +# name: test_devices[da_wm_sc_000001] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': 'MediaTek', + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Samsung Electronics', + 'model': 'DA_DF_TP2_20_COMMON', + 'model_id': None, + 'name': 'AirDresser', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'DA_DF_TP2_20_COMMON_30230807', + 'via_device_id': None, + }) +# --- # name: test_devices[da_wm_wd_000001] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -624,7 +822,7 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Theater', 'sw_version': 'DA_WM_A51_20_COMMON_30230708', 'via_device_id': None, }) @@ -664,7 +862,7 @@ # --- # name: test_devices[da_wm_wm_000001] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -690,7 +888,7 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Theater', 'sw_version': 'DA_WM_TP2_20_COMMON_30230804', 'via_device_id': None, }) @@ -829,7 +1027,7 @@ # --- # name: test_devices[fake_fan] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -855,14 +1053,14 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Theater', 'sw_version': None, 'via_device_id': None, }) # --- # name: test_devices[ge_in_wall_smart_dimmer] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -888,7 +1086,7 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Theater', 'sw_version': None, 'via_device_id': None, }) @@ -959,6 +1157,39 @@ 'via_device_id': None, }) # --- +# name: test_devices[heatit_zpushwall] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + '5e5b97f3-3094-44e6-abc0-f61283412d6a', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': None, + 'model': None, + 'model_id': None, + 'name': 'Livingroom smart switch', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- # name: test_devices[heatit_ztrm3_thermostat] DeviceRegistryEntrySnapshot({ 'area_id': None, @@ -1058,6 +1289,39 @@ 'via_device_id': None, }) # --- +# name: test_devices[hw_q80r_soundbar] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '0-0', + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + 'afcf3b91-0000-1111-2222-ddff2a0a6577', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Samsung Electronics', + 'model': 'Q80R', + 'model_id': None, + 'name': 'Soundbar', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'HW-Q80RWWB-1012.6', + 'via_device_id': None, + }) +# --- # name: test_devices[ikea_kadrilj] DeviceRegistryEntrySnapshot({ 'area_id': None, @@ -1159,7 +1423,7 @@ # --- # name: test_devices[multipurpose_sensor] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -1185,7 +1449,7 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Theater', 'sw_version': None, 'via_device_id': None, }) @@ -1225,7 +1489,7 @@ # --- # name: test_devices[smart_plug] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -1251,14 +1515,14 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Theater', 'sw_version': None, 'via_device_id': None, }) # --- # name: test_devices[sonos_player] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -1284,7 +1548,7 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Theater', 'sw_version': None, 'via_device_id': None, }) @@ -1324,7 +1588,7 @@ # --- # name: test_devices[vd_network_audio_002s] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -1350,14 +1614,47 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Theater', 'sw_version': 'SAT-iMX8M23WWC-1010.5', 'via_device_id': None, }) # --- +# name: test_devices[vd_sensor_light_2023] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + '5cc1c096-98b9-460c-8f1c-1045509ec605', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Samsung Electronics', + 'model': 'QE55LS03DAUXXN', + 'model_id': None, + 'name': 'Light Sensor - 55" The Frame', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'latest', + 'via_device_id': None, + }) +# --- # name: test_devices[vd_stv_2017_k] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -1383,14 +1680,14 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Theater', 'sw_version': 'T-KTMAKUC-1290.3', 'via_device_id': None, }) # --- # name: test_devices[virtual_thermostat] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -1416,14 +1713,14 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Theater', 'sw_version': None, 'via_device_id': None, }) # --- # name: test_devices[virtual_valve] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -1449,14 +1746,14 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Theater', 'sw_version': None, 'via_device_id': None, }) # --- # name: test_devices[virtual_water_sensor] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -1482,14 +1779,14 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Theater', 'sw_version': None, 'via_device_id': None, }) # --- # name: test_devices[yale_push_button_deadbolt_lock] DeviceRegistryEntrySnapshot({ - 'area_id': None, + 'area_id': 'theater', 'config_entries': , 'config_entries_subentries': , 'configuration_url': 'https://account.smartthings.com', @@ -1515,8 +1812,45 @@ 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, - 'suggested_area': None, + 'suggested_area': 'Theater', 'sw_version': None, 'via_device_id': None, }) # --- +# name: test_hub_via_device + DeviceRegistryEntrySnapshot({ + 'area_id': 'theater', + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://account.smartthings.com', + 'connections': set({ + tuple( + 'mac', + 'd0:52:a8:72:91:02', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'smartthings', + '074fa784-8be8-4c70-8e22-6f5ed6f81b7e', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': None, + 'model': 'V2_HUB', + 'model_id': None, + 'name': 'Home Hub', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': 'Theater', + 'sw_version': '000.055.00005', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/smartthings/snapshots/test_light.ambr b/tests/components/smartthings/snapshots/test_light.ambr index f1f2b92de77..6826a555f6a 100644 --- a/tests/components/smartthings/snapshots/test_light.ambr +++ b/tests/components/smartthings/snapshots/test_light.ambr @@ -37,7 +37,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '7c16163e-c94e-482f-95f6-139ae0cd9d5e', + 'unique_id': '7c16163e-c94e-482f-95f6-139ae0cd9d5e_main', 'unit_of_measurement': None, }) # --- @@ -103,7 +103,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': 'd0268a69-abfb-4c92-a646-61cec2e510ad', + 'unique_id': 'd0268a69-abfb-4c92-a646-61cec2e510ad_main', 'unit_of_measurement': None, }) # --- @@ -160,7 +160,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': 'aaedaf28-2ae0-4c1d-b57e-87f6a420c298', + 'unique_id': 'aaedaf28-2ae0-4c1d-b57e-87f6a420c298_main', 'unit_of_measurement': None, }) # --- @@ -221,7 +221,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '440063de-a200-40b5-8a6b-f3399eaa0370', + 'unique_id': '440063de-a200-40b5-8a6b-f3399eaa0370_main', 'unit_of_measurement': None, }) # --- @@ -302,7 +302,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': 'cb958955-b015-498c-9e62-fc0c51abd054', + 'unique_id': 'cb958955-b015-498c-9e62-fc0c51abd054_main', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/smartthings/snapshots/test_lock.ambr b/tests/components/smartthings/snapshots/test_lock.ambr index 2cf9688c3dd..325ce0cc677 100644 --- a/tests/components/smartthings/snapshots/test_lock.ambr +++ b/tests/components/smartthings/snapshots/test_lock.ambr @@ -29,7 +29,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'a9f587c5-5d8b-4273-8907-e7f609af5158', + 'unique_id': 'a9f587c5-5d8b-4273-8907-e7f609af5158_main', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/smartthings/snapshots/test_media_player.ambr b/tests/components/smartthings/snapshots/test_media_player.ambr new file mode 100644 index 00000000000..83f9d19b9fa --- /dev/null +++ b/tests/components/smartthings/snapshots/test_media_player.ambr @@ -0,0 +1,298 @@ +# serializer version: 1 +# name: test_all_entities[hw_q80r_soundbar][media_player.soundbar-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'source_list': list([ + 'wifi', + 'bluetooth', + 'HDMI1', + 'HDMI2', + 'digital', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.soundbar', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'afcf3b91-0000-1111-2222-ddff2a0a6577_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[hw_q80r_soundbar][media_player.soundbar-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speaker', + 'friendly_name': 'Soundbar', + 'is_volume_muted': False, + 'media_artist': 'Rick Astley', + 'media_title': 'Never Gonna Give You Up', + 'source': 'wifi', + 'source_list': list([ + 'wifi', + 'bluetooth', + 'HDMI1', + 'HDMI2', + 'digital', + ]), + 'supported_features': , + 'volume_level': 0.01, + }), + 'context': , + 'entity_id': 'media_player.soundbar', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_all_entities[im_speaker_ai_0001][media_player.galaxy_home_mini-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.galaxy_home_mini', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[im_speaker_ai_0001][media_player.galaxy_home_mini-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speaker', + 'friendly_name': 'Galaxy Home Mini', + 'is_volume_muted': False, + 'repeat': , + 'shuffle': False, + 'supported_features': , + 'volume_level': 0.52, + }), + 'context': , + 'entity_id': 'media_player.galaxy_home_mini', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_all_entities[sonos_player][media_player.elliots_rum-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.elliots_rum', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'c85fced9-c474-4a47-93c2-037cc7829536_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sonos_player][media_player.elliots_rum-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speaker', + 'friendly_name': 'Elliots Rum', + 'is_volume_muted': False, + 'media_artist': 'David Guetta', + 'media_title': 'Forever Young', + 'supported_features': , + 'volume_level': 0.15, + }), + 'context': , + 'entity_id': 'media_player.elliots_rum', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_all_entities[vd_network_audio_002s][media_player.soundbar_living-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.soundbar_living', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '0d94e5db-8501-2355-eb4f-214163702cac_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[vd_network_audio_002s][media_player.soundbar_living-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speaker', + 'friendly_name': 'Soundbar Living', + 'is_volume_muted': False, + 'media_artist': '', + 'media_title': '', + 'source': 'HDMI1', + 'supported_features': , + 'volume_level': 0.17, + }), + 'context': , + 'entity_id': 'media_player.soundbar_living', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[vd_stv_2017_k][media_player.tv_samsung_8_series_49-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'source_list': list([ + 'digitalTv', + 'HDMI1', + 'HDMI4', + 'HDMI4', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.tv_samsung_8_series_49', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[vd_stv_2017_k][media_player.tv_samsung_8_series_49-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tv', + 'friendly_name': '[TV] Samsung 8 Series (49)', + 'is_volume_muted': True, + 'source': 'HDMI1', + 'source_list': list([ + 'digitalTv', + 'HDMI1', + 'HDMI4', + 'HDMI4', + ]), + 'supported_features': , + 'volume_level': 0.13, + }), + 'context': , + 'entity_id': 'media_player.tv_samsung_8_series_49', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/smartthings/snapshots/test_number.ambr b/tests/components/smartthings/snapshots/test_number.ambr new file mode 100644 index 00000000000..66aade5b958 --- /dev/null +++ b/tests/components/smartthings/snapshots/test_number.ambr @@ -0,0 +1,115 @@ +# serializer version: 1 +# name: test_all_entities[da_wm_wm_000001][number.washer_rinse_cycles-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 5, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.washer_rinse_cycles', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Rinse cycles', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'washer_rinse_cycles', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_custom.washerRinseCycles_washerRinseCycles_washerRinseCycles', + 'unit_of_measurement': 'cycles', + }) +# --- +# name: test_all_entities[da_wm_wm_000001][number.washer_rinse_cycles-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Washer Rinse cycles', + 'max': 5, + 'min': 0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': 'cycles', + }), + 'context': , + 'entity_id': 'number.washer_rinse_cycles', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_all_entities[da_wm_wm_000001_1][number.washing_machine_rinse_cycles-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 5, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.washing_machine_rinse_cycles', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Rinse cycles', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'washer_rinse_cycles', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_custom.washerRinseCycles_washerRinseCycles_washerRinseCycles', + 'unit_of_measurement': 'cycles', + }) +# --- +# name: test_all_entities[da_wm_wm_000001_1][number.washing_machine_rinse_cycles-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Washing Machine Rinse cycles', + 'max': 5, + 'min': 0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': 'cycles', + }), + 'context': , + 'entity_id': 'number.washing_machine_rinse_cycles', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- diff --git a/tests/components/smartthings/snapshots/test_select.ambr b/tests/components/smartthings/snapshots/test_select.ambr new file mode 100644 index 00000000000..06185e09547 --- /dev/null +++ b/tests/components/smartthings/snapshots/test_select.ambr @@ -0,0 +1,349 @@ +# serializer version: 1 +# name: test_all_entities[da_wm_dw_000001][select.dishwasher-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.dishwasher', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'operating_state', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_machineState_machineState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_dw_000001][select.dishwasher-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dishwasher', + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'context': , + 'entity_id': 'select.dishwasher', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stop', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][select.airdresser-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.airdresser', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'operating_state', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_dryerOperatingState_machineState_machineState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_sc_000001][select.airdresser-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'AirDresser', + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'context': , + 'entity_id': 'select.airdresser', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stop', + }) +# --- +# name: test_all_entities[da_wm_wd_000001][select.dryer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.dryer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'operating_state', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_machineState_machineState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001][select.dryer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dryer', + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'context': , + 'entity_id': 'select.dryer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stop', + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][select.seca_roupa-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.seca_roupa', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'operating_state', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_dryerOperatingState_machineState_machineState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wd_000001_1][select.seca_roupa-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Seca-Roupa', + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'context': , + 'entity_id': 'select.seca_roupa', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stop', + }) +# --- +# name: test_all_entities[da_wm_wm_000001][select.washer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.washer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'operating_state', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_machineState_machineState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wm_000001][select.washer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Washer', + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'context': , + 'entity_id': 'select.washer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stop', + }) +# --- +# name: test_all_entities[da_wm_wm_000001_1][select.washing_machine-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.washing_machine', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'operating_state', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_washerOperatingState_machineState_machineState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_wm_000001_1][select.washing_machine-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Washing Machine', + 'options': list([ + 'stop', + 'run', + 'pause', + ]), + }), + 'context': , + 'entity_id': 'select.washing_machine', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'run', + }) +# --- diff --git a/tests/components/smartthings/snapshots/test_sensor.ambr b/tests/components/smartthings/snapshots/test_sensor.ambr index e345923c414..8ace345be18 100644 --- a/tests/components/smartthings/snapshots/test_sensor.ambr +++ b/tests/components/smartthings/snapshots/test_sensor.ambr @@ -31,7 +31,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f0af21a2-d5a1-437c-b10a-b34a87394b71.energy', + 'unique_id': 'f0af21a2-d5a1-437c-b10a-b34a87394b71_main_energyMeter_energy_energy', 'unit_of_measurement': 'kWh', }) # --- @@ -83,7 +83,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f0af21a2-d5a1-437c-b10a-b34a87394b71.power', + 'unique_id': 'f0af21a2-d5a1-437c-b10a-b34a87394b71_main_powerMeter_power_power', 'unit_of_measurement': 'W', }) # --- @@ -135,7 +135,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f0af21a2-d5a1-437c-b10a-b34a87394b71.voltage', + 'unique_id': 'f0af21a2-d5a1-437c-b10a-b34a87394b71_main_voltageMeasurement_voltage_voltage', 'unit_of_measurement': None, }) # --- @@ -186,7 +186,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'bf53a150-f8a4-45d1-aac4-86252475d551.temperature', + 'unique_id': 'bf53a150-f8a4-45d1-aac4-86252475d551_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -238,7 +238,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '68e786a6-7f61-4c3a-9e13-70b803cf782b.energy', + 'unique_id': '68e786a6-7f61-4c3a-9e13-70b803cf782b_main_energyMeter_energy_energy', 'unit_of_measurement': 'kWh', }) # --- @@ -290,7 +290,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '68e786a6-7f61-4c3a-9e13-70b803cf782b.power', + 'unique_id': '68e786a6-7f61-4c3a-9e13-70b803cf782b_main_powerMeter_power_power', 'unit_of_measurement': 'W', }) # --- @@ -340,7 +340,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '286ba274-4093-4bcb-849c-a1a3efe7b1e5.battery', + 'unique_id': '286ba274-4093-4bcb-849c-a1a3efe7b1e5_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -391,7 +391,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '286ba274-4093-4bcb-849c-a1a3efe7b1e5.temperature', + 'unique_id': '286ba274-4093-4bcb-849c-a1a3efe7b1e5_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -448,7 +448,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'alarm', - 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd.alarm', + 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd_main_alarm_alarm_alarm', 'unit_of_measurement': None, }) # --- @@ -502,7 +502,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd.battery', + 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -553,7 +553,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'd0268a69-abfb-4c92-a646-61cec2e510ad.power', + 'unique_id': 'd0268a69-abfb-4c92-a646-61cec2e510ad_main_powerMeter_power_power', 'unit_of_measurement': 'W', }) # --- @@ -603,7 +603,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '2d9a892b-1c93-45a5-84cb-0e81889498c6.battery', + 'unique_id': '2d9a892b-1c93-45a5-84cb-0e81889498c6_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -654,7 +654,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '2d9a892b-1c93-45a5-84cb-0e81889498c6.temperature', + 'unique_id': '2d9a892b-1c93-45a5-84cb-0e81889498c6_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -674,6 +674,416 @@ 'state': '15.0', }) # --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_air_quality-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eeomoniteo_peulreoseu_air_quality', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Air quality', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'air_quality', + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_airQualitySensor_airQuality_airQuality', + 'unit_of_measurement': 'CAQI', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_air_quality-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '에어모니터 플러스 Air quality', + 'state_class': , + 'unit_of_measurement': 'CAQI', + }), + 'context': , + 'entity_id': 'sensor.eeomoniteo_peulreoseu_air_quality', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_carbon_dioxide-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eeomoniteo_peulreoseu_carbon_dioxide', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Carbon dioxide', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_carbonDioxideMeasurement_carbonDioxide_carbonDioxide', + 'unit_of_measurement': 'ppm', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_carbon_dioxide-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'carbon_dioxide', + 'friendly_name': '에어모니터 플러스 Carbon dioxide', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'context': , + 'entity_id': 'sensor.eeomoniteo_peulreoseu_carbon_dioxide', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1045', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eeomoniteo_peulreoseu_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_relativeHumidityMeasurement_humidity_humidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': '에어모니터 플러스 Humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.eeomoniteo_peulreoseu_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '54', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_odor_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eeomoniteo_peulreoseu_odor_sensor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Odor sensor', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'odor_sensor', + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_odorSensor_odorLevel_odorLevel', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_odor_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': '에어모니터 플러스 Odor sensor', + }), + 'context': , + 'entity_id': 'sensor.eeomoniteo_peulreoseu_odor_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_pm1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eeomoniteo_peulreoseu_pm1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM1', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_veryFineDustSensor_veryFineDustLevel_veryFineDustLevel', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_pm1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm1', + 'friendly_name': '에어모니터 플러스 PM1', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.eeomoniteo_peulreoseu_pm1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_pm10-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eeomoniteo_peulreoseu_pm10', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM10', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_dustSensor_dustLevel_dustLevel', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_pm10-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm10', + 'friendly_name': '에어모니터 플러스 PM10', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.eeomoniteo_peulreoseu_pm10', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '31', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_pm2_5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eeomoniteo_peulreoseu_pm2_5', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PM2.5', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_dustSensor_fineDustLevel_fineDustLevel', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_pm2_5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm25', + 'friendly_name': '에어모니터 플러스 PM2.5', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.eeomoniteo_peulreoseu_pm2_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7', + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eeomoniteo_peulreoseu_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'a3a970ea-e09c-9c04-161b-94c934e21666_main_temperatureMeasurement_temperature_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_ac_airsensor_01001][sensor.eeomoniteo_peulreoseu_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': '에어모니터 플러스 Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eeomoniteo_peulreoseu_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '23.0', + }) +# --- # name: test_all_entities[da_ac_rac_000001][sensor.ac_office_granit_energy-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -709,7 +1119,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.energy_meter', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -764,7 +1174,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.deltaEnergy_meter', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -819,7 +1229,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.energySaved_meter', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -871,7 +1281,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.humidity', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_relativeHumidityMeasurement_humidity_humidity', 'unit_of_measurement': '%', }) # --- @@ -926,7 +1336,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.power_meter', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -983,7 +1393,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.powerEnergy_meter', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -1035,7 +1445,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.temperature', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -1085,7 +1495,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'audio_volume', - 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d.volume', + 'unique_id': '96a5ef74-5832-a84b-f1f7-ca799957065d_main_audioVolume_volume_volume', 'unit_of_measurement': '%', }) # --- @@ -1103,6 +1513,435 @@ 'state': '100', }) # --- +# name: test_all_entities[da_ac_rac_000003][sensor.office_airfree_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.office_airfree_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'c76d6f38-1b7f-13dd-37b5-db18d5272783_main_powerConsumptionReport_powerConsumption_energy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_ac_rac_000003][sensor.office_airfree_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Office AirFree Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.office_airfree_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '602.171', + }) +# --- +# name: test_all_entities[da_ac_rac_000003][sensor.office_airfree_energy_difference-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.office_airfree_energy_difference', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy difference', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_difference', + 'unique_id': 'c76d6f38-1b7f-13dd-37b5-db18d5272783_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_ac_rac_000003][sensor.office_airfree_energy_difference-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Office AirFree Energy difference', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.office_airfree_energy_difference', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_entities[da_ac_rac_000003][sensor.office_airfree_energy_saved-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.office_airfree_energy_saved', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy saved', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_saved', + 'unique_id': 'c76d6f38-1b7f-13dd-37b5-db18d5272783_main_powerConsumptionReport_powerConsumption_energySaved_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_ac_rac_000003][sensor.office_airfree_energy_saved-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Office AirFree Energy saved', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.office_airfree_energy_saved', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_entities[da_ac_rac_000003][sensor.office_airfree_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.office_airfree_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'c76d6f38-1b7f-13dd-37b5-db18d5272783_main_relativeHumidityMeasurement_humidity_humidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[da_ac_rac_000003][sensor.office_airfree_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Office AirFree Humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.office_airfree_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '48', + }) +# --- +# name: test_all_entities[da_ac_rac_000003][sensor.office_airfree_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.office_airfree_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'c76d6f38-1b7f-13dd-37b5-db18d5272783_main_powerConsumptionReport_powerConsumption_power_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_ac_rac_000003][sensor.office_airfree_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Office AirFree Power', + 'power_consumption_end': '2025-03-27T05:40:02Z', + 'power_consumption_start': '2025-03-27T05:29:22Z', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.office_airfree_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[da_ac_rac_000003][sensor.office_airfree_power_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.office_airfree_power_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power energy', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_energy', + 'unique_id': 'c76d6f38-1b7f-13dd-37b5-db18d5272783_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_ac_rac_000003][sensor.office_airfree_power_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Office AirFree Power energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.office_airfree_power_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_entities[da_ac_rac_000003][sensor.office_airfree_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.office_airfree_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'c76d6f38-1b7f-13dd-37b5-db18d5272783_main_temperatureMeasurement_temperature_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_ac_rac_000003][sensor.office_airfree_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Office AirFree Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.office_airfree_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '26', + }) +# --- +# name: test_all_entities[da_ac_rac_000003][sensor.office_airfree_volume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.office_airfree_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Volume', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'audio_volume', + 'unique_id': 'c76d6f38-1b7f-13dd-37b5-db18d5272783_main_audioVolume_volume_volume', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[da_ac_rac_000003][sensor.office_airfree_volume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Office AirFree Volume', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.office_airfree_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- # name: test_all_entities[da_ac_rac_01001][sensor.aire_dormitorio_principal_energy-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1138,7 +1977,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.energy_meter', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -1193,7 +2032,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.deltaEnergy_meter', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -1248,7 +2087,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.energySaved_meter', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -1300,7 +2139,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.humidity', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_relativeHumidityMeasurement_humidity_humidity', 'unit_of_measurement': '%', }) # --- @@ -1355,7 +2194,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.power_meter', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -1412,7 +2251,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.powerEnergy_meter', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -1464,7 +2303,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.temperature', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -1514,7 +2353,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'audio_volume', - 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e.volume', + 'unique_id': '4ece486b-89db-f06a-d54d-748b676b4d8e_main_audioVolume_volume_volume', 'unit_of_measurement': '%', }) # --- @@ -1564,7 +2403,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'air_quality', - 'unique_id': 'F8042E25-0E53-0000-0000-000000000000.airQuality', + 'unique_id': 'F8042E25-0E53-0000-0000-000000000000_main_airQualitySensor_airQuality_airQuality', 'unit_of_measurement': 'CAQI', }) # --- @@ -1615,7 +2454,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'F8042E25-0E53-0000-0000-000000000000.dustLevel', + 'unique_id': 'F8042E25-0E53-0000-0000-000000000000_main_dustSensor_dustLevel_dustLevel', 'unit_of_measurement': 'µg/m³', }) # --- @@ -1667,7 +2506,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'F8042E25-0E53-0000-0000-000000000000.fineDustLevel', + 'unique_id': 'F8042E25-0E53-0000-0000-000000000000_main_dustSensor_fineDustLevel_fineDustLevel', 'unit_of_measurement': 'µg/m³', }) # --- @@ -1719,7 +2558,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'F8042E25-0E53-0000-0000-000000000000.temperature', + 'unique_id': 'F8042E25-0E53-0000-0000-000000000000_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -1769,7 +2608,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'completion_time', - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a.completionTime', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_completionTime_completionTime', 'unit_of_measurement': None, }) # --- @@ -1837,7 +2676,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_job_state', - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a.ovenJobState', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_ovenJobState_ovenJobState', 'unit_of_measurement': None, }) # --- @@ -1910,7 +2749,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_machine_state', - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a.machineState', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -1991,7 +2830,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_mode', - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a.ovenMode', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenMode_ovenMode_ovenMode', 'unit_of_measurement': None, }) # --- @@ -2066,7 +2905,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_setpoint', - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a.ovenSetpoint', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenSetpoint_ovenSetpoint_ovenSetpoint', 'unit_of_measurement': , }) # --- @@ -2117,7 +2956,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a.temperature', + 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -2167,7 +3006,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'completion_time', - 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f.completionTime', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_ovenOperatingState_completionTime_completionTime', 'unit_of_measurement': None, }) # --- @@ -2235,7 +3074,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_job_state', - 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f.ovenJobState', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_ovenOperatingState_ovenJobState_ovenJobState', 'unit_of_measurement': None, }) # --- @@ -2308,7 +3147,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_machine_state', - 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f.machineState', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_ovenOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -2389,7 +3228,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_mode', - 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f.ovenMode', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_ovenMode_ovenMode_ovenMode', 'unit_of_measurement': None, }) # --- @@ -2464,7 +3303,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_setpoint', - 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f.ovenSetpoint', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_ovenSetpoint_ovenSetpoint_ovenSetpoint', 'unit_of_measurement': , }) # --- @@ -2515,7 +3354,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f.temperature', + 'unique_id': '9447959a-0dfa-6b27-d40d-650da525c53f_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -2565,7 +3404,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'completion_time', - 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18.completionTime', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_ovenOperatingState_completionTime_completionTime', 'unit_of_measurement': None, }) # --- @@ -2633,7 +3472,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_job_state', - 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18.ovenJobState', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_ovenOperatingState_ovenJobState_ovenJobState', 'unit_of_measurement': None, }) # --- @@ -2706,7 +3545,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_machine_state', - 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18.machineState', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_ovenOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -2787,7 +3626,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_mode', - 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18.ovenMode', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_ovenMode_ovenMode_ovenMode', 'unit_of_measurement': None, }) # --- @@ -2862,7 +3701,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'oven_setpoint', - 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18.ovenSetpoint', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_ovenSetpoint_ovenSetpoint_ovenSetpoint', 'unit_of_measurement': , }) # --- @@ -2913,7 +3752,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18.temperature', + 'unique_id': '2c3cbaa0-1899-5ddc-7b58-9d657bd48f18_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -2968,7 +3807,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09.energy_meter', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -3023,7 +3862,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09.deltaEnergy_meter', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -3078,7 +3917,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09.energySaved_meter', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -3133,7 +3972,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09.power_meter', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -3190,7 +4029,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09.powerEnergy_meter', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -3210,6 +4049,283 @@ 'state': '0.0135559777781698', }) # --- +# name: test_all_entities[da_ref_normal_01011][sensor.frigo_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.frigo_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '5758b2ec-563e-f39b-ec39-208e54aabf60_main_powerConsumptionReport_powerConsumption_energy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_ref_normal_01011][sensor.frigo_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Frigo Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.frigo_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '66.571', + }) +# --- +# name: test_all_entities[da_ref_normal_01011][sensor.frigo_energy_difference-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.frigo_energy_difference', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy difference', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_difference', + 'unique_id': '5758b2ec-563e-f39b-ec39-208e54aabf60_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_ref_normal_01011][sensor.frigo_energy_difference-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Frigo Energy difference', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.frigo_energy_difference', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.019', + }) +# --- +# name: test_all_entities[da_ref_normal_01011][sensor.frigo_energy_saved-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.frigo_energy_saved', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy saved', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_saved', + 'unique_id': '5758b2ec-563e-f39b-ec39-208e54aabf60_main_powerConsumptionReport_powerConsumption_energySaved_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_ref_normal_01011][sensor.frigo_energy_saved-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Frigo Energy saved', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.frigo_energy_saved', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_entities[da_ref_normal_01011][sensor.frigo_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.frigo_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '5758b2ec-563e-f39b-ec39-208e54aabf60_main_powerConsumptionReport_powerConsumption_power_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_ref_normal_01011][sensor.frigo_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Frigo Power', + 'power_consumption_end': '2025-03-30T18:38:18Z', + 'power_consumption_start': '2025-03-30T18:21:37Z', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.frigo_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '61', + }) +# --- +# name: test_all_entities[da_ref_normal_01011][sensor.frigo_power_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.frigo_power_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power energy', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_energy', + 'unique_id': '5758b2ec-563e-f39b-ec39-208e54aabf60_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_ref_normal_01011][sensor.frigo_power_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Frigo Power energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.frigo_power_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0189117822202047', + }) +# --- # name: test_all_entities[da_rvc_normal_000001][sensor.robot_vacuum_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -3240,7 +4356,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44.battery', + 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -3298,7 +4414,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'robot_cleaner_cleaning_mode', - 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44.robotCleanerCleaningMode', + 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44_main_robotCleanerCleaningMode_robotCleanerCleaningMode_robotCleanerCleaningMode', 'unit_of_measurement': None, }) # --- @@ -3367,7 +4483,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'robot_cleaner_movement', - 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44.robotCleanerMovement', + 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44_main_robotCleanerMovement_robotCleanerMovement_robotCleanerMovement', 'unit_of_measurement': None, }) # --- @@ -3434,7 +4550,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'robot_cleaner_turbo_mode', - 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44.robotCleanerTurboMode', + 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44_main_robotCleanerTurboMode_robotCleanerTurboMode_robotCleanerTurboMode', 'unit_of_measurement': None, }) # --- @@ -3458,6 +4574,384 @@ 'state': 'off', }) # --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_cooling_set_point-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eco_heating_system_cooling_set_point', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cooling set point', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'thermostat_cooling_setpoint', + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_thermostatCoolingSetpoint_coolingSetpoint_coolingSetpoint', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_cooling_set_point-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Eco Heating System Cooling set point', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eco_heating_system_cooling_set_point', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '48', + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eco_heating_system_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_powerConsumptionReport_powerConsumption_energy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Eco Heating System Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eco_heating_system_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8193.81', + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_energy_difference-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eco_heating_system_energy_difference', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy difference', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_difference', + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_energy_difference-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Eco Heating System Energy difference', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eco_heating_system_energy_difference', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_energy_saved-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eco_heating_system_energy_saved', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy saved', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_saved', + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_powerConsumptionReport_powerConsumption_energySaved_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_energy_saved-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Eco Heating System Energy saved', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eco_heating_system_energy_saved', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eco_heating_system_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_powerConsumptionReport_powerConsumption_power_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Eco Heating System Power', + 'power_consumption_end': '2025-03-09T11:14:57Z', + 'power_consumption_start': '2025-03-09T11:14:44Z', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eco_heating_system_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.539', + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_power_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eco_heating_system_power_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power energy', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_energy', + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_power_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Eco Heating System Power energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eco_heating_system_power_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '9.4041739669111e-06', + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.eco_heating_system_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_temperatureMeasurement_temperature_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_sac_ehs_000001_sub][sensor.eco_heating_system_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Eco Heating System Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.eco_heating_system_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '54.3', + }) +# --- # name: test_all_entities[da_wm_dw_000001][sensor.dishwasher_completion_time-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -3488,7 +4982,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'completion_time', - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.completionTime', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_completionTime_completionTime', 'unit_of_measurement': None, }) # --- @@ -3541,7 +5035,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.energy_meter', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -3596,7 +5090,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.deltaEnergy_meter', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -3651,7 +5145,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.energySaved_meter', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -3714,7 +5208,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'dishwasher_job_state', - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.dishwasherJobState', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_dishwasherJobState_dishwasherJobState', 'unit_of_measurement': None, }) # --- @@ -3780,7 +5274,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'dishwasher_machine_state', - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.machineState', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -3838,7 +5332,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.power_meter', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -3895,7 +5389,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676.powerEnergy_meter', + 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -3915,6 +5409,473 @@ 'state': '0.0', }) # --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_completion_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_completion_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Completion time', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'completion_time', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_dryerOperatingState_completionTime_completionTime', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_completion_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'AirDresser Completion time', + }), + 'context': , + 'entity_id': 'sensor.airdresser_completion_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2025-02-11T09:00:17+00:00', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_powerConsumptionReport_powerConsumption_energy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'AirDresser Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.airdresser_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '207.5', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_energy_difference-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_energy_difference', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy difference', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_difference', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_energy_difference-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'AirDresser Energy difference', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.airdresser_energy_difference', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_energy_saved-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_energy_saved', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy saved', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_saved', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_powerConsumptionReport_powerConsumption_energySaved_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_energy_saved-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'AirDresser Energy saved', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.airdresser_energy_saved', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_job_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'cooling', + 'delay_wash', + 'drying', + 'finished', + 'none', + 'refreshing', + 'weight_sensing', + 'wrinkle_prevent', + 'dehumidifying', + 'ai_drying', + 'sanitizing', + 'internal_care', + 'freeze_protection', + 'continuous_dehumidifying', + 'thawing_frozen_inside', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_job_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Job state', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dryer_job_state', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_dryerOperatingState_dryerJobState_dryerJobState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_job_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'AirDresser Job state', + 'options': list([ + 'cooling', + 'delay_wash', + 'drying', + 'finished', + 'none', + 'refreshing', + 'weight_sensing', + 'wrinkle_prevent', + 'dehumidifying', + 'ai_drying', + 'sanitizing', + 'internal_care', + 'freeze_protection', + 'continuous_dehumidifying', + 'thawing_frozen_inside', + ]), + }), + 'context': , + 'entity_id': 'sensor.airdresser_job_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'none', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_machine_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'pause', + 'run', + 'stop', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_machine_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Machine state', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dryer_machine_state', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_dryerOperatingState_machineState_machineState', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_machine_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'AirDresser Machine state', + 'options': list([ + 'pause', + 'run', + 'stop', + ]), + }), + 'context': , + 'entity_id': 'sensor.airdresser_machine_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stop', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_powerConsumptionReport_powerConsumption_power_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'AirDresser Power', + 'power_consumption_end': '2025-02-11T08:21:17Z', + 'power_consumption_start': '2025-02-10T22:51:59Z', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.airdresser_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_power_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airdresser_power_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power energy', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_energy', + 'unique_id': 'b93211bf-9d96-bd21-3b2f-964fcc87f5cc_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[da_wm_sc_000001][sensor.airdresser_power_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'AirDresser Power energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.airdresser_power_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- # name: test_all_entities[da_wm_wd_000001][sensor.dryer_completion_time-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -3945,7 +5906,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'completion_time', - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.completionTime', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_completionTime_completionTime', 'unit_of_measurement': None, }) # --- @@ -3998,7 +5959,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.energy_meter', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -4053,7 +6014,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.deltaEnergy_meter', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -4108,7 +6069,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.energySaved_meter', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -4176,7 +6137,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'dryer_job_state', - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.dryerJobState', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_dryerJobState_dryerJobState', 'unit_of_measurement': None, }) # --- @@ -4247,7 +6208,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'dryer_machine_state', - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.machineState', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -4305,7 +6266,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.power_meter', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -4362,7 +6323,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b.powerEnergy_meter', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -4412,7 +6373,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'completion_time', - 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd.completionTime', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_dryerOperatingState_completionTime_completionTime', 'unit_of_measurement': None, }) # --- @@ -4465,7 +6426,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd.energy_meter', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -4520,7 +6481,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd.deltaEnergy_meter', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -4575,7 +6536,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd.energySaved_meter', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -4643,7 +6604,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'dryer_job_state', - 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd.dryerJobState', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_dryerOperatingState_dryerJobState_dryerJobState', 'unit_of_measurement': None, }) # --- @@ -4714,7 +6675,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'dryer_machine_state', - 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd.machineState', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_dryerOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -4772,7 +6733,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd.power_meter', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -4829,7 +6790,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd.powerEnergy_meter', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -4879,7 +6840,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'completion_time', - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.completionTime', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_completionTime_completionTime', 'unit_of_measurement': None, }) # --- @@ -4932,7 +6893,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.energy_meter', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -4987,7 +6948,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.deltaEnergy_meter', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -5042,7 +7003,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.energySaved_meter', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -5111,7 +7072,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'washer_job_state', - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.washerJobState', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_washerJobState_washerJobState', 'unit_of_measurement': None, }) # --- @@ -5183,7 +7144,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'washer_machine_state', - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.machineState', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -5241,7 +7202,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.power_meter', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -5298,7 +7259,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47.powerEnergy_meter', + 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -5348,7 +7309,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'completion_time', - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.completionTime', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_washerOperatingState_completionTime_completionTime', 'unit_of_measurement': None, }) # --- @@ -5401,7 +7362,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.energy_meter', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -5456,7 +7417,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.deltaEnergy_meter', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -5511,7 +7472,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_saved', - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.energySaved_meter', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_powerConsumptionReport_powerConsumption_energySaved_meter', 'unit_of_measurement': , }) # --- @@ -5580,7 +7541,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'washer_job_state', - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.washerJobState', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_washerOperatingState_washerJobState_washerJobState', 'unit_of_measurement': None, }) # --- @@ -5652,7 +7613,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'washer_machine_state', - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.machineState', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_washerOperatingState_machineState_machineState', 'unit_of_measurement': None, }) # --- @@ -5710,7 +7671,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.power_meter', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_powerConsumptionReport_powerConsumption_power_meter', 'unit_of_measurement': , }) # --- @@ -5767,7 +7728,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_energy', - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7.powerEnergy_meter', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_powerConsumptionReport_powerConsumption_powerEnergy_meter', 'unit_of_measurement': , }) # --- @@ -5819,7 +7780,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'd5dc3299-c266-41c7-bd08-f540aea54b89.temperature', + 'unique_id': 'd5dc3299-c266-41c7-bd08-f540aea54b89_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -5871,7 +7832,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '028469cb-6e89-4f14-8d9a-bfbca5e0fbfc.humidity', + 'unique_id': '028469cb-6e89-4f14-8d9a-bfbca5e0fbfc_main_relativeHumidityMeasurement_humidity_humidity', 'unit_of_measurement': '%', }) # --- @@ -5923,7 +7884,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '028469cb-6e89-4f14-8d9a-bfbca5e0fbfc.temperature', + 'unique_id': '028469cb-6e89-4f14-8d9a-bfbca5e0fbfc_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -5975,7 +7936,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '1888b38f-6246-4f1e-911b-bfcfb66999db.humidity', + 'unique_id': '1888b38f-6246-4f1e-911b-bfcfb66999db_main_relativeHumidityMeasurement_humidity_humidity', 'unit_of_measurement': '%', }) # --- @@ -6027,7 +7988,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '1888b38f-6246-4f1e-911b-bfcfb66999db.temperature', + 'unique_id': '1888b38f-6246-4f1e-911b-bfcfb66999db_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': None, }) # --- @@ -6078,7 +8039,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'link_quality', - 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a.lqi', + 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a_main_signalStrength_lqi_lqi', 'unit_of_measurement': None, }) # --- @@ -6128,7 +8089,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a.rssi', + 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a_main_signalStrength_rssi_rssi', 'unit_of_measurement': 'dBm', }) # --- @@ -6180,7 +8141,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a.temperature', + 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -6200,6 +8161,55 @@ 'state': '21.0', }) # --- +# name: test_all_entities[heatit_zpushwall][sensor.livingroom_smart_switch_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.livingroom_smart_switch_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '5e5b97f3-3094-44e6-abc0-f61283412d6a_main_battery_battery_battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[heatit_zpushwall][sensor.livingroom_smart_switch_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Livingroom smart switch Battery', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.livingroom_smart_switch_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- # name: test_all_entities[heatit_ztrm3_thermostat][sensor.hall_thermostat_energy-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -6232,7 +8242,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '69a271f6-6537-4982-8cd9-979866872692.energy', + 'unique_id': '69a271f6-6537-4982-8cd9-979866872692_main_energyMeter_energy_energy', 'unit_of_measurement': 'kWh', }) # --- @@ -6284,7 +8294,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '69a271f6-6537-4982-8cd9-979866872692.power', + 'unique_id': '69a271f6-6537-4982-8cd9-979866872692_main_powerMeter_power_power', 'unit_of_measurement': 'W', }) # --- @@ -6336,7 +8346,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '69a271f6-6537-4982-8cd9-979866872692.temperature', + 'unique_id': '69a271f6-6537-4982-8cd9-979866872692_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -6386,7 +8396,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '71afed1c-006d-4e48-b16e-e7f88f9fd638.battery', + 'unique_id': '71afed1c-006d-4e48-b16e-e7f88f9fd638_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -6405,261 +8415,6 @@ 'state': '37', }) # --- -# name: test_all_entities[im_speaker_ai_0001][sensor.galaxy_home_mini_media_input_source-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.galaxy_home_mini_media_input_source', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Media input source', - 'platform': 'smartthings', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'media_input_source', - 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c.inputSource', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[im_speaker_ai_0001][sensor.galaxy_home_mini_media_input_source-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Galaxy Home Mini Media input source', - }), - 'context': , - 'entity_id': 'sensor.galaxy_home_mini_media_input_source', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_all_entities[im_speaker_ai_0001][sensor.galaxy_home_mini_media_playback_repeat-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.galaxy_home_mini_media_playback_repeat', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Media playback repeat', - 'platform': 'smartthings', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'media_playback_repeat', - 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c.playbackRepeatMode', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[im_speaker_ai_0001][sensor.galaxy_home_mini_media_playback_repeat-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Galaxy Home Mini Media playback repeat', - }), - 'context': , - 'entity_id': 'sensor.galaxy_home_mini_media_playback_repeat', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[im_speaker_ai_0001][sensor.galaxy_home_mini_media_playback_shuffle-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.galaxy_home_mini_media_playback_shuffle', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Media playback shuffle', - 'platform': 'smartthings', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'media_playback_shuffle', - 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c.playbackShuffle', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[im_speaker_ai_0001][sensor.galaxy_home_mini_media_playback_shuffle-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Galaxy Home Mini Media playback shuffle', - }), - 'context': , - 'entity_id': 'sensor.galaxy_home_mini_media_playback_shuffle', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'disabled', - }) -# --- -# name: test_all_entities[im_speaker_ai_0001][sensor.galaxy_home_mini_media_playback_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'paused', - 'playing', - 'stopped', - 'fast_forwarding', - 'rewinding', - 'buffering', - ]), - }), - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.galaxy_home_mini_media_playback_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Media playback status', - 'platform': 'smartthings', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'media_playback_status', - 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c.playbackStatus', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[im_speaker_ai_0001][sensor.galaxy_home_mini_media_playback_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Galaxy Home Mini Media playback status', - 'options': list([ - 'paused', - 'playing', - 'stopped', - 'fast_forwarding', - 'rewinding', - 'buffering', - ]), - }), - 'context': , - 'entity_id': 'sensor.galaxy_home_mini_media_playback_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'stopped', - }) -# --- -# name: test_all_entities[im_speaker_ai_0001][sensor.galaxy_home_mini_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.galaxy_home_mini_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'smartthings', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'audio_volume', - 'unique_id': 'c9276e43-fe3c-88c3-1dcc-2eb79e292b8c.volume', - 'unit_of_measurement': '%', - }) -# --- -# name: test_all_entities[im_speaker_ai_0001][sensor.galaxy_home_mini_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Galaxy Home Mini Volume', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.galaxy_home_mini_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '52', - }) -# --- # name: test_all_entities[multipurpose_sensor][sensor.deck_door_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -6690,7 +8445,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c.battery', + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -6741,7 +8496,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c.temperature', + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -6791,7 +8546,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'x_coordinate', - 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c X Coordinate', + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main_threeAxis_threeAxis_x_coordinate', 'unit_of_measurement': None, }) # --- @@ -6838,7 +8593,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'y_coordinate', - 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c Y Coordinate', + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main_threeAxis_threeAxis_y_coordinate', 'unit_of_measurement': None, }) # --- @@ -6885,7 +8640,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'z_coordinate', - 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c Z Coordinate', + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main_threeAxis_threeAxis_z_coordinate', 'unit_of_measurement': None, }) # --- @@ -6932,7 +8687,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'air_conditioner_mode', - 'unique_id': 'bf4b1167-48a3-4af7-9186-0900a678ffa5.airConditionerMode', + 'unique_id': 'bf4b1167-48a3-4af7-9186-0900a678ffa5_main_airConditionerMode_airConditionerMode_airConditionerMode', 'unit_of_measurement': None, }) # --- @@ -6979,7 +8734,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'thermostat_cooling_setpoint', - 'unique_id': 'bf4b1167-48a3-4af7-9186-0900a678ffa5.coolingSetpoint', + 'unique_id': 'bf4b1167-48a3-4af7-9186-0900a678ffa5_main_thermostatCoolingSetpoint_coolingSetpoint_coolingSetpoint', 'unit_of_measurement': , }) # --- @@ -6998,119 +8753,6 @@ 'state': '20', }) # --- -# name: test_all_entities[sonos_player][sensor.elliots_rum_media_playback_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'paused', - 'playing', - 'stopped', - 'fast_forwarding', - 'rewinding', - 'buffering', - ]), - }), - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.elliots_rum_media_playback_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Media playback status', - 'platform': 'smartthings', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'media_playback_status', - 'unique_id': 'c85fced9-c474-4a47-93c2-037cc7829536.playbackStatus', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sonos_player][sensor.elliots_rum_media_playback_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Elliots Rum Media playback status', - 'options': list([ - 'paused', - 'playing', - 'stopped', - 'fast_forwarding', - 'rewinding', - 'buffering', - ]), - }), - 'context': , - 'entity_id': 'sensor.elliots_rum_media_playback_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'playing', - }) -# --- -# name: test_all_entities[sonos_player][sensor.elliots_rum_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.elliots_rum_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'smartthings', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'audio_volume', - 'unique_id': 'c85fced9-c474-4a47-93c2-037cc7829536.volume', - 'unit_of_measurement': '%', - }) -# --- -# name: test_all_entities[sonos_player][sensor.elliots_rum_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Elliots Rum Volume', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.elliots_rum_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- # name: test_all_entities[tplink_p110][sensor.spulmaschine_energy-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -7146,7 +8788,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '6602696a-1e48-49e4-919f-69406f5b5da1.energy_meter', + 'unique_id': '6602696a-1e48-49e4-919f-69406f5b5da1_main_powerConsumptionReport_powerConsumption_energy_meter', 'unit_of_measurement': , }) # --- @@ -7201,7 +8843,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'energy_difference', - 'unique_id': '6602696a-1e48-49e4-919f-69406f5b5da1.deltaEnergy_meter', + 'unique_id': '6602696a-1e48-49e4-919f-69406f5b5da1_main_powerConsumptionReport_powerConsumption_deltaEnergy_meter', 'unit_of_measurement': , }) # --- @@ -7221,20 +8863,13 @@ 'state': '0.0', }) # --- -# name: test_all_entities[vd_network_audio_002s][sensor.soundbar_living_media_playback_status-entry] +# name: test_all_entities[vd_sensor_light_2023][sensor.light_sensor_55_the_frame_brightness_intensity-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), 'area_id': None, 'capabilities': dict({ - 'options': list([ - 'paused', - 'playing', - 'stopped', - 'fast_forwarding', - 'rewinding', - 'buffering', - ]), + 'state_class': , }), 'config_entry_id': , 'config_subentry_id': , @@ -7243,63 +8878,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.soundbar_living_media_playback_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Media playback status', - 'platform': 'smartthings', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'media_playback_status', - 'unique_id': '0d94e5db-8501-2355-eb4f-214163702cac.playbackStatus', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[vd_network_audio_002s][sensor.soundbar_living_media_playback_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Soundbar Living Media playback status', - 'options': list([ - 'paused', - 'playing', - 'stopped', - 'fast_forwarding', - 'rewinding', - 'buffering', - ]), - }), - 'context': , - 'entity_id': 'sensor.soundbar_living_media_playback_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'stopped', - }) -# --- -# name: test_all_entities[vd_network_audio_002s][sensor.soundbar_living_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.soundbar_living_volume', + 'entity_id': 'sensor.light_sensor_55_the_frame_brightness_intensity', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -7311,153 +8890,28 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Volume', + 'original_name': 'Brightness intensity', 'platform': 'smartthings', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': 'audio_volume', - 'unique_id': '0d94e5db-8501-2355-eb4f-214163702cac.volume', - 'unit_of_measurement': '%', + 'translation_key': 'brightness_intensity', + 'unique_id': '5cc1c096-98b9-460c-8f1c-1045509ec605_main_relativeBrightness_brightnessIntensity_brightnessIntensity', + 'unit_of_measurement': 'level', }) # --- -# name: test_all_entities[vd_network_audio_002s][sensor.soundbar_living_volume-state] +# name: test_all_entities[vd_sensor_light_2023][sensor.light_sensor_55_the_frame_brightness_intensity-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'Soundbar Living Volume', - 'unit_of_measurement': '%', + 'friendly_name': 'Light Sensor - 55" The Frame Brightness intensity', + 'state_class': , + 'unit_of_measurement': 'level', }), 'context': , - 'entity_id': 'sensor.soundbar_living_volume', + 'entity_id': 'sensor.light_sensor_55_the_frame_brightness_intensity', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '17', - }) -# --- -# name: test_all_entities[vd_stv_2017_k][sensor.tv_samsung_8_series_49_media_input_source-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'digitaltv', - 'hdmi1', - 'hdmi4', - 'hdmi4', - ]), - }), - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.tv_samsung_8_series_49_media_input_source', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Media input source', - 'platform': 'smartthings', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'media_input_source', - 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1.inputSource', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[vd_stv_2017_k][sensor.tv_samsung_8_series_49_media_input_source-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': '[TV] Samsung 8 Series (49) Media input source', - 'options': list([ - 'digitaltv', - 'hdmi1', - 'hdmi4', - 'hdmi4', - ]), - }), - 'context': , - 'entity_id': 'sensor.tv_samsung_8_series_49_media_input_source', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'hdmi1', - }) -# --- -# name: test_all_entities[vd_stv_2017_k][sensor.tv_samsung_8_series_49_media_playback_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'paused', - 'playing', - 'stopped', - 'fast_forwarding', - 'rewinding', - 'buffering', - ]), - }), - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.tv_samsung_8_series_49_media_playback_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Media playback status', - 'platform': 'smartthings', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'media_playback_status', - 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1.playbackStatus', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[vd_stv_2017_k][sensor.tv_samsung_8_series_49_media_playback_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': '[TV] Samsung 8 Series (49) Media playback status', - 'options': list([ - 'paused', - 'playing', - 'stopped', - 'fast_forwarding', - 'rewinding', - 'buffering', - ]), - }), - 'context': , - 'entity_id': 'sensor.tv_samsung_8_series_49_media_playback_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', + 'state': '2', }) # --- # name: test_all_entities[vd_stv_2017_k][sensor.tv_samsung_8_series_49_tv_channel-entry] @@ -7490,7 +8944,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'tv_channel', - 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1.tvChannel', + 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1_main_tvChannel_tvChannel_tvChannel', 'unit_of_measurement': None, }) # --- @@ -7537,7 +8991,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'tv_channel_name', - 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1.tvChannelName', + 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1_main_tvChannel_tvChannelName_tvChannelName', 'unit_of_measurement': None, }) # --- @@ -7554,54 +9008,6 @@ 'state': '', }) # --- -# name: test_all_entities[vd_stv_2017_k][sensor.tv_samsung_8_series_49_volume-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.tv_samsung_8_series_49_volume', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Volume', - 'platform': 'smartthings', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'audio_volume', - 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1.volume', - 'unit_of_measurement': '%', - }) -# --- -# name: test_all_entities[vd_stv_2017_k][sensor.tv_samsung_8_series_49_volume-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '[TV] Samsung 8 Series (49) Volume', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.tv_samsung_8_series_49_volume', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '13', - }) -# --- # name: test_all_entities[virtual_thermostat][sensor.asd_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -7632,7 +9038,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '2894dc93-0f11-49cc-8a81-3a684cebebf6.battery', + 'unique_id': '2894dc93-0f11-49cc-8a81-3a684cebebf6_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -7683,7 +9089,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '2894dc93-0f11-49cc-8a81-3a684cebebf6.temperature', + 'unique_id': '2894dc93-0f11-49cc-8a81-3a684cebebf6_main_temperatureMeasurement_temperature_temperature', 'unit_of_measurement': , }) # --- @@ -7733,7 +9139,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'a2a6018b-2663-4727-9d1d-8f56953b5116.battery', + 'unique_id': 'a2a6018b-2663-4727-9d1d-8f56953b5116_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- @@ -7782,7 +9188,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'a9f587c5-5d8b-4273-8907-e7f609af5158.battery', + 'unique_id': 'a9f587c5-5d8b-4273-8907-e7f609af5158_main_battery_battery_battery', 'unit_of_measurement': '%', }) # --- diff --git a/tests/components/smartthings/snapshots/test_switch.ambr b/tests/components/smartthings/snapshots/test_switch.ambr index f1b5ce8412e..d14d4d02aa4 100644 --- a/tests/components/smartthings/snapshots/test_switch.ambr +++ b/tests/components/smartthings/snapshots/test_switch.ambr @@ -29,7 +29,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd', + 'unique_id': '10e06a70-ee7d-4832-85e9-a0a06a7a05bd_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -46,7 +46,7 @@ 'state': 'on', }) # --- -# name: test_all_entities[da_ks_microwave_0101x][switch.microwave-entry] +# name: test_all_entities[da_ref_normal_000001][switch.refrigerator_ice_maker-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -59,7 +59,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': None, - 'entity_id': 'switch.microwave', + 'entity_id': 'switch.refrigerator_ice_maker', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -71,22 +71,22 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': None, + 'original_name': 'Ice maker', 'platform': 'smartthings', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, - 'unique_id': '2bad3237-4886-e699-1b90-4a51a3d55c8a', + 'translation_key': 'ice_maker', + 'unique_id': '7db87911-7dce-1cf2-7119-b953432a2f09_icemaker_switch_switch_switch', 'unit_of_measurement': None, }) # --- -# name: test_all_entities[da_ks_microwave_0101x][switch.microwave-state] +# name: test_all_entities[da_ref_normal_000001][switch.refrigerator_ice_maker-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'Microwave', + 'friendly_name': 'Refrigerator Ice maker', }), 'context': , - 'entity_id': 'switch.microwave', + 'entity_id': 'switch.refrigerator_ice_maker', 'last_changed': , 'last_reported': , 'last_updated': , @@ -123,7 +123,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44', + 'unique_id': '3442dfc6-17c0-a65f-dae0-4c6e01786f44_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -140,7 +140,7 @@ 'state': 'off', }) # --- -# name: test_all_entities[da_wm_dw_000001][switch.dishwasher-entry] +# name: test_all_entities[da_sac_ehs_000001_sub][switch.eco_heating_system-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -153,7 +153,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': None, - 'entity_id': 'switch.dishwasher', + 'entity_id': 'switch.eco_heating_system', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -170,24 +170,24 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'f36dc7ce-cac0-0667-dc14-a3704eb5e676', + 'unique_id': '1f98ebd0-ac48-d802-7f62-000001200100_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- -# name: test_all_entities[da_wm_dw_000001][switch.dishwasher-state] +# name: test_all_entities[da_sac_ehs_000001_sub][switch.eco_heating_system-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'Dishwasher', + 'friendly_name': 'Eco Heating System', }), 'context': , - 'entity_id': 'switch.dishwasher', + 'entity_id': 'switch.eco_heating_system', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_all_entities[da_wm_wd_000001][switch.dryer-entry] +# name: test_all_entities[da_wm_wd_000001][switch.dryer_wrinkle_prevent-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -200,7 +200,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': None, - 'entity_id': 'switch.dryer', + 'entity_id': 'switch.dryer_wrinkle_prevent', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -212,29 +212,29 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': None, + 'original_name': 'Wrinkle prevent', 'platform': 'smartthings', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, - 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b', + 'translation_key': 'wrinkle_prevent', + 'unique_id': '02f7256e-8353-5bdd-547f-bd5b1647e01b_main_custom.dryerWrinklePrevent_dryerWrinklePrevent_dryerWrinklePrevent', 'unit_of_measurement': None, }) # --- -# name: test_all_entities[da_wm_wd_000001][switch.dryer-state] +# name: test_all_entities[da_wm_wd_000001][switch.dryer_wrinkle_prevent-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'Dryer', + 'friendly_name': 'Dryer Wrinkle prevent', }), 'context': , - 'entity_id': 'switch.dryer', + 'entity_id': 'switch.dryer_wrinkle_prevent', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_all_entities[da_wm_wd_000001_1][switch.seca_roupa-entry] +# name: test_all_entities[da_wm_wd_000001_1][switch.seca_roupa_wrinkle_prevent-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -247,7 +247,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': None, - 'entity_id': 'switch.seca_roupa', + 'entity_id': 'switch.seca_roupa_wrinkle_prevent', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -259,29 +259,29 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': None, + 'original_name': 'Wrinkle prevent', 'platform': 'smartthings', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, - 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd', + 'translation_key': 'wrinkle_prevent', + 'unique_id': '3a6c4e05-811d-5041-e956-3d04c424cbcd_main_custom.dryerWrinklePrevent_dryerWrinklePrevent_dryerWrinklePrevent', 'unit_of_measurement': None, }) # --- -# name: test_all_entities[da_wm_wd_000001_1][switch.seca_roupa-state] +# name: test_all_entities[da_wm_wd_000001_1][switch.seca_roupa_wrinkle_prevent-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'Seca-Roupa', + 'friendly_name': 'Seca-Roupa Wrinkle prevent', }), 'context': , - 'entity_id': 'switch.seca_roupa', + 'entity_id': 'switch.seca_roupa_wrinkle_prevent', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_all_entities[da_wm_wm_000001][switch.washer-entry] +# name: test_all_entities[da_wm_wm_000001_1][switch.washing_machine_bubble_soak-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -294,7 +294,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': None, - 'entity_id': 'switch.washer', + 'entity_id': 'switch.washing_machine_bubble_soak', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -306,75 +306,28 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': None, + 'original_name': 'Bubble Soak', 'platform': 'smartthings', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'f984b91d-f250-9d42-3436-33f09a422a47', + 'translation_key': 'bubble_soak', + 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7_main_samsungce.washerBubbleSoak_status_status', 'unit_of_measurement': None, }) # --- -# name: test_all_entities[da_wm_wm_000001][switch.washer-state] +# name: test_all_entities[da_wm_wm_000001_1][switch.washing_machine_bubble_soak-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'Washer', + 'friendly_name': 'Washing Machine Bubble Soak', }), 'context': , - 'entity_id': 'switch.washer', + 'entity_id': 'switch.washing_machine_bubble_soak', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_all_entities[da_wm_wm_000001_1][switch.washing_machine-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.washing_machine', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'smartthings', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '63803fae-cbed-f356-a063-2cf148ae3ca7', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[da_wm_wm_000001_1][switch.washing_machine-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Washing Machine', - }), - 'context': , - 'entity_id': 'switch.washing_machine', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_all_entities[generic_ef00_v1][switch.thermostat_kuche-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -405,7 +358,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a', + 'unique_id': '656569c2-7976-4232-a789-34b4d1176c3a_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -452,7 +405,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'bf4b1167-48a3-4af7-9186-0900a678ffa5', + 'unique_id': 'bf4b1167-48a3-4af7-9186-0900a678ffa5_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -499,7 +452,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '550a1c72-65a0-4d55-b97b-75168e055398', + 'unique_id': '550a1c72-65a0-4d55-b97b-75168e055398_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -546,7 +499,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '6602696a-1e48-49e4-919f-69406f5b5da1', + 'unique_id': '6602696a-1e48-49e4-919f-69406f5b5da1_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- @@ -563,7 +516,7 @@ 'state': 'on', }) # --- -# name: test_all_entities[vd_network_audio_002s][switch.soundbar_living-entry] +# name: test_all_entities[vd_sensor_light_2023][switch.light_sensor_55_the_frame-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -576,7 +529,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': None, - 'entity_id': 'switch.soundbar_living', + 'entity_id': 'switch.light_sensor_55_the_frame', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -593,67 +546,20 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': '0d94e5db-8501-2355-eb4f-214163702cac', + 'unique_id': '5cc1c096-98b9-460c-8f1c-1045509ec605_main_switch_switch_switch', 'unit_of_measurement': None, }) # --- -# name: test_all_entities[vd_network_audio_002s][switch.soundbar_living-state] +# name: test_all_entities[vd_sensor_light_2023][switch.light_sensor_55_the_frame-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'Soundbar Living', + 'friendly_name': 'Light Sensor - 55" The Frame', }), 'context': , - 'entity_id': 'switch.soundbar_living', + 'entity_id': 'switch.light_sensor_55_the_frame', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_all_entities[vd_stv_2017_k][switch.tv_samsung_8_series_49-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'config_subentry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.tv_samsung_8_series_49', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'smartthings', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[vd_stv_2017_k][switch.tv_samsung_8_series_49-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': '[TV] Samsung 8 Series (49)', - }), - 'context': , - 'entity_id': 'switch.tv_samsung_8_series_49', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', + 'state': 'off', }) # --- diff --git a/tests/components/smartthings/snapshots/test_update.ambr b/tests/components/smartthings/snapshots/test_update.ambr new file mode 100644 index 00000000000..c27a0b9f5fc --- /dev/null +++ b/tests/components/smartthings/snapshots/test_update.ambr @@ -0,0 +1,421 @@ +# serializer version: 1 +# name: test_all_entities[bosch_radiator_thermostat_ii][update.radiator_thermostat_ii_m_wohnzimmer_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.radiator_thermostat_ii_m_wohnzimmer_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '286ba274-4093-4bcb-849c-a1a3efe7b1e5_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[bosch_radiator_thermostat_ii][update.radiator_thermostat_ii_m_wohnzimmer_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/smartthings/icon.png', + 'friendly_name': 'Radiator Thermostat II [+M] Wohnzimmer Firmware', + 'in_progress': False, + 'installed_version': '2.00.09 (20009)', + 'latest_version': '2.00.09 (20009)', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.radiator_thermostat_ii_m_wohnzimmer_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[centralite][update.dimmer_debian_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.dimmer_debian_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'd0268a69-abfb-4c92-a646-61cec2e510ad_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[centralite][update.dimmer_debian_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/smartthings/icon.png', + 'friendly_name': 'Dimmer Debian Firmware', + 'in_progress': False, + 'installed_version': '16015010', + 'latest_version': '16015010', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.dimmer_debian_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[contact_sensor][update.front_door_open_closed_sensor_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.front_door_open_closed_sensor_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '2d9a892b-1c93-45a5-84cb-0e81889498c6_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[contact_sensor][update.front_door_open_closed_sensor_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/smartthings/icon.png', + 'friendly_name': '.Front Door Open/Closed Sensor Firmware', + 'in_progress': False, + 'installed_version': '00000103', + 'latest_version': '00000104', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.front_door_open_closed_sensor_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[ikea_kadrilj][update.kitchen_ikea_kadrilj_window_blind_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.kitchen_ikea_kadrilj_window_blind_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '71afed1c-006d-4e48-b16e-e7f88f9fd638_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[ikea_kadrilj][update.kitchen_ikea_kadrilj_window_blind_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/smartthings/icon.png', + 'friendly_name': 'Kitchen IKEA KADRILJ Window blind Firmware', + 'in_progress': False, + 'installed_version': '22007631', + 'latest_version': '22007631', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.kitchen_ikea_kadrilj_window_blind_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[multipurpose_sensor][update.deck_door_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.deck_door_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '7d246592-93db-4d72-a10d-5a51793ece8c_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[multipurpose_sensor][update.deck_door_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/smartthings/icon.png', + 'friendly_name': 'Deck Door Firmware', + 'in_progress': False, + 'installed_version': '0000001B', + 'latest_version': '0000001B', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.deck_door_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[smart_plug][update.arlo_beta_basestation_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.arlo_beta_basestation_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '550a1c72-65a0-4d55-b97b-75168e055398_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[smart_plug][update.arlo_beta_basestation_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/smartthings/icon.png', + 'friendly_name': 'Arlo Beta Basestation Firmware', + 'in_progress': False, + 'installed_version': '00102101', + 'latest_version': '00102101', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.arlo_beta_basestation_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[yale_push_button_deadbolt_lock][update.basement_door_lock_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.basement_door_lock_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'a9f587c5-5d8b-4273-8907-e7f609af5158_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[yale_push_button_deadbolt_lock][update.basement_door_lock_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/smartthings/icon.png', + 'friendly_name': 'Basement Door Lock Firmware', + 'in_progress': False, + 'installed_version': '00840847', + 'latest_version': '00840847', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.basement_door_lock_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/smartthings/snapshots/test_valve.ambr b/tests/components/smartthings/snapshots/test_valve.ambr new file mode 100644 index 00000000000..f82155c8499 --- /dev/null +++ b/tests/components/smartthings/snapshots/test_valve.ambr @@ -0,0 +1,50 @@ +# serializer version: 1 +# name: test_all_entities[virtual_valve][valve.volvo-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'valve', + 'entity_category': None, + 'entity_id': 'valve.volvo', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'smartthings', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '612ab3c2-3bb0-48f7-b2c0-15b169cb2fc3_main', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[virtual_valve][valve.volvo-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'water', + 'friendly_name': 'volvo', + 'supported_features': , + }), + 'context': , + 'entity_id': 'valve.volvo', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closed', + }) +# --- diff --git a/tests/components/smartthings/test_binary_sensor.py b/tests/components/smartthings/test_binary_sensor.py index f46be2edc89..9f9d8d66317 100644 --- a/tests/components/smartthings/test_binary_sensor.py +++ b/tests/components/smartthings/test_binary_sensor.py @@ -6,9 +6,15 @@ from pysmartthings import Attribute, Capability import pytest from syrupy import SnapshotAssertion +from homeassistant.components import automation, script +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN +from homeassistant.components.script import scripts_with_entity +from homeassistant.components.smartthings import DOMAIN, MAIN from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.setup import async_setup_component from . import setup_integration, snapshot_smartthings_entities, trigger_update @@ -39,7 +45,7 @@ async def test_state_update( """Test state update.""" await setup_integration(hass, mock_config_entry) - assert hass.states.get("binary_sensor.refrigerator_door").state == STATE_OFF + assert hass.states.get("binary_sensor.refrigerator_cooler_door").state == STATE_OFF await trigger_update( hass, @@ -48,6 +54,180 @@ async def test_state_update( Capability.CONTACT_SENSOR, Attribute.CONTACT, "open", + component="cooler", ) - assert hass.states.get("binary_sensor.refrigerator_door").state == STATE_ON + assert hass.states.get("binary_sensor.refrigerator_cooler_door").state == STATE_ON + + +@pytest.mark.parametrize( + ("device_fixture", "unique_id", "suggested_object_id", "issue_string", "entity_id"), + [ + ( + "virtual_valve", + f"612ab3c2-3bb0-48f7-b2c0-15b169cb2fc3_{MAIN}_{Capability.VALVE}_{Attribute.VALVE}_{Attribute.VALVE}", + "volvo_valve", + "valve", + "binary_sensor.volvo_valve", + ), + ( + "da_ref_normal_000001", + f"7db87911-7dce-1cf2-7119-b953432a2f09_{MAIN}_{Capability.CONTACT_SENSOR}_{Attribute.CONTACT}_{Attribute.CONTACT}", + "refrigerator_door", + "fridge_door", + "binary_sensor.refrigerator_door", + ), + ], +) +async def test_create_issue_with_items( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + issue_registry: ir.IssueRegistry, + unique_id: str, + suggested_object_id: str, + issue_string: str, + entity_id: str, +) -> None: + """Test we create an issue when an automation or script is using a deprecated entity.""" + issue_id = f"deprecated_binary_{issue_string}_{entity_id}" + + entity_entry = entity_registry.async_get_or_create( + BINARY_SENSOR_DOMAIN, + DOMAIN, + unique_id, + suggested_object_id=suggested_object_id, + original_name=suggested_object_id, + ) + + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "id": "test", + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": { + "action": "automation.turn_on", + "target": { + "entity_id": "automation.test", + }, + }, + } + }, + ) + assert await async_setup_component( + hass, + script.DOMAIN, + { + script.DOMAIN: { + "test": { + "sequence": [ + { + "condition": "state", + "entity_id": entity_id, + "state": "on", + }, + ], + } + } + }, + ) + + await setup_integration(hass, mock_config_entry) + + assert hass.states.get(entity_id).state == STATE_OFF + + assert automations_with_entity(hass, entity_id)[0] == "automation.test" + assert scripts_with_entity(hass, entity_id)[0] == "script.test" + + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue(DOMAIN, issue_id) + assert issue is not None + assert issue.translation_key == f"deprecated_binary_{issue_string}_scripts" + assert issue.translation_placeholders == { + "entity_id": entity_id, + "entity_name": suggested_object_id, + "items": "- [test](/config/automation/edit/test)\n- [test](/config/script/edit/test)", + } + + entity_registry.async_update_entity( + entity_entry.entity_id, + disabled_by=er.RegistryEntryDisabler.USER, + ) + + await hass.config_entries.async_reload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, issue_id) + assert len(issue_registry.issues) == 0 + + +@pytest.mark.parametrize( + ("device_fixture", "unique_id", "suggested_object_id", "issue_string", "entity_id"), + [ + ( + "virtual_valve", + f"612ab3c2-3bb0-48f7-b2c0-15b169cb2fc3_{MAIN}_{Capability.VALVE}_{Attribute.VALVE}_{Attribute.VALVE}", + "volvo_valve", + "valve", + "binary_sensor.volvo_valve", + ), + ( + "da_ref_normal_000001", + f"7db87911-7dce-1cf2-7119-b953432a2f09_{MAIN}_{Capability.CONTACT_SENSOR}_{Attribute.CONTACT}_{Attribute.CONTACT}", + "refrigerator_door", + "fridge_door", + "binary_sensor.refrigerator_door", + ), + ], +) +async def test_create_issue( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + issue_registry: ir.IssueRegistry, + unique_id: str, + suggested_object_id: str, + issue_string: str, + entity_id: str, +) -> None: + """Test we create an issue when an automation or script is using a deprecated entity.""" + issue_id = f"deprecated_binary_{issue_string}_{entity_id}" + + entity_entry = entity_registry.async_get_or_create( + BINARY_SENSOR_DOMAIN, + DOMAIN, + unique_id, + suggested_object_id=suggested_object_id, + original_name=suggested_object_id, + ) + + await setup_integration(hass, mock_config_entry) + + assert hass.states.get(entity_id).state == STATE_OFF + + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue(DOMAIN, issue_id) + assert issue is not None + assert issue.translation_key == f"deprecated_binary_{issue_string}" + assert issue.translation_placeholders == { + "entity_id": entity_id, + "entity_name": suggested_object_id, + } + + entity_registry.async_update_entity( + entity_entry.entity_id, + disabled_by=er.RegistryEntryDisabler.USER, + ) + + await hass.config_entries.async_reload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, issue_id) + assert len(issue_registry.issues) == 0 diff --git a/tests/components/smartthings/test_button.py b/tests/components/smartthings/test_button.py new file mode 100644 index 00000000000..4a348d079ca --- /dev/null +++ b/tests/components/smartthings/test_button.py @@ -0,0 +1,56 @@ +"""Test for the SmartThings button platform.""" + +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +from pysmartthings import Capability, Command +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.smartthings import MAIN +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration, snapshot_smartthings_entities + +from tests.common import MockConfigEntry + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_integration(hass, mock_config_entry) + + snapshot_smartthings_entities(hass, entity_registry, snapshot, Platform.BUTTON) + + +@pytest.mark.parametrize("device_fixture", ["da_ks_microwave_0101x"]) +async def test_press( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test state update.""" + await setup_integration(hass, mock_config_entry) + freezer.move_to("2023-10-21") + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.microwave_stop"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "2bad3237-4886-e699-1b90-4a51a3d55c8a", + Capability.OVEN_OPERATING_STATE, + Command.STOP, + MAIN, + ) diff --git a/tests/components/smartthings/test_climate.py b/tests/components/smartthings/test_climate.py index 380c4072860..75b864598bd 100644 --- a/tests/components/smartthings/test_climate.py +++ b/tests/components/smartthings/test_climate.py @@ -817,10 +817,10 @@ async def test_updating_humidity( ( Capability.THERMOSTAT_MODE, Attribute.SUPPORTED_THERMOSTAT_MODES, - ["coolClean", "dryClean"], + ["rush hour", "heat"], ATTR_HVAC_MODES, - [], - [HVACMode.COOL, HVACMode.DRY], + [HVACMode.AUTO], + [HVACMode.AUTO, HVACMode.HEAT], ), ], ids=[ diff --git a/tests/components/smartthings/test_config_flow.py b/tests/components/smartthings/test_config_flow.py index 4069c201225..d6e8ef03290 100644 --- a/tests/components/smartthings/test_config_flow.py +++ b/tests/components/smartthings/test_config_flow.py @@ -513,7 +513,7 @@ async def test_migration( } assert mock_old_config_entry.unique_id == "397678e5-9995-4a39-9d9f-ae6ba310236c" assert mock_old_config_entry.version == 3 - assert mock_old_config_entry.minor_version == 1 + assert mock_old_config_entry.minor_version == 2 @pytest.mark.usefixtures("current_request_with_host", "use_cloud") @@ -586,7 +586,7 @@ async def test_migration_wrong_location( == "appid123-2be1-4e40-b257-e4ef59083324_397678e5-9995-4a39-9d9f-ae6ba310236c" ) assert mock_old_config_entry.version == 3 - assert mock_old_config_entry.minor_version == 1 + assert mock_old_config_entry.minor_version == 2 @pytest.mark.usefixtures("current_request_with_host") diff --git a/tests/components/smartthings/test_event.py b/tests/components/smartthings/test_event.py new file mode 100644 index 00000000000..34a96e9c6b4 --- /dev/null +++ b/tests/components/smartthings/test_event.py @@ -0,0 +1,99 @@ +"""Test for the SmartThings event platform.""" + +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +from pysmartthings import Attribute, Capability +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.event import ATTR_EVENT_TYPES +from homeassistant.const import STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration, snapshot_smartthings_entities, trigger_update + +from tests.common import MockConfigEntry + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_integration(hass, mock_config_entry) + + snapshot_smartthings_entities(hass, entity_registry, snapshot, Platform.EVENT) + + +@pytest.mark.parametrize("device_fixture", ["heatit_zpushwall"]) +async def test_state_update( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test state update.""" + await setup_integration(hass, mock_config_entry) + + freezer.move_to("2023-10-21") + + assert ( + hass.states.get("event.livingroom_smart_switch_button1").state == STATE_UNKNOWN + ) + + await trigger_update( + hass, + devices, + "5e5b97f3-3094-44e6-abc0-f61283412d6a", + Capability.BUTTON, + Attribute.BUTTON, + "pushed", + component="button1", + ) + + assert ( + hass.states.get("event.livingroom_smart_switch_button1").state + == "2023-10-21T00:00:00.000+00:00" + ) + + +@pytest.mark.parametrize("device_fixture", ["heatit_zpushwall"]) +async def test_supported_button_values_update( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test supported button values update.""" + await setup_integration(hass, mock_config_entry) + + freezer.move_to("2023-10-21") + + assert ( + hass.states.get("event.livingroom_smart_switch_button1").state == STATE_UNKNOWN + ) + assert hass.states.get("event.livingroom_smart_switch_button1").attributes[ + ATTR_EVENT_TYPES + ] == ["pushed", "held", "down_hold"] + + await trigger_update( + hass, + devices, + "5e5b97f3-3094-44e6-abc0-f61283412d6a", + Capability.BUTTON, + Attribute.SUPPORTED_BUTTON_VALUES, + ["pushed", "held", "down_hold", "pushed_2x"], + component="button1", + ) + + assert ( + hass.states.get("event.livingroom_smart_switch_button1").state == STATE_UNKNOWN + ) + assert hass.states.get("event.livingroom_smart_switch_button1").attributes[ + ATTR_EVENT_TYPES + ] == ["pushed", "held", "down_hold", "pushed_2x"] diff --git a/tests/components/smartthings/test_init.py b/tests/components/smartthings/test_init.py index cea2b6bb396..1d4b124c60d 100644 --- a/tests/components/smartthings/test_init.py +++ b/tests/components/smartthings/test_init.py @@ -1,18 +1,40 @@ """Tests for the SmartThings component init module.""" -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch -from pysmartthings import Attribute, Capability, SmartThingsSinkError -from pysmartthings.models import Subscription +from aiohttp import ClientResponseError, RequestInfo +from pysmartthings import ( + Attribute, + Capability, + DeviceResponse, + DeviceStatus, + Lifecycle, + SmartThingsSinkError, + Subscription, +) import pytest from syrupy import SnapshotAssertion +from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN +from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN, HVACMode +from homeassistant.components.cover import DOMAIN as COVER_DOMAIN +from homeassistant.components.fan import DOMAIN as FAN_DOMAIN +from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.components.smartthings import EVENT_BUTTON -from homeassistant.components.smartthings.const import CONF_SUBSCRIPTION_ID, DOMAIN +from homeassistant.components.smartthings.const import ( + CONF_INSTALLED_APP_ID, + CONF_LOCATION_ID, + CONF_SUBSCRIPTION_ID, + DOMAIN, + SCOPES, +) +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import Event, HomeAssistant -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import device_registry as dr, entity_registry as er from . import setup_integration, trigger_update @@ -256,3 +278,422 @@ async def test_removing_stale_devices( await hass.async_block_till_done() assert not device_registry.async_get_device({(DOMAIN, "aaa-bbb-ccc")}) + + +@pytest.mark.parametrize("device_fixture", ["da_ac_rac_000001"]) +async def test_refreshing_expired_token( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test removing stale devices.""" + with patch( + "homeassistant.components.smartthings.OAuth2Session.async_ensure_token_valid", + side_effect=ClientResponseError( + request_info=RequestInfo( + url="http://example.com", + method="GET", + headers={}, + real_url="http://example.com", + ), + status=400, + history=(), + ), + ): + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + assert len(hass.config_entries.flow.async_progress()) == 1 + + +@pytest.mark.parametrize("device_fixture", ["da_ac_rac_000001"]) +async def test_error_refreshing_token( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test removing stale devices.""" + with patch( + "homeassistant.components.smartthings.OAuth2Session.async_ensure_token_valid", + side_effect=ClientResponseError( + request_info=RequestInfo( + url="http://example.com", + method="GET", + headers={}, + real_url="http://example.com", + ), + status=500, + history=(), + ), + ): + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_hub_via_device( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + mock_smartthings: AsyncMock, +) -> None: + """Test hub with child devices.""" + mock_smartthings.get_devices.return_value = DeviceResponse.from_json( + load_fixture("devices/hub.json", DOMAIN) + ).items + mock_smartthings.get_device_status.side_effect = [ + DeviceStatus.from_json( + load_fixture(f"device_status/{fixture}.json", DOMAIN) + ).components + for fixture in ("hub", "multipurpose_sensor") + ] + await setup_integration(hass, mock_config_entry) + + hub_device = device_registry.async_get_device( + {(DOMAIN, "074fa784-8be8-4c70-8e22-6f5ed6f81b7e")} + ) + assert hub_device == snapshot + assert ( + device_registry.async_get_device( + {(DOMAIN, "374ba6fa-5a08-4ea2-969c-1fa43d86e21f")} + ).via_device_id + == hub_device.id + ) + + +@pytest.mark.parametrize("device_fixture", ["da_ac_rac_000001"]) +async def test_deleted_device_runtime( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test devices that are deleted in runtime.""" + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("climate.ac_office_granit").state == HVACMode.OFF + + for call in devices.add_device_lifecycle_event_listener.call_args_list: + if call[0][0] == Lifecycle.DELETE: + call[0][1]("96a5ef74-5832-a84b-f1f7-ca799957065d") + await hass.async_block_till_done() + + assert hass.states.get("climate.ac_office_granit") is None + + +@pytest.mark.parametrize( + ( + "device_fixture", + "domain", + "old_unique_id", + "suggested_object_id", + "new_unique_id", + ), + [ + ( + "multipurpose_sensor", + BINARY_SENSOR_DOMAIN, + "7d246592-93db-4d72-a10d-5a51793ece8c.contact", + "deck_door", + "7d246592-93db-4d72-a10d-5a51793ece8c_main_contactSensor_contact_contact", + ), + ( + "multipurpose_sensor", + SENSOR_DOMAIN, + "7d246592-93db-4d72-a10d-5a51793ece8c Y Coordinate", + "deck_door_y_coordinate", + "7d246592-93db-4d72-a10d-5a51793ece8c_main_threeAxis_threeAxis_y_coordinate", + ), + ( + "da_ac_rac_000001", + SENSOR_DOMAIN, + "7d246592-93db-4d72-a10d-ca799957065d.energy_meter", + "ac_office_granit_energy", + "7d246592-93db-4d72-a10d-ca799957065d_main_powerConsumptionReport_powerConsumption_energy_meter", + ), + ( + "da_ac_rac_000001", + CLIMATE_DOMAIN, + "7d246592-93db-4d72-a10d-ca799957065d", + "ac_office_granit", + "7d246592-93db-4d72-a10d-ca799957065d_main", + ), + ( + "c2c_shade", + COVER_DOMAIN, + "571af102-15db-4030-b76b-245a691f74a5", + "curtain_1a", + "571af102-15db-4030-b76b-245a691f74a5_main", + ), + ( + "generic_fan_3_speed", + FAN_DOMAIN, + "6d95a8b7-4ee3-429a-a13a-00ec9354170c", + "bedroom_fan", + "6d95a8b7-4ee3-429a-a13a-00ec9354170c_main", + ), + ( + "hue_rgbw_color_bulb", + LIGHT_DOMAIN, + "cb958955-b015-498c-9e62-fc0c51abd054", + "standing_light", + "cb958955-b015-498c-9e62-fc0c51abd054_main", + ), + ( + "yale_push_button_deadbolt_lock", + LOCK_DOMAIN, + "a9f587c5-5d8b-4273-8907-e7f609af5158", + "basement_door_lock", + "a9f587c5-5d8b-4273-8907-e7f609af5158_main", + ), + ( + "smart_plug", + SWITCH_DOMAIN, + "550a1c72-65a0-4d55-b97b-75168e055398", + "arlo_beta_basestation", + "550a1c72-65a0-4d55-b97b-75168e055398_main_switch_switch_switch", + ), + ], +) +async def test_entity_unique_id_migration( + hass: HomeAssistant, + devices: AsyncMock, + expires_at: int, + entity_registry: er.EntityRegistry, + domain: str, + old_unique_id: str, + suggested_object_id: str, + new_unique_id: str, +) -> None: + """Test entity unique ID migration.""" + mock_config_entry = MockConfigEntry( + domain=DOMAIN, + title="My home", + unique_id="397678e5-9995-4a39-9d9f-ae6ba310236c", + data={ + "auth_implementation": DOMAIN, + "token": { + "access_token": "mock-access-token", + "refresh_token": "mock-refresh-token", + "expires_at": expires_at, + "scope": " ".join(SCOPES), + "access_tier": 0, + "installed_app_id": "5aaaa925-2be1-4e40-b257-e4ef59083324", + }, + CONF_LOCATION_ID: "397678e5-9995-4a39-9d9f-ae6ba310236c", + CONF_INSTALLED_APP_ID: "123", + }, + version=3, + minor_version=1, + ) + mock_config_entry.add_to_hass(hass) + entry = entity_registry.async_get_or_create( + domain, + DOMAIN, + old_unique_id, + config_entry=mock_config_entry, + suggested_object_id=suggested_object_id, + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entry.entity_id) + + assert entry.unique_id == new_unique_id + + +@pytest.mark.parametrize( + ( + "device_fixture", + "domain", + "other_unique_id", + "old_unique_id", + "suggested_object_id", + "new_unique_id", + ), + [ + ( + "da_ks_microwave_0101x", + SENSOR_DOMAIN, + "2bad3237-4886-e699-1b90-4a51a3d55c8a.ovenJobState", + "2bad3237-4886-e699-1b90-4a51a3d55c8a.machineState", + "microwave_machine_state", + "2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_machineState_machineState", + ), + ( + "da_ks_microwave_0101x", + SENSOR_DOMAIN, + "2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_ovenJobState_ovenJobState", + "2bad3237-4886-e699-1b90-4a51a3d55c8a.machineState", + "microwave_machine_state", + "2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_machineState_machineState", + ), + ( + "da_ks_microwave_0101x", + SENSOR_DOMAIN, + "2bad3237-4886-e699-1b90-4a51a3d55c8a.ovenJobState", + "2bad3237-4886-e699-1b90-4a51a3d55c8a.completionTime", + "microwave_completion_time", + "2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_completionTime_completionTime", + ), + ( + "da_ks_microwave_0101x", + SENSOR_DOMAIN, + "2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_ovenJobState_ovenJobState", + "2bad3237-4886-e699-1b90-4a51a3d55c8a.completionTime", + "microwave_completion_time", + "2bad3237-4886-e699-1b90-4a51a3d55c8a_main_ovenOperatingState_completionTime_completionTime", + ), + ( + "da_wm_dw_000001", + SENSOR_DOMAIN, + "f36dc7ce-cac0-0667-dc14-a3704eb5e676.dishwasherJobState", + "f36dc7ce-cac0-0667-dc14-a3704eb5e676.machineState", + "dishwasher_machine_state", + "f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_machineState_machineState", + ), + ( + "da_wm_dw_000001", + SENSOR_DOMAIN, + "f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_dishwasherJobState_dishwasherJobState", + "f36dc7ce-cac0-0667-dc14-a3704eb5e676.machineState", + "dishwasher_machine_state", + "f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_machineState_machineState", + ), + ( + "da_wm_dw_000001", + SENSOR_DOMAIN, + "f36dc7ce-cac0-0667-dc14-a3704eb5e676.dishwasherJobState", + "f36dc7ce-cac0-0667-dc14-a3704eb5e676.completionTime", + "dishwasher_completion_time", + "f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_completionTime_completionTime", + ), + ( + "da_wm_dw_000001", + SENSOR_DOMAIN, + "f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_dishwasherJobState_dishwasherJobState", + "f36dc7ce-cac0-0667-dc14-a3704eb5e676.completionTime", + "dishwasher_completion_time", + "f36dc7ce-cac0-0667-dc14-a3704eb5e676_main_dishwasherOperatingState_completionTime_completionTime", + ), + ( + "da_wm_wd_000001", + SENSOR_DOMAIN, + "02f7256e-8353-5bdd-547f-bd5b1647e01b.dryerJobState", + "02f7256e-8353-5bdd-547f-bd5b1647e01b.machineState", + "dryer_machine_state", + "02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_machineState_machineState", + ), + ( + "da_wm_wd_000001", + SENSOR_DOMAIN, + "02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_dryerJobState_dryerJobState", + "02f7256e-8353-5bdd-547f-bd5b1647e01b.machineState", + "dryer_machine_state", + "02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_machineState_machineState", + ), + ( + "da_wm_wd_000001", + SENSOR_DOMAIN, + "02f7256e-8353-5bdd-547f-bd5b1647e01b.dryerJobState", + "02f7256e-8353-5bdd-547f-bd5b1647e01b.completionTime", + "dryer_completion_time", + "02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_completionTime_completionTime", + ), + ( + "da_wm_wd_000001", + SENSOR_DOMAIN, + "02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_dryerJobState_dryerJobState", + "02f7256e-8353-5bdd-547f-bd5b1647e01b.completionTime", + "dryer_completion_time", + "02f7256e-8353-5bdd-547f-bd5b1647e01b_main_dryerOperatingState_completionTime_completionTime", + ), + ( + "da_wm_wm_000001", + SENSOR_DOMAIN, + "f984b91d-f250-9d42-3436-33f09a422a47.washerJobState", + "f984b91d-f250-9d42-3436-33f09a422a47.machineState", + "washer_machine_state", + "f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_machineState_machineState", + ), + ( + "da_wm_wm_000001", + SENSOR_DOMAIN, + "f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_washerJobState_washerJobState", + "f984b91d-f250-9d42-3436-33f09a422a47.machineState", + "washer_machine_state", + "f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_machineState_machineState", + ), + ( + "da_wm_wm_000001", + SENSOR_DOMAIN, + "f984b91d-f250-9d42-3436-33f09a422a47.washerJobState", + "f984b91d-f250-9d42-3436-33f09a422a47.completionTime", + "washer_completion_time", + "f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_completionTime_completionTime", + ), + ( + "da_wm_wm_000001", + SENSOR_DOMAIN, + "f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_washerJobState_washerJobState", + "f984b91d-f250-9d42-3436-33f09a422a47.completionTime", + "washer_completion_time", + "f984b91d-f250-9d42-3436-33f09a422a47_main_washerOperatingState_completionTime_completionTime", + ), + ], +) +async def test_entity_unique_id_migration_machine_state( + hass: HomeAssistant, + devices: AsyncMock, + expires_at: int, + entity_registry: er.EntityRegistry, + domain: str, + other_unique_id: str, + old_unique_id: str, + suggested_object_id: str, + new_unique_id: str, +) -> None: + """Test entity unique ID migration.""" + mock_config_entry = MockConfigEntry( + domain=DOMAIN, + title="My home", + unique_id="397678e5-9995-4a39-9d9f-ae6ba310236c", + data={ + "auth_implementation": DOMAIN, + "token": { + "access_token": "mock-access-token", + "refresh_token": "mock-refresh-token", + "expires_at": expires_at, + "scope": " ".join(SCOPES), + "access_tier": 0, + "installed_app_id": "5aaaa925-2be1-4e40-b257-e4ef59083324", + }, + CONF_LOCATION_ID: "397678e5-9995-4a39-9d9f-ae6ba310236c", + CONF_INSTALLED_APP_ID: "123", + }, + version=3, + minor_version=1, + ) + mock_config_entry.add_to_hass(hass) + entity_registry.async_get_or_create( + domain, + DOMAIN, + other_unique_id, + config_entry=mock_config_entry, + suggested_object_id="job_state", + ) + entry = entity_registry.async_get_or_create( + domain, + DOMAIN, + old_unique_id, + config_entry=mock_config_entry, + suggested_object_id=suggested_object_id, + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entry.entity_id) + + assert entry.unique_id == new_unique_id diff --git a/tests/components/smartthings/test_media_player.py b/tests/components/smartthings/test_media_player.py new file mode 100644 index 00000000000..b7cecfe8408 --- /dev/null +++ b/tests/components/smartthings/test_media_player.py @@ -0,0 +1,432 @@ +"""Test for the SmartThings media player platform.""" + +from unittest.mock import AsyncMock + +from pysmartthings import Attribute, Capability, Command, Status +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.media_player import ( + ATTR_INPUT_SOURCE, + ATTR_MEDIA_REPEAT, + ATTR_MEDIA_SHUFFLE, + ATTR_MEDIA_VOLUME_LEVEL, + ATTR_MEDIA_VOLUME_MUTED, + DOMAIN as MEDIA_PLAYER_DOMAIN, + SERVICE_SELECT_SOURCE, + RepeatMode, +) +from homeassistant.components.smartthings.const import MAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_MEDIA_NEXT_TRACK, + SERVICE_MEDIA_PAUSE, + SERVICE_MEDIA_PLAY, + SERVICE_MEDIA_PREVIOUS_TRACK, + SERVICE_MEDIA_STOP, + SERVICE_REPEAT_SET, + SERVICE_SHUFFLE_SET, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + SERVICE_VOLUME_DOWN, + SERVICE_VOLUME_MUTE, + SERVICE_VOLUME_SET, + SERVICE_VOLUME_UP, + STATE_OFF, + STATE_PLAYING, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration, snapshot_smartthings_entities, trigger_update + +from tests.common import MockConfigEntry + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_integration(hass, mock_config_entry) + + snapshot_smartthings_entities( + hass, entity_registry, snapshot, Platform.MEDIA_PLAYER + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +@pytest.mark.parametrize( + ("action", "command"), + [ + (SERVICE_TURN_ON, Command.ON), + (SERVICE_TURN_OFF, Command.OFF), + ], +) +async def test_turn_on_off( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + action: str, + command: Command, +) -> None: + """Test media player turn on and off command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + action, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", Capability.SWITCH, command, MAIN + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +@pytest.mark.parametrize( + ("muted", "argument"), + [ + (True, "muted"), + (False, "unmuted"), + ], +) +async def test_mute_unmute( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + muted: bool, + argument: str, +) -> None: + """Test media player mute and unmute command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_MUTE, + {ATTR_ENTITY_ID: "media_player.soundbar", ATTR_MEDIA_VOLUME_MUTED: muted}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.AUDIO_MUTE, + Command.SET_MUTE, + MAIN, + argument=argument, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_set_volume_level( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player set volume level command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_SET, + {ATTR_ENTITY_ID: "media_player.soundbar", ATTR_MEDIA_VOLUME_LEVEL: 0.31}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.AUDIO_VOLUME, + Command.SET_VOLUME, + MAIN, + argument=31, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_volume_up( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player increase volume level command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_UP, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.AUDIO_VOLUME, + Command.VOLUME_UP, + MAIN, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_volume_down( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player decrease volume level command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_DOWN, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.AUDIO_VOLUME, + Command.VOLUME_DOWN, + MAIN, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_media_play( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player play command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_PLAY, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_PLAYBACK, + Command.PLAY, + MAIN, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_media_pause( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player pause command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_PAUSE, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_PLAYBACK, + Command.PAUSE, + MAIN, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_media_stop( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player stop command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_STOP, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_PLAYBACK, + Command.STOP, + MAIN, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_media_previous_track( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player previous track command.""" + devices.get_device_status.return_value[MAIN][Capability.MEDIA_PLAYBACK] = { + Attribute.SUPPORTED_PLAYBACK_COMMANDS: Status(["rewind"]) + } + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_PREVIOUS_TRACK, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_PLAYBACK, + Command.REWIND, + MAIN, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_media_next_track( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player next track command.""" + devices.get_device_status.return_value[MAIN][Capability.MEDIA_PLAYBACK] = { + Attribute.SUPPORTED_PLAYBACK_COMMANDS: Status(["fastForward"]) + } + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_NEXT_TRACK, + {ATTR_ENTITY_ID: "media_player.soundbar"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_PLAYBACK, + Command.FAST_FORWARD, + MAIN, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_select_source( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test media player stop command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_SELECT_SOURCE, + {ATTR_ENTITY_ID: "media_player.soundbar", ATTR_INPUT_SOURCE: "digital"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_INPUT_SOURCE, + Command.SET_INPUT_SOURCE, + MAIN, + "digital", + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +@pytest.mark.parametrize( + ("shuffle", "argument"), + [ + (True, "enabled"), + (False, "disabled"), + ], +) +async def test_media_shuffle_on_off( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + shuffle: bool, + argument: bool, +) -> None: + """Test media player media shuffle command.""" + devices.get_device_status.return_value[MAIN][Capability.MEDIA_PLAYBACK_SHUFFLE] = { + Attribute.PLAYBACK_SHUFFLE: Status(True) + } + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_SHUFFLE_SET, + {ATTR_ENTITY_ID: "media_player.soundbar", ATTR_MEDIA_SHUFFLE: shuffle}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_PLAYBACK_SHUFFLE, + Command.SET_PLAYBACK_SHUFFLE, + MAIN, + argument=argument, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +@pytest.mark.parametrize( + ("repeat", "argument"), + [ + (RepeatMode.OFF, "off"), + (RepeatMode.ONE, "one"), + (RepeatMode.ALL, "all"), + ], +) +async def test_media_repeat_mode( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + repeat: RepeatMode, + argument: bool, +) -> None: + """Test media player repeat mode command.""" + devices.get_device_status.return_value[MAIN][Capability.MEDIA_PLAYBACK_REPEAT] = { + Attribute.REPEAT_MODE: Status("one") + } + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_REPEAT_SET, + {ATTR_ENTITY_ID: "media_player.soundbar", ATTR_MEDIA_REPEAT: repeat}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.MEDIA_PLAYBACK_REPEAT, + Command.SET_PLAYBACK_REPEAT_MODE, + MAIN, + argument=argument, + ) + + +@pytest.mark.parametrize("device_fixture", ["hw_q80r_soundbar"]) +async def test_state_update( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test state update.""" + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("media_player.soundbar").state == STATE_PLAYING + + await trigger_update( + hass, + devices, + "afcf3b91-0000-1111-2222-ddff2a0a6577", + Capability.SWITCH, + Attribute.SWITCH, + "off", + ) + + assert hass.states.get("media_player.soundbar").state == STATE_OFF diff --git a/tests/components/smartthings/test_number.py b/tests/components/smartthings/test_number.py new file mode 100644 index 00000000000..578b94e050f --- /dev/null +++ b/tests/components/smartthings/test_number.py @@ -0,0 +1,81 @@ +"""Test for the SmartThings number platform.""" + +from unittest.mock import AsyncMock + +from pysmartthings import Attribute, Capability, Command +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.components.smartthings import MAIN +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration, snapshot_smartthings_entities, trigger_update + +from tests.common import MockConfigEntry + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_integration(hass, mock_config_entry) + + snapshot_smartthings_entities(hass, entity_registry, snapshot, Platform.NUMBER) + + +@pytest.mark.parametrize("device_fixture", ["da_wm_wm_000001"]) +async def test_set_value( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting a value.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: "number.washer_rinse_cycles", ATTR_VALUE: 3}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "f984b91d-f250-9d42-3436-33f09a422a47", + Capability.CUSTOM_WASHER_RINSE_CYCLES, + Command.SET_WASHER_RINSE_CYCLES, + MAIN, + argument="3", + ) + + +@pytest.mark.parametrize("device_fixture", ["da_wm_wm_000001"]) +async def test_state_update( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test state update.""" + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("number.washer_rinse_cycles").state == "2" + + await trigger_update( + hass, + devices, + "f984b91d-f250-9d42-3436-33f09a422a47", + Capability.CUSTOM_WASHER_RINSE_CYCLES, + Attribute.WASHER_RINSE_CYCLES, + "3", + ) + + assert hass.states.get("number.washer_rinse_cycles").state == "3" diff --git a/tests/components/smartthings/test_select.py b/tests/components/smartthings/test_select.py new file mode 100644 index 00000000000..2c5c55239f2 --- /dev/null +++ b/tests/components/smartthings/test_select.py @@ -0,0 +1,121 @@ +"""Test for the SmartThings select platform.""" + +from unittest.mock import AsyncMock + +from pysmartthings import Attribute, Capability, Command +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.select import ( + ATTR_OPTION, + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.components.smartthings import MAIN +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from . import ( + set_attribute_value, + setup_integration, + snapshot_smartthings_entities, + trigger_update, +) + +from tests.common import MockConfigEntry + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_integration(hass, mock_config_entry) + + snapshot_smartthings_entities(hass, entity_registry, snapshot, Platform.SELECT) + + +@pytest.mark.parametrize("device_fixture", ["da_wm_wd_000001"]) +async def test_state_update( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test state update.""" + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("select.dryer").state == "stop" + + await trigger_update( + hass, + devices, + "02f7256e-8353-5bdd-547f-bd5b1647e01b", + Capability.DRYER_OPERATING_STATE, + Attribute.MACHINE_STATE, + "run", + ) + + assert hass.states.get("select.dryer").state == "run" + + +@pytest.mark.parametrize("device_fixture", ["da_wm_wd_000001"]) +async def test_select_option( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test state update.""" + set_attribute_value( + devices, + Capability.REMOTE_CONTROL_STATUS, + Attribute.REMOTE_CONTROL_ENABLED, + "true", + ) + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: "select.dryer", ATTR_OPTION: "run"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "02f7256e-8353-5bdd-547f-bd5b1647e01b", + Capability.DRYER_OPERATING_STATE, + Command.SET_MACHINE_STATE, + MAIN, + argument="run", + ) + + +@pytest.mark.parametrize("device_fixture", ["da_wm_wd_000001"]) +async def test_select_option_without_remote_control( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test state update.""" + set_attribute_value( + devices, + Capability.REMOTE_CONTROL_STATUS, + Attribute.REMOTE_CONTROL_ENABLED, + "false", + ) + await setup_integration(hass, mock_config_entry) + + with pytest.raises( + ServiceValidationError, + match="Can only be updated when remote control is enabled", + ): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: "select.dryer", ATTR_OPTION: "run"}, + blocking=True, + ) + devices.execute_device_command.assert_not_called() diff --git a/tests/components/smartthings/test_sensor.py b/tests/components/smartthings/test_sensor.py index c83950de9e9..e90c177bd6d 100644 --- a/tests/components/smartthings/test_sensor.py +++ b/tests/components/smartthings/test_sensor.py @@ -6,9 +6,15 @@ from pysmartthings import Attribute, Capability import pytest from syrupy import SnapshotAssertion -from homeassistant.const import Platform +from homeassistant.components import automation, script +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.script import scripts_with_entity +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.components.smartthings.const import DOMAIN, MAIN +from homeassistant.const import STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.setup import async_setup_component from . import setup_integration, snapshot_smartthings_entities, trigger_update @@ -49,3 +55,244 @@ async def test_state_update( ) assert hass.states.get("sensor.ac_office_granit_temperature").state == "20" + + +@pytest.mark.parametrize( + ( + "device_fixture", + "unique_id", + "suggested_object_id", + "issue_string", + "entity_id", + "expected_state", + ), + [ + ( + "vd_stv_2017_k", + f"4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1_{MAIN}_{Capability.MEDIA_PLAYBACK}_{Attribute.PLAYBACK_STATUS}_{Attribute.PLAYBACK_STATUS}", + "tv_samsung_8_series_49_media_playback_status", + "media_player", + "sensor.tv_samsung_8_series_49_media_playback_status", + STATE_UNKNOWN, + ), + ( + "vd_stv_2017_k", + f"4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1_{MAIN}_{Capability.AUDIO_VOLUME}_{Attribute.VOLUME}_{Attribute.VOLUME}", + "tv_samsung_8_series_49_volume", + "media_player", + "sensor.tv_samsung_8_series_49_volume", + "13", + ), + ( + "vd_stv_2017_k", + f"4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1_{MAIN}_{Capability.MEDIA_INPUT_SOURCE}_{Attribute.INPUT_SOURCE}_{Attribute.INPUT_SOURCE}", + "tv_samsung_8_series_49_media_input_source", + "media_player", + "sensor.tv_samsung_8_series_49_media_input_source", + "hdmi1", + ), + ( + "im_speaker_ai_0001", + f"c9276e43-fe3c-88c3-1dcc-2eb79e292b8c_{MAIN}_{Capability.MEDIA_PLAYBACK_REPEAT}_{Attribute.PLAYBACK_REPEAT_MODE}_{Attribute.PLAYBACK_REPEAT_MODE}", + "galaxy_home_mini_media_playback_repeat", + "media_player", + "sensor.galaxy_home_mini_media_playback_repeat", + "off", + ), + ( + "im_speaker_ai_0001", + f"c9276e43-fe3c-88c3-1dcc-2eb79e292b8c_{MAIN}_{Capability.MEDIA_PLAYBACK_SHUFFLE}_{Attribute.PLAYBACK_SHUFFLE}_{Attribute.PLAYBACK_SHUFFLE}", + "galaxy_home_mini_media_playback_shuffle", + "media_player", + "sensor.galaxy_home_mini_media_playback_shuffle", + "disabled", + ), + ], +) +async def test_create_issue_with_items( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + issue_registry: ir.IssueRegistry, + unique_id: str, + suggested_object_id: str, + issue_string: str, + entity_id: str, + expected_state: str, +) -> None: + """Test we create an issue when an automation or script is using a deprecated entity.""" + issue_id = f"deprecated_{issue_string}_{entity_id}" + + entity_entry = entity_registry.async_get_or_create( + SENSOR_DOMAIN, + DOMAIN, + unique_id, + suggested_object_id=suggested_object_id, + original_name=suggested_object_id, + ) + + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "id": "test", + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": { + "action": "automation.turn_on", + "target": { + "entity_id": "automation.test", + }, + }, + } + }, + ) + assert await async_setup_component( + hass, + script.DOMAIN, + { + script.DOMAIN: { + "test": { + "sequence": [ + { + "condition": "state", + "entity_id": entity_id, + "state": "on", + }, + ], + } + } + }, + ) + + await setup_integration(hass, mock_config_entry) + + assert hass.states.get(entity_id).state == expected_state + + assert automations_with_entity(hass, entity_id)[0] == "automation.test" + assert scripts_with_entity(hass, entity_id)[0] == "script.test" + + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue(DOMAIN, issue_id) + assert issue is not None + assert issue.translation_key == f"deprecated_{issue_string}_scripts" + assert issue.translation_placeholders == { + "entity_id": entity_id, + "entity_name": suggested_object_id, + "items": "- [test](/config/automation/edit/test)\n- [test](/config/script/edit/test)", + } + + entity_registry.async_update_entity( + entity_entry.entity_id, + disabled_by=er.RegistryEntryDisabler.USER, + ) + + await hass.config_entries.async_reload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, issue_id) + assert len(issue_registry.issues) == 0 + + +@pytest.mark.parametrize( + ( + "device_fixture", + "unique_id", + "suggested_object_id", + "issue_string", + "entity_id", + "expected_state", + ), + [ + ( + "vd_stv_2017_k", + f"4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1_{MAIN}_{Capability.MEDIA_PLAYBACK}_{Attribute.PLAYBACK_STATUS}_{Attribute.PLAYBACK_STATUS}", + "tv_samsung_8_series_49_media_playback_status", + "media_player", + "sensor.tv_samsung_8_series_49_media_playback_status", + STATE_UNKNOWN, + ), + ( + "vd_stv_2017_k", + f"4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1_{MAIN}_{Capability.AUDIO_VOLUME}_{Attribute.VOLUME}_{Attribute.VOLUME}", + "tv_samsung_8_series_49_volume", + "media_player", + "sensor.tv_samsung_8_series_49_volume", + "13", + ), + ( + "vd_stv_2017_k", + f"4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1_{MAIN}_{Capability.MEDIA_INPUT_SOURCE}_{Attribute.INPUT_SOURCE}_{Attribute.INPUT_SOURCE}", + "tv_samsung_8_series_49_media_input_source", + "media_player", + "sensor.tv_samsung_8_series_49_media_input_source", + "hdmi1", + ), + ( + "im_speaker_ai_0001", + f"c9276e43-fe3c-88c3-1dcc-2eb79e292b8c_{MAIN}_{Capability.MEDIA_PLAYBACK_REPEAT}_{Attribute.PLAYBACK_REPEAT_MODE}_{Attribute.PLAYBACK_REPEAT_MODE}", + "galaxy_home_mini_media_playback_repeat", + "media_player", + "sensor.galaxy_home_mini_media_playback_repeat", + "off", + ), + ( + "im_speaker_ai_0001", + f"c9276e43-fe3c-88c3-1dcc-2eb79e292b8c_{MAIN}_{Capability.MEDIA_PLAYBACK_SHUFFLE}_{Attribute.PLAYBACK_SHUFFLE}_{Attribute.PLAYBACK_SHUFFLE}", + "galaxy_home_mini_media_playback_shuffle", + "media_player", + "sensor.galaxy_home_mini_media_playback_shuffle", + "disabled", + ), + ], +) +async def test_create_issue( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + issue_registry: ir.IssueRegistry, + unique_id: str, + suggested_object_id: str, + issue_string: str, + entity_id: str, + expected_state: str, +) -> None: + """Test we create an issue when an automation or script is using a deprecated entity.""" + issue_id = f"deprecated_{issue_string}_{entity_id}" + + entity_entry = entity_registry.async_get_or_create( + SENSOR_DOMAIN, + DOMAIN, + unique_id, + suggested_object_id=suggested_object_id, + original_name=suggested_object_id, + ) + + await setup_integration(hass, mock_config_entry) + + assert hass.states.get(entity_id).state == expected_state + + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue(DOMAIN, issue_id) + assert issue is not None + assert issue.translation_key == f"deprecated_{issue_string}" + assert issue.translation_placeholders == { + "entity_id": entity_id, + "entity_name": suggested_object_id, + } + + entity_registry.async_update_entity( + entity_entry.entity_id, + disabled_by=er.RegistryEntryDisabler.USER, + ) + + await hass.config_entries.async_reload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, issue_id) + assert len(issue_registry.issues) == 0 diff --git a/tests/components/smartthings/test_switch.py b/tests/components/smartthings/test_switch.py index a1e420a8edb..a47ecde7e0d 100644 --- a/tests/components/smartthings/test_switch.py +++ b/tests/components/smartthings/test_switch.py @@ -6,7 +6,10 @@ from pysmartthings import Attribute, Capability, Command import pytest from syrupy import SnapshotAssertion -from homeassistant.components.smartthings.const import MAIN +from homeassistant.components import automation, script +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.script import scripts_with_entity +from homeassistant.components.smartthings.const import DOMAIN, MAIN from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, @@ -17,7 +20,8 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.setup import async_setup_component from . import setup_integration, snapshot_smartthings_entities, trigger_update @@ -66,6 +70,39 @@ async def test_switch_turn_on_off( ) +@pytest.mark.parametrize("device_fixture", ["da_wm_wd_000001"]) +@pytest.mark.parametrize( + ("action", "argument"), + [ + (SERVICE_TURN_ON, "on"), + (SERVICE_TURN_OFF, "off"), + ], +) +async def test_command_switch_turn_on_off( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + action: str, + argument: str, +) -> None: + """Test switch turn on and off command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + SWITCH_DOMAIN, + action, + {ATTR_ENTITY_ID: "switch.dryer_wrinkle_prevent"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "02f7256e-8353-5bdd-547f-bd5b1647e01b", + Capability.CUSTOM_DRYER_WRINKLE_PREVENT, + Command.SET_DRYER_WRINKLE_PREVENT, + MAIN, + argument, + ) + + @pytest.mark.parametrize("device_fixture", ["c2c_arlo_pro_3_switch"]) async def test_state_update( hass: HomeAssistant, @@ -87,3 +124,256 @@ async def test_state_update( ) assert hass.states.get("switch.2nd_floor_hallway").state == STATE_OFF + + +@pytest.mark.parametrize( + ("device_fixture", "device_id", "suggested_object_id", "issue_string"), + [ + ( + "da_ks_cooktop_31001", + "808dbd84-f357-47e2-a0cd-3b66fa22d584", + "induction_hob", + "appliance", + ), + ( + "da_ks_microwave_0101x", + "2bad3237-4886-e699-1b90-4a51a3d55c8a", + "microwave", + "appliance", + ), + ( + "da_wm_dw_000001", + "f36dc7ce-cac0-0667-dc14-a3704eb5e676", + "dishwasher", + "appliance", + ), + ( + "da_wm_sc_000001", + "b93211bf-9d96-bd21-3b2f-964fcc87f5cc", + "airdresser", + "appliance", + ), + ( + "da_wm_wd_000001", + "02f7256e-8353-5bdd-547f-bd5b1647e01b", + "dryer", + "appliance", + ), + ( + "da_wm_wm_000001", + "f984b91d-f250-9d42-3436-33f09a422a47", + "washer", + "appliance", + ), + ( + "hw_q80r_soundbar", + "afcf3b91-0000-1111-2222-ddff2a0a6577", + "soundbar", + "media_player", + ), + ( + "vd_network_audio_002s", + "0d94e5db-8501-2355-eb4f-214163702cac", + "soundbar_living", + "media_player", + ), + ( + "vd_stv_2017_k", + "4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1", + "tv_samsung_8_series_49", + "media_player", + ), + ], +) +async def test_create_issue_with_items( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + issue_registry: ir.IssueRegistry, + device_id: str, + suggested_object_id: str, + issue_string: str, +) -> None: + """Test we create an issue when an automation or script is using a deprecated entity.""" + entity_id = f"switch.{suggested_object_id}" + issue_id = f"deprecated_switch_{issue_string}_{entity_id}" + + entity_entry = entity_registry.async_get_or_create( + SWITCH_DOMAIN, + DOMAIN, + f"{device_id}_{MAIN}_{Capability.SWITCH}_{Attribute.SWITCH}_{Attribute.SWITCH}", + suggested_object_id=suggested_object_id, + original_name=suggested_object_id, + ) + + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "id": "test", + "alias": "test", + "trigger": {"platform": "state", "entity_id": entity_id}, + "action": { + "action": "automation.turn_on", + "target": { + "entity_id": "automation.test", + }, + }, + } + }, + ) + assert await async_setup_component( + hass, + script.DOMAIN, + { + script.DOMAIN: { + "test": { + "sequence": [ + { + "condition": "state", + "entity_id": entity_id, + "state": "on", + }, + ], + } + } + }, + ) + + await setup_integration(hass, mock_config_entry) + + assert hass.states.get(entity_id).state in [STATE_OFF, STATE_ON] + + assert automations_with_entity(hass, entity_id)[0] == "automation.test" + assert scripts_with_entity(hass, entity_id)[0] == "script.test" + + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue(DOMAIN, issue_id) + assert issue is not None + assert issue.translation_key == f"deprecated_switch_{issue_string}_scripts" + assert issue.translation_placeholders == { + "entity_id": entity_id, + "entity_name": suggested_object_id, + "items": "- [test](/config/automation/edit/test)\n- [test](/config/script/edit/test)", + } + + entity_registry.async_update_entity( + entity_entry.entity_id, + disabled_by=er.RegistryEntryDisabler.USER, + ) + + await hass.config_entries.async_reload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, issue_id) + assert len(issue_registry.issues) == 0 + + +@pytest.mark.parametrize( + ("device_fixture", "device_id", "suggested_object_id", "issue_string"), + [ + ( + "da_ks_cooktop_31001", + "808dbd84-f357-47e2-a0cd-3b66fa22d584", + "induction_hob", + "appliance", + ), + ( + "da_ks_microwave_0101x", + "2bad3237-4886-e699-1b90-4a51a3d55c8a", + "microwave", + "appliance", + ), + ( + "da_wm_dw_000001", + "f36dc7ce-cac0-0667-dc14-a3704eb5e676", + "dishwasher", + "appliance", + ), + ( + "da_wm_sc_000001", + "b93211bf-9d96-bd21-3b2f-964fcc87f5cc", + "airdresser", + "appliance", + ), + ( + "da_wm_wd_000001", + "02f7256e-8353-5bdd-547f-bd5b1647e01b", + "dryer", + "appliance", + ), + ( + "da_wm_wm_000001", + "f984b91d-f250-9d42-3436-33f09a422a47", + "washer", + "appliance", + ), + ( + "hw_q80r_soundbar", + "afcf3b91-0000-1111-2222-ddff2a0a6577", + "soundbar", + "media_player", + ), + ( + "vd_network_audio_002s", + "0d94e5db-8501-2355-eb4f-214163702cac", + "soundbar_living", + "media_player", + ), + ( + "vd_stv_2017_k", + "4588d2d9-a8cf-40f4-9a0b-ed5dfbaccda1", + "tv_samsung_8_series_49", + "media_player", + ), + ], +) +async def test_create_issue( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + issue_registry: ir.IssueRegistry, + device_id: str, + suggested_object_id: str, + issue_string: str, +) -> None: + """Test we create an issue when an automation or script is using a deprecated entity.""" + entity_id = f"switch.{suggested_object_id}" + issue_id = f"deprecated_switch_{issue_string}_{entity_id}" + + entity_entry = entity_registry.async_get_or_create( + SWITCH_DOMAIN, + DOMAIN, + f"{device_id}_{MAIN}_{Capability.SWITCH}_{Attribute.SWITCH}_{Attribute.SWITCH}", + suggested_object_id=suggested_object_id, + original_name=suggested_object_id, + ) + + await setup_integration(hass, mock_config_entry) + + assert hass.states.get(entity_id).state in [STATE_OFF, STATE_ON] + + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue(DOMAIN, issue_id) + assert issue is not None + assert issue.translation_key == f"deprecated_switch_{issue_string}" + assert issue.translation_placeholders == { + "entity_id": entity_id, + "entity_name": suggested_object_id, + } + + entity_registry.async_update_entity( + entity_entry.entity_id, + disabled_by=er.RegistryEntryDisabler.USER, + ) + + await hass.config_entries.async_reload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + # Assert the issue is no longer present + assert not issue_registry.async_get_issue(DOMAIN, issue_id) + assert len(issue_registry.issues) == 0 diff --git a/tests/components/smartthings/test_update.py b/tests/components/smartthings/test_update.py new file mode 100644 index 00000000000..8c3d9e1a968 --- /dev/null +++ b/tests/components/smartthings/test_update.py @@ -0,0 +1,142 @@ +"""Test for the SmartThings update platform.""" + +from unittest.mock import AsyncMock + +from pysmartthings import Attribute, Capability, Command +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.smartthings.const import MAIN +from homeassistant.components.update import ( + ATTR_IN_PROGRESS, + DOMAIN as UPDATE_DOMAIN, + SERVICE_INSTALL, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration, snapshot_smartthings_entities, trigger_update + +from tests.common import MockConfigEntry + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_integration(hass, mock_config_entry) + + snapshot_smartthings_entities(hass, entity_registry, snapshot, Platform.UPDATE) + + +@pytest.mark.parametrize("device_fixture", ["contact_sensor"]) +async def test_installing_update( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test installing an update.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + UPDATE_DOMAIN, + SERVICE_INSTALL, + {ATTR_ENTITY_ID: "update.front_door_open_closed_sensor_firmware"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "2d9a892b-1c93-45a5-84cb-0e81889498c6", + Capability.FIRMWARE_UPDATE, + Command.UPDATE_FIRMWARE, + MAIN, + ) + + +@pytest.mark.parametrize("device_fixture", ["contact_sensor"]) +async def test_state_update( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test state update.""" + await setup_integration(hass, mock_config_entry) + + assert ( + hass.states.get("update.front_door_open_closed_sensor_firmware").state + == STATE_ON + ) + + await trigger_update( + hass, + devices, + "2d9a892b-1c93-45a5-84cb-0e81889498c6", + Capability.FIRMWARE_UPDATE, + Attribute.CURRENT_VERSION, + "00000104", + ) + + assert ( + hass.states.get("update.front_door_open_closed_sensor_firmware").state + == STATE_OFF + ) + + +@pytest.mark.parametrize("device_fixture", ["contact_sensor"]) +async def test_state_progress_update( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test state progress update.""" + await setup_integration(hass, mock_config_entry) + + assert ( + hass.states.get("update.front_door_open_closed_sensor_firmware").attributes[ + ATTR_IN_PROGRESS + ] + is False + ) + + await trigger_update( + hass, + devices, + "2d9a892b-1c93-45a5-84cb-0e81889498c6", + Capability.FIRMWARE_UPDATE, + Attribute.STATE, + "updateInProgress", + ) + + assert ( + hass.states.get("update.front_door_open_closed_sensor_firmware").attributes[ + ATTR_IN_PROGRESS + ] + is True + ) + + +@pytest.mark.parametrize("device_fixture", ["centralite"]) +async def test_state_update_available( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test state update available.""" + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("update.dimmer_debian_firmware").state == STATE_OFF + + await trigger_update( + hass, + devices, + "d0268a69-abfb-4c92-a646-61cec2e510ad", + Capability.FIRMWARE_UPDATE, + Attribute.AVAILABLE_VERSION, + "16015011", + ) + + assert hass.states.get("update.dimmer_debian_firmware").state == STATE_ON diff --git a/tests/components/smartthings/test_valve.py b/tests/components/smartthings/test_valve.py new file mode 100644 index 00000000000..f0ba34c8264 --- /dev/null +++ b/tests/components/smartthings/test_valve.py @@ -0,0 +1,87 @@ +"""Test for the SmartThings valve platform.""" + +from unittest.mock import AsyncMock + +from pysmartthings import Attribute, Capability, Command +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.smartthings import MAIN +from homeassistant.components.valve import DOMAIN as VALVE_DOMAIN, ValveState +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_CLOSE_VALVE, + SERVICE_OPEN_VALVE, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration, snapshot_smartthings_entities, trigger_update + +from tests.common import MockConfigEntry + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + await setup_integration(hass, mock_config_entry) + + snapshot_smartthings_entities(hass, entity_registry, snapshot, Platform.VALVE) + + +@pytest.mark.parametrize("device_fixture", ["virtual_valve"]) +@pytest.mark.parametrize( + ("action", "command"), + [ + (SERVICE_OPEN_VALVE, Command.OPEN), + (SERVICE_CLOSE_VALVE, Command.CLOSE), + ], +) +async def test_valve_open_close( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, + action: str, + command: Command, +) -> None: + """Test valve open and close command.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + VALVE_DOMAIN, + action, + {ATTR_ENTITY_ID: "valve.volvo"}, + blocking=True, + ) + devices.execute_device_command.assert_called_once_with( + "612ab3c2-3bb0-48f7-b2c0-15b169cb2fc3", Capability.VALVE, command, MAIN + ) + + +@pytest.mark.parametrize("device_fixture", ["virtual_valve"]) +async def test_state_update( + hass: HomeAssistant, + devices: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test state update.""" + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("valve.volvo").state == ValveState.CLOSED + + await trigger_update( + hass, + devices, + "612ab3c2-3bb0-48f7-b2c0-15b169cb2fc3", + Capability.VALVE, + Attribute.VALVE, + "open", + ) + + assert hass.states.get("valve.volvo").state == ValveState.OPEN diff --git a/tests/components/smlight/test_config_flow.py b/tests/components/smlight/test_config_flow.py index c8933029ce6..4ecfe9366e3 100644 --- a/tests/components/smlight/test_config_flow.py +++ b/tests/components/smlight/test_config_flow.py @@ -193,7 +193,7 @@ async def test_zeroconf_flow_auth( } assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_smlight_client.get_info.mock_calls) == 3 + assert len(mock_smlight_client.get_info.mock_calls) == 2 async def test_zeroconf_unsupported_abort( @@ -406,7 +406,7 @@ async def test_user_invalid_auth( } assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_smlight_client.get_info.mock_calls) == 3 + assert len(mock_smlight_client.get_info.mock_calls) == 2 async def test_user_cannot_connect( @@ -443,7 +443,7 @@ async def test_user_cannot_connect( assert result2["title"] == "SLZB-06p7" assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_smlight_client.get_info.mock_calls) == 3 + assert len(mock_smlight_client.get_info.mock_calls) == 2 async def test_auth_cannot_connect( diff --git a/tests/components/smlight/test_sensor.py b/tests/components/smlight/test_sensor.py index f130d7ccf30..bec73bc514a 100644 --- a/tests/components/smlight/test_sensor.py +++ b/tests/components/smlight/test_sensor.py @@ -2,17 +2,18 @@ from unittest.mock import MagicMock -from pysmlight import Sensors +from pysmlight import Info, Sensors import pytest from syrupy.assertion import SnapshotAssertion +from homeassistant.components.smlight.const import DOMAIN from homeassistant.const import STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from .conftest import setup_integration -from tests.common import MockConfigEntry, snapshot_platform +from tests.common import MockConfigEntry, load_json_object_fixture, snapshot_platform pytestmark = [ pytest.mark.usefixtures( @@ -73,3 +74,38 @@ async def test_zigbee_uptime_disconnected( state = hass.states.get("sensor.mock_title_zigbee_uptime") assert state.state == STATE_UNKNOWN + + +async def test_zigbee2_temp_sensor( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test for zb_temp2 if device has second radio.""" + mock_smlight_client.get_sensors.return_value = Sensors(zb_temp2=20.45) + await setup_integration(hass, mock_config_entry) + + state = hass.states.get("sensor.mock_title_zigbee_chip_temp_2") + assert state + assert state.state == "20.45" + + +async def test_zigbee_type_sensors( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test for zigbee type sensor with second radio.""" + mock_smlight_client.get_info.side_effect = None + mock_smlight_client.get_info.return_value = Info.from_dict( + load_json_object_fixture("info-MR1.json", DOMAIN) + ) + await setup_integration(hass, mock_config_entry) + + state = hass.states.get("sensor.mock_title_zigbee_type") + assert state + assert state.state == "coordinator" + + state = hass.states.get("sensor.mock_title_zigbee_type_2") + assert state + assert state.state == "router" diff --git a/tests/components/smlight/test_update.py b/tests/components/smlight/test_update.py index 86d19968910..d120a08d519 100644 --- a/tests/components/smlight/test_update.py +++ b/tests/components/smlight/test_update.py @@ -154,10 +154,9 @@ async def test_update_zigbee2_firmware( mock_smlight_client: MagicMock, ) -> None: """Test update of zigbee2 firmware where available.""" + mock_info = Info.from_dict(load_json_object_fixture("info-MR1.json", DOMAIN)) mock_smlight_client.get_info.side_effect = None - mock_smlight_client.get_info.return_value = Info.from_dict( - load_json_object_fixture("info-MR1.json", DOMAIN) - ) + mock_smlight_client.get_info.return_value = mock_info await setup_integration(hass, mock_config_entry) entity_id = "update.mock_title_zigbee_firmware_2" state = hass.states.get(entity_id) @@ -177,17 +176,17 @@ async def test_update_zigbee2_firmware( event_function = get_mock_event_function(mock_smlight_client, SmEvents.FW_UPD_done) event_function(MOCK_FIRMWARE_DONE) - with patch( - "homeassistant.components.smlight.update.get_radio", return_value=MOCK_RADIO - ): - freezer.tick(timedelta(seconds=5)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state.state == STATE_OFF - assert state.attributes[ATTR_INSTALLED_VERSION] == "20240716" - assert state.attributes[ATTR_LATEST_VERSION] == "20240716" + mock_info.radios[1] = MOCK_RADIO + + freezer.tick(timedelta(seconds=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == STATE_OFF + assert state.attributes[ATTR_INSTALLED_VERSION] == "20240716" + assert state.attributes[ATTR_LATEST_VERSION] == "20240716" async def test_update_legacy_firmware_v2( diff --git a/tests/components/snoo/__init__.py b/tests/components/snoo/__init__.py index f8529251720..b4692e6f08b 100644 --- a/tests/components/snoo/__init__.py +++ b/tests/components/snoo/__init__.py @@ -1,5 +1,11 @@ """Tests for the Happiest Baby Snoo integration.""" +from collections.abc import Awaitable, Callable +from unittest.mock import AsyncMock + +import pytest +from python_snoo.containers import SnooData + from homeassistant.components.snoo.const import DOMAIN from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME @@ -36,3 +42,13 @@ async def async_init_integration(hass: HomeAssistant) -> ConfigEntry: await hass.async_block_till_done() return entry + + +def find_update_callback( + mock: AsyncMock, serial_number: str +) -> Callable[[SnooData], Awaitable[None]]: + """Find the update callback for a specific identifier.""" + for call in mock.subscribe.call_args_list: + if call[0][0].serialNumber == serial_number: + return call[0][1] + pytest.fail(f"Callback for identifier {serial_number} not found") diff --git a/tests/components/snoo/conftest.py b/tests/components/snoo/conftest.py index 33642e67ff5..6163fa56b7f 100644 --- a/tests/components/snoo/conftest.py +++ b/tests/components/snoo/conftest.py @@ -5,9 +5,8 @@ from unittest.mock import AsyncMock, patch import pytest from python_snoo.containers import SnooDevice -from python_snoo.snoo import Snoo -from .const import MOCK_AMAZON_AUTH, MOCK_SNOO_AUTH, MOCK_SNOO_DEVICES +from .const import MOCK_SNOO_DEVICES, MOCKED_AUTH @pytest.fixture @@ -19,55 +18,14 @@ def mock_setup_entry() -> Generator[AsyncMock]: yield mock_setup_entry -class MockedSnoo(Snoo): - """Mock the Snoo object.""" - - def __init__(self, email, password, clientsession) -> None: - """Set up a Mocked Snoo.""" - super().__init__(email, password, clientsession) - self.auth_error = None - - async def subscribe(self, device: SnooDevice, function): - """Mock the subscribe function.""" - return AsyncMock() - - async def send_command(self, command: str, device: SnooDevice, **kwargs): - """Mock the send command function.""" - return AsyncMock() - - async def authorize(self): - """Do normal auth flow unless error is patched.""" - if self.auth_error: - raise self.auth_error - return await super().authorize() - - def set_auth_error(self, error: Exception | None): - """Set an error for authentication.""" - self.auth_error = error - - async def auth_amazon(self): - """Mock the amazon auth.""" - return MOCK_AMAZON_AUTH - - async def auth_snoo(self, id_token): - """Mock the snoo auth.""" - return MOCK_SNOO_AUTH - - async def schedule_reauthorization(self, snoo_expiry: int): - """Mock scheduling reauth.""" - return AsyncMock() - - async def get_devices(self) -> list[SnooDevice]: - """Move getting devices.""" - return [SnooDevice.from_dict(dev) for dev in MOCK_SNOO_DEVICES] - - @pytest.fixture(name="bypass_api") -def bypass_api() -> MockedSnoo: +def bypass_api() -> Generator[AsyncMock]: """Bypass the Snoo api.""" - api = MockedSnoo("email", "password", AsyncMock()) with ( - patch("homeassistant.components.snoo.Snoo", return_value=api), - patch("homeassistant.components.snoo.config_flow.Snoo", return_value=api), + patch("homeassistant.components.snoo.Snoo", autospec=True) as mock_client, + patch("homeassistant.components.snoo.config_flow.Snoo", new=mock_client), ): - yield api + client = mock_client.return_value + client.get_devices.return_value = [SnooDevice.from_dict(MOCK_SNOO_DEVICES[0])] + client.authorize.return_value = MOCKED_AUTH + yield client diff --git a/tests/components/snoo/const.py b/tests/components/snoo/const.py index c5d53780fa1..2657048afb8 100644 --- a/tests/components/snoo/const.py +++ b/tests/components/snoo/const.py @@ -1,5 +1,9 @@ """Snoo constants for testing.""" +import time + +from python_snoo.containers import AuthorizationInfo, SnooData + MOCK_AMAZON_AUTH = { # This is a JWT with random values. "AccessToken": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJhMWIyYzNkNC1lNWY2" @@ -32,3 +36,36 @@ MOCK_SNOO_DEVICES = [ "provisionedAt": "random_time", } ] + +MOCK_SNOO_DATA = SnooData.from_dict( + { + "system_state": "normal", + "sw_version": "v1.14.27", + "state_machine": { + "session_id": "0", + "state": "ONLINE", + "is_active_session": "false", + "since_session_start_ms": -1, + "time_left": -1, + "hold": "off", + "weaning": "off", + "audio": "on", + "up_transition": "NONE", + "down_transition": "NONE", + "sticky_white_noise": "off", + }, + "left_safety_clip": 1, + "right_safety_clip": 1, + "event": "status_requested", + "event_time_ms": int(time.time()), + "rx_signal": {"rssi": -45, "strength": 100}, + } +) + + +MOCKED_AUTH = AuthorizationInfo( + snoo=MOCK_SNOO_AUTH, + aws_access=MOCK_AMAZON_AUTH["AccessToken"], + aws_id=MOCK_AMAZON_AUTH["IdToken"], + aws_refresh=MOCK_AMAZON_AUTH["RefreshToken"], +) diff --git a/tests/components/snoo/test_binary_sensor.py b/tests/components/snoo/test_binary_sensor.py new file mode 100644 index 00000000000..77b2e36c1fe --- /dev/null +++ b/tests/components/snoo/test_binary_sensor.py @@ -0,0 +1,30 @@ +"""Test Snoo Binary Sensors.""" + +from unittest.mock import AsyncMock + +from homeassistant.const import STATE_ON, STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant + +from . import async_init_integration, find_update_callback +from .const import MOCK_SNOO_DATA + + +async def test_binary_sensors(hass: HomeAssistant, bypass_api: AsyncMock) -> None: + """Test binary sensors and check test values are correctly set.""" + await async_init_integration(hass) + assert len(hass.states.async_all("binary_sensor")) == 2 + assert ( + hass.states.get("binary_sensor.test_snoo_left_safety_clip").state + == STATE_UNAVAILABLE + ) + assert ( + hass.states.get("binary_sensor.test_snoo_right_safety_clip").state + == STATE_UNAVAILABLE + ) + find_update_callback(bypass_api, "random_num")(MOCK_SNOO_DATA) + await hass.async_block_till_done() + assert len(hass.states.async_all("binary_sensor")) == 2 + assert hass.states.get("binary_sensor.test_snoo_left_safety_clip").state == STATE_ON + assert ( + hass.states.get("binary_sensor.test_snoo_right_safety_clip").state == STATE_ON + ) diff --git a/tests/components/snoo/test_config_flow.py b/tests/components/snoo/test_config_flow.py index ffdfb22142d..9e07f011cd4 100644 --- a/tests/components/snoo/test_config_flow.py +++ b/tests/components/snoo/test_config_flow.py @@ -13,11 +13,10 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from . import create_entry -from .conftest import MockedSnoo async def test_config_flow_success( - hass: HomeAssistant, mock_setup_entry: AsyncMock, bypass_api: MockedSnoo + hass: HomeAssistant, mock_setup_entry: AsyncMock, bypass_api: AsyncMock ) -> None: """Test we create the entry successfully.""" result = await hass.config_entries.flow.async_init( @@ -55,7 +54,7 @@ async def test_config_flow_success( async def test_form_auth_issues( hass: HomeAssistant, mock_setup_entry: AsyncMock, - bypass_api: MockedSnoo, + bypass_api: AsyncMock, exception, error_msg, ) -> None: @@ -64,7 +63,7 @@ async def test_form_auth_issues( DOMAIN, context={"source": config_entries.SOURCE_USER} ) # Set Authorize to fail. - bypass_api.set_auth_error(exception) + bypass_api.authorize.side_effect = exception result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -73,10 +72,9 @@ async def test_form_auth_issues( }, ) # Reset auth back to the original - bypass_api.set_auth_error(None) assert result["type"] == FlowResultType.FORM assert result["errors"] == {"base": error_msg} - + bypass_api.authorize.side_effect = None result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -84,7 +82,6 @@ async def test_form_auth_issues( CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() assert result["type"] == FlowResultType.CREATE_ENTRY assert result["title"] == "test-username" @@ -96,7 +93,7 @@ async def test_form_auth_issues( async def test_account_already_configured( - hass: HomeAssistant, mock_setup_entry: AsyncMock, bypass_api + hass: HomeAssistant, mock_setup_entry: AsyncMock, bypass_api: AsyncMock ) -> None: """Ensure we abort if the config flow already exists.""" create_entry(hass) diff --git a/tests/components/snoo/test_event.py b/tests/components/snoo/test_event.py new file mode 100644 index 00000000000..41cb386a599 --- /dev/null +++ b/tests/components/snoo/test_event.py @@ -0,0 +1,45 @@ +"""Test Snoo Events.""" + +from unittest.mock import AsyncMock + +from freezegun import freeze_time + +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant + +from . import async_init_integration, find_update_callback +from .const import MOCK_SNOO_DATA + + +@freeze_time("2025-01-01 12:00:00") +async def test_events(hass: HomeAssistant, bypass_api: AsyncMock) -> None: + """Test events and check test values are correctly set.""" + await async_init_integration(hass) + assert len(hass.states.async_all("event")) == 1 + assert hass.states.get("event.test_snoo_snoo_event").state == STATE_UNAVAILABLE + find_update_callback(bypass_api, "random_num")(MOCK_SNOO_DATA) + await hass.async_block_till_done() + assert len(hass.states.async_all("event")) == 1 + assert ( + hass.states.get("event.test_snoo_snoo_event").state + == "2025-01-01T12:00:00.000+00:00" + ) + + +@freeze_time("2025-01-01 12:00:00") +async def test_events_data_on_startup( + hass: HomeAssistant, bypass_api: AsyncMock +) -> None: + """Test events and check test values are correctly set if data exists on first update.""" + + def update_status(_): + find_update_callback(bypass_api, "random_num")(MOCK_SNOO_DATA) + + bypass_api.get_status.side_effect = update_status + await async_init_integration(hass) + await hass.async_block_till_done() + assert len(hass.states.async_all("event")) == 1 + assert ( + hass.states.get("event.test_snoo_snoo_event").state + == "2025-01-01T12:00:00.000+00:00" + ) diff --git a/tests/components/snoo/test_init.py b/tests/components/snoo/test_init.py index 06f420b6518..72c4b6fb8ab 100644 --- a/tests/components/snoo/test_init.py +++ b/tests/components/snoo/test_init.py @@ -1,14 +1,32 @@ """Test init for Snoo.""" +from unittest.mock import AsyncMock + +from python_snoo.exceptions import SnooAuthException + +from homeassistant.components.snoo import SnooDeviceError from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from . import async_init_integration -from .conftest import MockedSnoo -async def test_async_setup_entry(hass: HomeAssistant, bypass_api: MockedSnoo) -> None: +async def test_async_setup_entry(hass: HomeAssistant, bypass_api: AsyncMock) -> None: """Test a successful setup entry.""" entry = await async_init_integration(hass) assert len(hass.states.async_all("sensor")) == 2 assert entry.state == ConfigEntryState.LOADED + + +async def test_cannot_auth(hass: HomeAssistant, bypass_api: AsyncMock) -> None: + """Test that we are put into retry when we fail to auth.""" + bypass_api.authorize.side_effect = SnooAuthException + entry = await async_init_integration(hass) + assert entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_failed_devices(hass: HomeAssistant, bypass_api: AsyncMock) -> None: + """Test that we are put into retry when we fail to get devices.""" + bypass_api.get_devices.side_effect = SnooDeviceError + entry = await async_init_integration(hass) + assert entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/snoo/test_select.py b/tests/components/snoo/test_select.py new file mode 100644 index 00000000000..e00721b2ab8 --- /dev/null +++ b/tests/components/snoo/test_select.py @@ -0,0 +1,75 @@ +"""Test Snoo Selects.""" + +import copy +from unittest.mock import AsyncMock + +import pytest +from python_snoo.containers import SnooDevice, SnooLevels, SnooStates + +from homeassistant.components.select import SERVICE_SELECT_OPTION +from homeassistant.components.snoo.select import SnooCommandException +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from . import async_init_integration, find_update_callback +from .const import MOCK_SNOO_DATA + + +async def test_select(hass: HomeAssistant, bypass_api: AsyncMock) -> None: + """Test select and check test values are correctly set.""" + await async_init_integration(hass) + assert len(hass.states.async_all("select")) == 1 + assert hass.states.get("select.test_snoo_intensity").state == STATE_UNAVAILABLE + find_update_callback(bypass_api, "random_num")(MOCK_SNOO_DATA) + await hass.async_block_till_done() + assert len(hass.states.async_all("select")) == 1 + assert hass.states.get("select.test_snoo_intensity").state == "stop" + + +async def test_update_success(hass: HomeAssistant, bypass_api: AsyncMock) -> None: + """Test changing values for select entities.""" + await async_init_integration(hass) + + find_update_callback(bypass_api, "random_num")(MOCK_SNOO_DATA) + assert hass.states.get("select.test_snoo_intensity").state == "stop" + + async def update_level(device: SnooDevice, level: SnooStates, _hold: bool = False): + new_data = copy.deepcopy(MOCK_SNOO_DATA) + new_data.state_machine.level = SnooLevels(level.value) + find_update_callback(bypass_api, device.serialNumber)(new_data) + + bypass_api.set_level.side_effect = update_level + await hass.services.async_call( + "select", + SERVICE_SELECT_OPTION, + service_data={"option": "level1"}, + blocking=True, + target={"entity_id": "select.test_snoo_intensity"}, + ) + + assert bypass_api.set_level.assert_called_once + assert hass.states.get("select.test_snoo_intensity").state == "level1" + + +async def test_update_failed(hass: HomeAssistant, bypass_api: AsyncMock) -> None: + """Test failing to change values for select entities.""" + await async_init_integration(hass) + + find_update_callback(bypass_api, "random_num")(MOCK_SNOO_DATA) + assert hass.states.get("select.test_snoo_intensity").state == "stop" + + bypass_api.set_level.side_effect = SnooCommandException + with pytest.raises( + HomeAssistantError, match="Error while updating Intensity to level1" + ): + await hass.services.async_call( + "select", + SERVICE_SELECT_OPTION, + service_data={"option": "level1"}, + blocking=True, + target={"entity_id": "select.test_snoo_intensity"}, + ) + + assert bypass_api.set_level.assert_called_once + assert hass.states.get("select.test_snoo_intensity").state == "stop" diff --git a/tests/components/snoo/test_sensor.py b/tests/components/snoo/test_sensor.py new file mode 100644 index 00000000000..96a22e548b8 --- /dev/null +++ b/tests/components/snoo/test_sensor.py @@ -0,0 +1,22 @@ +"""Test Snoo Sensors.""" + +from unittest.mock import AsyncMock + +from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.core import HomeAssistant + +from . import async_init_integration, find_update_callback +from .const import MOCK_SNOO_DATA + + +async def test_sensors(hass: HomeAssistant, bypass_api: AsyncMock) -> None: + """Test sensors and check test values are correctly set.""" + await async_init_integration(hass) + assert len(hass.states.async_all("sensor")) == 2 + assert hass.states.get("sensor.test_snoo_state").state == STATE_UNAVAILABLE + assert hass.states.get("sensor.test_snoo_time_left").state == STATE_UNAVAILABLE + find_update_callback(bypass_api, "random_num")(MOCK_SNOO_DATA) + await hass.async_block_till_done() + assert len(hass.states.async_all("sensor")) == 2 + assert hass.states.get("sensor.test_snoo_state").state == "stop" + assert hass.states.get("sensor.test_snoo_time_left").state == STATE_UNKNOWN diff --git a/tests/components/snoo/test_switch.py b/tests/components/snoo/test_switch.py new file mode 100644 index 00000000000..2343ff6c0d8 --- /dev/null +++ b/tests/components/snoo/test_switch.py @@ -0,0 +1,88 @@ +"""Test Snoo Switches.""" + +import copy +from unittest.mock import AsyncMock + +import pytest +from python_snoo.containers import SnooDevice +from python_snoo.exceptions import SnooCommandException + +from homeassistant.components.switch import ( + SERVICE_TOGGLE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from . import async_init_integration, find_update_callback +from .const import MOCK_SNOO_DATA + + +async def test_switch(hass: HomeAssistant, bypass_api: AsyncMock) -> None: + """Test switch and check test values are correctly set.""" + await async_init_integration(hass) + assert len(hass.states.async_all("switch")) == 2 + assert hass.states.get("switch.test_snoo_level_lock").state == STATE_UNAVAILABLE + assert ( + hass.states.get("switch.test_snoo_sleepytime_sounds").state == STATE_UNAVAILABLE + ) + find_update_callback(bypass_api, "random_num")(MOCK_SNOO_DATA) + await hass.async_block_till_done() + assert len(hass.states.async_all("switch")) == 2 + assert hass.states.get("switch.test_snoo_sleepytime_sounds").state == STATE_OFF + assert hass.states.get("switch.test_snoo_level_lock").state == STATE_OFF + + +async def test_update_success(hass: HomeAssistant, bypass_api: AsyncMock) -> None: + """Test changing values for switch entities.""" + await async_init_integration(hass) + + find_update_callback(bypass_api, "random_num")(MOCK_SNOO_DATA) + assert hass.states.get("switch.test_snoo_sleepytime_sounds").state == STATE_OFF + + async def set_sticky_white_noise(device: SnooDevice, state: bool): + new_data = copy.deepcopy(MOCK_SNOO_DATA) + new_data.state_machine.sticky_white_noise = "off" if not state else "on" + find_update_callback(bypass_api, device.serialNumber)(new_data) + + bypass_api.set_sticky_white_noise.side_effect = set_sticky_white_noise + await hass.services.async_call( + "switch", + SERVICE_TOGGLE, + blocking=True, + target={"entity_id": "switch.test_snoo_sleepytime_sounds"}, + ) + + assert bypass_api.set_sticky_white_noise.assert_called_once + assert hass.states.get("switch.test_snoo_sleepytime_sounds").state == STATE_ON + + +@pytest.mark.parametrize( + ("command", "error_str"), + [ + (SERVICE_TURN_ON, "Turning Sleepytime sounds on failed"), + (SERVICE_TURN_OFF, "Turning Sleepytime sounds off failed"), + ], +) +async def test_update_failed( + hass: HomeAssistant, bypass_api: AsyncMock, command: str, error_str: str +) -> None: + """Test failing to change values for switch entities.""" + await async_init_integration(hass) + + find_update_callback(bypass_api, "random_num")(MOCK_SNOO_DATA) + assert hass.states.get("switch.test_snoo_sleepytime_sounds").state == STATE_OFF + + bypass_api.set_sticky_white_noise.side_effect = SnooCommandException + with pytest.raises(HomeAssistantError, match=error_str): + await hass.services.async_call( + "switch", + command, + blocking=True, + target={"entity_id": "switch.test_snoo_sleepytime_sounds"}, + ) + + assert bypass_api.set_level.assert_called_once + assert hass.states.get("switch.test_snoo_sleepytime_sounds").state == STATE_OFF diff --git a/tests/components/sonos/test_config_flow.py b/tests/components/sonos/test_config_flow.py index 70605092da1..8454b4ad673 100644 --- a/tests/components/sonos/test_config_flow.py +++ b/tests/components/sonos/test_config_flow.py @@ -123,6 +123,22 @@ async def test_zeroconf_form( assert len(mock_manager.mock_calls) == 2 +async def test_zeroconf_form_not_ipv4( + hass: HomeAssistant, zeroconf_payload: ZeroconfServiceInfo +) -> None: + """Test we pass Zeroconf discoveries to the manager.""" + mock_manager = hass.data[DATA_SONOS_DISCOVERY_MANAGER] = MagicMock() + zeroconf_payload.ip_address = ip_address("2001:db8:3333:4444:5555:6666:7777:8888") + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf_payload, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "not_ipv4_address" + assert mock_manager.call_count == 0 + + async def test_ssdp_discovery(hass: HomeAssistant, soco) -> None: """Test that SSDP discoveries create a config flow.""" diff --git a/tests/components/sonos/test_init.py b/tests/components/sonos/test_init.py index a7ad2f4cb82..c6be606eb20 100644 --- a/tests/components/sonos/test_init.py +++ b/tests/components/sonos/test_init.py @@ -455,3 +455,32 @@ async def test_async_poll_manual_hosts_8( assert "media_player.garage" in entity_registry.entities assert "media_player.studio" in entity_registry.entities await hass.async_block_till_done(wait_background_tasks=True) + + +async def _setup_hass_ipv6_address_not_supported(hass: HomeAssistant): + await async_setup_component( + hass, + sonos.DOMAIN, + { + "sonos": { + "media_player": { + "interface_addr": "127.0.0.1", + "hosts": ["2001:db8:3333:4444:5555:6666:7777:8888"], + } + } + }, + ) + await hass.async_block_till_done() + + +async def test_ipv6_not_supported( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Tests that invalid ipv4 addresses do not generate stack dump.""" + with caplog.at_level(logging.DEBUG): + caplog.clear() + await _setup_hass_ipv6_address_not_supported(hass) + await hass.async_block_till_done() + assert "invalid ip_address received" in caplog.text + assert "2001:db8:3333:4444:5555:6666:7777:8888" in caplog.text diff --git a/tests/components/sonos/test_media_player.py b/tests/components/sonos/test_media_player.py index cec40c997a7..78d88a1ea98 100644 --- a/tests/components/sonos/test_media_player.py +++ b/tests/components/sonos/test_media_player.py @@ -692,6 +692,7 @@ async def test_select_source_line_in_tv( "play_uri": 1, "play_uri_uri": "x-sonosapi-radio:ST%3aetc", "play_uri_title": "James Taylor Radio", + "play_uri_meta": 'James Taylor Radioobject.item.audioItem.audioBroadcast.#stationSA_RINCON60423_X_#Svc60423-99999999-Token', }, ), ( @@ -700,6 +701,16 @@ async def test_select_source_line_in_tv( "play_uri": 1, "play_uri_uri": "x-sonosapi-hls:Api%3atune%3aliveAudio%3ajazzcafe%3aetc", "play_uri_title": "66 - Watercolors", + "play_uri_meta": '66 - Watercolorsobject.item.audioItem.audioBroadcastSA_RINCON9479_X_#Svc9479-99999999-Token', + }, + ), + ( + "American Tall Tales", + { + "play_uri": 1, + "play_uri_uri": "x-rincon-cpcontainer:101340c8reftitle%C9F27_com?sid=239&flags=16584&sn=5", + "play_uri_title": "American Tall Tales", + "play_uri_meta": 'American Tall Talesobject.item.audioItem.audioBookSA_RINCON61191_X_#Svc6-0-Token', }, ), ], @@ -726,6 +737,7 @@ async def test_select_source_play_uri( soco_mock.play_uri.assert_called_with( result.get("play_uri_uri"), title=result.get("play_uri_title"), + meta=result.get("play_uri_meta"), timeout=LONG_SERVICE_TIMEOUT, ) diff --git a/tests/components/squeezebox/conftest.py b/tests/components/squeezebox/conftest.py index 9ca750808c5..769e611bf28 100644 --- a/tests/components/squeezebox/conftest.py +++ b/tests/components/squeezebox/conftest.py @@ -163,7 +163,7 @@ async def mock_async_browse( "title": "Fake Item 2", "id": FAKE_VALID_ITEM_ID + "_2", "hasitems": media_type == "favorites", - "isaudio": True, + "isaudio": False, "item_type": child_types[media_type], "image_url": "http://lms.internal:9000/html/images/favorites.png", "url": "file:///var/lib/squeezeboxserver/music/track_2.mp3", @@ -269,6 +269,7 @@ def mock_pysqueezebox_player(uuid: str) -> MagicMock: mock_player.title = None mock_player.image_url = None mock_player.model = "SqueezeLite" + mock_player.creator = "Ralph Irving & Adrian Smith" return mock_player @@ -309,7 +310,27 @@ async def configure_squeezebox_media_player_platform( ) -> None: """Configure a squeezebox config entry with appropriate mocks for media_player.""" with ( - patch("homeassistant.components.squeezebox.PLATFORMS", [Platform.MEDIA_PLAYER]), + patch( + "homeassistant.components.squeezebox.PLATFORMS", + [Platform.MEDIA_PLAYER], + ), + patch("homeassistant.components.squeezebox.Server", return_value=lms), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + + +async def configure_squeezebox_media_player_button_platform( + hass: HomeAssistant, + config_entry: MockConfigEntry, + lms: MagicMock, +) -> None: + """Configure a squeezebox config entry with appropriate mocks for media_player.""" + with ( + patch( + "homeassistant.components.squeezebox.PLATFORMS", + [Platform.BUTTON], + ), patch("homeassistant.components.squeezebox.Server", return_value=lms), ): await hass.config_entries.async_setup(config_entry.entry_id) @@ -325,6 +346,15 @@ async def configured_player( return (await lms.async_get_players())[0] +@pytest.fixture +async def configured_player_with_button( + hass: HomeAssistant, config_entry: MockConfigEntry, lms: MagicMock +) -> MagicMock: + """Fixture mocking calls to pysqueezebox Player from a configured squeezebox.""" + await configure_squeezebox_media_player_button_platform(hass, config_entry, lms) + return (await lms.async_get_players())[0] + + @pytest.fixture async def configured_players( hass: HomeAssistant, config_entry: MockConfigEntry, lms_factory: MagicMock diff --git a/tests/components/squeezebox/snapshots/test_media_player.ambr b/tests/components/squeezebox/snapshots/test_media_player.ambr index 34d6ae16af8..c0633035a84 100644 --- a/tests/components/squeezebox/snapshots/test_media_player.ambr +++ b/tests/components/squeezebox/snapshots/test_media_player.ambr @@ -24,7 +24,7 @@ 'is_new': False, 'labels': set({ }), - 'manufacturer': 'Ralph Irving', + 'manufacturer': 'Ralph Irving & Adrian Smith', 'model': 'SqueezeLite', 'model_id': None, 'name': 'Test Player', diff --git a/tests/components/squeezebox/test_button.py b/tests/components/squeezebox/test_button.py new file mode 100644 index 00000000000..16ced65be61 --- /dev/null +++ b/tests/components/squeezebox/test_button.py @@ -0,0 +1,23 @@ +"""Tests for the squeezebox button component.""" + +from unittest.mock import MagicMock + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant + + +async def test_squeezebox_press( + hass: HomeAssistant, configured_player_with_button: MagicMock +) -> None: + """Test press service call.""" + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.test_player_preset_1"}, + blocking=True, + ) + + configured_player_with_button.async_query.assert_called_with( + "button", "preset_1.single" + ) diff --git a/tests/components/stookwijzer/conftest.py b/tests/components/stookwijzer/conftest.py index 40582dc4be3..dd7f2a7bbc3 100644 --- a/tests/components/stookwijzer/conftest.py +++ b/tests/components/stookwijzer/conftest.py @@ -1,6 +1,7 @@ """Fixtures for Stookwijzer integration tests.""" from collections.abc import Generator +from typing import Required, TypedDict from unittest.mock import AsyncMock, MagicMock, patch import pytest @@ -12,6 +13,14 @@ from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry +class Forecast(TypedDict): + """Typed Stookwijzer forecast dict.""" + + datetime: Required[str] + advice: str | None + final: bool | None + + @pytest.fixture def mock_config_entry() -> MockConfigEntry: """Return the default mocked config entry.""" @@ -80,6 +89,28 @@ def mock_stookwijzer() -> Generator[MagicMock]: client.windspeed_ms = 2.5 client.windspeed_bft = 2 client.advice = "code_yellow" + client.async_get_forecast.return_value = ( + Forecast( + datetime="2025-02-12T17:00:00+01:00", + advice="code_yellow", + final=True, + ), + Forecast( + datetime="2025-02-12T23:00:00+01:00", + advice="code_yellow", + final=True, + ), + Forecast( + datetime="2025-02-13T05:00:00+01:00", + advice="code_orange", + final=False, + ), + Forecast( + datetime="2025-02-13T11:00:00+01:00", + advice="code_orange", + final=False, + ), + ) yield stookwijzer_mock diff --git a/tests/components/stookwijzer/snapshots/test_diagnostics.ambr b/tests/components/stookwijzer/snapshots/test_diagnostics.ambr index e2535d54466..452b5bd0a30 100644 --- a/tests/components/stookwijzer/snapshots/test_diagnostics.ambr +++ b/tests/components/stookwijzer/snapshots/test_diagnostics.ambr @@ -3,6 +3,28 @@ dict({ 'advice': 'code_yellow', 'air_quality_index': 2, + 'forecast': list([ + dict({ + 'advice': 'code_yellow', + 'datetime': '2025-02-12T17:00:00+01:00', + 'final': True, + }), + dict({ + 'advice': 'code_yellow', + 'datetime': '2025-02-12T23:00:00+01:00', + 'final': True, + }), + dict({ + 'advice': 'code_orange', + 'datetime': '2025-02-13T05:00:00+01:00', + 'final': False, + }), + dict({ + 'advice': 'code_orange', + 'datetime': '2025-02-13T11:00:00+01:00', + 'final': False, + }), + ]), 'windspeed_ms': 2.5, }) # --- diff --git a/tests/components/stream/conftest.py b/tests/components/stream/conftest.py index 39e4de13fed..296505271c0 100644 --- a/tests/components/stream/conftest.py +++ b/tests/components/stream/conftest.py @@ -27,6 +27,8 @@ from homeassistant.components.stream.worker import StreamState from .common import generate_h264_video, stream_teardown +_LOGGER = logging.getLogger(__name__) + TEST_TIMEOUT = 7.0 # Lower than 9s home assistant timeout @@ -44,7 +46,7 @@ class WorkerSync: def resume(self): """Allow the worker thread to finalize the stream.""" - logging.debug("waking blocked worker") + _LOGGER.debug("waking blocked worker") self._event.set() def blocking_discontinuity(self, stream_state: StreamState): @@ -52,7 +54,7 @@ class WorkerSync: # Worker is ending the stream, which clears all output buffers. # Block the worker thread until the test has a chance to verify # the segments under test. - logging.debug("blocking worker") + _LOGGER.debug("blocking worker") if self._event: self._event.wait() diff --git a/tests/components/stream/test_ll_hls.py b/tests/components/stream/test_ll_hls.py index 443103fdf92..1eb638237af 100644 --- a/tests/components/stream/test_ll_hls.py +++ b/tests/components/stream/test_ll_hls.py @@ -202,7 +202,7 @@ async def test_ll_hls_stream( datetime_re = re.compile(r"#EXT-X-PROGRAM-DATE-TIME:(?P.+)") inf_re = re.compile(r"#EXTINF:(?P[0-9]{1,}.[0-9]{3,}),") # keep track of which tests were done (indexed by re) - tested = {regex: False for regex in (part_re, datetime_re, inf_re)} + tested = dict.fromkeys((part_re, datetime_re, inf_re), False) # keep track of times and durations along playlist for checking consistency part_durations = [] segment_duration = 0 diff --git a/tests/components/stream/test_worker.py b/tests/components/stream/test_worker.py index 2be972cc6a2..276b4109652 100644 --- a/tests/components/stream/test_worker.py +++ b/tests/components/stream/test_worker.py @@ -56,6 +56,8 @@ from .test_ll_hls import TEST_PART_DURATION from tests.components.camera.common import EMPTY_8_6_JPEG, mock_turbo_jpeg +_LOGGER = logging.getLogger(__name__) + STREAM_SOURCE = "some-stream-source" # Formats here are arbitrary, not exercised by tests AUDIO_STREAM_FORMAT = "mp3" @@ -229,7 +231,7 @@ class FakePyAvBuffer: return def mux(self, packet): - logging.debug("Muxed packet: %s", packet) + _LOGGER.debug("Muxed packet: %s", packet) self.capture_packets.append(packet) def __str__(self) -> str: diff --git a/tests/components/sunweg/__init__.py b/tests/components/sunweg/__init__.py index 1453483a3fd..d9dac10eeb6 100644 --- a/tests/components/sunweg/__init__.py +++ b/tests/components/sunweg/__init__.py @@ -1 +1 @@ -"""Tests for the sunweg component.""" +"""Tests for the Sun WEG integration.""" diff --git a/tests/components/sunweg/common.py b/tests/components/sunweg/common.py deleted file mode 100644 index 096113f6609..00000000000 --- a/tests/components/sunweg/common.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Common functions needed to setup tests for Sun WEG.""" - -from homeassistant.components.sunweg.const import CONF_PLANT_ID, DOMAIN -from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_USERNAME - -from tests.common import MockConfigEntry - -SUNWEG_USER_INPUT = { - CONF_USERNAME: "username", - CONF_PASSWORD: "password", -} - -SUNWEG_MOCK_ENTRY = MockConfigEntry( - domain=DOMAIN, - unique_id=0, - data={ - CONF_USERNAME: "user@email.com", - CONF_PASSWORD: "password", - CONF_PLANT_ID: 0, - CONF_NAME: "Name", - }, -) diff --git a/tests/components/sunweg/conftest.py b/tests/components/sunweg/conftest.py deleted file mode 100644 index db94b9cc5c8..00000000000 --- a/tests/components/sunweg/conftest.py +++ /dev/null @@ -1,90 +0,0 @@ -"""Conftest for SunWEG tests.""" - -from datetime import datetime - -import pytest -from sunweg.device import MPPT, Inverter, Phase, String -from sunweg.plant import Plant - - -@pytest.fixture -def string_fixture() -> String: - """Define String fixture.""" - return String("STR1", 450.3, 23.4, 0) - - -@pytest.fixture -def mppt_fixture(string_fixture) -> MPPT: - """Define MPPT fixture.""" - mppt = MPPT("mppt") - mppt.strings.append(string_fixture) - return mppt - - -@pytest.fixture -def phase_fixture() -> Phase: - """Define Phase fixture.""" - return Phase("PhaseA", 120.0, 3.2, 0, 0) - - -@pytest.fixture -def inverter_fixture(phase_fixture, mppt_fixture) -> Inverter: - """Define inverter fixture.""" - inverter = Inverter( - 21255, - "INVERSOR01", - "J63T233018RE074", - 23.2, - 0.0, - 0.0, - "MWh", - 0, - "kWh", - 0.0, - 1, - 0, - "kW", - ) - inverter.phases.append(phase_fixture) - inverter.mppts.append(mppt_fixture) - return inverter - - -@pytest.fixture -def plant_fixture(inverter_fixture) -> Plant: - """Define Plant fixture.""" - plant = Plant( - 123456, - "Plant #123", - 29.5, - 0.5, - 0, - 12.786912, - 24.0, - "kWh", - 332.2, - 0.012296, - datetime(2023, 2, 16, 14, 22, 37), - ) - plant.inverters.append(inverter_fixture) - return plant - - -@pytest.fixture -def plant_fixture_alternative(inverter_fixture) -> Plant: - """Define Plant fixture.""" - plant = Plant( - 123456, - "Plant #123", - 29.5, - 0.5, - 0, - 12.786912, - 24.0, - "kWh", - 332.2, - 0.012296, - None, - ) - plant.inverters.append(inverter_fixture) - return plant diff --git a/tests/components/sunweg/test_config_flow.py b/tests/components/sunweg/test_config_flow.py deleted file mode 100644 index 8103003d7fb..00000000000 --- a/tests/components/sunweg/test_config_flow.py +++ /dev/null @@ -1,223 +0,0 @@ -"""Tests for the Sun WEG server config flow.""" - -from unittest.mock import patch - -from sunweg.api import APIHelper, SunWegApiError - -from homeassistant import config_entries -from homeassistant.components.sunweg.const import CONF_PLANT_ID, DOMAIN -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType - -from .common import SUNWEG_MOCK_ENTRY, SUNWEG_USER_INPUT - -from tests.common import MockConfigEntry - - -async def test_show_authenticate_form(hass: HomeAssistant) -> None: - """Test that the setup form is served.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - -async def test_incorrect_login(hass: HomeAssistant) -> None: - """Test that it shows the appropriate error when an incorrect username/password/server is entered.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with patch.object(APIHelper, "authenticate", return_value=False): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], SUNWEG_USER_INPUT - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": "invalid_auth"} - - -async def test_server_unavailable(hass: HomeAssistant) -> None: - """Test when the SunWEG server don't respond.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with patch.object( - APIHelper, "authenticate", side_effect=SunWegApiError("Internal Server Error") - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], SUNWEG_USER_INPUT - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": "timeout_connect"} - - -async def test_reauth(hass: HomeAssistant, plant_fixture, inverter_fixture) -> None: - """Test reauth flow.""" - mock_entry = SUNWEG_MOCK_ENTRY - mock_entry.add_to_hass(hass) - - entries = hass.config_entries.async_entries() - assert len(entries) == 1 - assert entries[0].data[CONF_USERNAME] == SUNWEG_MOCK_ENTRY.data[CONF_USERNAME] - assert entries[0].data[CONF_PASSWORD] == SUNWEG_MOCK_ENTRY.data[CONF_PASSWORD] - - result = await mock_entry.start_reauth_flow(hass) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - - with patch.object(APIHelper, "authenticate", return_value=False): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input=SUNWEG_USER_INPUT, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - assert result["errors"] == {"base": "invalid_auth"} - - with patch.object( - APIHelper, "authenticate", side_effect=SunWegApiError("Internal Server Error") - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input=SUNWEG_USER_INPUT, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - assert result["errors"] == {"base": "timeout_connect"} - - with ( - patch.object(APIHelper, "authenticate", return_value=True), - patch.object(APIHelper, "listPlants", return_value=[plant_fixture]), - patch.object(APIHelper, "plant", return_value=plant_fixture), - patch.object(APIHelper, "inverter", return_value=inverter_fixture), - patch.object(APIHelper, "complete_inverter"), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input=SUNWEG_USER_INPUT, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" - - entries = hass.config_entries.async_entries() - - assert len(entries) == 1 - assert entries[0].data[CONF_USERNAME] == SUNWEG_USER_INPUT[CONF_USERNAME] - assert entries[0].data[CONF_PASSWORD] == SUNWEG_USER_INPUT[CONF_PASSWORD] - - -async def test_no_plants_on_account(hass: HomeAssistant) -> None: - """Test registering an integration with wrong auth then with no plants available.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with patch.object(APIHelper, "authenticate", return_value=False): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], SUNWEG_USER_INPUT - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": "invalid_auth"} - - with ( - patch.object(APIHelper, "authenticate", return_value=True), - patch.object(APIHelper, "listPlants", return_value=[]), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], SUNWEG_USER_INPUT - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "no_plants" - - -async def test_multiple_plant_ids(hass: HomeAssistant, plant_fixture) -> None: - """Test registering an integration and finishing flow with an selected plant_id.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with ( - patch.object(APIHelper, "authenticate", return_value=True), - patch.object( - APIHelper, "listPlants", return_value=[plant_fixture, plant_fixture] - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], SUNWEG_USER_INPUT - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "plant" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_PLANT_ID: 123456} - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"][CONF_USERNAME] == SUNWEG_USER_INPUT[CONF_USERNAME] - assert result["data"][CONF_PASSWORD] == SUNWEG_USER_INPUT[CONF_PASSWORD] - assert result["data"][CONF_PLANT_ID] == 123456 - - -async def test_one_plant_on_account(hass: HomeAssistant, plant_fixture) -> None: - """Test registering an integration and finishing flow with current plant_id.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with ( - patch.object(APIHelper, "authenticate", return_value=True), - patch.object( - APIHelper, - "listPlants", - return_value=[plant_fixture], - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], SUNWEG_USER_INPUT - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"][CONF_USERNAME] == SUNWEG_USER_INPUT[CONF_USERNAME] - assert result["data"][CONF_PASSWORD] == SUNWEG_USER_INPUT[CONF_PASSWORD] - assert result["data"][CONF_PLANT_ID] == 123456 - - -async def test_existing_plant_configured(hass: HomeAssistant, plant_fixture) -> None: - """Test entering an existing plant_id.""" - entry = MockConfigEntry(domain=DOMAIN, unique_id=123456) - entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - with ( - patch.object(APIHelper, "authenticate", return_value=True), - patch.object( - APIHelper, - "listPlants", - return_value=[plant_fixture], - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], SUNWEG_USER_INPUT - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" diff --git a/tests/components/sunweg/test_init.py b/tests/components/sunweg/test_init.py index 6cbe38a128b..964b48aebcb 100644 --- a/tests/components/sunweg/test_init.py +++ b/tests/components/sunweg/test_init.py @@ -1,209 +1,79 @@ -"""Tests for the Sun WEG init.""" +"""Tests for the Sun WEG integration.""" -import json -from unittest.mock import MagicMock, patch - -from sunweg.api import APIHelper, SunWegApiError - -from homeassistant.components.sunweg import SunWEGData -from homeassistant.components.sunweg.const import DOMAIN, DeviceType -from homeassistant.components.sunweg.sensor.sensor_entity_description import ( - SunWEGSensorEntityDescription, +from homeassistant.components.sunweg import DOMAIN +from homeassistant.config_entries import ( + SOURCE_IGNORE, + ConfigEntryDisabler, + ConfigEntryState, ) -from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component +from homeassistant.helpers import issue_registry as ir -from .common import SUNWEG_MOCK_ENTRY +from tests.common import MockConfigEntry -async def test_methods(hass: HomeAssistant, plant_fixture, inverter_fixture) -> None: - """Test methods.""" - mock_entry = SUNWEG_MOCK_ENTRY - mock_entry.add_to_hass(hass) - - with ( - patch.object(APIHelper, "authenticate", return_value=True), - patch.object(APIHelper, "listPlants", return_value=[plant_fixture]), - patch.object(APIHelper, "plant", return_value=plant_fixture), - patch.object(APIHelper, "inverter", return_value=inverter_fixture), - patch.object(APIHelper, "complete_inverter"), - ): - assert await async_setup_component(hass, DOMAIN, mock_entry.data) - await hass.async_block_till_done() - assert await hass.config_entries.async_unload(mock_entry.entry_id) - - -async def test_setup_wrongpass(hass: HomeAssistant) -> None: - """Test setup with wrong pass.""" - mock_entry = SUNWEG_MOCK_ENTRY - mock_entry.add_to_hass(hass) - with patch.object(APIHelper, "authenticate", return_value=False): - assert await async_setup_component(hass, DOMAIN, mock_entry.data) - await hass.async_block_till_done() - - -async def test_setup_error_500(hass: HomeAssistant) -> None: - """Test setup with wrong pass.""" - mock_entry = SUNWEG_MOCK_ENTRY - mock_entry.add_to_hass(hass) - with patch.object( - APIHelper, "authenticate", side_effect=SunWegApiError("Error 500") - ): - assert await async_setup_component(hass, DOMAIN, mock_entry.data) - await hass.async_block_till_done() - - -async def test_sunwegdata_update_exception() -> None: - """Test SunWEGData exception on update.""" - api = MagicMock() - api.plant = MagicMock(side_effect=json.decoder.JSONDecodeError("Message", "Doc", 1)) - data = SunWEGData(api, 0) - data.update() - assert data.data is None - - -async def test_sunwegdata_update_success(plant_fixture) -> None: - """Test SunWEGData success on update.""" - api = MagicMock() - api.plant = MagicMock(return_value=plant_fixture) - api.complete_inverter = MagicMock() - data = SunWEGData(api, 0) - data.update() - assert data.data.id == plant_fixture.id - assert data.data.name == plant_fixture.name - assert data.data.kwh_per_kwp == plant_fixture.kwh_per_kwp - assert data.data.last_update == plant_fixture.last_update - assert data.data.performance_rate == plant_fixture.performance_rate - assert data.data.saving == plant_fixture.saving - assert len(data.data.inverters) == 1 - - -async def test_sunwegdata_update_success_alternative(plant_fixture_alternative) -> None: - """Test SunWEGData success on update.""" - api = MagicMock() - api.plant = MagicMock(return_value=plant_fixture_alternative) - api.complete_inverter = MagicMock() - data = SunWEGData(api, 0) - data.update() - assert data.data.id == plant_fixture_alternative.id - assert data.data.name == plant_fixture_alternative.name - assert data.data.kwh_per_kwp == plant_fixture_alternative.kwh_per_kwp - assert data.data.last_update == plant_fixture_alternative.last_update - assert data.data.performance_rate == plant_fixture_alternative.performance_rate - assert data.data.saving == plant_fixture_alternative.saving - assert len(data.data.inverters) == 1 - - -async def test_sunwegdata_get_api_value_none(plant_fixture) -> None: - """Test SunWEGData none return on get_api_value.""" - api = MagicMock() - data = SunWEGData(api, 123456) - data.data = plant_fixture - assert data.get_api_value("variable", DeviceType.INVERTER, 0, "deep_name") is None - assert data.get_api_value("variable", DeviceType.STRING, 21255, "deep_name") is None - - -async def test_sunwegdata_get_data_drop_threshold() -> None: - """Test SunWEGData get_data with drop threshold.""" - api = MagicMock() - data = SunWEGData(api, 123456) - data.get_api_value = MagicMock() - entity_description = SunWEGSensorEntityDescription( - api_variable_key="variable", key="key", previous_value_drop_threshold=0.1 +async def test_sunweg_repair_issue( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: + """Test the Sun WEG configuration entry loading/unloading handles the repair.""" + config_entry_1 = MockConfigEntry( + title="Example 1", + domain=DOMAIN, ) - data.get_api_value.return_value = 3.0 - assert data.get_data( - api_variable_key=entity_description.api_variable_key, - api_variable_unit=entity_description.api_variable_unit, - deep_name=None, - device_type=DeviceType.TOTAL, - inverter_id=0, - name=entity_description.name, - native_unit_of_measurement=entity_description.native_unit_of_measurement, - never_resets=entity_description.never_resets, - previous_value_drop_threshold=entity_description.previous_value_drop_threshold, - ) == (3.0, None) - data.get_api_value.return_value = 2.91 - assert data.get_data( - api_variable_key=entity_description.api_variable_key, - api_variable_unit=entity_description.api_variable_unit, - deep_name=None, - device_type=DeviceType.TOTAL, - inverter_id=0, - name=entity_description.name, - native_unit_of_measurement=entity_description.native_unit_of_measurement, - never_resets=entity_description.never_resets, - previous_value_drop_threshold=entity_description.previous_value_drop_threshold, - ) == (3.0, None) - data.get_api_value.return_value = 2.8 - assert data.get_data( - api_variable_key=entity_description.api_variable_key, - api_variable_unit=entity_description.api_variable_unit, - deep_name=None, - device_type=DeviceType.TOTAL, - inverter_id=0, - name=entity_description.name, - native_unit_of_measurement=entity_description.native_unit_of_measurement, - never_resets=entity_description.never_resets, - previous_value_drop_threshold=entity_description.previous_value_drop_threshold, - ) == (2.8, None) + config_entry_1.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry_1.entry_id) + await hass.async_block_till_done() + assert config_entry_1.state is ConfigEntryState.LOADED - -async def test_sunwegdata_get_data_never_reset() -> None: - """Test SunWEGData get_data with never reset.""" - api = MagicMock() - data = SunWEGData(api, 123456) - data.get_api_value = MagicMock() - entity_description = SunWEGSensorEntityDescription( - api_variable_key="variable", key="key", never_resets=True + # Add a second one + config_entry_2 = MockConfigEntry( + title="Example 2", + domain=DOMAIN, ) - data.get_api_value.return_value = 3.0 - assert data.get_data( - api_variable_key=entity_description.api_variable_key, - api_variable_unit=entity_description.api_variable_unit, - deep_name=None, - device_type=DeviceType.TOTAL, - inverter_id=0, - name=entity_description.name, - native_unit_of_measurement=entity_description.native_unit_of_measurement, - never_resets=entity_description.never_resets, - previous_value_drop_threshold=entity_description.previous_value_drop_threshold, - ) == (3.0, None) - data.get_api_value.return_value = 0 - assert data.get_data( - api_variable_key=entity_description.api_variable_key, - api_variable_unit=entity_description.api_variable_unit, - deep_name=None, - device_type=DeviceType.TOTAL, - inverter_id=0, - name=entity_description.name, - native_unit_of_measurement=entity_description.native_unit_of_measurement, - never_resets=entity_description.never_resets, - previous_value_drop_threshold=entity_description.previous_value_drop_threshold, - ) == (3.0, None) - data.get_api_value.return_value = 2.8 - assert data.get_data( - api_variable_key=entity_description.api_variable_key, - api_variable_unit=entity_description.api_variable_unit, - deep_name=None, - device_type=DeviceType.TOTAL, - inverter_id=0, - name=entity_description.name, - native_unit_of_measurement=entity_description.native_unit_of_measurement, - never_resets=entity_description.never_resets, - previous_value_drop_threshold=entity_description.previous_value_drop_threshold, - ) == (2.8, None) + config_entry_2.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry_2.entry_id) + await hass.async_block_till_done() + assert config_entry_2.state is ConfigEntryState.LOADED + assert issue_registry.async_get_issue(DOMAIN, DOMAIN) -async def test_reauth_started(hass: HomeAssistant) -> None: - """Test reauth flow started.""" - mock_entry = SUNWEG_MOCK_ENTRY - mock_entry.add_to_hass(hass) - with patch.object(APIHelper, "authenticate", return_value=False): - await async_setup_component(hass, DOMAIN, {}) - await hass.async_block_till_done() - assert mock_entry.state is ConfigEntryState.SETUP_ERROR - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - assert flows[0]["step_id"] == "reauth_confirm" + # Add an ignored entry + config_entry_3 = MockConfigEntry( + source=SOURCE_IGNORE, + domain=DOMAIN, + ) + config_entry_3.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry_3.entry_id) + await hass.async_block_till_done() + + assert config_entry_3.state is ConfigEntryState.NOT_LOADED + + # Add a disabled entry + config_entry_4 = MockConfigEntry( + disabled_by=ConfigEntryDisabler.USER, + domain=DOMAIN, + ) + config_entry_4.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry_4.entry_id) + await hass.async_block_till_done() + + assert config_entry_4.state is ConfigEntryState.NOT_LOADED + + # Remove the first one + await hass.config_entries.async_remove(config_entry_1.entry_id) + await hass.async_block_till_done() + + assert config_entry_1.state is ConfigEntryState.NOT_LOADED + assert config_entry_2.state is ConfigEntryState.LOADED + assert issue_registry.async_get_issue(DOMAIN, DOMAIN) + + # Remove the second one + await hass.config_entries.async_remove(config_entry_2.entry_id) + await hass.async_block_till_done() + + assert config_entry_1.state is ConfigEntryState.NOT_LOADED + assert config_entry_2.state is ConfigEntryState.NOT_LOADED + assert issue_registry.async_get_issue(DOMAIN, DOMAIN) is None + + # Check the ignored and disabled entries are removed + assert not hass.config_entries.async_entries(DOMAIN) diff --git a/tests/components/switchbot/__init__.py b/tests/components/switchbot/__init__.py index 4d6794b962f..715073aa891 100644 --- a/tests/components/switchbot/__init__.py +++ b/tests/components/switchbot/__init__.py @@ -294,3 +294,95 @@ REMOTE_SERVICE_INFO = BluetoothServiceInfoBleak( connectable=False, tx_power=-127, ) + + +WOHUB2_SERVICE_INFO = BluetoothServiceInfoBleak( + name="WoHub2", + manufacturer_data={ + 2409: b"\xe7\x06\x1dx\x99y\x00\xffg\xe2\xbf]\x84\x04\x9a,\x00", + }, + service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b"v\x00"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + address="AA:BB:CC:DD:EE:FF", + rssi=-60, + source="local", + advertisement=generate_advertisement_data( + local_name="WoHub2", + manufacturer_data={ + 2409: b"\xe7\x06\x1dx\x99y\x00\xffg\xe2\xbf]\x84\x04\x9a,\x00", + }, + service_data={"0000fd3d-0000-1000-8000-00805f9b34fb": b"v\x00"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + ), + device=generate_ble_device("AA:BB:CC:DD:EE:FF", "WoHub2"), + time=0, + connectable=True, + tx_power=-127, +) + + +WOCURTAIN3_SERVICE_INFO = BluetoothServiceInfoBleak( + name="WoCurtain3", + address="AA:BB:CC:DD:EE:FF", + manufacturer_data={2409: b"\xcf;Zwu\x0c\x19\x0b\x00\x11D\x006"}, + service_data={"00000d00-0000-1000-8000-00805f9b34fb": b"{\xc06\x00\x11D"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + rssi=-60, + source="local", + advertisement=generate_advertisement_data( + local_name="WoCurtain3", + manufacturer_data={2409: b"\xcf;Zwu\x0c\x19\x0b\x00\x11D\x006"}, + service_data={"00000d00-0000-1000-8000-00805f9b34fb": b"{\xc06\x00\x11D"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + ), + device=generate_ble_device("AA:BB:CC:DD:EE:FF", "WoCurtain3"), + time=0, + connectable=True, + tx_power=-127, +) + + +WOBLINDTILT_SERVICE_INFO = BluetoothServiceInfoBleak( + name="WoBlindTilt", + address="AA:BB:CC:DD:EE:FF", + manufacturer_data={2409: b"\xfbgA`\x98\xe8\x1d%2\x11\x84"}, + service_data={"00000d00-0000-1000-8000-00805f9b34fb": b"x\x00*"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + rssi=-60, + source="local", + advertisement=generate_advertisement_data( + local_name="WoBlindTilt", + manufacturer_data={2409: b"\xfbgA`\x98\xe8\x1d%2\x11\x84"}, + service_data={"00000d00-0000-1000-8000-00805f9b34fb": b"x\x00*"}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + ), + device=generate_ble_device("AA:BB:CC:DD:EE:FF", "WoBlindTilt"), + time=0, + connectable=True, + tx_power=-127, +) + + +def make_advertisement( + address: str, manufacturer_data: bytes, service_data: bytes +) -> BluetoothServiceInfoBleak: + """Make a dummy advertisement.""" + return BluetoothServiceInfoBleak( + name="Test Device", + address=address, + manufacturer_data={2409: manufacturer_data}, + service_data={"00000d00-0000-1000-8000-00805f9b34fb": service_data}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + rssi=-60, + source="local", + advertisement=generate_advertisement_data( + local_name="Test Device", + manufacturer_data={2409: manufacturer_data}, + service_data={"00000d00-0000-1000-8000-00805f9b34fb": service_data}, + service_uuids=["cba20d00-224d-11e6-9fb8-0002a5d5c51b"], + ), + device=generate_ble_device(address, "Test Device"), + time=0, + connectable=True, + tx_power=-127, + ) diff --git a/tests/components/switchbot/conftest.py b/tests/components/switchbot/conftest.py index 44f68a1c8ae..aff94626a68 100644 --- a/tests/components/switchbot/conftest.py +++ b/tests/components/switchbot/conftest.py @@ -2,7 +2,26 @@ import pytest +from homeassistant.components.switchbot.const import DOMAIN +from homeassistant.const import CONF_ADDRESS, CONF_NAME, CONF_SENSOR_TYPE + +from tests.common import MockConfigEntry + @pytest.fixture(autouse=True) def mock_bluetooth(enable_bluetooth: None) -> None: """Auto mock bluetooth.""" + + +@pytest.fixture +def mock_entry_factory(): + """Fixture to create a MockConfigEntry with a customizable sensor type.""" + return lambda sensor_type="curtain": MockConfigEntry( + domain=DOMAIN, + data={ + CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", + CONF_NAME: "test-name", + CONF_SENSOR_TYPE: sensor_type, + }, + unique_id="aabbccddeeff", + ) diff --git a/tests/components/switchbot/test_cover.py b/tests/components/switchbot/test_cover.py new file mode 100644 index 00000000000..8810963f63d --- /dev/null +++ b/tests/components/switchbot/test_cover.py @@ -0,0 +1,327 @@ +"""Test the switchbot covers.""" + +from collections.abc import Callable +from unittest.mock import AsyncMock, patch + +from homeassistant.components.cover import ( + ATTR_CURRENT_POSITION, + ATTR_CURRENT_TILT_POSITION, + ATTR_POSITION, + ATTR_TILT_POSITION, + DOMAIN as COVER_DOMAIN, + CoverState, +) +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_CLOSE_COVER, + SERVICE_CLOSE_COVER_TILT, + SERVICE_OPEN_COVER, + SERVICE_OPEN_COVER_TILT, + SERVICE_SET_COVER_POSITION, + SERVICE_SET_COVER_TILT_POSITION, + SERVICE_STOP_COVER, + SERVICE_STOP_COVER_TILT, +) +from homeassistant.core import HomeAssistant, State + +from . import WOBLINDTILT_SERVICE_INFO, WOCURTAIN3_SERVICE_INFO, make_advertisement + +from tests.common import MockConfigEntry, mock_restore_cache +from tests.components.bluetooth import inject_bluetooth_service_info + + +async def test_curtain3_setup( + hass: HomeAssistant, mock_entry_factory: Callable[[str], MockConfigEntry] +) -> None: + """Test setting up the Curtain3.""" + inject_bluetooth_service_info(hass, WOCURTAIN3_SERVICE_INFO) + + entry = mock_entry_factory(sensor_type="curtain") + + entity_id = "cover.test_name" + mock_restore_cache( + hass, + [ + State( + entity_id, + CoverState.OPEN, + {ATTR_CURRENT_POSITION: 50}, + ) + ], + ) + + entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == CoverState.OPEN + assert state.attributes[ATTR_CURRENT_POSITION] == 50 + + +async def test_curtain3_controlling( + hass: HomeAssistant, mock_entry_factory: Callable[[str], MockConfigEntry] +) -> None: + """Test Curtain3 controlling.""" + inject_bluetooth_service_info(hass, WOCURTAIN3_SERVICE_INFO) + + entry = mock_entry_factory(sensor_type="curtain") + entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.switchbot.cover.switchbot.SwitchbotCurtain.open", + new=AsyncMock(return_value=True), + ) as mock_open, + patch( + "homeassistant.components.switchbot.cover.switchbot.SwitchbotCurtain.close", + new=AsyncMock(return_value=True), + ) as mock_close, + patch( + "homeassistant.components.switchbot.cover.switchbot.SwitchbotCurtain.stop", + new=AsyncMock(return_value=True), + ) as mock_stop, + patch( + "homeassistant.components.switchbot.cover.switchbot.SwitchbotCurtain.set_position", + new=AsyncMock(return_value=True), + ) as mock_set_position, + ): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + entity_id = "cover.test_name" + address = "AA:BB:CC:DD:EE:FF" + service_data = b"{\xc06\x00\x11D" + + # Test open + manufacturer_data = b"\xcf;Zwu\x0c\x19\x0b\x05\x11D\x006" + await hass.services.async_call( + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + inject_bluetooth_service_info( + hass, make_advertisement(address, manufacturer_data, service_data) + ) + await hass.async_block_till_done() + + mock_open.assert_awaited_once() + state = hass.states.get(entity_id) + assert state.state == CoverState.OPEN + assert state.attributes[ATTR_CURRENT_POSITION] == 95 + + # Test close + manufacturer_data = b"\xcf;Zwu\x0c\x19\x0b\x58\x11D\x006" + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + inject_bluetooth_service_info( + hass, make_advertisement(address, manufacturer_data, service_data) + ) + await hass.async_block_till_done() + + mock_close.assert_awaited_once() + state = hass.states.get(entity_id) + assert state.state == CoverState.CLOSED + assert state.attributes[ATTR_CURRENT_POSITION] == 12 + + # Test stop + manufacturer_data = b"\xcf;Zwu\x0c\x19\x0b\x3c\x11D\x006" + await hass.services.async_call( + COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + inject_bluetooth_service_info( + hass, make_advertisement(address, manufacturer_data, service_data) + ) + await hass.async_block_till_done() + + mock_stop.assert_awaited_once() + state = hass.states.get(entity_id) + assert state.state == CoverState.OPEN + assert state.attributes[ATTR_CURRENT_POSITION] == 40 + + # Test set position + manufacturer_data = b"\xcf;Zwu\x0c\x19\x0b(\x11D\x006" + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_SET_COVER_POSITION, + {ATTR_ENTITY_ID: entity_id, ATTR_POSITION: 50}, + blocking=True, + ) + inject_bluetooth_service_info( + hass, make_advertisement(address, manufacturer_data, service_data) + ) + await hass.async_block_till_done() + + mock_set_position.assert_awaited_once() + state = hass.states.get(entity_id) + assert state.state == CoverState.OPEN + assert state.attributes[ATTR_CURRENT_POSITION] == 60 + + +async def test_blindtilt_setup( + hass: HomeAssistant, mock_entry_factory: Callable[[str], MockConfigEntry] +) -> None: + """Test setting up the blindtilt.""" + inject_bluetooth_service_info(hass, WOBLINDTILT_SERVICE_INFO) + + entry = mock_entry_factory(sensor_type="blind_tilt") + entity_id = "cover.test_name" + mock_restore_cache( + hass, + [ + State( + entity_id, + CoverState.OPEN, + {ATTR_CURRENT_TILT_POSITION: 40}, + ) + ], + ) + + entry.add_to_hass(hass) + with patch( + "homeassistant.components.switchbot.cover.switchbot.SwitchbotBlindTilt.update", + new=AsyncMock(return_value=True), + ): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == CoverState.OPEN + assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 40 + + +async def test_blindtilt_controlling( + hass: HomeAssistant, mock_entry_factory: Callable[[str], MockConfigEntry] +) -> None: + """Test blindtilt controlling.""" + inject_bluetooth_service_info(hass, WOBLINDTILT_SERVICE_INFO) + + entry = mock_entry_factory(sensor_type="blind_tilt") + entry.add_to_hass(hass) + info = { + "motionDirection": { + "opening": False, + "closing": False, + "up": False, + "down": False, + }, + } + with ( + patch( + "homeassistant.components.switchbot.cover.switchbot.SwitchbotBlindTilt.get_basic_info", + new=AsyncMock(return_value=info), + ), + patch( + "homeassistant.components.switchbot.cover.switchbot.SwitchbotBlindTilt.open", + new=AsyncMock(return_value=True), + ) as mock_open, + patch( + "homeassistant.components.switchbot.cover.switchbot.SwitchbotBlindTilt.close", + new=AsyncMock(return_value=True), + ) as mock_close, + patch( + "homeassistant.components.switchbot.cover.switchbot.SwitchbotBlindTilt.stop", + new=AsyncMock(return_value=True), + ) as mock_stop, + patch( + "homeassistant.components.switchbot.cover.switchbot.SwitchbotBlindTilt.set_position", + new=AsyncMock(return_value=True), + ) as mock_set_position, + ): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + entity_id = "cover.test_name" + address = "AA:BB:CC:DD:EE:FF" + service_data = b"x\x00*" + + # Test open + manufacturer_data = b"\xfbgA`\x98\xe8\x1d%F\x12\x85" + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER_TILT, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + with patch( + "homeassistant.components.switchbot.cover.switchbot.SwitchbotBlindTilt.get_basic_info", + return_value=info, + ): + inject_bluetooth_service_info( + hass, make_advertisement(address, manufacturer_data, service_data) + ) + await hass.async_block_till_done() + + mock_open.assert_awaited_once() + + state = hass.states.get(entity_id) + assert state.state == CoverState.OPEN + assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 70 + + # Test close + manufacturer_data = b"\xfbgA`\x98\xe8\x1d%\x0f\x12\x85" + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER_TILT, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + with patch( + "homeassistant.components.switchbot.cover.switchbot.SwitchbotBlindTilt.get_basic_info", + return_value=info, + ): + inject_bluetooth_service_info( + hass, make_advertisement(address, manufacturer_data, service_data) + ) + await hass.async_block_till_done() + + mock_close.assert_awaited_once() + state = hass.states.get(entity_id) + assert state.state == CoverState.CLOSED + assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 15 + + # Test stop + manufacturer_data = b"\xfbgA`\x98\xe8\x1d%\n\x12\x85" + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_STOP_COVER_TILT, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + with patch( + "homeassistant.components.switchbot.cover.switchbot.SwitchbotBlindTilt.get_basic_info", + return_value=info, + ): + inject_bluetooth_service_info( + hass, make_advertisement(address, manufacturer_data, service_data) + ) + await hass.async_block_till_done() + + mock_stop.assert_awaited_once() + state = hass.states.get(entity_id) + assert state.state == CoverState.CLOSED + assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 10 + + # Test set position + manufacturer_data = b"\xfbgA`\x98\xe8\x1d%2\x12\x85" + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_SET_COVER_TILT_POSITION, + {ATTR_ENTITY_ID: entity_id, ATTR_TILT_POSITION: 50}, + blocking=True, + ) + with patch( + "homeassistant.components.switchbot.cover.switchbot.SwitchbotBlindTilt.get_basic_info", + return_value=info, + ): + inject_bluetooth_service_info( + hass, make_advertisement(address, manufacturer_data, service_data) + ) + await hass.async_block_till_done() + + mock_set_position.assert_awaited_once() + state = hass.states.get(entity_id) + assert state.state == CoverState.OPEN + assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 diff --git a/tests/components/switchbot/test_sensor.py b/tests/components/switchbot/test_sensor.py index 6a7111a054e..5fd270b3393 100644 --- a/tests/components/switchbot/test_sensor.py +++ b/tests/components/switchbot/test_sensor.py @@ -25,6 +25,7 @@ from . import ( LEAK_SERVICE_INFO, REMOTE_SERVICE_INFO, WOHAND_SERVICE_INFO, + WOHUB2_SERVICE_INFO, WOMETERTHPC_SERVICE_INFO, WORELAY_SWITCH_1PM_SERVICE_INFO, ) @@ -234,3 +235,61 @@ async def test_remote(hass: HomeAssistant) -> None: assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_hub2_sensor(hass: HomeAssistant) -> None: + """Test setting up creates the sensor for WoHub2.""" + await async_setup_component(hass, DOMAIN, {}) + inject_bluetooth_service_info(hass, WOHUB2_SERVICE_INFO) + + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_ADDRESS: "AA:BB:CC:DD:EE:FF", + CONF_NAME: "test-name", + CONF_SENSOR_TYPE: "hub2", + }, + unique_id="aabbccddeeff", + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all("sensor")) == 5 + + temperature_sensor = hass.states.get("sensor.test_name_temperature") + temperature_sensor_attrs = temperature_sensor.attributes + assert temperature_sensor.state == "26.4" + assert temperature_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Temperature" + assert temperature_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "°C" + assert temperature_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + humidity_sensor = hass.states.get("sensor.test_name_humidity") + humidity_sensor_attrs = humidity_sensor.attributes + assert humidity_sensor.state == "44" + assert humidity_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Humidity" + assert humidity_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "%" + assert humidity_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + light_level_sensor = hass.states.get("sensor.test_name_light_level") + light_level_sensor_attrs = light_level_sensor.attributes + assert light_level_sensor.state == "4" + assert light_level_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Light level" + assert light_level_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "Level" + + light_level_sensor = hass.states.get("sensor.test_name_illuminance") + light_level_sensor_attrs = light_level_sensor.attributes + assert light_level_sensor.state == "30" + assert light_level_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Illuminance" + assert light_level_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "lx" + + rssi_sensor = hass.states.get("sensor.test_name_bluetooth_signal") + rssi_sensor_attrs = rssi_sensor.attributes + assert rssi_sensor.state == "-60" + assert rssi_sensor_attrs[ATTR_FRIENDLY_NAME] == "test-name Bluetooth signal" + assert rssi_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "dBm" + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/synology_dsm/test_backup.py b/tests/components/synology_dsm/test_backup.py index 24cfe29f52b..db0062b45bf 100644 --- a/tests/components/synology_dsm/test_backup.py +++ b/tests/components/synology_dsm/test_backup.py @@ -4,9 +4,13 @@ from io import StringIO from typing import Any from unittest.mock import ANY, AsyncMock, MagicMock, Mock, patch +from freezegun.api import FrozenDateTimeFactory import pytest from synology_dsm.api.file_station.models import SynoFileFile, SynoFileSharedFolder -from synology_dsm.exceptions import SynologyDSMAPIErrorException +from synology_dsm.exceptions import ( + SynologyDSMAPIErrorException, + SynologyDSMRequestException, +) from homeassistant.components.backup import ( DOMAIN as BACKUP_DOMAIN, @@ -279,6 +283,50 @@ async def test_agents_on_unload( } +async def test_agents_on_changed_update_success( + hass: HomeAssistant, + setup_dsm_with_filestation: MagicMock, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, +) -> None: + """Test backup agent on changed update success of coordintaor.""" + client = await hass_ws_client(hass) + + # config entry is loaded + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + assert response["success"] + assert len(response["result"]["agents"]) == 2 + + # coordinator update was successful + freezer.tick(910) # 15 min interval + 10s + await hass.async_block_till_done(wait_background_tasks=True) + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + assert response["success"] + assert len(response["result"]["agents"]) == 2 + + # coordinator update was un-successful + setup_dsm_with_filestation.update.side_effect = SynologyDSMRequestException( + OSError() + ) + freezer.tick(910) + await hass.async_block_till_done(wait_background_tasks=True) + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + assert response["success"] + assert len(response["result"]["agents"]) == 1 + + # coordinator update was successful again + setup_dsm_with_filestation.update.side_effect = None + freezer.tick(910) + await hass.async_block_till_done(wait_background_tasks=True) + await client.send_json_auto_id({"type": "backup/agents/info"}) + response = await client.receive_json() + assert response["success"] + assert len(response["result"]["agents"]) == 2 + + async def test_agents_list_backups( hass: HomeAssistant, setup_dsm_with_filestation: MagicMock, @@ -338,7 +386,7 @@ async def test_agents_list_backups_error( "backups": [], "last_attempted_automatic_backup": None, "last_completed_automatic_backup": None, - "last_non_idle_event": None, + "last_action_event": None, "next_automatic_backup": None, "next_automatic_backup_additional": False, "state": "idle", diff --git a/tests/components/system_bridge/test_init.py b/tests/components/system_bridge/test_init.py index 7632a0c8157..25ccbdeb46c 100644 --- a/tests/components/system_bridge/test_init.py +++ b/tests/components/system_bridge/test_init.py @@ -81,3 +81,53 @@ async def test_migration_minor_future_version(hass: HomeAssistant) -> None: assert config_entry.minor_version == config_entry_minor_version assert config_entry.data == config_entry_data assert config_entry.state is ConfigEntryState.LOADED + + +async def test_setup_timeout(hass: HomeAssistant) -> None: + """Test setup with timeout error.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=FIXTURE_UUID, + data=FIXTURE_USER_INPUT, + version=SystemBridgeConfigFlow.VERSION, + minor_version=SystemBridgeConfigFlow.MINOR_VERSION, + ) + + with patch( + "systembridgeconnector.version.Version.check_supported", + side_effect=TimeoutError, + ): + config_entry.add_to_hass(hass) + result = await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert result is False + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_coordinator_get_data_timeout(hass: HomeAssistant) -> None: + """Test coordinator handling timeout during get_data.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=FIXTURE_UUID, + data=FIXTURE_USER_INPUT, + version=SystemBridgeConfigFlow.VERSION, + minor_version=SystemBridgeConfigFlow.MINOR_VERSION, + ) + + with ( + patch( + "systembridgeconnector.version.Version.check_supported", + return_value=True, + ), + patch( + "homeassistant.components.system_bridge.coordinator.SystemBridgeDataUpdateCoordinator.async_get_data", + side_effect=TimeoutError, + ), + ): + config_entry.add_to_hass(hass) + result = await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert result is False + assert config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/systemmonitor/snapshots/test_sensor.ambr b/tests/components/systemmonitor/snapshots/test_sensor.ambr index 1ee9067a528..8108e4777c8 100644 --- a/tests/components/systemmonitor/snapshots/test_sensor.ambr +++ b/tests/components/systemmonitor/snapshots/test_sensor.ambr @@ -114,34 +114,34 @@ # name: test_sensor[System Monitor Last boot - state] '2024-02-24T15:00:00+00:00' # --- -# name: test_sensor[System Monitor Load (15m) - attributes] +# name: test_sensor[System Monitor Load (15 min) - attributes] ReadOnlyDict({ - 'friendly_name': 'System Monitor Load (15m)', + 'friendly_name': 'System Monitor Load (15 min)', 'icon': 'mdi:cpu-64-bit', 'state_class': , }) # --- -# name: test_sensor[System Monitor Load (15m) - state] +# name: test_sensor[System Monitor Load (15 min) - state] '3' # --- -# name: test_sensor[System Monitor Load (1m) - attributes] +# name: test_sensor[System Monitor Load (1 min) - attributes] ReadOnlyDict({ - 'friendly_name': 'System Monitor Load (1m)', + 'friendly_name': 'System Monitor Load (1 min)', 'icon': 'mdi:cpu-64-bit', 'state_class': , }) # --- -# name: test_sensor[System Monitor Load (1m) - state] +# name: test_sensor[System Monitor Load (1 min) - state] '1' # --- -# name: test_sensor[System Monitor Load (5m) - attributes] +# name: test_sensor[System Monitor Load (5 min) - attributes] ReadOnlyDict({ - 'friendly_name': 'System Monitor Load (5m)', + 'friendly_name': 'System Monitor Load (5 min)', 'icon': 'mdi:cpu-64-bit', 'state_class': , }) # --- -# name: test_sensor[System Monitor Load (5m) - state] +# name: test_sensor[System Monitor Load (5 min) - state] '2' # --- # name: test_sensor[System Monitor Memory free - attributes] diff --git a/tests/components/tado/__init__.py b/tests/components/tado/__init__.py index 11d199f01a1..e6b6257e6ea 100644 --- a/tests/components/tado/__init__.py +++ b/tests/components/tado/__init__.py @@ -1 +1 @@ -"""Tests for the tado integration.""" +"""Tests for the Tado integration.""" diff --git a/tests/components/tado/conftest.py b/tests/components/tado/conftest.py new file mode 100644 index 00000000000..1aa62b218a2 --- /dev/null +++ b/tests/components/tado/conftest.py @@ -0,0 +1,50 @@ +"""Fixtures for Tado tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +from PyTado.http import DeviceActivationStatus +import pytest + +from homeassistant.components.tado import CONF_REFRESH_TOKEN, DOMAIN + +from tests.common import MockConfigEntry, load_json_object_fixture + + +@pytest.fixture +def mock_tado_api() -> Generator[MagicMock]: + """Mock the Tado API.""" + with ( + patch("homeassistant.components.tado.Tado") as mock_tado, + patch("homeassistant.components.tado.config_flow.Tado", new=mock_tado), + ): + client = mock_tado.return_value + client.device_verification_url.return_value = ( + "https://login.tado.com/oauth2/device?user_code=TEST" + ) + client.device_activation_status.return_value = DeviceActivationStatus.COMPLETED + client.get_me.return_value = load_json_object_fixture("me.json", DOMAIN) + client.get_refresh_token.return_value = "refresh" + yield client + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Mock the setup entry.""" + with patch( + "homeassistant.components.tado.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data={ + CONF_REFRESH_TOKEN: "refresh", + }, + unique_id="1", + version=2, + ) diff --git a/tests/components/tado/fixtures/device_authorize.json b/tests/components/tado/fixtures/device_authorize.json new file mode 100644 index 00000000000..aacd171fafd --- /dev/null +++ b/tests/components/tado/fixtures/device_authorize.json @@ -0,0 +1,8 @@ +{ + "device_code": "ABCD", + "expires_in": 300, + "interval": 5, + "user_code": "TEST", + "verification_uri": "https://login.tado.com/oauth2/device", + "verification_uri_complete": "https://login.tado.com/oauth2/device?user_code=TEST" +} diff --git a/tests/components/tado/test_config_flow.py b/tests/components/tado/test_config_flow.py index 19acb0aecbd..2fd8e6a0468 100644 --- a/tests/components/tado/test_config_flow.py +++ b/tests/components/tado/test_config_flow.py @@ -1,20 +1,20 @@ """Test the Tado config flow.""" -from http import HTTPStatus from ipaddress import ip_address -from unittest.mock import MagicMock, patch +import threading +from unittest.mock import AsyncMock, MagicMock, patch -import PyTado +from PyTado.http import DeviceActivationStatus import pytest -import requests -from homeassistant import config_entries -from homeassistant.components.tado.config_flow import NoHomes +from homeassistant.components.tado.config_flow import TadoException from homeassistant.components.tado.const import ( CONF_FALLBACK, + CONF_REFRESH_TOKEN, CONST_OVERLAY_TADO_DEFAULT, DOMAIN, ) +from homeassistant.config_entries import SOURCE_HOMEKIT, SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -26,92 +26,186 @@ from homeassistant.helpers.service_info.zeroconf import ( from tests.common import MockConfigEntry -def _get_mock_tado_api(get_me=None) -> MagicMock: - mock_tado = MagicMock() - if isinstance(get_me, Exception): - type(mock_tado).get_me = MagicMock(side_effect=get_me) - else: - type(mock_tado).get_me = MagicMock(return_value=get_me) - return mock_tado +async def test_full_flow( + hass: HomeAssistant, + mock_tado_api: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the full flow of the config flow.""" + + event = threading.Event() + + def mock_tado_api_device_activation() -> None: + # Simulate the device activation process + event.wait(timeout=5) + + mock_tado_api.device_activation = mock_tado_api_device_activation + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "user" + + event.set() + await hass.async_block_till_done() + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "home name" + assert result["data"] == {CONF_REFRESH_TOKEN: "refresh"} + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_full_flow_reauth( + hass: HomeAssistant, + mock_tado_api: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the full flow of the config when reauthticating.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="ABC-123-DEF-456", + data={CONF_REFRESH_TOKEN: "totally_refresh_for_reauth"}, + ) + entry.add_to_hass(hass) + + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + # The no user input + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + event = threading.Event() + + def mock_tado_api_device_activation() -> None: + # Simulate the device activation process + event.wait(timeout=5) + + mock_tado_api.device_activation = mock_tado_api_device_activation + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "user" + + event.set() + await hass.async_block_till_done() + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "home name" + assert result["data"] == {CONF_REFRESH_TOKEN: "refresh"} + + +async def test_auth_timeout( + hass: HomeAssistant, + mock_tado_api: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the auth timeout.""" + mock_tado_api.device_activation_status.return_value = DeviceActivationStatus.PENDING + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS_DONE + assert result["step_id"] == "timeout" + + mock_tado_api.device_activation_status.return_value = ( + DeviceActivationStatus.COMPLETED + ) + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "timeout" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "home name" + assert result["data"] == {CONF_REFRESH_TOKEN: "refresh"} + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_no_homes(hass: HomeAssistant, mock_tado_api: MagicMock) -> None: + """Test the full flow of the config flow.""" + mock_tado_api.get_me.return_value["homes"] = [] + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS_DONE + assert result["step_id"] == "finish_login" + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_homes" + + +async def test_tado_creation(hass: HomeAssistant) -> None: + """Test we handle Form Exceptions.""" + + with patch( + "homeassistant.components.tado.config_flow.Tado", + side_effect=TadoException("Test exception"), + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" @pytest.mark.parametrize( ("exception", "error"), [ - (KeyError, "invalid_auth"), - (RuntimeError, "cannot_connect"), - (ValueError, "unknown"), + (Exception, "timeout"), + (TadoException, "timeout"), ], ) -async def test_form_exceptions( - hass: HomeAssistant, exception: Exception, error: str +async def test_wait_for_login_exception( + hass: HomeAssistant, + mock_tado_api: MagicMock, + exception: Exception, + error: str, ) -> None: - """Test we handle Form Exceptions.""" + """Test that an exception in wait for login is handled properly.""" + mock_tado_api.device_activation.side_effect = exception + result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) - - with patch( - "homeassistant.components.tado.config_flow.Tado", - side_effect=exception, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "test-username", "password": "test-password"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": error} - - # Test a retry to recover, upon failure - mock_tado_api = _get_mock_tado_api(get_me={"homes": [{"id": 1, "name": "myhome"}]}) - - with ( - patch( - "homeassistant.components.tado.config_flow.Tado", - return_value=mock_tado_api, - ), - patch( - "homeassistant.components.tado.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "test-username", "password": "test-password"}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "myhome" - assert result["data"] == { - "username": "test-username", - "password": "test-password", - } - assert len(mock_setup_entry.mock_calls) == 1 + # @joostlek: I think the timeout step is not rightfully named, but heck, it works + assert result["type"] is FlowResultType.SHOW_PROGRESS_DONE + assert result["step_id"] == error -async def test_options_flow(hass: HomeAssistant) -> None: +async def test_options_flow( + hass: HomeAssistant, + mock_tado_api: MagicMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: """Test config flow options.""" - entry = MockConfigEntry(domain=DOMAIN, data={"username": "test-username"}) - entry.add_to_hass(hass) + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - with patch( - "homeassistant.components.tado.async_setup_entry", - return_value=True, - ): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - result = await hass.config_entries.options.async_init( - entry.entry_id, context={"source": config_entries.SOURCE_USER} - ) + result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "init" @@ -119,125 +213,17 @@ async def test_options_flow(hass: HomeAssistant) -> None: result["flow_id"], {CONF_FALLBACK: CONST_OVERLAY_TADO_DEFAULT}, ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == {CONF_FALLBACK: CONST_OVERLAY_TADO_DEFAULT} -async def test_create_entry(hass: HomeAssistant) -> None: - """Test we can setup though the user path.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - mock_tado_api = _get_mock_tado_api(get_me={"homes": [{"id": 1, "name": "myhome"}]}) - - with ( - patch( - "homeassistant.components.tado.config_flow.Tado", - return_value=mock_tado_api, - ), - patch( - "homeassistant.components.tado.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "test-username", "password": "test-password"}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "myhome" - assert result["data"] == { - "username": "test-username", - "password": "test-password", - } - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_form_invalid_auth(hass: HomeAssistant) -> None: - """Test we handle invalid auth.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - response_mock = MagicMock() - type(response_mock).status_code = HTTPStatus.UNAUTHORIZED - mock_tado_api = _get_mock_tado_api( - get_me=requests.HTTPError(response=response_mock) - ) - - with patch( - "homeassistant.components.tado.config_flow.Tado", - return_value=mock_tado_api, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "test-username", "password": "test-password"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "invalid_auth"} - - -async def test_form_cannot_connect(hass: HomeAssistant) -> None: - """Test we handle cannot connect error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - response_mock = MagicMock() - type(response_mock).status_code = HTTPStatus.INTERNAL_SERVER_ERROR - mock_tado_api = _get_mock_tado_api( - get_me=requests.HTTPError(response=response_mock) - ) - - with patch( - "homeassistant.components.tado.config_flow.Tado", - return_value=mock_tado_api, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "test-username", "password": "test-password"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "cannot_connect"} - - -async def test_no_homes(hass: HomeAssistant) -> None: - """Test we handle no homes error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - mock_tado_api = _get_mock_tado_api(get_me={"homes": []}) - - with patch( - "homeassistant.components.tado.config_flow.Tado", - return_value=mock_tado_api, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"username": "test-username", "password": "test-password"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "no_homes"} - - -async def test_form_homekit(hass: HomeAssistant) -> None: +async def test_homekit(hass: HomeAssistant, mock_tado_api: MagicMock) -> None: """Test that we abort from homekit if tado is already setup.""" result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_HOMEKIT}, + context={"source": SOURCE_HOMEKIT}, data=ZeroconfServiceInfo( ip_address=ip_address("127.0.0.1"), ip_addresses=[ip_address("127.0.0.1")], @@ -249,13 +235,18 @@ async def test_form_homekit(hass: HomeAssistant) -> None: ), ) assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - flow = next( - flow - for flow in hass.config_entries.flow.async_progress() - if flow["flow_id"] == result["flow_id"] - ) - assert flow["context"]["unique_id"] == "AA:BB:CC:DD:EE:FF" + assert result["step_id"] == "homekit_confirm" + + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == "1" + + +async def test_homekit_already_setup( + hass: HomeAssistant, mock_tado_api: MagicMock +) -> None: + """Test that we abort from homekit if tado is already setup.""" entry = MockConfigEntry( domain=DOMAIN, data={CONF_USERNAME: "mock", CONF_PASSWORD: "mock"} @@ -264,7 +255,7 @@ async def test_form_homekit(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_HOMEKIT}, + context={"source": SOURCE_HOMEKIT}, data=ZeroconfServiceInfo( ip_address=ip_address("127.0.0.1"), ip_addresses=[ip_address("127.0.0.1")], @@ -276,77 +267,4 @@ async def test_form_homekit(hass: HomeAssistant) -> None: ), ) assert result["type"] is FlowResultType.ABORT - - -@pytest.mark.parametrize( - ("exception", "error"), - [ - (PyTado.exceptions.TadoWrongCredentialsException, "invalid_auth"), - (RuntimeError, "cannot_connect"), - (NoHomes, "no_homes"), - (ValueError, "unknown"), - ], -) -async def test_reconfigure_flow( - hass: HomeAssistant, exception: Exception, error: str -) -> None: - """Test re-configuration flow.""" - entry = MockConfigEntry( - domain=DOMAIN, - data={ - "username": "test-username", - "password": "test-password", - "home_id": 1, - }, - unique_id="unique_id", - ) - entry.add_to_hass(hass) - - result = await entry.start_reconfigure_flow(hass) - - assert result["type"] is FlowResultType.FORM - - with patch( - "homeassistant.components.tado.config_flow.Tado", - side_effect=exception, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_PASSWORD: "test-password", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": error} - - mock_tado_api = _get_mock_tado_api(get_me={"homes": [{"id": 1, "name": "myhome"}]}) - with ( - patch( - "homeassistant.components.tado.config_flow.Tado", - return_value=mock_tado_api, - ), - patch( - "homeassistant.components.tado.async_setup_entry", - return_value=True, - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_PASSWORD: "test-password", - }, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reconfigure_successful" - entry = hass.config_entries.async_get_entry(entry.entry_id) - assert entry - assert entry.title == "Mock Title" - assert entry.data == { - "username": "test-username", - "password": "test-password", - "home_id": 1, - } + assert result["reason"] == "already_configured" diff --git a/tests/components/tado/test_helper.py b/tests/components/tado/test_helper.py index da959c2124a..7f798e3797c 100644 --- a/tests/components/tado/test_helper.py +++ b/tests/components/tado/test_helper.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock, patch from PyTado.interface import Tado import pytest -from homeassistant.components.tado import TadoDataUpdateCoordinator +from homeassistant.components.tado import CONF_REFRESH_TOKEN, TadoDataUpdateCoordinator from homeassistant.components.tado.const import ( CONST_OVERLAY_MANUAL, CONST_OVERLAY_TADO_DEFAULT, @@ -28,13 +28,13 @@ def entry(request: pytest.FixtureRequest) -> MockConfigEntry: request.param if hasattr(request, "param") else CONST_OVERLAY_TADO_DEFAULT ) return MockConfigEntry( - version=1, - minor_version=1, + version=2, domain=DOMAIN, title="Tado", data={ CONF_USERNAME: "test-username", CONF_PASSWORD: "test-password", + CONF_REFRESH_TOKEN: "test-refresh", }, options={ "fallback": fallback, diff --git a/tests/components/tado/test_init.py b/tests/components/tado/test_init.py new file mode 100644 index 00000000000..2f2ccacf3c0 --- /dev/null +++ b/tests/components/tado/test_init.py @@ -0,0 +1,30 @@ +"""Test the Tado integration.""" + +from homeassistant.components.tado import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_v1_migration(hass: HomeAssistant) -> None: + """Test migration from v1 to v2 config entry.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_USERNAME: "test", + CONF_PASSWORD: "test", + }, + unique_id="1", + version=1, + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.version == 2 + assert CONF_USERNAME not in entry.data + assert CONF_PASSWORD not in entry.data + + assert entry.state is ConfigEntryState.SETUP_ERROR + assert len(hass.config_entries.flow.async_progress()) == 1 diff --git a/tests/components/tado/util.py b/tests/components/tado/util.py index 5bf87dbed33..6fd333dff51 100644 --- a/tests/components/tado/util.py +++ b/tests/components/tado/util.py @@ -2,8 +2,7 @@ import requests_mock -from homeassistant.components.tado import DOMAIN -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.components.tado import CONF_REFRESH_TOKEN, DOMAIN from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -178,9 +177,16 @@ async def async_init_integration( "https://my.tado.com/api/v2/homes/1/zones/1/state", text=load_fixture(zone_1_state_fixture), ) + m.post( + "https://login.tado.com/oauth2/token", + text=load_fixture(token_fixture), + ) entry = MockConfigEntry( domain=DOMAIN, - data={CONF_USERNAME: "mock", CONF_PASSWORD: "mock"}, + version=2, + data={ + CONF_REFRESH_TOKEN: "mock-token", + }, options={"fallback": "NEXT_TIME_BLOCK"}, ) entry.add_to_hass(hass) diff --git a/tests/components/tankerkoenig/const.py b/tests/components/tankerkoenig/const.py index 2c28753a7f3..9a2ecb3a2be 100644 --- a/tests/components/tankerkoenig/const.py +++ b/tests/components/tankerkoenig/const.py @@ -2,7 +2,7 @@ from aiotankerkoenig import PriceInfo, Station, Status -from homeassistant.components.tankerkoenig.const import CONF_FUEL_TYPES, CONF_STATIONS +from homeassistant.components.tankerkoenig.const import CONF_STATIONS from homeassistant.const import ( CONF_API_KEY, CONF_LATITUDE, @@ -98,7 +98,6 @@ PRICES_MISSING_FUELTYPE = { CONFIG_DATA = { CONF_NAME: "Home", CONF_API_KEY: "269534f6-xxxx-xxxx-xxxx-yyyyzzzzxxxx", - CONF_FUEL_TYPES: ["e5"], CONF_LOCATION: {CONF_LATITUDE: 51.0, CONF_LONGITUDE: 13.0}, CONF_RADIUS: 2.0, CONF_STATIONS: [ diff --git a/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr b/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr index b5b33d7c246..71d9d9c75f8 100644 --- a/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr +++ b/tests/components/tankerkoenig/snapshots/test_diagnostics.ambr @@ -12,9 +12,6 @@ 'entry': dict({ 'data': dict({ 'api_key': '**REDACTED**', - 'fuel_types': list([ - 'e5', - ]), 'location': dict({ 'latitude': '**REDACTED**', 'longitude': '**REDACTED**', diff --git a/tests/components/tankerkoenig/test_config_flow.py b/tests/components/tankerkoenig/test_config_flow.py index bb1e943bbb9..967470c2c16 100644 --- a/tests/components/tankerkoenig/test_config_flow.py +++ b/tests/components/tankerkoenig/test_config_flow.py @@ -4,11 +4,7 @@ from unittest.mock import AsyncMock, patch from aiotankerkoenig.exceptions import TankerkoenigInvalidKeyError -from homeassistant.components.tankerkoenig.const import ( - CONF_FUEL_TYPES, - CONF_STATIONS, - DOMAIN, -) +from homeassistant.components.tankerkoenig.const import CONF_STATIONS, DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.const import ( CONF_API_KEY, @@ -30,7 +26,6 @@ from tests.common import MockConfigEntry MOCK_USER_DATA = { CONF_NAME: "Home", CONF_API_KEY: "269534f6-xxxx-xxxx-xxxx-yyyyzzzzxxxx", - CONF_FUEL_TYPES: ["e5"], CONF_LOCATION: {CONF_LATITUDE: 51.0, CONF_LONGITUDE: 13.0}, CONF_RADIUS: 2.0, } @@ -81,7 +76,6 @@ async def test_user(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"][CONF_NAME] == "Home" assert result["data"][CONF_API_KEY] == "269534f6-xxxx-xxxx-xxxx-yyyyzzzzxxxx" - assert result["data"][CONF_FUEL_TYPES] == ["e5"] assert result["data"][CONF_LOCATION] == {"latitude": 51.0, "longitude": 13.0} assert result["data"][CONF_RADIUS] == 2.0 assert result["data"][CONF_STATIONS] == [ diff --git a/tests/components/template/conftest.py b/tests/components/template/conftest.py index bdca84ba071..86a30535e92 100644 --- a/tests/components/template/conftest.py +++ b/tests/components/template/conftest.py @@ -1,5 +1,7 @@ """template conftest.""" +from enum import Enum + import pytest from homeassistant.core import HomeAssistant, ServiceCall @@ -9,6 +11,13 @@ from homeassistant.setup import async_setup_component from tests.common import assert_setup_component, async_mock_service +class ConfigurationStyle(Enum): + """Configuration Styles for template testing.""" + + LEGACY = "Legacy" + MODERN = "Modern" + + @pytest.fixture def calls(hass: HomeAssistant) -> list[ServiceCall]: """Track calls to a mock service.""" diff --git a/tests/components/template/snapshots/test_switch.ambr b/tests/components/template/snapshots/test_switch.ambr index c240a9436a0..909110fdbc8 100644 --- a/tests/components/template/snapshots/test_switch.ambr +++ b/tests/components/template/snapshots/test_switch.ambr @@ -1,5 +1,18 @@ # serializer version: 1 -# name: test_setup_config_entry +# name: test_setup_config_entry[state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'My template', + }), + 'context': , + 'entity_id': 'switch.my_template', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_setup_config_entry[value_template] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'My template', diff --git a/tests/components/template/test_blueprint.py b/tests/components/template/test_blueprint.py index dd008a27822..66630ecf739 100644 --- a/tests/components/template/test_blueprint.py +++ b/tests/components/template/test_blueprint.py @@ -16,10 +16,10 @@ from homeassistant.components.blueprint import ( DomainBlueprints, ) from homeassistant.components.template import DOMAIN, SERVICE_RELOAD -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import Context, HomeAssistant, callback from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component -from homeassistant.util import yaml as yaml_util +from homeassistant.util import dt as dt_util, yaml as yaml_util from tests.common import async_mock_service @@ -212,6 +212,61 @@ async def test_reload_template_when_blueprint_changes(hass: HomeAssistant) -> No assert not_inverted.state == "on" +async def test_trigger_event_sensor( + hass: HomeAssistant, device_registry: dr.DeviceRegistry +) -> None: + """Test event sensor blueprint.""" + blueprint = "test_event_sensor.yaml" + assert await async_setup_component( + hass, + "template", + { + "template": [ + { + "use_blueprint": { + "path": blueprint, + "input": { + "event_type": "my_custom_event", + "event_data": {"foo": "bar"}, + }, + }, + "name": "My Custom Event", + }, + ] + }, + ) + + context = Context() + now = dt_util.utcnow() + with patch("homeassistant.util.dt.now", return_value=now): + hass.bus.async_fire( + "my_custom_event", {"foo": "bar", "beer": 2}, context=context + ) + await hass.async_block_till_done() + + date_state = hass.states.get("sensor.my_custom_event") + assert date_state is not None + assert date_state.state == now.isoformat(timespec="seconds") + data = date_state.attributes.get("data") + assert data is not None + assert data != "" + assert data.get("foo") == "bar" + assert data.get("beer") == 2 + + inverted_foo_template = template.helpers.blueprint_in_template( + hass, "sensor.my_custom_event" + ) + assert inverted_foo_template == blueprint + + inverted_binary_sensor_blueprint_entity_ids = ( + template.helpers.templates_with_blueprint(hass, blueprint) + ) + assert len(inverted_binary_sensor_blueprint_entity_ids) == 1 + + with pytest.raises(BlueprintInUse): + await template.async_get_blueprints(hass).async_remove_blueprint(blueprint) + + async def test_domain_blueprint(hass: HomeAssistant) -> None: """Test DomainBlueprint services.""" reload_handler_calls = async_mock_service(hass, DOMAIN, SERVICE_RELOAD) @@ -262,7 +317,8 @@ async def test_invalid_blueprint( ) assert "more than one platform defined per blueprint" in caplog.text - assert await template.async_get_blueprints(hass).async_get_blueprints() == {} + blueprints = await template.async_get_blueprints(hass).async_get_blueprints() + assert "invalid.yaml" not in blueprints async def test_no_blueprint(hass: HomeAssistant) -> None: diff --git a/tests/components/template/test_config.py b/tests/components/template/test_config.py new file mode 100644 index 00000000000..b14ff0efa5a --- /dev/null +++ b/tests/components/template/test_config.py @@ -0,0 +1,50 @@ +"""Test Template config.""" + +from __future__ import annotations + +import pytest +import voluptuous as vol + +from homeassistant.components.template.config import CONFIG_SECTION_SCHEMA +from homeassistant.core import HomeAssistant + + +@pytest.mark.parametrize( + "config", + [ + { + "trigger": {"trigger": "event", "event_type": "my_event"}, + "button": { + "press": { + "service": "test.automation", + "data_template": {"caller": "{{ this.entity_id }}"}, + }, + "device_class": "restart", + "unique_id": "test", + "name": "test", + "icon": "mdi:test", + }, + }, + { + "trigger": {"trigger": "event", "event_type": "my_event"}, + "action": { + "service": "test.automation", + "data_template": {"caller": "{{ this.entity_id }}"}, + }, + "button": { + "press": { + "service": "test.automation", + "data_template": {"caller": "{{ this.entity_id }}"}, + }, + "device_class": "restart", + "unique_id": "test", + "name": "test", + "icon": "mdi:test", + }, + }, + ], +) +async def test_invalid_schema(hass: HomeAssistant, config: dict) -> None: + """Test invalid config schemas.""" + with pytest.raises(vol.Invalid): + CONFIG_SECTION_SCHEMA(config) diff --git a/tests/components/template/test_config_flow.py b/tests/components/template/test_config_flow.py index 2c9b81e7c91..21d740b165b 100644 --- a/tests/components/template/test_config_flow.py +++ b/tests/components/template/test_config_flow.py @@ -16,6 +16,36 @@ from homeassistant.helpers import device_registry as dr from tests.common import MockConfigEntry from tests.typing import WebSocketGenerator +SWITCH_BEFORE_OPTIONS = { + "name": "test_template_switch", + "template_type": "switch", + "turn_off": [{"event": "test_template_switch", "event_data": {"event": "off"}}], + "turn_on": [{"event": "test_template_switch", "event_data": {"event": "on"}}], + "value_template": "{{ now().minute % 2 == 0 }}", +} + + +SWITCH_AFTER_OPTIONS = { + "name": "test_template_switch", + "template_type": "switch", + "turn_off": [{"event": "test_template_switch", "event_data": {"event": "off"}}], + "turn_on": [{"event": "test_template_switch", "event_data": {"event": "on"}}], + "state": "{{ now().minute % 2 == 0 }}", + "value_template": "{{ now().minute % 2 == 0 }}", +} + +SENSOR_OPTIONS = { + "name": "test_template_sensor", + "template_type": "sensor", + "state": "{{ 'a' if now().minute % 2 == 0 else 'b' }}", +} + +BINARY_SENSOR_OPTIONS = { + "name": "test_template_sensor", + "template_type": "binary_sensor", + "state": "{{ now().minute % 2 == 0 else }}", +} + @pytest.mark.parametrize( ( diff --git a/tests/components/template/test_entity.py b/tests/components/template/test_entity.py new file mode 100644 index 00000000000..67a85839982 --- /dev/null +++ b/tests/components/template/test_entity.py @@ -0,0 +1,17 @@ +"""Test abstract template entity.""" + +import pytest + +from homeassistant.components.template import entity as abstract_entity +from homeassistant.core import HomeAssistant + + +async def test_template_entity_not_implemented(hass: HomeAssistant) -> None: + """Test abstract template entity raises not implemented error.""" + + entity = abstract_entity.AbstractTemplateEntity(None) + with pytest.raises(NotImplementedError): + _ = entity.referenced_blueprint + + with pytest.raises(NotImplementedError): + entity._render_script_variables() diff --git a/tests/components/template/test_light.py b/tests/components/template/test_light.py index a94ec233f81..1a739b4921e 100644 --- a/tests/components/template/test_light.py +++ b/tests/components/template/test_light.py @@ -4,7 +4,7 @@ from typing import Any import pytest -from homeassistant.components import light +from homeassistant.components import light, template from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP_KELVIN, @@ -17,6 +17,7 @@ from homeassistant.components.light import ( ColorMode, LightEntityFeature, ) +from homeassistant.components.template.light import rewrite_legacy_to_modern_conf from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, @@ -26,8 +27,12 @@ from homeassistant.const import ( STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.template import Template from homeassistant.setup import async_setup_component +from .conftest import ConfigurationStyle + from tests.common import assert_setup_component # Represent for light's availability @@ -154,10 +159,245 @@ OPTIMISTIC_RGBWW_COLOR_LIGHT_CONFIG = { } -async def async_setup_light( +TEST_MISSING_KEY_CONFIG = { + "turn_on": { + "service": "light.turn_on", + "entity_id": "light.test_state", + }, + "set_level": { + "service": "light.turn_on", + "data_template": { + "entity_id": "light.test_state", + "brightness": "{{brightness}}", + }, + }, +} + + +TEST_ON_ACTION_WITH_TRANSITION_CONFIG = { + "turn_on": { + "service": "test.automation", + "data_template": { + "transition": "{{transition}}", + }, + }, + "turn_off": { + "service": "light.turn_off", + "entity_id": "light.test_state", + }, + "set_level": { + "service": "light.turn_on", + "data_template": { + "entity_id": "light.test_state", + "brightness": "{{brightness}}", + "transition": "{{transition}}", + }, + }, +} + + +TEST_OFF_ACTION_WITH_TRANSITION_CONFIG = { + "turn_on": { + "service": "light.turn_on", + "entity_id": "light.test_state", + }, + "turn_off": { + "service": "test.automation", + "data_template": { + "transition": "{{transition}}", + }, + }, + "set_level": { + "service": "light.turn_on", + "data_template": { + "entity_id": "light.test_state", + "brightness": "{{brightness}}", + "transition": "{{transition}}", + }, + }, +} + + +TEST_ALL_COLORS_NO_TEMPLATE_CONFIG = { + "set_hs": { + "service": "test.automation", + "data_template": { + "entity_id": "test.test_state", + "h": "{{h}}", + "s": "{{s}}", + }, + }, + "set_temperature": { + "service": "test.automation", + "data_template": { + "entity_id": "test.test_state", + "color_temp": "{{color_temp}}", + }, + }, + "set_rgb": { + "service": "test.automation", + "data_template": { + "entity_id": "test.test_state", + "r": "{{r}}", + "g": "{{g}}", + "b": "{{b}}", + }, + }, + "set_rgbw": { + "service": "test.automation", + "data_template": { + "entity_id": "test.test_state", + "r": "{{r}}", + "g": "{{g}}", + "b": "{{b}}", + "w": "{{w}}", + }, + }, + "set_rgbww": { + "service": "test.automation", + "data_template": { + "entity_id": "test.test_state", + "r": "{{r}}", + "g": "{{g}}", + "b": "{{b}}", + "cw": "{{cw}}", + "ww": "{{ww}}", + }, + }, +} + + +TEST_UNIQUE_ID_CONFIG = { + **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, + "unique_id": "not-so-unique-anymore", +} + + +@pytest.mark.parametrize( + ("old_attr", "new_attr", "attr_template"), + [ + ( + "value_template", + "state", + "{{ 1 == 1 }}", + ), + ( + "rgb_template", + "rgb", + "{{ (255,255,255) }}", + ), + ( + "rgbw_template", + "rgbw", + "{{ (255,255,255,255) }}", + ), + ( + "rgbww_template", + "rgbww", + "{{ (255,255,255,255,255) }}", + ), + ( + "effect_list_template", + "effect_list", + "{{ ['a', 'b'] }}", + ), + ( + "effect_template", + "effect", + "{{ 'a' }}", + ), + ( + "level_template", + "level", + "{{ 255 }}", + ), + ( + "max_mireds_template", + "max_mireds", + "{{ 255 }}", + ), + ( + "min_mireds_template", + "min_mireds", + "{{ 255 }}", + ), + ( + "supports_transition_template", + "supports_transition", + "{{ True }}", + ), + ( + "temperature_template", + "temperature", + "{{ 255 }}", + ), + ( + "white_value_template", + "white_value", + "{{ 255 }}", + ), + ( + "hs_template", + "hs", + "{{ (255, 255) }}", + ), + ( + "color_template", + "hs", + "{{ (255, 255) }}", + ), + ], +) +async def test_legacy_to_modern_config( + hass: HomeAssistant, old_attr: str, new_attr: str, attr_template: str +) -> None: + """Test the conversion of legacy template to modern template.""" + config = { + "foo": { + "friendly_name": "foo bar", + "unique_id": "foo-bar-light", + "icon_template": "{{ 'mdi.abc' }}", + "entity_picture_template": "{{ 'mypicture.jpg' }}", + "availability_template": "{{ 1 == 1 }}", + old_attr: attr_template, + **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, + } + } + altered_configs = rewrite_legacy_to_modern_conf(hass, config) + + assert len(altered_configs) == 1 + + assert [ + { + "availability": Template("{{ 1 == 1 }}", hass), + "icon": Template("{{ 'mdi.abc' }}", hass), + "name": Template("foo bar", hass), + "object_id": "foo", + "picture": Template("{{ 'mypicture.jpg' }}", hass), + "turn_off": { + "data_template": { + "action": "turn_off", + "caller": "{{ this.entity_id }}", + }, + "service": "test.automation", + }, + "turn_on": { + "data_template": { + "action": "turn_on", + "caller": "{{ this.entity_id }}", + }, + "service": "test.automation", + }, + "unique_id": "foo-bar-light", + new_attr: Template(attr_template, hass), + } + ] == altered_configs + + +async def async_setup_legacy_format( hass: HomeAssistant, count: int, light_config: dict[str, Any] ) -> None: - """Do setup of light integration.""" + """Do setup of light integration via legacy format.""" config = {"light": {"platform": "template", "lights": light_config}} with assert_setup_component(count, light.DOMAIN): @@ -172,12 +412,291 @@ async def async_setup_light( await hass.async_block_till_done() -@pytest.fixture -async def setup_light( +async def async_setup_legacy_format_with_attribute( + hass: HomeAssistant, + count: int, + attribute: str, + attribute_template: str, + extra_config: dict, +) -> None: + """Do setup of a legacy light that has a single templated attribute.""" + extra = {attribute: attribute_template} if attribute and attribute_template else {} + await async_setup_legacy_format( + hass, + count, + { + "test_template_light": { + **extra_config, + "value_template": "{{ 1 == 1 }}", + **extra, + } + }, + ) + + +async def async_setup_new_format( hass: HomeAssistant, count: int, light_config: dict[str, Any] +) -> None: + """Do setup of light integration via new format.""" + config = {"template": {"light": light_config}} + + with assert_setup_component(count, template.DOMAIN): + assert await async_setup_component( + hass, + template.DOMAIN, + config, + ) + + await hass.async_block_till_done() + await hass.async_start() + await hass.async_block_till_done() + + +async def async_setup_modern_format_with_attribute( + hass: HomeAssistant, + count: int, + attribute: str, + attribute_template: str, + extra_config: dict, +) -> None: + """Do setup of a legacy light that has a single templated attribute.""" + extra = {attribute: attribute_template} if attribute and attribute_template else {} + await async_setup_new_format( + hass, + count, + { + "name": "test_template_light", + **extra_config, + "state": "{{ 1 == 1 }}", + **extra, + }, + ) + + +@pytest.fixture +async def setup_light( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + light_config: dict[str, Any], ) -> None: """Do setup of light integration.""" - await async_setup_light(hass, count, light_config) + if style == ConfigurationStyle.LEGACY: + await async_setup_legacy_format(hass, count, light_config) + elif style == ConfigurationStyle.MODERN: + await async_setup_new_format(hass, count, light_config) + + +@pytest.fixture +async def setup_state_light( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + state_template: str, +): + """Do setup of light integration.""" + if style == ConfigurationStyle.LEGACY: + await async_setup_legacy_format( + hass, + count, + { + "test_template_light": { + **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, + "value_template": state_template, + } + }, + ) + elif style == ConfigurationStyle.MODERN: + await async_setup_new_format( + hass, + count, + { + **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, + "name": "test_template_light", + "state": state_template, + }, + ) + + +@pytest.fixture +async def setup_single_attribute_light( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + attribute: str, + attribute_template: str, + extra_config: dict, +) -> None: + """Do setup of light integration.""" + if style == ConfigurationStyle.LEGACY: + await async_setup_legacy_format_with_attribute( + hass, count, attribute, attribute_template, extra_config + ) + elif style == ConfigurationStyle.MODERN: + await async_setup_modern_format_with_attribute( + hass, count, attribute, attribute_template, extra_config + ) + + +@pytest.fixture +async def setup_single_action_light( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + extra_config: dict, +) -> None: + """Do setup of light integration.""" + if style == ConfigurationStyle.LEGACY: + await async_setup_legacy_format_with_attribute( + hass, count, "", "", extra_config + ) + elif style == ConfigurationStyle.MODERN: + await async_setup_modern_format_with_attribute( + hass, count, "", "", extra_config + ) + + +@pytest.fixture +async def setup_light_with_effects( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + effect_list_template: str, + effect_template: str, +) -> None: + """Do setup of light with effects.""" + common = { + "set_effect": { + "service": "test.automation", + "data_template": { + "action": "set_effect", + "caller": "{{ this.entity_id }}", + "entity_id": "test.test_state", + "effect": "{{effect}}", + }, + }, + } + if style == ConfigurationStyle.LEGACY: + await async_setup_legacy_format( + hass, + count, + { + "test_template_light": { + **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, + "value_template": "{{true}}", + **common, + "effect_list_template": effect_list_template, + "effect_template": effect_template, + } + }, + ) + elif style == ConfigurationStyle.MODERN: + await async_setup_new_format( + hass, + count, + { + "name": "test_template_light", + **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, + "state": "{{true}}", + **common, + "effect_list": effect_list_template, + "effect": effect_template, + }, + ) + + +@pytest.fixture +async def setup_light_with_mireds( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + attribute: str, + attribute_template: str, +) -> None: + """Do setup of light that uses mireds.""" + common = { + "set_temperature": { + "service": "light.turn_on", + "data_template": { + "entity_id": "light.test_state", + "color_temp": "{{color_temp}}", + }, + }, + attribute: attribute_template, + } + if style == ConfigurationStyle.LEGACY: + await async_setup_legacy_format( + hass, + count, + { + "test_template_light": { + **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, + "value_template": "{{ 1 == 1 }}", + **common, + "temperature_template": "{{200}}", + } + }, + ) + elif style == ConfigurationStyle.MODERN: + await async_setup_new_format( + hass, + count, + { + "name": "test_template_light", + **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, + "state": "{{ 1 == 1 }}", + **common, + "temperature": "{{200}}", + }, + ) + + +@pytest.fixture +async def setup_light_with_transition_template( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + transition_template: str, +) -> None: + """Do setup of light that uses mireds.""" + common = { + "set_effect": { + "service": "test.automation", + "data_template": { + "entity_id": "test.test_state", + "effect": "{{effect}}", + }, + }, + } + if style == ConfigurationStyle.LEGACY: + await async_setup_legacy_format( + hass, + count, + { + "test_template_light": { + **OPTIMISTIC_COLOR_TEMP_LIGHT_CONFIG, + "value_template": "{{ 1 == 1 }}", + **common, + "effect_list_template": "{{ ['Disco', 'Police'] }}", + "effect_template": "{{ None }}", + "supports_transition_template": transition_template, + } + }, + ) + elif style == ConfigurationStyle.MODERN: + await async_setup_new_format( + hass, + count, + { + "name": "test_template_light", + **OPTIMISTIC_COLOR_TEMP_LIGHT_CONFIG, + "state": "{{ 1 == 1 }}", + **common, + "effect_list": "{{ ['Disco', 'Police'] }}", + "effect": "{{ None }}", + "supports_transition": transition_template, + }, + ) @pytest.mark.parametrize("count", [1]) @@ -186,18 +705,15 @@ async def setup_light( [(0, [ColorMode.BRIGHTNESS])], ) @pytest.mark.parametrize( - "light_config", + "style", [ - { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "value_template": "{{states.test['big.fat...']}}", - } - }, + ConfigurationStyle.LEGACY, + ConfigurationStyle.MODERN, ], ) +@pytest.mark.parametrize("state_template", ["{{states.test['big.fat...']}}"]) async def test_template_state_invalid( - hass: HomeAssistant, supported_features, supported_color_modes, setup_light + hass: HomeAssistant, supported_features, supported_color_modes, setup_state_light ) -> None: """Test template state with render error.""" state = hass.states.get("light.test_template_light") @@ -209,17 +725,14 @@ async def test_template_state_invalid( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + "style", [ - { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "value_template": "{{ states.light.test_state.state }}", - } - }, + ConfigurationStyle.LEGACY, + ConfigurationStyle.MODERN, ], ) -async def test_template_state_text(hass: HomeAssistant, setup_light) -> None: +@pytest.mark.parametrize("state_template", ["{{ states.light.test_state.state }}"]) +async def test_template_state_text(hass: HomeAssistant, setup_state_light) -> None: """Test the state text of a template.""" set_state = STATE_ON hass.states.async_set("light.test_state", set_state) @@ -242,7 +755,14 @@ async def test_template_state_text(hass: HomeAssistant, setup_light) -> None: @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - ("value_template", "expected_state", "expected_color_mode"), + "style", + [ + ConfigurationStyle.LEGACY, + ConfigurationStyle.MODERN, + ], +) +@pytest.mark.parametrize( + ("state_template", "expected_state", "expected_color_mode"), [ ( "{{ 1 == 1 }}", @@ -256,21 +776,13 @@ async def test_template_state_text(hass: HomeAssistant, setup_light) -> None: ), ], ) -async def test_templatex_state_boolean( +async def test_legacy_template_state_boolean( hass: HomeAssistant, expected_color_mode, expected_state, - count, - value_template, + setup_state_light, ) -> None: """Test the setting of the state with boolean on.""" - light_config = { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "value_template": value_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state.state == expected_state assert state.attributes.get("color_mode") == expected_color_mode @@ -280,48 +792,56 @@ async def test_templatex_state_boolean( @pytest.mark.parametrize("count", [0]) @pytest.mark.parametrize( - "light_config", + ("light_config", "style"), [ - { - "test_template_light": { + ( + { + "test_template_light": { + **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, + "value_template": "{%- if false -%}", + } + }, + ConfigurationStyle.LEGACY, + ), + ( + { + "bad name here": { + **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, + "value_template": "{{ 1== 1}}", + } + }, + ConfigurationStyle.LEGACY, + ), + ( + {"test_template_light": "Invalid"}, + ConfigurationStyle.LEGACY, + ), + ( + { **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "value_template": "{%- if false -%}", - } - }, - { - "bad name here": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "value_template": "{{ 1== 1}}", - } - }, - {"test_template_light": "Invalid"}, + "name": "test_template_light", + "state": "{%- if false -%}", + }, + ConfigurationStyle.MODERN, + ), ], ) -async def test_template_syntax_error(hass: HomeAssistant, setup_light) -> None: - """Test templating syntax error.""" +async def test_template_config_errors(hass: HomeAssistant, setup_light) -> None: + """Test template light configuration errors.""" assert hass.states.async_all("light") == [] @pytest.mark.parametrize( - ("light_config", "count"), + ("light_config", "style", "count"), [ ( - { - "light_one": { - "value_template": "{{ 1== 1}}", - "turn_on": { - "service": "light.turn_on", - "entity_id": "light.test_state", - }, - "set_level": { - "service": "light.turn_on", - "data_template": { - "entity_id": "light.test_state", - "brightness": "{{brightness}}", - }, - }, - } - }, + {"light_one": {"value_template": "{{ 1== 1}}", **TEST_MISSING_KEY_CONFIG}}, + ConfigurationStyle.LEGACY, + 0, + ), + ( + {"name": "light_one", "state": "{{ 1== 1}}", **TEST_MISSING_KEY_CONFIG}, + ConfigurationStyle.MODERN, 0, ), ], @@ -336,18 +856,15 @@ async def test_missing_key(hass: HomeAssistant, count, setup_light) -> None: @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + "style", [ - { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "value_template": "{{states.light.test_state.state}}", - } - }, + ConfigurationStyle.LEGACY, + ConfigurationStyle.MODERN, ], ) +@pytest.mark.parametrize("state_template", ["{{ states.light.test_state.state }}"]) async def test_on_action( - hass: HomeAssistant, setup_light, calls: list[ServiceCall] + hass: HomeAssistant, setup_state_light, calls: list[ServiceCall] ) -> None: """Test on action.""" hass.states.async_set("light.test_state", STATE_OFF) @@ -378,32 +895,26 @@ async def test_on_action( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("light_config", "style"), [ - { - "test_template_light": { - "value_template": "{{states.light.test_state.state}}", - "turn_on": { - "service": "test.automation", - "data_template": { - "transition": "{{transition}}", - }, - }, - "turn_off": { - "service": "light.turn_off", - "entity_id": "light.test_state", - }, - "supports_transition_template": "{{true}}", - "set_level": { - "service": "light.turn_on", - "data_template": { - "entity_id": "light.test_state", - "brightness": "{{brightness}}", - "transition": "{{transition}}", - }, - }, - } - }, + ( + { + "test_template_light": { + "value_template": "{{states.light.test_state.state}}", + **TEST_ON_ACTION_WITH_TRANSITION_CONFIG, + "supports_transition_template": "{{true}}", + } + }, + ConfigurationStyle.LEGACY, + ), + ( + { + "name": "test_template_light", + **TEST_ON_ACTION_WITH_TRANSITION_CONFIG, + "supports_transition": "{{true}}", + }, + ConfigurationStyle.MODERN, + ), ], ) async def test_on_action_with_transition( @@ -437,13 +948,23 @@ async def test_on_action_with_transition( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("light_config", "style"), [ - { - "test_template_light": { + ( + { + "test_template_light": { + **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, + } + }, + ConfigurationStyle.LEGACY, + ), + ( + { + "name": "test_template_light", **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - } - }, + }, + ConfigurationStyle.MODERN, + ), ], ) async def test_on_action_optimistic( @@ -497,18 +1018,15 @@ async def test_on_action_optimistic( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + "style", [ - { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "value_template": "{{states.light.test_state.state}}", - } - }, + ConfigurationStyle.LEGACY, + ConfigurationStyle.MODERN, ], ) +@pytest.mark.parametrize("state_template", ["{{ states.light.test_state.state }}"]) async def test_off_action( - hass: HomeAssistant, setup_light, calls: list[ServiceCall] + hass: HomeAssistant, setup_state_light, calls: list[ServiceCall] ) -> None: """Test off action.""" hass.states.async_set("light.test_state", STATE_ON) @@ -538,32 +1056,27 @@ async def test_off_action( @pytest.mark.parametrize("count", [(1)]) @pytest.mark.parametrize( - "light_config", + ("light_config", "style"), [ - { - "test_template_light": { - "value_template": "{{states.light.test_state.state}}", - "turn_on": { - "service": "light.turn_on", - "entity_id": "light.test_state", - }, - "turn_off": { - "service": "test.automation", - "data_template": { - "transition": "{{transition}}", - }, - }, - "supports_transition_template": "{{true}}", - "set_level": { - "service": "light.turn_on", - "data_template": { - "entity_id": "light.test_state", - "brightness": "{{brightness}}", - "transition": "{{transition}}", - }, - }, - } - }, + ( + { + "test_template_light": { + "value_template": "{{states.light.test_state.state}}", + **TEST_OFF_ACTION_WITH_TRANSITION_CONFIG, + "supports_transition_template": "{{true}}", + } + }, + ConfigurationStyle.LEGACY, + ), + ( + { + "name": "test_template_light", + "state": "{{states.light.test_state.state}}", + **TEST_OFF_ACTION_WITH_TRANSITION_CONFIG, + "supports_transition": "{{true}}", + }, + ConfigurationStyle.MODERN, + ), ], ) async def test_off_action_with_transition( @@ -596,13 +1109,23 @@ async def test_off_action_with_transition( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("light_config", "style"), [ - { - "test_template_light": { + ( + { + "test_template_light": { + **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, + } + }, + ConfigurationStyle.LEGACY, + ), + ( + { + "name": "test_template_light", **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - } - }, + }, + ConfigurationStyle.MODERN, + ), ], ) async def test_off_action_optimistic( @@ -632,19 +1155,16 @@ async def test_off_action_optimistic( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + "style", [ - { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "value_template": "{{1 == 1}}", - } - }, + ConfigurationStyle.LEGACY, + ConfigurationStyle.MODERN, ], ) +@pytest.mark.parametrize("state_template", ["{{1 == 1}}"]) async def test_level_action_no_template( hass: HomeAssistant, - setup_light, + setup_state_light, calls: list[ServiceCall], ) -> None: """Test setting brightness with optimistic template.""" @@ -671,9 +1191,18 @@ async def test_level_action_no_template( assert state.attributes["supported_features"] == 0 -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - ("expected_level", "level_template", "expected_color_mode"), + ("count", "extra_config"), [(1, OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG)] +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "level_template"), + (ConfigurationStyle.MODERN, "level"), + ], +) +@pytest.mark.parametrize( + ("expected_level", "attribute_template", "expected_color_mode"), [ (255, "{{255}}", ColorMode.BRIGHTNESS), (None, "{{256}}", ColorMode.BRIGHTNESS), @@ -690,20 +1219,11 @@ async def test_level_action_no_template( ) async def test_level_template( hass: HomeAssistant, - expected_level, - expected_color_mode, - count, - level_template, + expected_level: Any, + expected_color_mode: ColorMode, + setup_single_attribute_light, ) -> None: """Test the template for the level.""" - light_config = { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "value_template": "{{ 1 == 1 }}", - "level_template": level_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state.attributes.get("brightness") == expected_level assert state.state == STATE_ON @@ -712,9 +1232,18 @@ async def test_level_template( assert state.attributes["supported_features"] == 0 -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - ("expected_temp", "temperature_template", "expected_color_mode"), + ("count", "extra_config"), [(1, OPTIMISTIC_COLOR_TEMP_LIGHT_CONFIG)] +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "temperature_template"), + (ConfigurationStyle.MODERN, "temperature"), + ], +) +@pytest.mark.parametrize( + ("expected_temp", "attribute_template", "expected_color_mode"), [ (500, "{{500}}", ColorMode.COLOR_TEMP), (None, "{{501}}", ColorMode.COLOR_TEMP), @@ -727,20 +1256,11 @@ async def test_level_template( ) async def test_temperature_template( hass: HomeAssistant, - expected_temp, - expected_color_mode, - count, - temperature_template, + expected_temp: Any, + expected_color_mode: ColorMode, + setup_single_attribute_light, ) -> None: """Test the template for the temperature.""" - light_config = { - "test_template_light": { - **OPTIMISTIC_COLOR_TEMP_LIGHT_CONFIG, - "value_template": "{{ 1 == 1 }}", - "temperature_template": temperature_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state.attributes.get("color_temp") == expected_temp assert state.state == STATE_ON @@ -749,21 +1269,19 @@ async def test_temperature_template( assert state.attributes["supported_features"] == 0 -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("count", "extra_config"), [(1, OPTIMISTIC_COLOR_TEMP_LIGHT_CONFIG)] +) +@pytest.mark.parametrize( + "style", [ - { - "test_template_light": { - **OPTIMISTIC_COLOR_TEMP_LIGHT_CONFIG, - "value_template": "{{1 == 1}}", - } - }, + ConfigurationStyle.LEGACY, + ConfigurationStyle.MODERN, ], ) async def test_temperature_action_no_template( hass: HomeAssistant, - setup_light, + setup_single_action_light, calls: list[ServiceCall], ) -> None: """Test setting temperature with optimistic template.""" @@ -793,43 +1311,53 @@ async def test_temperature_action_no_template( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("light_config", "style", "entity_id"), [ - { - "test_template_light": { + ( + { + "test_template_light": { + **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, + "friendly_name": "Template light", + "value_template": "{{ 1 == 1 }}", + } + }, + ConfigurationStyle.LEGACY, + "light.test_template_light", + ), + ( + { **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "friendly_name": "Template light", - "value_template": "{{ 1 == 1 }}", - } - }, + "name": "Template light", + "state": "{{ 1 == 1 }}", + }, + ConfigurationStyle.MODERN, + "light.template_light", + ), ], ) -async def test_friendly_name(hass: HomeAssistant, setup_light) -> None: +async def test_friendly_name(hass: HomeAssistant, entity_id: str, setup_light) -> None: """Test the accessibility of the friendly_name attribute.""" - state = hass.states.get("light.test_template_light") + state = hass.states.get(entity_id) assert state is not None assert state.attributes.get("friendly_name") == "Template light" -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("count", "extra_config"), [(1, OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG)] +) +@pytest.mark.parametrize( + ("style", "attribute"), [ - { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "friendly_name": "Template light", - "value_template": "{{ 1 == 1 }}", - "icon_template": ( - "{% if states.light.test_state.state %}mdi:check{% endif %}" - ), - } - }, + (ConfigurationStyle.LEGACY, "icon_template"), + (ConfigurationStyle.MODERN, "icon"), ], ) -async def test_icon_template(hass: HomeAssistant, setup_light) -> None: +@pytest.mark.parametrize( + "attribute_template", ["{% if states.light.test_state.state %}mdi:check{% endif %}"] +) +async def test_icon_template(hass: HomeAssistant, setup_single_attribute_light) -> None: """Test icon template.""" state = hass.states.get("light.test_template_light") assert state.attributes.get("icon") == "" @@ -842,23 +1370,23 @@ async def test_icon_template(hass: HomeAssistant, setup_light) -> None: assert state.attributes["icon"] == "mdi:check" -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("count", "extra_config"), [(1, OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG)] +) +@pytest.mark.parametrize( + ("style", "attribute"), [ - { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "friendly_name": "Template light", - "value_template": "{{ 1 == 1 }}", - "entity_picture_template": ( - "{% if states.light.test_state.state %}/local/light.png{% endif %}" - ), - } - }, + (ConfigurationStyle.LEGACY, "entity_picture_template"), + (ConfigurationStyle.MODERN, "picture"), ], ) -async def test_entity_picture_template(hass: HomeAssistant, setup_light) -> None: +@pytest.mark.parametrize( + "attribute_template", + ["{% if states.light.test_state.state %}/local/light.png{% endif %}"], +) +async def test_entity_picture_template( + hass: HomeAssistant, setup_single_attribute_light +) -> None: """Test entity_picture template.""" state = hass.states.get("light.test_template_light") assert state.attributes.get("entity_picture") == "" @@ -871,21 +1399,21 @@ async def test_entity_picture_template(hass: HomeAssistant, setup_light) -> None assert state.attributes["entity_picture"] == "/local/light.png" -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("count", "extra_config"), [ - { - "test_template_light": { - **OPTIMISTIC_LEGACY_COLOR_LIGHT_CONFIG, - "value_template": "{{1 == 1}}", - } - }, + (1, OPTIMISTIC_LEGACY_COLOR_LIGHT_CONFIG), + ], +) +@pytest.mark.parametrize( + "style", + [ + ConfigurationStyle.LEGACY, ], ) async def test_legacy_color_action_no_template( hass: HomeAssistant, - setup_light, + setup_single_action_light, calls: list[ServiceCall], ) -> None: """Test setting color with optimistic template.""" @@ -913,24 +1441,25 @@ async def test_legacy_color_action_no_template( assert state.attributes["supported_features"] == 0 -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("count", "extra_config"), [ - { - "test_template_light": { - **OPTIMISTIC_HS_COLOR_LIGHT_CONFIG, - "value_template": "{{1 == 1}}", - } - }, + (1, OPTIMISTIC_HS_COLOR_LIGHT_CONFIG), + ], +) +@pytest.mark.parametrize( + "style", + [ + ConfigurationStyle.LEGACY, + ConfigurationStyle.MODERN, ], ) async def test_hs_color_action_no_template( hass: HomeAssistant, - setup_light, + setup_single_action_light, calls: list[ServiceCall], ) -> None: - """Test setting hs color with optimistic template.""" + """Test setting color with optimistic template.""" state = hass.states.get("light.test_template_light") assert state.attributes.get("hs_color") is None @@ -955,21 +1484,20 @@ async def test_hs_color_action_no_template( assert state.attributes["supported_features"] == 0 -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("count", "extra_config"), + [(1, OPTIMISTIC_RGB_COLOR_LIGHT_CONFIG)], +) +@pytest.mark.parametrize( + "style", [ - { - "test_template_light": { - **OPTIMISTIC_RGB_COLOR_LIGHT_CONFIG, - "value_template": "{{1 == 1}}", - } - }, + ConfigurationStyle.LEGACY, + ConfigurationStyle.MODERN, ], ) async def test_rgb_color_action_no_template( hass: HomeAssistant, - setup_light, + setup_single_action_light, calls: list[ServiceCall], ) -> None: """Test setting rgb color with optimistic template.""" @@ -998,21 +1526,20 @@ async def test_rgb_color_action_no_template( assert state.attributes["supported_features"] == 0 -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("count", "extra_config"), + [(1, OPTIMISTIC_RGBW_COLOR_LIGHT_CONFIG)], +) +@pytest.mark.parametrize( + "style", [ - { - "test_template_light": { - **OPTIMISTIC_RGBW_COLOR_LIGHT_CONFIG, - "value_template": "{{1 == 1}}", - } - }, + ConfigurationStyle.LEGACY, + ConfigurationStyle.MODERN, ], ) async def test_rgbw_color_action_no_template( hass: HomeAssistant, - setup_light, + setup_single_action_light, calls: list[ServiceCall], ) -> None: """Test setting rgbw color with optimistic template.""" @@ -1045,21 +1572,20 @@ async def test_rgbw_color_action_no_template( assert state.attributes["supported_features"] == 0 -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("count", "extra_config"), + [(1, OPTIMISTIC_RGBWW_COLOR_LIGHT_CONFIG)], +) +@pytest.mark.parametrize( + "style", [ - { - "test_template_light": { - **OPTIMISTIC_RGBWW_COLOR_LIGHT_CONFIG, - "value_template": "{{1 == 1}}", - } - }, + ConfigurationStyle.LEGACY, + ConfigurationStyle.MODERN, ], ) async def test_rgbww_color_action_no_template( hass: HomeAssistant, - setup_light, + setup_single_action_light, calls: list[ServiceCall], ) -> None: """Test setting rgbww color with optimistic template.""" @@ -1123,7 +1649,7 @@ async def test_legacy_color_template( "color_template": color_template, } } - await async_setup_light(hass, count, light_config) + await async_setup_legacy_format(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state.attributes.get("hs_color") == expected_hs assert state.state == STATE_ON @@ -1132,9 +1658,18 @@ async def test_legacy_color_template( assert state.attributes["supported_features"] == 0 -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - ("expected_hs", "hs_template", "expected_color_mode"), + ("count", "extra_config"), [(1, OPTIMISTIC_HS_COLOR_LIGHT_CONFIG)] +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "hs_template"), + (ConfigurationStyle.MODERN, "hs"), + ], +) +@pytest.mark.parametrize( + ("expected_hs", "attribute_template", "expected_color_mode"), [ ((360, 100), "{{(360, 100)}}", ColorMode.HS), ((360, 100), "(360, 100)", ColorMode.HS), @@ -1152,18 +1687,9 @@ async def test_hs_template( hass: HomeAssistant, expected_hs, expected_color_mode, - count, - hs_template, + setup_single_attribute_light, ) -> None: """Test the template for the color.""" - light_config = { - "test_template_light": { - **OPTIMISTIC_HS_COLOR_LIGHT_CONFIG, - "value_template": "{{ 1 == 1 }}", - "hs_template": hs_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state.attributes.get("hs_color") == expected_hs assert state.state == STATE_ON @@ -1172,9 +1698,18 @@ async def test_hs_template( assert state.attributes["supported_features"] == 0 -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - ("expected_rgb", "rgb_template", "expected_color_mode"), + ("count", "extra_config"), [(1, OPTIMISTIC_RGB_COLOR_LIGHT_CONFIG)] +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "rgb_template"), + (ConfigurationStyle.MODERN, "rgb"), + ], +) +@pytest.mark.parametrize( + ("expected_rgb", "attribute_template", "expected_color_mode"), [ ((160, 78, 192), "{{(160, 78, 192)}}", ColorMode.RGB), ((160, 78, 192), "{{[160, 78, 192]}}", ColorMode.RGB), @@ -1193,18 +1728,9 @@ async def test_rgb_template( hass: HomeAssistant, expected_rgb, expected_color_mode, - count, - rgb_template, + setup_single_attribute_light, ) -> None: """Test the template for the color.""" - light_config = { - "test_template_light": { - **OPTIMISTIC_RGB_COLOR_LIGHT_CONFIG, - "value_template": "{{ 1 == 1 }}", - "rgb_template": rgb_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state.attributes.get("rgb_color") == expected_rgb assert state.state == STATE_ON @@ -1213,9 +1739,18 @@ async def test_rgb_template( assert state.attributes["supported_features"] == 0 -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - ("expected_rgbw", "rgbw_template", "expected_color_mode"), + ("count", "extra_config"), [(1, OPTIMISTIC_RGBW_COLOR_LIGHT_CONFIG)] +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "rgbw_template"), + (ConfigurationStyle.MODERN, "rgbw"), + ], +) +@pytest.mark.parametrize( + ("expected_rgbw", "attribute_template", "expected_color_mode"), [ ((160, 78, 192, 25), "{{(160, 78, 192, 25)}}", ColorMode.RGBW), ((160, 78, 192, 25), "{{[160, 78, 192, 25]}}", ColorMode.RGBW), @@ -1235,18 +1770,9 @@ async def test_rgbw_template( hass: HomeAssistant, expected_rgbw, expected_color_mode, - count, - rgbw_template, + setup_single_attribute_light, ) -> None: """Test the template for the color.""" - light_config = { - "test_template_light": { - **OPTIMISTIC_RGBW_COLOR_LIGHT_CONFIG, - "value_template": "{{ 1 == 1 }}", - "rgbw_template": rgbw_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state.attributes.get("rgbw_color") == expected_rgbw assert state.state == STATE_ON @@ -1255,9 +1781,18 @@ async def test_rgbw_template( assert state.attributes["supported_features"] == 0 -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - ("expected_rgbww", "rgbww_template", "expected_color_mode"), + ("count", "extra_config"), [(1, OPTIMISTIC_RGBWW_COLOR_LIGHT_CONFIG)] +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "rgbww_template"), + (ConfigurationStyle.MODERN, "rgbww"), + ], +) +@pytest.mark.parametrize( + ("expected_rgbww", "attribute_template", "expected_color_mode"), [ ((160, 78, 192, 25, 55), "{{(160, 78, 192, 25, 55)}}", ColorMode.RGBWW), ((160, 78, 192, 25, 55), "(160, 78, 192, 25, 55)", ColorMode.RGBWW), @@ -1282,18 +1817,9 @@ async def test_rgbww_template( hass: HomeAssistant, expected_rgbww, expected_color_mode, - count, - rgbww_template, + setup_single_attribute_light, ) -> None: """Test the template for the color.""" - light_config = { - "test_template_light": { - **OPTIMISTIC_RGBWW_COLOR_LIGHT_CONFIG, - "value_template": "{{ 1 == 1 }}", - "rgbww_template": rgbww_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state.attributes.get("rgbww_color") == expected_rgbww assert state.state == STATE_ON @@ -1304,59 +1830,27 @@ async def test_rgbww_template( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("light_config", "style"), [ - { - "test_template_light": { + ( + { + "test_template_light": { + **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, + "value_template": "{{1 == 1}}", + **TEST_ALL_COLORS_NO_TEMPLATE_CONFIG, + } + }, + ConfigurationStyle.LEGACY, + ), + ( + { + "name": "test_template_light", **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, - "value_template": "{{1 == 1}}", - "set_hs": { - "service": "test.automation", - "data_template": { - "entity_id": "test.test_state", - "h": "{{h}}", - "s": "{{s}}", - }, - }, - "set_temperature": { - "service": "test.automation", - "data_template": { - "entity_id": "test.test_state", - "color_temp": "{{color_temp}}", - }, - }, - "set_rgb": { - "service": "test.automation", - "data_template": { - "entity_id": "test.test_state", - "r": "{{r}}", - "g": "{{g}}", - "b": "{{b}}", - }, - }, - "set_rgbw": { - "service": "test.automation", - "data_template": { - "entity_id": "test.test_state", - "r": "{{r}}", - "g": "{{g}}", - "b": "{{b}}", - "w": "{{w}}", - }, - }, - "set_rgbww": { - "service": "test.automation", - "data_template": { - "entity_id": "test.test_state", - "r": "{{r}}", - "g": "{{g}}", - "b": "{{b}}", - "cw": "{{cw}}", - "ww": "{{ww}}", - }, - }, - } - }, + "state": "{{1 == 1}}", + **TEST_ALL_COLORS_NO_TEMPLATE_CONFIG, + }, + ConfigurationStyle.MODERN, + ), ], ) async def test_all_colors_mode_no_template( @@ -1554,29 +2048,21 @@ async def test_all_colors_mode_no_template( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + "style", [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN] +) +@pytest.mark.parametrize( + ("effect_list_template", "effect_template", "effect", "expected"), [ - { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "value_template": "{{true}}", - "set_effect": { - "service": "test.automation", - "data_template": { - "action": "set_effect", - "caller": "{{ this.entity_id }}", - "entity_id": "test.test_state", - "effect": "{{effect}}", - }, - }, - "effect_list_template": "{{ ['Disco', 'Police'] }}", - "effect_template": "{{ 'Disco' }}", - } - }, + ("{{ ['Disco', 'Police'] }}", "{{ 'Disco' }}", "Disco", "Disco"), + ("{{ ['Disco', 'Police'] }}", "{{ 'None' }}", "RGB", None), ], ) -async def test_effect_action_valid_effect( - hass: HomeAssistant, setup_light, calls: list[ServiceCall] +async def test_effect_action( + hass: HomeAssistant, + effect: str, + expected: Any, + setup_light_with_effects, + calls: list[ServiceCall], ) -> None: """Test setting valid effect with template.""" state = hass.states.get("light.test_template_light") @@ -1585,64 +2071,24 @@ async def test_effect_action_valid_effect( await hass.services.async_call( light.DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_template_light", ATTR_EFFECT: "Disco"}, + {ATTR_ENTITY_ID: "light.test_template_light", ATTR_EFFECT: effect}, blocking=True, ) assert len(calls) == 1 assert calls[-1].data["action"] == "set_effect" assert calls[-1].data["caller"] == "light.test_template_light" - assert calls[-1].data["effect"] == "Disco" + assert calls[-1].data["effect"] == effect state = hass.states.get("light.test_template_light") assert state is not None - assert state.attributes.get("effect") == "Disco" + assert state.attributes.get("effect") == expected -@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize(("count", "effect_template"), [(1, "{{ None }}")]) @pytest.mark.parametrize( - "light_config", - [ - { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "value_template": "{{true}}", - "set_effect": { - "service": "test.automation", - "data_template": { - "entity_id": "test.test_state", - "effect": "{{effect}}", - }, - }, - "effect_list_template": "{{ ['Disco', 'Police'] }}", - "effect_template": "{{ None }}", - } - }, - ], + "style", [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN] ) -async def test_effect_action_invalid_effect( - hass: HomeAssistant, setup_light, calls: list[ServiceCall] -) -> None: - """Test setting invalid effect with template.""" - state = hass.states.get("light.test_template_light") - assert state is not None - - await hass.services.async_call( - light.DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_template_light", ATTR_EFFECT: "RGB"}, - blocking=True, - ) - - assert len(calls) == 1 - assert calls[0].data["effect"] == "RGB" - - state = hass.states.get("light.test_template_light") - assert state is not None - assert state.attributes.get("effect") is None - - -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( ("expected_effect_list", "effect_list_template"), [ @@ -1663,31 +2109,21 @@ async def test_effect_action_invalid_effect( ], ) async def test_effect_list_template( - hass: HomeAssistant, expected_effect_list, count, effect_list_template + hass: HomeAssistant, expected_effect_list, setup_light_with_effects ) -> None: """Test the template for the effect list.""" - light_config = { - "test_template_light": { - **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, - "value_template": "{{ 1 == 1 }}", - "set_effect": { - "service": "test.automation", - "data_template": { - "entity_id": "test.test_state", - "effect": "{{effect}}", - }, - }, - "effect_template": "{{ None }}", - "effect_list_template": effect_list_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state is not None assert state.attributes.get("effect_list") == expected_effect_list -@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + ("count", "effect_list_template"), + [(1, "{{ ['Strobe color', 'Police', 'Christmas', 'RGB', 'Random Loop'] }}")], +) +@pytest.mark.parametrize( + "style", [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN] +) @pytest.mark.parametrize( ("expected_effect", "effect_template"), [ @@ -1699,27 +2135,9 @@ async def test_effect_list_template( ], ) async def test_effect_template( - hass: HomeAssistant, expected_effect, count, effect_template + hass: HomeAssistant, expected_effect, setup_light_with_effects ) -> None: """Test the template for the effect.""" - light_config = { - "test_template_light": { - **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, - "value_template": "{{ 1 == 1 }}", - "set_effect": { - "service": "test.automation", - "data_template": { - "entity_id": "test.test_state", - "effect": "{{effect}}", - }, - }, - "effect_list_template": ( - "{{ ['Strobe color', 'Police', 'Christmas', 'RGB', 'Random Loop'] }}" - ), - "effect_template": effect_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state is not None assert state.attributes.get("effect") == expected_effect @@ -1727,7 +2145,14 @@ async def test_effect_template( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - ("expected_min_mireds", "min_mireds_template"), + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "min_mireds_template"), + (ConfigurationStyle.MODERN, "min_mireds"), + ], +) +@pytest.mark.parametrize( + ("expected_min_mireds", "attribute_template"), [ (118, "{{118}}"), (153, "{{x - 12}}"), @@ -1738,25 +2163,9 @@ async def test_effect_template( ], ) async def test_min_mireds_template( - hass: HomeAssistant, expected_min_mireds, count, min_mireds_template + hass: HomeAssistant, expected_min_mireds, setup_light_with_mireds ) -> None: """Test the template for the min mireds.""" - light_config = { - "test_template_light": { - **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, - "value_template": "{{ 1 == 1 }}", - "set_temperature": { - "service": "light.turn_on", - "data_template": { - "entity_id": "light.test_state", - "color_temp": "{{color_temp}}", - }, - }, - "temperature_template": "{{200}}", - "min_mireds_template": min_mireds_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state is not None assert state.attributes.get("min_mireds") == expected_min_mireds @@ -1764,7 +2173,14 @@ async def test_min_mireds_template( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - ("expected_max_mireds", "max_mireds_template"), + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "max_mireds_template"), + (ConfigurationStyle.MODERN, "max_mireds"), + ], +) +@pytest.mark.parametrize( + ("expected_max_mireds", "attribute_template"), [ (488, "{{488}}"), (500, "{{x - 12}}"), @@ -1775,33 +2191,26 @@ async def test_min_mireds_template( ], ) async def test_max_mireds_template( - hass: HomeAssistant, expected_max_mireds, count, max_mireds_template + hass: HomeAssistant, expected_max_mireds, setup_light_with_mireds ) -> None: """Test the template for the max mireds.""" - light_config = { - "test_template_light": { - **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, - "value_template": "{{ 1 == 1 }}", - "set_temperature": { - "service": "light.turn_on", - "data_template": { - "entity_id": "light.test_state", - "color_temp": "{{color_temp}}", - }, - }, - "temperature_template": "{{200}}", - "max_mireds_template": max_mireds_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state is not None assert state.attributes.get("max_mireds") == expected_max_mireds -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - ("expected_supports_transition", "supports_transition_template"), + ("count", "extra_config"), [(1, OPTIMISTIC_COLOR_TEMP_LIGHT_CONFIG)] +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "supports_transition_template"), + (ConfigurationStyle.MODERN, "supports_transition"), + ], +) +@pytest.mark.parametrize( + ("expected_supports_transition", "attribute_template"), [ (True, "{{true}}"), (True, "{{1 == 1}}"), @@ -1812,28 +2221,9 @@ async def test_max_mireds_template( ], ) async def test_supports_transition_template( - hass: HomeAssistant, - expected_supports_transition, - count, - supports_transition_template, + hass: HomeAssistant, expected_supports_transition, setup_single_attribute_light ) -> None: """Test the template for the supports transition.""" - light_config = { - "test_template_light": { - "value_template": "{{ 1 == 1 }}", - "turn_on": {"service": "light.turn_on", "entity_id": "light.test_state"}, - "turn_off": {"service": "light.turn_off", "entity_id": "light.test_state"}, - "set_temperature": { - "service": "light.turn_on", - "data_template": { - "entity_id": "light.test_state", - "color_temp": "{{color_temp}}", - }, - }, - "supports_transition_template": supports_transition_template, - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") expected_value = 1 @@ -1847,36 +2237,16 @@ async def test_supports_transition_template( ) != expected_value -@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + ("count", "transition_template"), [(1, "{{ states('sensor.test') }}")] +) +@pytest.mark.parametrize( + "style", [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN] +) async def test_supports_transition_template_updates( - hass: HomeAssistant, count: int + hass: HomeAssistant, setup_light_with_transition_template ) -> None: """Test the template for the supports transition dynamically.""" - light_config = { - "test_template_light": { - "value_template": "{{ 1 == 1 }}", - "turn_on": {"service": "light.turn_on", "entity_id": "light.test_state"}, - "turn_off": {"service": "light.turn_off", "entity_id": "light.test_state"}, - "set_temperature": { - "service": "light.turn_on", - "data_template": { - "entity_id": "light.test_state", - "color_temp": "{{color_temp}}", - }, - }, - "set_effect": { - "service": "test.automation", - "data_template": { - "entity_id": "test.test_state", - "effect": "{{effect}}", - }, - }, - "effect_list_template": "{{ ['Disco', 'Police'] }}", - "effect_template": "{{ None }}", - "supports_transition_template": "{{ states('sensor.test') }}", - } - } - await async_setup_light(hass, count, light_config) state = hass.states.get("light.test_template_light") assert state is not None @@ -1901,22 +2271,25 @@ async def test_supports_transition_template_updates( assert supported_features == LightEntityFeature.EFFECT -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("count", "extra_config", "attribute_template"), [ - { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "availability_template": ( - "{{ is_state('availability_boolean.state', 'on') }}" - ), - } - }, + ( + 1, + OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, + "{{ is_state('availability_boolean.state', 'on') }}", + ) + ], +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "availability_template"), + (ConfigurationStyle.MODERN, "availability"), ], ) async def test_available_template_with_entities( - hass: HomeAssistant, setup_light + hass: HomeAssistant, setup_single_attribute_light ) -> None: """Test availability templates with values from other entities.""" # When template returns true.. @@ -1934,20 +2307,25 @@ async def test_available_template_with_entities( assert hass.states.get("light.test_template_light").state == STATE_UNAVAILABLE -@pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("count", "extra_config", "attribute_template"), [ - { - "test_template_light": { - **OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, - "availability_template": "{{ x - 12 }}", - } - }, + ( + 1, + OPTIMISTIC_BRIGHTNESS_LIGHT_CONFIG, + "{{ x - 12 }}", + ) + ], +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "availability_template"), + (ConfigurationStyle.MODERN, "availability"), ], ) async def test_invalid_availability_template_keeps_component_available( - hass: HomeAssistant, setup_light, caplog_setup_text + hass: HomeAssistant, setup_single_attribute_light, caplog_setup_text ) -> None: """Test that an invalid availability keeps the device available.""" assert hass.states.get("light.test_template_light").state != STATE_UNAVAILABLE @@ -1956,20 +2334,73 @@ async def test_invalid_availability_template_keeps_component_available( @pytest.mark.parametrize("count", [1]) @pytest.mark.parametrize( - "light_config", + ("light_config", "style"), [ - { - "test_template_light_01": { - **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, - "unique_id": "not-so-unique-anymore", + ( + { + "test_template_light_01": TEST_UNIQUE_ID_CONFIG, + "test_template_light_02": TEST_UNIQUE_ID_CONFIG, }, - "test_template_light_02": { - **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, - "unique_id": "not-so-unique-anymore", - }, - }, + ConfigurationStyle.LEGACY, + ), + ( + [ + { + "name": "test_template_light_01", + **TEST_UNIQUE_ID_CONFIG, + }, + { + "name": "test_template_light_02", + **TEST_UNIQUE_ID_CONFIG, + }, + ], + ConfigurationStyle.MODERN, + ), ], ) async def test_unique_id(hass: HomeAssistant, setup_light) -> None: """Test unique_id option only creates one light per id.""" assert len(hass.states.async_all("light")) == 1 + + +async def test_nested_unique_id( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test unique_id option creates one light per nested id.""" + + with assert_setup_component(1, template.DOMAIN): + assert await async_setup_component( + hass, + template.DOMAIN, + { + "template": { + "unique_id": "x", + "light": [ + { + "name": "test_a", + **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, + "unique_id": "a", + }, + { + "name": "test_b", + **OPTIMISTIC_ON_OFF_LIGHT_CONFIG, + "unique_id": "b", + }, + ], + } + }, + ) + + await hass.async_block_till_done() + await hass.async_start() + await hass.async_block_till_done() + + assert len(hass.states.async_all("light")) == 2 + + entry = entity_registry.async_get("light.test_a") + assert entry + assert entry.unique_id == "x-a" + + entry = entity_registry.async_get("light.test_b") + assert entry + assert entry.unique_id == "x-b" diff --git a/tests/components/template/test_number.py b/tests/components/template/test_number.py index ec96245b4d0..f73a943e752 100644 --- a/tests/components/template/test_number.py +++ b/tests/components/template/test_number.py @@ -330,7 +330,10 @@ async def test_trigger_number(hass: HomeAssistant) -> None: "max": "{{ trigger.event.data.max_beers }}", "step": "{{ trigger.event.data.step }}", "unit_of_measurement": "beer", - "set_value": {"event": "test_number_event"}, + "set_value": { + "event": "test_number_event", + "event_data": {"entity_id": "{{ this.entity_id }}"}, + }, "optimistic": True, }, ], @@ -379,6 +382,9 @@ async def test_trigger_number(hass: HomeAssistant) -> None: ) assert len(events) == 1 assert events[0].event_type == "test_number_event" + entity_id = events[0].data.get("entity_id") + assert entity_id is not None + assert entity_id == "number.hello_name" def _verify( diff --git a/tests/components/template/test_select.py b/tests/components/template/test_select.py index 5b4723a3034..59ab45aeb36 100644 --- a/tests/components/template/test_select.py +++ b/tests/components/template/test_select.py @@ -264,6 +264,7 @@ async def test_templates_with_entities( async def test_trigger_select(hass: HomeAssistant) -> None: """Test trigger based template select.""" events = async_capture_events(hass, "test_number_event") + action_events = async_capture_events(hass, "action_event") assert await setup.async_setup_component( hass, "template", @@ -274,13 +275,23 @@ async def test_trigger_select(hass: HomeAssistant) -> None: { "unique_id": "listening-test-event", "trigger": {"platform": "event", "event_type": "test_event"}, + "variables": {"beer": "{{ trigger.event.data.beer }}"}, + "action": [ + {"event": "action_event", "event_data": {"beer": "{{ beer }}"}} + ], "select": [ { "name": "Hello Name", "unique_id": "hello_name-id", "state": "{{ trigger.event.data.beer }}", "options": "{{ trigger.event.data.beers }}", - "select_option": {"event": "test_number_event"}, + "select_option": { + "event": "test_number_event", + "event_data": { + "entity_id": "{{ this.entity_id }}", + "beer": "{{ beer }}", + }, + }, "optimistic": True, }, ], @@ -308,6 +319,12 @@ async def test_trigger_select(hass: HomeAssistant) -> None: assert state.state == "duff" assert state.attributes["options"] == ["duff", "alamo"] + assert len(action_events) == 1 + assert action_events[0].event_type == "action_event" + beer = action_events[0].data.get("beer") + assert beer is not None + assert beer == "duff" + await hass.services.async_call( SELECT_DOMAIN, SELECT_SERVICE_SELECT_OPTION, @@ -316,6 +333,13 @@ async def test_trigger_select(hass: HomeAssistant) -> None: ) assert len(events) == 1 assert events[0].event_type == "test_number_event" + entity_id = events[0].data.get("entity_id") + assert entity_id is not None + assert entity_id == "select.hello_name" + + beer = events[0].data.get("beer") + assert beer is not None + assert beer == "duff" def _verify( diff --git a/tests/components/template/test_switch.py b/tests/components/template/test_switch.py index 2fc0f29acaf..d8877851efe 100644 --- a/tests/components/template/test_switch.py +++ b/tests/components/template/test_switch.py @@ -1,11 +1,14 @@ """The tests for the Template switch platform.""" +from typing import Any + import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant import setup -from homeassistant.components import template +from homeassistant.components import switch, template from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.components.template.switch import rewrite_legacy_to_modern_conf +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, @@ -15,36 +18,240 @@ from homeassistant.const import ( STATE_UNAVAILABLE, ) from homeassistant.core import CoreState, HomeAssistant, ServiceCall, State +from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.template import Template from homeassistant.setup import async_setup_component +from .conftest import ConfigurationStyle + from tests.common import ( MockConfigEntry, assert_setup_component, mock_component, mock_restore_cache, ) +from tests.typing import WebSocketGenerator -OPTIMISTIC_SWITCH_CONFIG = { - "turn_on": { - "service": "test.automation", - "data_template": { - "action": "turn_on", - "caller": "{{ this.entity_id }}", - }, +TEST_OBJECT_ID = "test_template_switch" +TEST_ENTITY_ID = f"switch.{TEST_OBJECT_ID}" +TEST_STATE_ENTITY_ID = "switch.test_state" + +SWITCH_TURN_ON = { + "service": "test.automation", + "data_template": { + "action": "turn_on", + "caller": "{{ this.entity_id }}", }, - "turn_off": { - "service": "test.automation", - "data_template": { - "action": "turn_off", - "caller": "{{ this.entity_id }}", - }, +} +SWITCH_TURN_OFF = { + "service": "test.automation", + "data_template": { + "action": "turn_off", + "caller": "{{ this.entity_id }}", }, } +SWITCH_ACTIONS = { + "turn_on": SWITCH_TURN_ON, + "turn_off": SWITCH_TURN_OFF, +} +NAMED_SWITCH_ACTIONS = { + **SWITCH_ACTIONS, + "name": TEST_OBJECT_ID, +} +UNIQUE_ID_CONFIG = { + **SWITCH_ACTIONS, + "unique_id": "not-so-unique-anymore", +} +async def async_setup_legacy_format( + hass: HomeAssistant, count: int, switch_config: dict[str, Any] +) -> None: + """Do setup of switch integration via legacy format.""" + config = {"switch": {"platform": "template", "switches": switch_config}} + + with assert_setup_component(count, switch.DOMAIN): + assert await async_setup_component( + hass, + switch.DOMAIN, + config, + ) + await hass.async_block_till_done() + await hass.async_start() + await hass.async_block_till_done() + + +async def async_setup_modern_format( + hass: HomeAssistant, count: int, switch_config: dict[str, Any] +) -> None: + """Do setup of switch integration via modern format.""" + config = {"template": {"switch": switch_config}} + + with assert_setup_component(count, template.DOMAIN): + assert await async_setup_component( + hass, + template.DOMAIN, + config, + ) + + await hass.async_block_till_done() + await hass.async_start() + await hass.async_block_till_done() + + +@pytest.fixture +async def setup_switch( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + switch_config: dict[str, Any], +) -> None: + """Do setup of switch integration.""" + if style == ConfigurationStyle.LEGACY: + await async_setup_legacy_format(hass, count, switch_config) + elif style == ConfigurationStyle.MODERN: + await async_setup_modern_format(hass, count, switch_config) + + +@pytest.fixture +async def setup_state_switch( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + state_template: str, +): + """Do setup of switch integration using a state template.""" + if style == ConfigurationStyle.LEGACY: + await async_setup_legacy_format( + hass, + count, + { + TEST_OBJECT_ID: { + **SWITCH_ACTIONS, + "value_template": state_template, + } + }, + ) + elif style == ConfigurationStyle.MODERN: + await async_setup_modern_format( + hass, + count, + { + **NAMED_SWITCH_ACTIONS, + "state": state_template, + }, + ) + + +@pytest.fixture +async def setup_single_attribute_switch( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, + attribute: str, + attribute_template: str, +) -> None: + """Do setup of switch integration testing a single attribute.""" + extra = {attribute: attribute_template} if attribute and attribute_template else {} + if style == ConfigurationStyle.LEGACY: + await async_setup_legacy_format( + hass, + count, + { + TEST_OBJECT_ID: { + **SWITCH_ACTIONS, + "value_template": "{{ 1 == 1 }}", + **extra, + } + }, + ) + elif style == ConfigurationStyle.MODERN: + await async_setup_modern_format( + hass, + count, + { + **NAMED_SWITCH_ACTIONS, + "state": "{{ 1 == 1 }}", + **extra, + }, + ) + + +@pytest.fixture +async def setup_optimistic_switch( + hass: HomeAssistant, + count: int, + style: ConfigurationStyle, +) -> None: + """Do setup of an optimistic switch.""" + if style == ConfigurationStyle.LEGACY: + await async_setup_legacy_format( + hass, + count, + { + TEST_OBJECT_ID: { + **SWITCH_ACTIONS, + } + }, + ) + elif style == ConfigurationStyle.MODERN: + await async_setup_modern_format( + hass, + count, + { + **NAMED_SWITCH_ACTIONS, + }, + ) + + +async def test_legacy_to_modern_config(hass: HomeAssistant) -> None: + """Test the conversion of legacy template to modern template.""" + config = { + "foo": { + "friendly_name": "foo bar", + "value_template": "{{ 1 == 1 }}", + "unique_id": "foo-bar-switch", + "icon_template": "{{ 'mdi.abc' }}", + "entity_picture_template": "{{ 'mypicture.jpg' }}", + "availability_template": "{{ 1 == 1 }}", + **SWITCH_ACTIONS, + } + } + altered_configs = rewrite_legacy_to_modern_conf(hass, config) + + assert len(altered_configs) == 1 + assert [ + { + "availability": Template("{{ 1 == 1 }}", hass), + "icon": Template("{{ 'mdi.abc' }}", hass), + "name": Template("foo bar", hass), + "object_id": "foo", + "picture": Template("{{ 'mypicture.jpg' }}", hass), + "turn_off": SWITCH_TURN_OFF, + "turn_on": SWITCH_TURN_ON, + "unique_id": "foo-bar-switch", + "state": Template("{{ 1 == 1 }}", hass), + } + ] == altered_configs + + +@pytest.mark.parametrize(("count", "state_template"), [(1, "{{ True }}")]) +@pytest.mark.parametrize( + "style", [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN] +) +async def test_setup(hass: HomeAssistant, setup_state_switch) -> None: + """Test template.""" + state = hass.states.get(TEST_ENTITY_ID) + assert state is not None + assert state.name == TEST_OBJECT_ID + assert state.state == STATE_ON + + +@pytest.mark.parametrize("state_key", ["value_template", "state"]) async def test_setup_config_entry( hass: HomeAssistant, + state_key: str, snapshot: SnapshotAssertion, ) -> None: """Test the config flow.""" @@ -60,7 +267,7 @@ async def test_setup_config_entry( domain=template.DOMAIN, options={ "name": "My template", - "value_template": "{{ states('switch.one') }}", + state_key: "{{ states('switch.one') }}", "template_type": SWITCH_DOMAIN, }, title="My template", @@ -75,200 +282,148 @@ async def test_setup_config_entry( assert state == snapshot -async def test_template_state_text(hass: HomeAssistant) -> None: +@pytest.mark.parametrize("state_key", ["value_template", "state"]) +async def test_flow_preview( + hass: HomeAssistant, + state_key: str, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test the config flow preview.""" + client = await hass_ws_client(hass) + + result = await hass.config_entries.flow.async_init( + template.DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": SWITCH_DOMAIN}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == SWITCH_DOMAIN + assert result["errors"] is None + assert result["preview"] == "template" + + await client.send_json_auto_id( + { + "type": "template/start_preview", + "flow_id": result["flow_id"], + "flow_type": "config_flow", + "user_input": {"name": "My template", state_key: "{{ 'on' }}"}, + } + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] is None + + msg = await client.receive_json() + assert msg["event"]["state"] == "on" + + +@pytest.mark.parametrize( + ("count", "state_template"), [(1, "{{ states.switch.test_state.state }}")] +) +@pytest.mark.parametrize( + "style", [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN] +) +async def test_template_state_text(hass: HomeAssistant, setup_state_switch) -> None: """Test the state text of a template.""" - with assert_setup_component(1, "switch"): - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - "value_template": "{{ states.switch.test_state.state }}", - } - }, - } - }, - ) - - await hass.async_block_till_done() - await hass.async_start() + hass.states.async_set(TEST_STATE_ENTITY_ID, STATE_ON) await hass.async_block_till_done() - hass.states.async_set("switch.test_state", STATE_ON) - await hass.async_block_till_done() - - state = hass.states.get("switch.test_template_switch") + state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_ON - hass.states.async_set("switch.test_state", STATE_OFF) + hass.states.async_set(TEST_STATE_ENTITY_ID, STATE_OFF) await hass.async_block_till_done() - state = hass.states.get("switch.test_template_switch") + state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_OFF -async def test_template_state_boolean_on(hass: HomeAssistant) -> None: - """Test the setting of the state with boolean on.""" - with assert_setup_component(1, "switch"): - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - "value_template": "{{ 1 == 1 }}", - } - }, - } - }, - ) - - await hass.async_block_till_done() - await hass.async_start() - await hass.async_block_till_done() - - state = hass.states.get("switch.test_template_switch") - assert state.state == STATE_ON +@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + ("expected", "state_template"), + [ + (STATE_ON, "{{ 1 == 1 }}"), + (STATE_OFF, "{{ 1 == 2 }}"), + ], +) +@pytest.mark.parametrize( + "style", [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN] +) +async def test_template_state_boolean( + hass: HomeAssistant, expected: str, setup_state_switch +) -> None: + """Test the setting of the state with boolean template.""" + state = hass.states.get(TEST_ENTITY_ID) + assert state.state == expected -async def test_template_state_boolean_off(hass: HomeAssistant) -> None: - """Test the setting of the state with off.""" - with assert_setup_component(1, "switch"): - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - "value_template": "{{ 1 == 2 }}", - } - }, - } - }, - ) - - await hass.async_block_till_done() - await hass.async_start() - await hass.async_block_till_done() - - state = hass.states.get("switch.test_template_switch") - assert state.state == STATE_OFF - - -async def test_icon_template(hass: HomeAssistant) -> None: - """Test icon template.""" - with assert_setup_component(1, "switch"): - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - "value_template": "{{ states.switch.test_state.state }}", - "icon_template": ( - "{% if states.switch.test_state.state %}" - "mdi:check" - "{% endif %}" - ), - } - }, - } - }, - ) - - await hass.async_block_till_done() - await hass.async_start() - await hass.async_block_till_done() - - state = hass.states.get("switch.test_template_switch") +@pytest.mark.parametrize( + ("count", "attribute_template"), + [(1, "{% if states.switch.test_state.state %}mdi:check{% endif %}")], +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "icon_template"), + (ConfigurationStyle.MODERN, "icon"), + ], +) +async def test_icon_template( + hass: HomeAssistant, setup_single_attribute_switch +) -> None: + """Test the state text of a template.""" + state = hass.states.get(TEST_ENTITY_ID) assert state.attributes.get("icon") == "" - hass.states.async_set("switch.test_state", STATE_ON) + hass.states.async_set(TEST_STATE_ENTITY_ID, STATE_ON) await hass.async_block_till_done() - state = hass.states.get("switch.test_template_switch") + state = hass.states.get(TEST_ENTITY_ID) assert state.attributes["icon"] == "mdi:check" -async def test_entity_picture_template(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("count", "attribute_template"), + [(1, "{% if states.switch.test_state.state %}/local/switch.png{% endif %}")], +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "entity_picture_template"), + (ConfigurationStyle.MODERN, "picture"), + ], +) +async def test_entity_picture_template( + hass: HomeAssistant, setup_single_attribute_switch +) -> None: """Test entity_picture template.""" - with assert_setup_component(1, "switch"): - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - "value_template": "{{ states.switch.test_state.state }}", - "entity_picture_template": ( - "{% if states.switch.test_state.state %}" - "/local/switch.png" - "{% endif %}" - ), - } - }, - } - }, - ) - - await hass.async_block_till_done() - await hass.async_start() - await hass.async_block_till_done() - - state = hass.states.get("switch.test_template_switch") + state = hass.states.get(TEST_ENTITY_ID) assert state.attributes.get("entity_picture") == "" - hass.states.async_set("switch.test_state", STATE_ON) + hass.states.async_set(TEST_STATE_ENTITY_ID, STATE_ON) await hass.async_block_till_done() - state = hass.states.get("switch.test_template_switch") + state = hass.states.get(TEST_ENTITY_ID) assert state.attributes["entity_picture"] == "/local/switch.png" -async def test_template_syntax_error(hass: HomeAssistant) -> None: +@pytest.mark.parametrize(("count", "state_template"), [(0, "{% if rubbish %}")]) +@pytest.mark.parametrize( + "style", + [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN], +) +async def test_template_syntax_error(hass: HomeAssistant, setup_state_switch) -> None: """Test templating syntax error.""" - with assert_setup_component(0, "switch"): - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - "value_template": "{% if rubbish %}", - } - }, - } - }, - ) - - await hass.async_block_till_done() - await hass.async_start() - await hass.async_block_till_done() - assert hass.states.async_all("switch") == [] -async def test_invalid_name_does_not_create(hass: HomeAssistant) -> None: - """Test invalid name.""" +async def test_invalid_legacy_slug_does_not_create(hass: HomeAssistant) -> None: + """Test invalid legacy slug.""" with assert_setup_component(0, "switch"): assert await async_setup_component( hass, @@ -278,7 +433,7 @@ async def test_invalid_name_does_not_create(hass: HomeAssistant) -> None: "platform": "template", "switches": { "test INVALID switch": { - **OPTIMISTIC_SWITCH_CONFIG, + **SWITCH_ACTIONS, "value_template": "{{ rubbish }", } }, @@ -293,19 +448,32 @@ async def test_invalid_name_does_not_create(hass: HomeAssistant) -> None: assert hass.states.async_all("switch") == [] -async def test_invalid_switch_does_not_create(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("config", "domain"), + [ + ( + { + "template": {"switch": "Invalid"}, + }, + template.DOMAIN, + ), + ( + { + "switch": { + "platform": "template", + "switches": {TEST_OBJECT_ID: "Invalid"}, + } + }, + switch.DOMAIN, + ), + ], +) +async def test_invalid_switch_does_not_create( + hass: HomeAssistant, config: dict, domain: str +) -> None: """Test invalid switch.""" - with assert_setup_component(0, "switch"): - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": {"test_template_switch": "Invalid"}, - } - }, - ) + with assert_setup_component(0, domain): + assert await async_setup_component(hass, domain, config) await hass.async_block_till_done() await hass.async_start() @@ -314,12 +482,33 @@ async def test_invalid_switch_does_not_create(hass: HomeAssistant) -> None: assert hass.states.async_all("switch") == [] -async def test_no_switches_does_not_create(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("config", "domain", "count"), + [ + ( + { + "template": {"switch": []}, + }, + template.DOMAIN, + 1, + ), + ( + { + "switch": { + "platform": "template", + } + }, + switch.DOMAIN, + 0, + ), + ], +) +async def test_no_switches_does_not_create( + hass: HomeAssistant, config: dict, domain: str, count: int +) -> None: """Test if there are no switches no creation.""" - with assert_setup_component(0, "switch"): - assert await async_setup_component( - hass, "switch", {"switch": {"platform": "template"}} - ) + with assert_setup_component(count, domain): + assert await async_setup_component(hass, domain, config) await hass.async_block_till_done() await hass.async_start() @@ -328,239 +517,254 @@ async def test_no_switches_does_not_create(hass: HomeAssistant) -> None: assert hass.states.async_all("switch") == [] -async def test_missing_on_does_not_create(hass: HomeAssistant) -> None: - """Test missing on.""" - with assert_setup_component(0, "switch"): - assert await async_setup_component( - hass, - "switch", +@pytest.mark.parametrize( + ("config", "domain"), + [ + ( { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - "value_template": "{{ states.switch.test_state.state }}", - "not_on": { - "service": "switch.turn_on", - "entity_id": "switch.test_state", - }, - "turn_off": { - "service": "switch.turn_off", - "entity_id": "switch.test_state", - }, - } - }, - } - }, - ) - - await hass.async_block_till_done() - await hass.async_start() - await hass.async_block_till_done() - - assert hass.states.async_all("switch") == [] - - -async def test_missing_off_does_not_create(hass: HomeAssistant) -> None: - """Test missing off.""" - with assert_setup_component(0, "switch"): - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - "value_template": "{{ states.switch.test_state.state }}", - "turn_on": { - "service": "switch.turn_on", - "entity_id": "switch.test_state", - }, - "not_off": { - "service": "switch.turn_off", - "entity_id": "switch.test_state", - }, - } - }, - } - }, - ) - - await hass.async_block_till_done() - await hass.async_start() - await hass.async_block_till_done() - - assert hass.states.async_all("switch") == [] - - -async def test_on_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: - """Test on action.""" - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - "value_template": "{{ states.switch.test_state.state }}", + "template": { + "switch": { + "not_on": SWITCH_TURN_ON, + "turn_off": SWITCH_TURN_OFF, + "state": "{{ states.switch.test_state.state }}", } }, - } - }, - ) + }, + template.DOMAIN, + ), + ( + { + "switch": { + "platform": "template", + "switches": { + TEST_OBJECT_ID: { + "not_on": SWITCH_TURN_ON, + "turn_off": SWITCH_TURN_OFF, + "value_template": "{{ states.switch.test_state.state }}", + } + }, + } + }, + switch.DOMAIN, + ), + ], +) +async def test_missing_on_does_not_create( + hass: HomeAssistant, config: dict, domain: str +) -> None: + """Test missing on.""" + with assert_setup_component(0, domain): + assert await async_setup_component(hass, domain, config) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() - hass.states.async_set("switch.test_state", STATE_OFF) + assert hass.states.async_all("switch") == [] + + +@pytest.mark.parametrize( + ("config", "domain"), + [ + ( + { + "template": { + "switch": { + "turn_on": SWITCH_TURN_ON, + "not_off": SWITCH_TURN_OFF, + "state": "{{ states.switch.test_state.state }}", + } + }, + }, + template.DOMAIN, + ), + ( + { + "switch": { + "platform": "template", + "switches": { + TEST_OBJECT_ID: { + "turn_on": SWITCH_TURN_ON, + "not_off": SWITCH_TURN_OFF, + "value_template": "{{ states.switch.test_state.state }}", + } + }, + } + }, + switch.DOMAIN, + ), + ], +) +async def test_missing_off_does_not_create( + hass: HomeAssistant, config: dict, domain: str +) -> None: + """Test missing off.""" + with assert_setup_component(0, domain): + assert await async_setup_component(hass, domain, config) + + await hass.async_block_till_done() + await hass.async_start() await hass.async_block_till_done() - state = hass.states.get("switch.test_template_switch") + assert hass.states.async_all("switch") == [] + + +@pytest.mark.parametrize( + ("count", "state_template"), [(1, "{{ states('switch.test_state') }}")] +) +@pytest.mark.parametrize( + "style", [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN] +) +async def test_on_action( + hass: HomeAssistant, setup_state_switch, calls: list[ServiceCall] +) -> None: + """Test on action.""" + hass.states.async_set(TEST_STATE_ENTITY_ID, STATE_OFF) + await hass.async_block_till_done() + + state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_OFF await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "switch.test_template_switch"}, + {ATTR_ENTITY_ID: TEST_ENTITY_ID}, blocking=True, ) assert len(calls) == 1 assert calls[-1].data["action"] == "turn_on" - assert calls[-1].data["caller"] == "switch.test_template_switch" + assert calls[-1].data["caller"] == TEST_ENTITY_ID +@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + "style", [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN] +) async def test_on_action_optimistic( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, setup_optimistic_switch, calls: list[ServiceCall] ) -> None: """Test on action in optimistic mode.""" - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - } - }, - } - }, - ) - - await hass.async_start() + hass.states.async_set(TEST_ENTITY_ID, STATE_OFF) await hass.async_block_till_done() - hass.states.async_set("switch.test_template_switch", STATE_OFF) - await hass.async_block_till_done() - - state = hass.states.get("switch.test_template_switch") + state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_OFF await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "switch.test_template_switch"}, + {ATTR_ENTITY_ID: TEST_ENTITY_ID}, blocking=True, ) - state = hass.states.get("switch.test_template_switch") + state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_ON assert len(calls) == 1 assert calls[-1].data["action"] == "turn_on" - assert calls[-1].data["caller"] == "switch.test_template_switch" + assert calls[-1].data["caller"] == TEST_ENTITY_ID -async def test_off_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +@pytest.mark.parametrize( + ("count", "state_template"), [(1, "{{ states.switch.test_state.state }}")] +) +@pytest.mark.parametrize( + "style", [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN] +) +async def test_off_action( + hass: HomeAssistant, setup_state_switch, calls: list[ServiceCall] +) -> None: """Test off action.""" - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - "value_template": "{{ states.switch.test_state.state }}", - } - }, - } - }, - ) - - await hass.async_block_till_done() - await hass.async_start() + hass.states.async_set(TEST_STATE_ENTITY_ID, STATE_ON) await hass.async_block_till_done() - hass.states.async_set("switch.test_state", STATE_ON) - await hass.async_block_till_done() - - state = hass.states.get("switch.test_template_switch") + state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_ON await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "switch.test_template_switch"}, + {ATTR_ENTITY_ID: TEST_ENTITY_ID}, blocking=True, ) assert len(calls) == 1 assert calls[-1].data["action"] == "turn_off" - assert calls[-1].data["caller"] == "switch.test_template_switch" + assert calls[-1].data["caller"] == TEST_ENTITY_ID +@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + "style", [ConfigurationStyle.LEGACY, ConfigurationStyle.MODERN] +) async def test_off_action_optimistic( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, setup_optimistic_switch, calls: list[ServiceCall] ) -> None: """Test off action in optimistic mode.""" - assert await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - } - }, - } - }, - ) - - await hass.async_start() + hass.states.async_set(TEST_ENTITY_ID, STATE_ON) await hass.async_block_till_done() - hass.states.async_set("switch.test_template_switch", STATE_ON) - await hass.async_block_till_done() - - state = hass.states.get("switch.test_template_switch") + state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_ON await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "switch.test_template_switch"}, + {ATTR_ENTITY_ID: TEST_ENTITY_ID}, blocking=True, ) - state = hass.states.get("switch.test_template_switch") + state = hass.states.get(TEST_ENTITY_ID) assert state.state == STATE_OFF assert len(calls) == 1 assert calls[-1].data["action"] == "turn_off" - assert calls[-1].data["caller"] == "switch.test_template_switch" + assert calls[-1].data["caller"] == TEST_ENTITY_ID -async def test_restore_state(hass: HomeAssistant) -> None: +@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + ("config", "domain"), + [ + ( + { + "switch": { + "platform": "template", + "switches": { + "s1": { + **SWITCH_ACTIONS, + }, + "s2": { + **SWITCH_ACTIONS, + }, + }, + } + }, + switch.DOMAIN, + ), + ( + { + "template": { + "switch": [ + { + "name": "s1", + **SWITCH_ACTIONS, + }, + { + "name": "s2", + **SWITCH_ACTIONS, + }, + ], + } + }, + template.DOMAIN, + ), + ], +) +async def test_restore_state( + hass: HomeAssistant, count: int, domain: str, config: dict[str, Any] +) -> None: """Test state restoration.""" mock_restore_cache( hass, @@ -573,23 +777,9 @@ async def test_restore_state(hass: HomeAssistant) -> None: hass.set_state(CoreState.starting) mock_component(hass, "recorder") - await async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "s1": { - **OPTIMISTIC_SWITCH_CONFIG, - }, - "s2": { - **OPTIMISTIC_SWITCH_CONFIG, - }, - }, - } - }, - ) + with assert_setup_component(count, domain): + await async_setup_component(hass, domain, config) + await hass.async_block_till_done() state = hass.states.get("switch.s1") @@ -601,100 +791,157 @@ async def test_restore_state(hass: HomeAssistant) -> None: assert state.state == STATE_OFF -async def test_available_template_with_entities(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("count", "attribute_template"), + [(1, "{{ is_state('switch.test_state', 'on') }}")], +) +@pytest.mark.parametrize( + ("style", "attribute"), + [ + (ConfigurationStyle.LEGACY, "availability_template"), + (ConfigurationStyle.MODERN, "availability"), + ], +) +async def test_available_template_with_entities( + hass: HomeAssistant, setup_single_attribute_switch +) -> None: """Test availability templates with values from other entities.""" - await setup.async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - "value_template": "{{ 1 == 1 }}", - "availability_template": ( - "{{ is_state('availability_state.state', 'on') }}" - ), - } - }, - } - }, - ) - - await hass.async_block_till_done() - await hass.async_start() + hass.states.async_set(TEST_STATE_ENTITY_ID, STATE_ON) await hass.async_block_till_done() - hass.states.async_set("availability_state.state", STATE_ON) + assert hass.states.get(TEST_ENTITY_ID).state != STATE_UNAVAILABLE + + hass.states.async_set(TEST_STATE_ENTITY_ID, STATE_OFF) await hass.async_block_till_done() - assert hass.states.get("switch.test_template_switch").state != STATE_UNAVAILABLE - - hass.states.async_set("availability_state.state", STATE_OFF) - await hass.async_block_till_done() - - assert hass.states.get("switch.test_template_switch").state == STATE_UNAVAILABLE + assert hass.states.get(TEST_ENTITY_ID).state == STATE_UNAVAILABLE +@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + ("config", "domain"), + [ + ( + { + "switch": { + "platform": "template", + "switches": { + TEST_OBJECT_ID: { + **SWITCH_ACTIONS, + "value_template": "{{ true }}", + "availability_template": "{{ x - 12 }}", + } + }, + } + }, + switch.DOMAIN, + ), + ( + { + "template": { + "switch": { + **NAMED_SWITCH_ACTIONS, + "state": "{{ true }}", + "availability": "{{ x - 12 }}", + }, + } + }, + template.DOMAIN, + ), + ], +) async def test_invalid_availability_template_keeps_component_available( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + count: int, + config: dict[str, Any], + domain: str, + caplog: pytest.LogCaptureFixture, ) -> None: """Test that an invalid availability keeps the device available.""" - await setup.async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch": { - **OPTIMISTIC_SWITCH_CONFIG, - "value_template": "{{ true }}", - "availability_template": "{{ x - 12 }}", - } - }, - } - }, - ) + with assert_setup_component(count, domain): + await async_setup_component(hass, domain, config) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() - assert hass.states.get("switch.test_template_switch").state != STATE_UNAVAILABLE + assert hass.states.get(TEST_ENTITY_ID).state != STATE_UNAVAILABLE assert "UndefinedError: 'x' is undefined" in caplog.text -async def test_unique_id(hass: HomeAssistant) -> None: - """Test unique_id option only creates one switch per id.""" - await setup.async_setup_component( - hass, - "switch", - { - "switch": { - "platform": "template", - "switches": { - "test_template_switch_01": { - **OPTIMISTIC_SWITCH_CONFIG, - "unique_id": "not-so-unique-anymore", - "value_template": "{{ true }}", - }, - "test_template_switch_02": { - **OPTIMISTIC_SWITCH_CONFIG, - "unique_id": "not-so-unique-anymore", - "value_template": "{{ false }}", - }, +@pytest.mark.parametrize("count", [1]) +@pytest.mark.parametrize( + ("switch_config", "style"), + [ + ( + { + "test_template_switch_01": UNIQUE_ID_CONFIG, + "test_template_switch_02": UNIQUE_ID_CONFIG, + }, + ConfigurationStyle.LEGACY, + ), + ( + [ + { + "name": "test_template_switch_01", + **UNIQUE_ID_CONFIG, }, - } - }, - ) + { + "name": "test_template_switch_02", + **UNIQUE_ID_CONFIG, + }, + ], + ConfigurationStyle.MODERN, + ), + ], +) +async def test_unique_id(hass: HomeAssistant, setup_switch) -> None: + """Test unique_id option only creates one switch per id.""" + assert len(hass.states.async_all("switch")) == 1 + + +async def test_nested_unique_id( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test a template unique_id propagates to switch unique_ids.""" + with assert_setup_component(1, template.DOMAIN): + assert await async_setup_component( + hass, + template.DOMAIN, + { + "template": { + "unique_id": "x", + "switch": [ + { + **SWITCH_ACTIONS, + "name": "test_a", + "unique_id": "a", + "state": "{{ true }}", + }, + { + **SWITCH_ACTIONS, + "name": "test_b", + "unique_id": "b", + "state": "{{ true }}", + }, + ], + }, + }, + ) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() - assert len(hass.states.async_all("switch")) == 1 + assert len(hass.states.async_all("switch")) == 2 + + entry = entity_registry.async_get("switch.test_a") + assert entry + assert entry.unique_id == "x-a" + + entry = entity_registry.async_get("switch.test_b") + assert entry + assert entry.unique_id == "x-b" async def test_device_id( @@ -720,7 +967,7 @@ async def test_device_id( domain=template.DOMAIN, options={ "name": "My template", - "value_template": "{{ true }}", + "state": "{{ true }}", "template_type": "switch", "device_id": device_entry.id, }, diff --git a/tests/components/template/test_template_entity.py b/tests/components/template/test_template_entity.py index c09a09750fe..d66fc2710c9 100644 --- a/tests/components/template/test_template_entity.py +++ b/tests/components/template/test_template_entity.py @@ -9,7 +9,7 @@ from homeassistant.helpers import template async def test_template_entity_requires_hass_set(hass: HomeAssistant) -> None: """Test template entity requires hass to be set before accepting templates.""" - entity = template_entity.TemplateEntity(hass) + entity = template_entity.TemplateEntity(None) with pytest.raises(ValueError, match="^hass cannot be None"): entity.add_template_attribute("_hello", template.Template("Hello")) diff --git a/tests/components/template/test_trigger_entity.py b/tests/components/template/test_trigger_entity.py new file mode 100644 index 00000000000..99aa2d65df9 --- /dev/null +++ b/tests/components/template/test_trigger_entity.py @@ -0,0 +1,13 @@ +"""Test trigger template entity.""" + +from homeassistant.components.template import trigger_entity +from homeassistant.components.template.coordinator import TriggerUpdateCoordinator +from homeassistant.core import HomeAssistant + + +async def test_reference_blueprints_is_none(hass: HomeAssistant) -> None: + """Test template entity requires hass to be set before accepting templates.""" + coordinator = TriggerUpdateCoordinator(hass, {}) + entity = trigger_entity.TriggerEntity(hass, coordinator, {}) + + assert entity.referenced_blueprint is None diff --git a/tests/components/template/test_weather.py b/tests/components/template/test_weather.py index 081028b6f5b..5db6a000ccc 100644 --- a/tests/components/template/test_weather.py +++ b/tests/components/template/test_weather.py @@ -928,3 +928,65 @@ async def test_trigger_entity_restore_state_fail( state = hass.states.get("weather.test") assert state.state == STATE_UNKNOWN assert state.attributes.get("temperature") is None + + +async def test_new_style_template_state_text(hass: HomeAssistant) -> None: + """Test the state text of a template.""" + assert await async_setup_component( + hass, + "weather", + { + "weather": [ + {"weather": {"platform": "demo"}}, + ] + }, + ) + assert await async_setup_component( + hass, + "template", + { + "template": { + "weather": { + "name": "test", + "attribution_template": "{{ states('sensor.attribution') }}", + "condition_template": "sunny", + "temperature_template": "{{ states('sensor.temperature') | float }}", + "humidity_template": "{{ states('sensor.humidity') | int }}", + "pressure_template": "{{ states('sensor.pressure') }}", + "wind_speed_template": "{{ states('sensor.windspeed') }}", + "wind_bearing_template": "{{ states('sensor.windbearing') }}", + "ozone_template": "{{ states('sensor.ozone') }}", + "visibility_template": "{{ states('sensor.visibility') }}", + "wind_gust_speed_template": "{{ states('sensor.wind_gust_speed') }}", + "cloud_coverage_template": "{{ states('sensor.cloud_coverage') }}", + "dew_point_template": "{{ states('sensor.dew_point') }}", + "apparent_temperature_template": "{{ states('sensor.apparent_temperature') }}", + }, + }, + }, + ) + + for attr, v_attr, value in ( + ( + "sensor.attribution", + ATTR_ATTRIBUTION, + "The custom attribution", + ), + ("sensor.temperature", ATTR_WEATHER_TEMPERATURE, 22.3), + ("sensor.humidity", ATTR_WEATHER_HUMIDITY, 60), + ("sensor.pressure", ATTR_WEATHER_PRESSURE, 1000), + ("sensor.windspeed", ATTR_WEATHER_WIND_SPEED, 20), + ("sensor.windbearing", ATTR_WEATHER_WIND_BEARING, 180), + ("sensor.ozone", ATTR_WEATHER_OZONE, 25), + ("sensor.visibility", ATTR_WEATHER_VISIBILITY, 4.6), + ("sensor.wind_gust_speed", ATTR_WEATHER_WIND_GUST_SPEED, 30), + ("sensor.cloud_coverage", ATTR_WEATHER_CLOUD_COVERAGE, 75), + ("sensor.dew_point", ATTR_WEATHER_DEW_POINT, 2.2), + ("sensor.apparent_temperature", ATTR_WEATHER_APPARENT_TEMPERATURE, 25), + ): + hass.states.async_set(attr, value) + await hass.async_block_till_done() + state = hass.states.get("weather.test") + assert state is not None + assert state.state == "sunny" + assert state.attributes.get(v_attr) == value diff --git a/tests/components/tesla_fleet/conftest.py b/tests/components/tesla_fleet/conftest.py index 06d2b54c936..10b01caca96 100644 --- a/tests/components/tesla_fleet/conftest.py +++ b/tests/components/tesla_fleet/conftest.py @@ -1,4 +1,4 @@ -"""Fixtures for Tessie.""" +"""Fixtures for Tesla Fleet.""" from __future__ import annotations @@ -113,7 +113,7 @@ def mock_products() -> Generator[AsyncMock]: def mock_vehicle_state() -> Generator[AsyncMock]: """Mock Tesla Fleet API Vehicle Specific vehicle method.""" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.vehicle", + "tesla_fleet_api.tesla.VehicleFleet.vehicle", return_value=VEHICLE_ONLINE, ) as mock_vehicle: yield mock_vehicle @@ -123,7 +123,7 @@ def mock_vehicle_state() -> Generator[AsyncMock]: def mock_vehicle_data() -> Generator[AsyncMock]: """Mock Tesla Fleet API Vehicle Specific vehicle_data method.""" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.vehicle_data", + "tesla_fleet_api.tesla.VehicleFleet.vehicle_data", return_value=VEHICLE_DATA, ) as mock_vehicle_data: yield mock_vehicle_data @@ -133,7 +133,7 @@ def mock_vehicle_data() -> Generator[AsyncMock]: def mock_wake_up() -> Generator[AsyncMock]: """Mock Tesla Fleet API Vehicle Specific wake_up method.""" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.wake_up", + "tesla_fleet_api.tesla.VehicleFleet.wake_up", return_value=VEHICLE_ONLINE, ) as mock_wake_up: yield mock_wake_up @@ -143,7 +143,7 @@ def mock_wake_up() -> Generator[AsyncMock]: def mock_live_status() -> Generator[AsyncMock]: """Mock Tesla Fleet API Energy Specific live_status method.""" with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.live_status", + "tesla_fleet_api.tesla.EnergySite.live_status", side_effect=lambda: deepcopy(LIVE_STATUS), ) as mock_live_status: yield mock_live_status @@ -153,7 +153,7 @@ def mock_live_status() -> Generator[AsyncMock]: def mock_site_info() -> Generator[AsyncMock]: """Mock Tesla Fleet API Energy Specific site_info method.""" with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.site_info", + "tesla_fleet_api.tesla.EnergySite.site_info", side_effect=lambda: deepcopy(SITE_INFO), ) as mock_live_status: yield mock_live_status @@ -182,7 +182,7 @@ def mock_request(): def mock_energy_history(): """Mock Teslemetry Energy Specific site_info method.""" with patch( - "homeassistant.components.teslemetry.EnergySpecific.energy_history", + "tesla_fleet_api.tesla.EnergySite.energy_history", return_value=ENERGY_HISTORY, ) as mock_live_status: yield mock_live_status @@ -192,7 +192,7 @@ def mock_energy_history(): def mock_signed_command() -> Generator[AsyncMock]: """Mock Tesla Fleet Api signed_command method.""" with patch( - "homeassistant.components.tesla_fleet.VehicleSigned.signed_command", + "tesla_fleet_api.tesla.VehicleSigned.signed_command", return_value=COMMAND_OK, ) as mock_signed_command: yield mock_signed_command diff --git a/tests/components/tesla_fleet/test_button.py b/tests/components/tesla_fleet/test_button.py index ef1cfd90357..d43f7448379 100644 --- a/tests/components/tesla_fleet/test_button.py +++ b/tests/components/tesla_fleet/test_button.py @@ -56,7 +56,7 @@ async def test_press( await setup_platform(hass, normal_config_entry, [Platform.BUTTON]) with patch( - f"homeassistant.components.tesla_fleet.VehicleSpecific.{func}", + f"tesla_fleet_api.tesla.VehicleFleet.{func}", return_value=COMMAND_OK, ) as command: await hass.services.async_call( @@ -85,7 +85,7 @@ async def test_press_signing_error( with ( patch("homeassistant.components.tesla_fleet.TeslaFleetApi.get_private_key"), patch( - "homeassistant.components.tesla_fleet.VehicleSigned.flash_lights", + "tesla_fleet_api.tesla.VehicleSigned.flash_lights", side_effect=NotOnWhitelistFault, ), pytest.raises(HomeAssistantError) as error, diff --git a/tests/components/tesla_fleet/test_climate.py b/tests/components/tesla_fleet/test_climate.py index b45e5259a5c..fae79c795c2 100644 --- a/tests/components/tesla_fleet/test_climate.py +++ b/tests/components/tesla_fleet/test_climate.py @@ -257,7 +257,7 @@ async def test_invalid_error( with ( patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.tesla.VehicleFleet.auto_conditioning_start", side_effect=InvalidCommand, ) as mock_on, pytest.raises( @@ -285,7 +285,7 @@ async def test_errors( with ( patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.tesla.VehicleFleet.auto_conditioning_start", return_value=response, ) as mock_on, pytest.raises(HomeAssistantError), @@ -308,7 +308,7 @@ async def test_ignored_error( await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) entity_id = "climate.test_climate" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.tesla.VehicleFleet.auto_conditioning_start", return_value=COMMAND_IGNORED_REASON, ) as mock_on: await hass.services.async_call( diff --git a/tests/components/tesla_fleet/test_cover.py b/tests/components/tesla_fleet/test_cover.py index ac5307b2fdd..15d14f34a87 100644 --- a/tests/components/tesla_fleet/test_cover.py +++ b/tests/components/tesla_fleet/test_cover.py @@ -89,7 +89,7 @@ async def test_cover_services( # Vent Windows entity_id = "cover.test_windows" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.window_control", + "tesla_fleet_api.tesla.VehicleFleet.window_control", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -118,7 +118,7 @@ async def test_cover_services( # Charge Port Door entity_id = "cover.test_charge_port_door" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.charge_port_door_open", + "tesla_fleet_api.tesla.VehicleFleet.charge_port_door_open", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -133,7 +133,7 @@ async def test_cover_services( assert state.state == CoverState.OPEN with patch( - "homeassistant.components.teslemetry.VehicleSpecific.charge_port_door_close", + "tesla_fleet_api.tesla.VehicleFleet.charge_port_door_close", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -150,7 +150,7 @@ async def test_cover_services( # Frunk entity_id = "cover.test_frunk" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.actuate_trunk", + "tesla_fleet_api.tesla.VehicleFleet.actuate_trunk", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -167,7 +167,7 @@ async def test_cover_services( # Trunk entity_id = "cover.test_trunk" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.actuate_trunk", + "tesla_fleet_api.tesla.VehicleFleet.actuate_trunk", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -196,7 +196,7 @@ async def test_cover_services( # Sunroof entity_id = "cover.test_sunroof" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.sun_roof_control", + "tesla_fleet_api.tesla.VehicleFleet.sun_roof_control", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/tesla_fleet/test_lock.py b/tests/components/tesla_fleet/test_lock.py index 00b77aefcaf..ac9a7b49b55 100644 --- a/tests/components/tesla_fleet/test_lock.py +++ b/tests/components/tesla_fleet/test_lock.py @@ -59,7 +59,7 @@ async def test_lock_services( entity_id = "lock.test_lock" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.door_lock", + "tesla_fleet_api.tesla.VehicleFleet.door_lock", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -73,7 +73,7 @@ async def test_lock_services( call.assert_called_once() with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.door_unlock", + "tesla_fleet_api.tesla.VehicleFleet.door_unlock", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -97,7 +97,7 @@ async def test_lock_services( ) with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.charge_port_door_open", + "tesla_fleet_api.tesla.VehicleFleet.charge_port_door_open", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/tesla_fleet/test_media_player.py b/tests/components/tesla_fleet/test_media_player.py index 4c833e7499f..b2900d96c80 100644 --- a/tests/components/tesla_fleet/test_media_player.py +++ b/tests/components/tesla_fleet/test_media_player.py @@ -88,7 +88,7 @@ async def test_media_player_services( entity_id = "media_player.test_media_player" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.adjust_volume", + "tesla_fleet_api.tesla.VehicleFleet.adjust_volume", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -102,7 +102,7 @@ async def test_media_player_services( call.assert_called_once() with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.media_toggle_playback", + "tesla_fleet_api.tesla.VehicleFleet.media_toggle_playback", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -117,7 +117,7 @@ async def test_media_player_services( # This test will fail without the previous call to pause playback with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.media_toggle_playback", + "tesla_fleet_api.tesla.VehicleFleet.media_toggle_playback", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -131,7 +131,7 @@ async def test_media_player_services( call.assert_called_once() with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.media_next_track", + "tesla_fleet_api.tesla.VehicleFleet.media_next_track", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -144,7 +144,7 @@ async def test_media_player_services( call.assert_called_once() with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.media_prev_track", + "tesla_fleet_api.tesla.VehicleFleet.media_prev_track", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/tesla_fleet/test_number.py b/tests/components/tesla_fleet/test_number.py index 8551a99ee29..4ade98852c8 100644 --- a/tests/components/tesla_fleet/test_number.py +++ b/tests/components/tesla_fleet/test_number.py @@ -57,7 +57,7 @@ async def test_number_services( entity_id = "number.test_charge_current" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.set_charging_amps", + "tesla_fleet_api.tesla.VehicleFleet.set_charging_amps", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -72,7 +72,7 @@ async def test_number_services( entity_id = "number.test_charge_limit" with patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.set_charge_limit", + "tesla_fleet_api.tesla.VehicleFleet.set_charge_limit", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -87,7 +87,7 @@ async def test_number_services( entity_id = "number.energy_site_backup_reserve" with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.backup", + "tesla_fleet_api.tesla.EnergySite.backup", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -105,7 +105,7 @@ async def test_number_services( entity_id = "number.energy_site_off_grid_reserve" with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.off_grid_vehicle_charging_reserve", + "tesla_fleet_api.tesla.EnergySite.off_grid_vehicle_charging_reserve", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/tesla_fleet/test_select.py b/tests/components/tesla_fleet/test_select.py index 902b28ddb7a..f06d67041c9 100644 --- a/tests/components/tesla_fleet/test_select.py +++ b/tests/components/tesla_fleet/test_select.py @@ -61,11 +61,11 @@ async def test_select_services( entity_id = "select.test_seat_heater_front_left" with ( patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.remote_seat_heater_request", + "tesla_fleet_api.tesla.VehicleFleet.remote_seat_heater_request", return_value=COMMAND_OK, ) as remote_seat_heater_request, patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.tesla.VehicleFleet.auto_conditioning_start", return_value=COMMAND_OK, ) as auto_conditioning_start, ): @@ -83,11 +83,11 @@ async def test_select_services( entity_id = "select.test_steering_wheel_heater" with ( patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.remote_steering_wheel_heat_level_request", + "tesla_fleet_api.tesla.VehicleFleet.remote_steering_wheel_heat_level_request", return_value=COMMAND_OK, ) as remote_steering_wheel_heat_level_request, patch( - "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.tesla.VehicleFleet.auto_conditioning_start", return_value=COMMAND_OK, ) as auto_conditioning_start, ): @@ -104,7 +104,7 @@ async def test_select_services( entity_id = "select.energy_site_operation_mode" with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.operation", + "tesla_fleet_api.tesla.EnergySite.operation", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -122,7 +122,7 @@ async def test_select_services( entity_id = "select.energy_site_allow_export" with patch( - "homeassistant.components.tesla_fleet.EnergySpecific.grid_import_export", + "tesla_fleet_api.tesla.EnergySite.grid_import_export", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/tesla_fleet/test_switch.py b/tests/components/tesla_fleet/test_switch.py index fba4fc05cc4..022c3a0ab18 100644 --- a/tests/components/tesla_fleet/test_switch.py +++ b/tests/components/tesla_fleet/test_switch.py @@ -71,41 +71,41 @@ async def test_switch_offline( @pytest.mark.parametrize( ("name", "on", "off"), [ - ("test_charge", "VehicleSpecific.charge_start", "VehicleSpecific.charge_stop"), + ("test_charge", "VehicleFleet.charge_start", "VehicleFleet.charge_stop"), ( "test_auto_seat_climate_left", - "VehicleSpecific.remote_auto_seat_climate_request", - "VehicleSpecific.remote_auto_seat_climate_request", + "VehicleFleet.remote_auto_seat_climate_request", + "VehicleFleet.remote_auto_seat_climate_request", ), ( "test_auto_seat_climate_right", - "VehicleSpecific.remote_auto_seat_climate_request", - "VehicleSpecific.remote_auto_seat_climate_request", + "VehicleFleet.remote_auto_seat_climate_request", + "VehicleFleet.remote_auto_seat_climate_request", ), ( "test_auto_steering_wheel_heater", - "VehicleSpecific.remote_auto_steering_wheel_heat_climate_request", - "VehicleSpecific.remote_auto_steering_wheel_heat_climate_request", + "VehicleFleet.remote_auto_steering_wheel_heat_climate_request", + "VehicleFleet.remote_auto_steering_wheel_heat_climate_request", ), ( "test_defrost", - "VehicleSpecific.set_preconditioning_max", - "VehicleSpecific.set_preconditioning_max", + "VehicleFleet.set_preconditioning_max", + "VehicleFleet.set_preconditioning_max", ), ( "energy_site_storm_watch", - "EnergySpecific.storm_mode", - "EnergySpecific.storm_mode", + "EnergySite.storm_mode", + "EnergySite.storm_mode", ), ( "energy_site_allow_charging_from_grid", - "EnergySpecific.grid_import_export", - "EnergySpecific.grid_import_export", + "EnergySite.grid_import_export", + "EnergySite.grid_import_export", ), ( "test_sentry_mode", - "VehicleSpecific.set_sentry_mode", - "VehicleSpecific.set_sentry_mode", + "VehicleFleet.set_sentry_mode", + "VehicleFleet.set_sentry_mode", ), ], ) @@ -122,7 +122,7 @@ async def test_switch_services( entity_id = f"switch.{name}" with patch( - f"homeassistant.components.tesla_fleet.{on}", + f"tesla_fleet_api.tesla.{on}", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -136,7 +136,7 @@ async def test_switch_services( call.assert_called_once() with patch( - f"homeassistant.components.tesla_fleet.{off}", + f"tesla_fleet_api.tesla.{off}", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/teslemetry/conftest.py b/tests/components/teslemetry/conftest.py index e89bab9eff1..0152543e512 100644 --- a/tests/components/teslemetry/conftest.py +++ b/tests/components/teslemetry/conftest.py @@ -25,7 +25,7 @@ from .const import ( def mock_metadata(): """Mock Tesla Fleet Api metadata method.""" with patch( - "homeassistant.components.teslemetry.Teslemetry.metadata", return_value=METADATA + "tesla_fleet_api.teslemetry.Teslemetry.metadata", return_value=METADATA ) as mock_products: yield mock_products @@ -34,7 +34,7 @@ def mock_metadata(): def mock_products(): """Mock Tesla Fleet Api products method.""" with patch( - "homeassistant.components.teslemetry.Teslemetry.products", return_value=PRODUCTS + "tesla_fleet_api.teslemetry.Teslemetry.products", return_value=PRODUCTS ) as mock_products: yield mock_products @@ -43,7 +43,7 @@ def mock_products(): def mock_vehicle_data() -> Generator[AsyncMock]: """Mock Tesla Fleet API Vehicle Specific vehicle_data method.""" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.vehicle_data", + "tesla_fleet_api.teslemetry.Vehicle.vehicle_data", return_value=VEHICLE_DATA, ) as mock_vehicle_data: yield mock_vehicle_data @@ -53,7 +53,7 @@ def mock_vehicle_data() -> Generator[AsyncMock]: def mock_legacy(): """Mock Tesla Fleet Api products method.""" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.pre2021", return_value=True + "tesla_fleet_api.teslemetry.Vehicle.pre2021", return_value=True ) as mock_pre2021: yield mock_pre2021 @@ -62,7 +62,7 @@ def mock_legacy(): def mock_wake_up(): """Mock Tesla Fleet API Vehicle Specific wake_up method.""" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.wake_up", + "tesla_fleet_api.teslemetry.Vehicle.wake_up", return_value=WAKE_UP_ONLINE, ) as mock_wake_up: yield mock_wake_up @@ -72,7 +72,7 @@ def mock_wake_up(): def mock_vehicle() -> Generator[AsyncMock]: """Mock Tesla Fleet API Vehicle Specific vehicle method.""" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.vehicle", + "tesla_fleet_api.teslemetry.Vehicle.vehicle", return_value=WAKE_UP_ONLINE, ) as mock_vehicle: yield mock_vehicle @@ -82,7 +82,7 @@ def mock_vehicle() -> Generator[AsyncMock]: def mock_request(): """Mock Tesla Fleet API Vehicle Specific class.""" with patch( - "homeassistant.components.teslemetry.Teslemetry._request", + "tesla_fleet_api.teslemetry.Teslemetry._request", return_value=COMMAND_OK, ) as mock_request: yield mock_request @@ -92,7 +92,7 @@ def mock_request(): def mock_live_status(): """Mock Teslemetry Energy Specific live_status method.""" with patch( - "homeassistant.components.teslemetry.EnergySpecific.live_status", + "tesla_fleet_api.tesla.energysite.EnergySite.live_status", side_effect=lambda: deepcopy(LIVE_STATUS), ) as mock_live_status: yield mock_live_status @@ -102,7 +102,7 @@ def mock_live_status(): def mock_site_info(): """Mock Teslemetry Energy Specific site_info method.""" with patch( - "homeassistant.components.teslemetry.EnergySpecific.site_info", + "tesla_fleet_api.tesla.energysite.EnergySite.site_info", side_effect=lambda: deepcopy(SITE_INFO), ) as mock_live_status: yield mock_live_status @@ -112,7 +112,7 @@ def mock_site_info(): def mock_energy_history(): """Mock Teslemetry Energy Specific site_info method.""" with patch( - "homeassistant.components.teslemetry.EnergySpecific.energy_history", + "tesla_fleet_api.tesla.energysite.EnergySite.energy_history", return_value=ENERGY_HISTORY, ) as mock_live_status: yield mock_live_status @@ -122,7 +122,7 @@ def mock_energy_history(): def mock_add_listener(): """Mock Teslemetry Stream listen method.""" with patch( - "homeassistant.components.teslemetry.TeslemetryStream.async_add_listener", + "teslemetry_stream.TeslemetryStream.async_add_listener", ) as mock_add_listener: mock_add_listener.listeners = [] @@ -165,7 +165,7 @@ def mock_stream_update_config(): def mock_stream_connected(): """Mock Teslemetry Stream listen method.""" with patch( - "homeassistant.components.teslemetry.TeslemetryStream.connected", + "teslemetry_stream.TeslemetryStream.connected", return_value=True, ) as mock_stream_connected: yield mock_stream_connected diff --git a/tests/components/teslemetry/const.py b/tests/components/teslemetry/const.py index 40d55dab71f..31915630951 100644 --- a/tests/components/teslemetry/const.py +++ b/tests/components/teslemetry/const.py @@ -18,7 +18,6 @@ VEHICLE_DATA_ALT = load_json_object_fixture("vehicle_data_alt.json", DOMAIN) LIVE_STATUS = load_json_object_fixture("live_status.json", DOMAIN) SITE_INFO = load_json_object_fixture("site_info.json", DOMAIN) ENERGY_HISTORY = load_json_object_fixture("energy_history.json", DOMAIN) -METADATA = load_json_object_fixture("metadata.json", DOMAIN) COMMAND_OK = {"response": {"result": True, "reason": ""}} COMMAND_REASON = {"response": {"result": False, "reason": "already closed"}} @@ -52,7 +51,7 @@ METADATA = { "proxy": False, "access": True, "polling": True, - "firmware": "2024.44.25", + "firmware": "2026.0.0", } }, } diff --git a/tests/components/teslemetry/fixtures/metadata.json b/tests/components/teslemetry/fixtures/metadata.json deleted file mode 100644 index 60282afc934..00000000000 --- a/tests/components/teslemetry/fixtures/metadata.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "uid": "abc-123", - "region": "NA", - "scopes": [ - "openid", - "offline_access", - "user_data", - "vehicle_device_data", - "vehicle_cmds", - "vehicle_charging_cmds", - "energy_device_data", - "energy_cmds" - ], - "vehicles": { - "LRW3F7EK4NC700000": { - "access": true, - "polling": true, - "proxy": true, - "firmware": "2024.44.25" - } - } -} diff --git a/tests/components/teslemetry/fixtures/vehicle_data.json b/tests/components/teslemetry/fixtures/vehicle_data.json index 0cd238c4e52..051c7199d00 100644 --- a/tests/components/teslemetry/fixtures/vehicle_data.json +++ b/tests/components/teslemetry/fixtures/vehicle_data.json @@ -192,7 +192,7 @@ "api_version": 71, "autopark_state_v2": "unavailable", "calendar_supported": true, - "car_version": "2024.44.25 06f534d46010", + "car_version": "2026.0.0 06f534d46010", "center_display_state": 0, "dashcam_clip_save_available": true, "dashcam_state": "Recording", diff --git a/tests/components/teslemetry/snapshots/test_binary_sensor.ambr b/tests/components/teslemetry/snapshots/test_binary_sensor.ambr index 6a6e9826dc2..a295dc16344 100644 --- a/tests/components/teslemetry/snapshots/test_binary_sensor.ambr +++ b/tests/components/teslemetry/snapshots/test_binary_sensor.ambr @@ -1371,6 +1371,147 @@ 'state': 'unknown', }) # --- +# name: test_binary_sensor[binary_sensor.test_located_at_favorite-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_located_at_favorite', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Located at favorite', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'located_at_favorite', + 'unique_id': 'LRW3F7EK4NC700000-located_at_favorite', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_located_at_favorite-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Located at favorite', + }), + 'context': , + 'entity_id': 'binary_sensor.test_located_at_favorite', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_located_at_home-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_located_at_home', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Located at home', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'located_at_home', + 'unique_id': 'LRW3F7EK4NC700000-located_at_home', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_located_at_home-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Located at home', + }), + 'context': , + 'entity_id': 'binary_sensor.test_located_at_home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_located_at_work-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_located_at_work', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Located at work', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'located_at_work', + 'unique_id': 'LRW3F7EK4NC700000-located_at_work', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_located_at_work-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Located at work', + }), + 'context': , + 'entity_id': 'binary_sensor.test_located_at_work', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_binary_sensor[binary_sensor.test_offroad_lightbar-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1490,7 +1631,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Pin to drive enabled', + 'original_name': 'PIN to Drive enabled', 'platform': 'teslemetry', 'previous_unique_id': None, 'supported_features': 0, @@ -1502,7 +1643,7 @@ # name: test_binary_sensor[binary_sensor.test_pin_to_drive_enabled-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Pin to drive enabled', + 'friendly_name': 'Test PIN to Drive enabled', }), 'context': , 'entity_id': 'binary_sensor.test_pin_to_drive_enabled', @@ -2801,6 +2942,45 @@ 'state': 'unknown', }) # --- +# name: test_binary_sensor_refresh[binary_sensor.test_located_at_favorite-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Located at favorite', + }), + 'context': , + 'entity_id': 'binary_sensor.test_located_at_favorite', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_located_at_home-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Located at home', + }), + 'context': , + 'entity_id': 'binary_sensor.test_located_at_home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_located_at_work-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Located at work', + }), + 'context': , + 'entity_id': 'binary_sensor.test_located_at_work', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_binary_sensor_refresh[binary_sensor.test_offroad_lightbar-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ @@ -2830,7 +3010,7 @@ # name: test_binary_sensor_refresh[binary_sensor.test_pin_to_drive_enabled-statealt] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Pin to drive enabled', + 'friendly_name': 'Test PIN to Drive enabled', }), 'context': , 'entity_id': 'binary_sensor.test_pin_to_drive_enabled', diff --git a/tests/components/teslemetry/snapshots/test_climate.ambr b/tests/components/teslemetry/snapshots/test_climate.ambr index 4c265c00cb8..e0e68f23c79 100644 --- a/tests/components/teslemetry/snapshots/test_climate.ambr +++ b/tests/components/teslemetry/snapshots/test_climate.ambr @@ -1,10 +1,4 @@ # serializer version: 1 -# name: test_asleep_or_offline[HomeAssistantError] - 'Timed out trying to wake up vehicle' -# --- -# name: test_asleep_or_offline[InvalidCommand] - 'Failed to wake up vehicle: The data request or command is unknown.' -# --- # name: test_climate[climate.test_cabin_overheat_protection-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -78,6 +72,10 @@ }), 'area_id': None, 'capabilities': dict({ + 'fan_modes': list([ + 'off', + 'bioweapon', + ]), 'hvac_modes': list([ , , @@ -113,7 +111,7 @@ 'original_name': 'Climate', 'platform': 'teslemetry', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': , 'unique_id': 'LRW3F7EK4NC700000-driver_temp', 'unit_of_measurement': None, @@ -123,6 +121,11 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'current_temperature': 30.0, + 'fan_mode': 'off', + 'fan_modes': list([ + 'off', + 'bioweapon', + ]), 'friendly_name': 'Test Climate', 'hvac_modes': list([ , @@ -137,7 +140,7 @@ 'dog', 'camp', ]), - 'supported_features': , + 'supported_features': , 'temperature': 22.0, }), 'context': , @@ -220,6 +223,10 @@ }), 'area_id': None, 'capabilities': dict({ + 'fan_modes': list([ + 'off', + 'bioweapon', + ]), 'hvac_modes': list([ , , @@ -255,7 +262,7 @@ 'original_name': 'Climate', 'platform': 'teslemetry', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': , 'unique_id': 'LRW3F7EK4NC700000-driver_temp', 'unit_of_measurement': None, @@ -265,6 +272,11 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'current_temperature': 30.0, + 'fan_mode': 'off', + 'fan_modes': list([ + 'off', + 'bioweapon', + ]), 'friendly_name': 'Test Climate', 'hvac_modes': list([ , @@ -279,7 +291,7 @@ 'dog', 'camp', ]), - 'supported_features': , + 'supported_features': , 'temperature': 22.0, }), 'context': , @@ -297,7 +309,9 @@ 'area_id': None, 'capabilities': dict({ 'hvac_modes': list([ + , , + , ]), 'max_temp': 40, 'min_temp': 30, @@ -339,6 +353,7 @@ 'capabilities': dict({ 'hvac_modes': list([ , + , ]), 'max_temp': 28.0, 'min_temp': 15.0, @@ -374,3 +389,85 @@ # name: test_invalid_error[error] 'Command returned exception: The data request or command is unknown.' # --- +# name: test_select_streaming[climate.test_cabin_overheat_protection] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'Test Cabin overheat protection', + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 40, + 'min_temp': 30, + 'supported_features': , + 'target_temp_step': 5, + 'temperature': None, + }), + 'context': , + 'entity_id': 'climate.test_cabin_overheat_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'cool', + }) +# --- +# name: test_select_streaming[climate.test_climate LHD] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 26.0, + 'friendly_name': 'Test Climate', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 15.0, + 'preset_mode': None, + 'preset_modes': list([ + 'off', + 'keep', + 'dog', + 'camp', + ]), + 'supported_features': , + 'temperature': 21.0, + }), + 'context': , + 'entity_id': 'climate.test_climate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat_cool', + }) +# --- +# name: test_select_streaming[climate.test_climate] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 26.0, + 'friendly_name': 'Test Climate', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 15.0, + 'preset_mode': None, + 'preset_modes': list([ + 'off', + 'keep', + 'dog', + 'camp', + ]), + 'supported_features': , + 'temperature': 21.0, + }), + 'context': , + 'entity_id': 'climate.test_climate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat_cool', + }) +# --- diff --git a/tests/components/teslemetry/snapshots/test_diagnostics.ambr b/tests/components/teslemetry/snapshots/test_diagnostics.ambr index 56a8f759a21..a39e8a0ff74 100644 --- a/tests/components/teslemetry/snapshots/test_diagnostics.ambr +++ b/tests/components/teslemetry/snapshots/test_diagnostics.ambr @@ -375,7 +375,7 @@ 'vehicle_state_api_version': 71, 'vehicle_state_autopark_state_v2': 'unavailable', 'vehicle_state_calendar_supported': True, - 'vehicle_state_car_version': '2024.44.25 06f534d46010', + 'vehicle_state_car_version': '2026.0.0 06f534d46010', 'vehicle_state_center_display_state': 0, 'vehicle_state_dashcam_clip_save_available': True, 'vehicle_state_dashcam_state': 'Recording', diff --git a/tests/components/teslemetry/snapshots/test_media_player.ambr b/tests/components/teslemetry/snapshots/test_media_player.ambr index 663e91a502c..7f721b95289 100644 --- a/tests/components/teslemetry/snapshots/test_media_player.ambr +++ b/tests/components/teslemetry/snapshots/test_media_player.ambr @@ -47,7 +47,7 @@ 'media_title': 'Chapter 51: Cybertruck: Tesla, 2018–2019', 'source': 'Audible', 'supported_features': , - 'volume_level': 0.16129355359011466, + 'volume_level': 0.16129354838709678, }), 'context': , 'entity_id': 'media_player.test_media_player', @@ -64,10 +64,12 @@ 'friendly_name': 'Test Media player', 'media_album_name': '', 'media_artist': '', + 'media_duration': 0.0, 'media_playlist': '', 'media_title': '', 'source': 'Spotify', 'supported_features': , + 'volume_level': 0.0, }), 'context': , 'entity_id': 'media_player.test_media_player', @@ -125,7 +127,43 @@ 'media_title': 'Chapter 51: Cybertruck: Tesla, 2018–2019', 'source': 'Audible', 'supported_features': , - 'volume_level': 0.16129355359011466, + 'volume_level': 0.16129354838709678, + }), + 'context': , + 'entity_id': 'media_player.test_media_player', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_update_streaming[off] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speaker', + 'friendly_name': 'Test Media player', + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.test_media_player', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_update_streaming[on] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speaker', + 'friendly_name': 'Test Media player', + 'media_album_name': 'Test Album', + 'media_artist': 'Test Artist', + 'media_duration': 60, + 'media_position': 5, + 'source': 'Spotify', + 'supported_features': , + 'volume_level': 0.1935483870967742, }), 'context': , 'entity_id': 'media_player.test_media_player', diff --git a/tests/components/teslemetry/snapshots/test_switch.ambr b/tests/components/teslemetry/snapshots/test_switch.ambr index f9997133044..0586b454a91 100644 --- a/tests/components/teslemetry/snapshots/test_switch.ambr +++ b/tests/components/teslemetry/snapshots/test_switch.ambr @@ -495,3 +495,21 @@ 'state': 'off', }) # --- +# name: test_switch_streaming[switch.test_auto_seat_climate_left] + 'on' +# --- +# name: test_switch_streaming[switch.test_auto_seat_climate_right] + 'off' +# --- +# name: test_switch_streaming[switch.test_auto_steering_wheel_heater] + 'on' +# --- +# name: test_switch_streaming[switch.test_charge] + 'on' +# --- +# name: test_switch_streaming[switch.test_defrost] + 'off' +# --- +# name: test_switch_streaming[switch.test_sentry_mode] + 'on' +# --- diff --git a/tests/components/teslemetry/snapshots/test_update.ambr b/tests/components/teslemetry/snapshots/test_update.ambr index 1c7d525af86..391d81c086e 100644 --- a/tests/components/teslemetry/snapshots/test_update.ambr +++ b/tests/components/teslemetry/snapshots/test_update.ambr @@ -41,7 +41,7 @@ 'entity_picture': 'https://brands.home-assistant.io/_/teslemetry/icon.png', 'friendly_name': 'Test Update', 'in_progress': False, - 'installed_version': '2024.44.25', + 'installed_version': '2026.0.0', 'latest_version': '2024.12.0.0', 'release_summary': None, 'release_url': None, @@ -117,3 +117,128 @@ 'state': 'off', }) # --- +# name: test_update_streaming[downloading] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/teslemetry/icon.png', + 'friendly_name': 'Test Update', + 'in_progress': False, + 'installed_version': '2025.1.1', + 'latest_version': '2025.2.1', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.test_update', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_update_streaming[installing] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/teslemetry/icon.png', + 'friendly_name': 'Test Update', + 'in_progress': False, + 'installed_version': '2025.1.1', + 'latest_version': '2025.2.1', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.test_update', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_update_streaming[ready] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/teslemetry/icon.png', + 'friendly_name': 'Test Update', + 'in_progress': False, + 'installed_version': '2025.1.1', + 'latest_version': '2025.2.1', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.test_update', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_update_streaming[restored] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/teslemetry/icon.png', + 'friendly_name': 'Test Update', + 'in_progress': False, + 'installed_version': '2025.2.1', + 'latest_version': '2025.1.1', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.test_update', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_update_streaming[updated] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/teslemetry/icon.png', + 'friendly_name': 'Test Update', + 'in_progress': False, + 'installed_version': '2025.2.1', + 'latest_version': '2025.1.1', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.test_update', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/teslemetry/test_button.py b/tests/components/teslemetry/test_button.py index 75f94342f1e..46db33ce913 100644 --- a/tests/components/teslemetry/test_button.py +++ b/tests/components/teslemetry/test_button.py @@ -42,7 +42,7 @@ async def test_press(hass: HomeAssistant, name: str, func: str) -> None: await setup_platform(hass, [Platform.BUTTON]) with patch( - f"homeassistant.components.teslemetry.VehicleSpecific.{func}", + f"tesla_fleet_api.teslemetry.Vehicle.{func}", return_value=COMMAND_OK, ) as command: await hass.services.async_call( diff --git a/tests/components/teslemetry/test_climate.py b/tests/components/teslemetry/test_climate.py index 33f2e134806..27bed45c51f 100644 --- a/tests/components/teslemetry/test_climate.py +++ b/tests/components/teslemetry/test_climate.py @@ -2,10 +2,10 @@ from unittest.mock import AsyncMock, patch -from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion from tesla_fleet_api.exceptions import InvalidCommand +from teslemetry_stream import Signal from homeassistant.components.climate import ( ATTR_HVAC_MODE, @@ -24,15 +24,12 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import entity_registry as er -from . import assert_entities, setup_platform +from . import assert_entities, reload_platform, setup_platform from .const import ( COMMAND_ERRORS, COMMAND_IGNORED_REASON, METADATA_NOSCOPE, VEHICLE_DATA_ALT, - VEHICLE_DATA_ASLEEP, - WAKE_UP_ASLEEP, - WAKE_UP_ONLINE, ) @@ -41,6 +38,7 @@ async def test_climate( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, + mock_legacy: AsyncMock, ) -> None: """Tests that the climate entity is correct.""" @@ -195,6 +193,7 @@ async def test_climate_alt( snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, mock_vehicle_data: AsyncMock, + mock_legacy: AsyncMock, ) -> None: """Tests that the climate entity is correct.""" @@ -211,7 +210,7 @@ async def test_invalid_error(hass: HomeAssistant, snapshot: SnapshotAssertion) - with ( patch( - "homeassistant.components.teslemetry.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.teslemetry.Vehicle.auto_conditioning_start", side_effect=InvalidCommand, ) as mock_on, pytest.raises(HomeAssistantError) as error, @@ -235,7 +234,7 @@ async def test_errors(hass: HomeAssistant, response: str) -> None: with ( patch( - "homeassistant.components.teslemetry.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.teslemetry.Vehicle.auto_conditioning_start", return_value=response, ) as mock_on, pytest.raises(HomeAssistantError), @@ -257,7 +256,7 @@ async def test_ignored_error( await setup_platform(hass, [Platform.CLIMATE]) entity_id = "climate.test_climate" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.auto_conditioning_start", + "tesla_fleet_api.teslemetry.Vehicle.auto_conditioning_start", return_value=COMMAND_IGNORED_REASON, ) as mock_on: await hass.services.async_call( @@ -269,71 +268,12 @@ async def test_ignored_error( mock_on.assert_called_once() -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_asleep_or_offline( - hass: HomeAssistant, - mock_vehicle_data: AsyncMock, - mock_wake_up: AsyncMock, - mock_vehicle: AsyncMock, - freezer: FrozenDateTimeFactory, - snapshot: SnapshotAssertion, -) -> None: - """Tests asleep is handled.""" - - mock_vehicle_data.return_value = VEHICLE_DATA_ASLEEP - await setup_platform(hass, [Platform.CLIMATE]) - entity_id = "climate.test_climate" - - # Run a command but fail trying to wake up the vehicle - mock_wake_up.side_effect = InvalidCommand - with pytest.raises(HomeAssistantError) as error: - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - assert str(error.value) == snapshot(name="InvalidCommand") - mock_wake_up.assert_called_once() - - mock_wake_up.side_effect = None - mock_wake_up.reset_mock() - - # Run a command but timeout trying to wake up the vehicle - mock_wake_up.return_value = WAKE_UP_ASLEEP - mock_vehicle.return_value = WAKE_UP_ASLEEP - with ( - patch("homeassistant.components.teslemetry.helpers.asyncio.sleep"), - pytest.raises(HomeAssistantError) as error, - ): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: [entity_id]}, - blocking=True, - ) - assert str(error.value) == snapshot(name="HomeAssistantError") - mock_wake_up.assert_called_once() - mock_vehicle.assert_called() - - mock_wake_up.reset_mock() - mock_vehicle.reset_mock() - mock_wake_up.return_value = WAKE_UP_ONLINE - mock_vehicle.return_value = WAKE_UP_ONLINE - - # Run a command and wake up the vehicle immediately - await hass.services.async_call( - CLIMATE_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: [entity_id]}, blocking=True - ) - await hass.async_block_till_done() - mock_wake_up.assert_called_once() - - async def test_climate_noscope( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, mock_metadata: AsyncMock, + mock_legacy: AsyncMock, ) -> None: """Tests that the climate entity is correct.""" mock_metadata.return_value = METADATA_NOSCOPE @@ -363,3 +303,47 @@ async def test_climate_noscope( {ATTR_ENTITY_ID: [entity_id], ATTR_TEMPERATURE: 20}, blocking=True, ) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_select_streaming( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_vehicle_data: AsyncMock, + mock_add_listener: AsyncMock, +) -> None: + """Tests that the select entities with streaming are correct.""" + + entry = await setup_platform(hass, [Platform.CLIMATE]) + + # Stream update + mock_add_listener.send( + { + "vin": VEHICLE_DATA_ALT["response"]["vin"], + "data": { + Signal.INSIDE_TEMP: 26, + Signal.HVAC_AC_ENABLED: True, + Signal.CLIMATE_KEEPER_MODE: "ClimateKeeperModeOn", + Signal.RIGHT_HAND_DRIVE: True, + Signal.HVAC_LEFT_TEMPERATURE_REQUEST: 22, + Signal.HVAC_RIGHT_TEMPERATURE_REQUEST: 21, + Signal.CABIN_OVERHEAT_PROTECTION_MODE: "CabinOverheatProtectionModeStateOn", + Signal.CABIN_OVERHEAT_PROTECTION_TEMPERATURE_LIMIT: 35, + }, + "createdAt": "2024-10-04T10:45:17.537Z", + } + ) + await hass.async_block_till_done() + + assert hass.states.get("climate.test_climate") == snapshot( + name="climate.test_climate LHD" + ) + + await reload_platform(hass, entry, [Platform.CLIMATE]) + + # Assert the entities restored their values + for entity_id in ( + "climate.test_climate", + "climate.test_cabin_overheat_protection", + ): + assert hass.states.get(entity_id) == snapshot(name=entity_id) diff --git a/tests/components/teslemetry/test_cover.py b/tests/components/teslemetry/test_cover.py index 14af1e732fe..e3933931c9f 100644 --- a/tests/components/teslemetry/test_cover.py +++ b/tests/components/teslemetry/test_cover.py @@ -75,7 +75,7 @@ async def test_cover_services( # Vent Windows entity_id = "cover.test_windows" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.window_control", + "tesla_fleet_api.teslemetry.Vehicle.window_control", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -104,7 +104,7 @@ async def test_cover_services( # Charge Port Door entity_id = "cover.test_charge_port_door" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.charge_port_door_open", + "tesla_fleet_api.teslemetry.Vehicle.charge_port_door_open", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -119,7 +119,7 @@ async def test_cover_services( assert state.state == CoverState.OPEN with patch( - "homeassistant.components.teslemetry.VehicleSpecific.charge_port_door_close", + "tesla_fleet_api.teslemetry.Vehicle.charge_port_door_close", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -136,7 +136,7 @@ async def test_cover_services( # Frunk entity_id = "cover.test_frunk" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.actuate_trunk", + "tesla_fleet_api.teslemetry.Vehicle.actuate_trunk", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -153,7 +153,7 @@ async def test_cover_services( # Trunk entity_id = "cover.test_trunk" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.actuate_trunk", + "tesla_fleet_api.teslemetry.Vehicle.actuate_trunk", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -182,7 +182,7 @@ async def test_cover_services( # Sunroof entity_id = "cover.test_sunroof" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.sun_roof_control", + "tesla_fleet_api.teslemetry.Vehicle.sun_roof_control", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/teslemetry/test_init.py b/tests/components/teslemetry/test_init.py index 5481e6cc034..fcf9c76c939 100644 --- a/tests/components/teslemetry/test_init.py +++ b/tests/components/teslemetry/test_init.py @@ -2,17 +2,14 @@ from unittest.mock import AsyncMock -from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion from tesla_fleet_api.exceptions import ( InvalidToken, SubscriptionRequired, TeslaFleetError, - VehicleOffline, ) -from homeassistant.components.teslemetry.coordinator import VEHICLE_INTERVAL from homeassistant.components.teslemetry.models import TeslemetryData from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_OFF, STATE_ON, Platform @@ -22,8 +19,6 @@ from homeassistant.helpers import device_registry as dr from . import setup_platform from .const import VEHICLE_DATA_ALT -from tests.common import async_fire_time_changed - ERRORS = [ (InvalidToken, ConfigEntryState.SETUP_ERROR), (SubscriptionRequired, ConfigEntryState.SETUP_ERROR), @@ -69,22 +64,6 @@ async def test_devices( assert device == snapshot(name=f"{device.identifiers}") -async def test_vehicle_refresh_offline( - hass: HomeAssistant, mock_vehicle_data: AsyncMock, freezer: FrozenDateTimeFactory -) -> None: - """Test coordinator refresh with an error.""" - entry = await setup_platform(hass, [Platform.CLIMATE]) - assert entry.state is ConfigEntryState.LOADED - mock_vehicle_data.assert_called_once() - mock_vehicle_data.reset_mock() - - mock_vehicle_data.side_effect = VehicleOffline - freezer.tick(VEHICLE_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - mock_vehicle_data.assert_called_once() - - @pytest.mark.parametrize(("side_effect", "state"), ERRORS) async def test_vehicle_refresh_error( hass: HomeAssistant, diff --git a/tests/components/teslemetry/test_lock.py b/tests/components/teslemetry/test_lock.py index 848eee82c39..a74d613859f 100644 --- a/tests/components/teslemetry/test_lock.py +++ b/tests/components/teslemetry/test_lock.py @@ -57,7 +57,7 @@ async def test_lock_services( entity_id = "lock.test_lock" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.door_lock", + "tesla_fleet_api.teslemetry.Vehicle.door_lock", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -71,7 +71,7 @@ async def test_lock_services( call.assert_called_once() with patch( - "homeassistant.components.teslemetry.VehicleSpecific.door_unlock", + "tesla_fleet_api.teslemetry.Vehicle.door_unlock", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -95,7 +95,7 @@ async def test_lock_services( ) with patch( - "homeassistant.components.teslemetry.VehicleSpecific.charge_port_door_open", + "tesla_fleet_api.teslemetry.Vehicle.charge_port_door_open", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/teslemetry/test_media_player.py b/tests/components/teslemetry/test_media_player.py index ae462bfd026..ab8f21ceda4 100644 --- a/tests/components/teslemetry/test_media_player.py +++ b/tests/components/teslemetry/test_media_player.py @@ -2,7 +2,9 @@ from unittest.mock import AsyncMock, patch +import pytest from syrupy.assertion import SnapshotAssertion +from teslemetry_stream import Signal from homeassistant.components.media_player import ( ATTR_MEDIA_VOLUME_LEVEL, @@ -18,7 +20,7 @@ from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from . import assert_entities, assert_entities_alt, setup_platform +from . import assert_entities, assert_entities_alt, reload_platform, setup_platform from .const import COMMAND_OK, METADATA_NOSCOPE, VEHICLE_DATA_ALT @@ -26,6 +28,7 @@ async def test_media_player( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, + mock_legacy: AsyncMock, ) -> None: """Tests that the media player entities are correct.""" @@ -38,6 +41,7 @@ async def test_media_player_alt( snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, mock_vehicle_data: AsyncMock, + mock_legacy: AsyncMock, ) -> None: """Tests that the media player entities are correct.""" @@ -51,6 +55,7 @@ async def test_media_player_noscope( snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, mock_metadata: AsyncMock, + mock_legacy: AsyncMock, ) -> None: """Tests that the media player entities are correct without required scope.""" @@ -62,6 +67,7 @@ async def test_media_player_noscope( async def test_media_player_services( hass: HomeAssistant, snapshot: SnapshotAssertion, + mock_legacy: AsyncMock, ) -> None: """Tests that the media player services work.""" @@ -70,7 +76,7 @@ async def test_media_player_services( entity_id = "media_player.test_media_player" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.adjust_volume", + "tesla_fleet_api.teslemetry.Vehicle.adjust_volume", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -84,7 +90,7 @@ async def test_media_player_services( call.assert_called_once() with patch( - "homeassistant.components.teslemetry.VehicleSpecific.media_toggle_playback", + "tesla_fleet_api.teslemetry.Vehicle.media_toggle_playback", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -99,7 +105,7 @@ async def test_media_player_services( # This test will fail without the previous call to pause playback with patch( - "homeassistant.components.teslemetry.VehicleSpecific.media_toggle_playback", + "tesla_fleet_api.teslemetry.Vehicle.media_toggle_playback", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -113,7 +119,7 @@ async def test_media_player_services( call.assert_called_once() with patch( - "homeassistant.components.teslemetry.VehicleSpecific.media_next_track", + "tesla_fleet_api.teslemetry.Vehicle.media_next_track", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -126,7 +132,7 @@ async def test_media_player_services( call.assert_called_once() with patch( - "homeassistant.components.teslemetry.VehicleSpecific.media_prev_track", + "tesla_fleet_api.teslemetry.Vehicle.media_prev_track", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -137,3 +143,62 @@ async def test_media_player_services( ) state = hass.states.get(entity_id) call.assert_called_once() + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_update_streaming( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_vehicle_data: AsyncMock, + mock_add_listener: AsyncMock, +) -> None: + """Tests that the media player entities with streaming are correct.""" + + entry = await setup_platform(hass, [Platform.MEDIA_PLAYER]) + + # Stream update + mock_add_listener.send( + { + "vin": VEHICLE_DATA_ALT["response"]["vin"], + "data": { + Signal.CENTER_DISPLAY: "Off", + Signal.MEDIA_PLAYBACK_STATUS: None, + Signal.MEDIA_PLAYBACK_SOURCE: None, + Signal.MEDIA_AUDIO_VOLUME: None, + Signal.MEDIA_NOW_PLAYING_DURATION: None, + Signal.MEDIA_NOW_PLAYING_ELAPSED: None, + Signal.MEDIA_NOW_PLAYING_ARTIST: None, + Signal.MEDIA_NOW_PLAYING_ALBUM: None, + }, + "createdAt": "2024-10-04T10:45:17.537Z", + } + ) + await hass.async_block_till_done() + state = hass.states.get("media_player.test_media_player") + assert state == snapshot(name="off") + + mock_add_listener.send( + { + "vin": VEHICLE_DATA_ALT["response"]["vin"], + "data": { + Signal.CENTER_DISPLAY: "Driving", + Signal.MEDIA_PLAYBACK_STATUS: "Playing", + Signal.MEDIA_PLAYBACK_SOURCE: "Spotify", + Signal.MEDIA_AUDIO_VOLUME: 2, + Signal.MEDIA_NOW_PLAYING_DURATION: 60000, + Signal.MEDIA_NOW_PLAYING_ELAPSED: 5000, + Signal.MEDIA_NOW_PLAYING_ARTIST: "Test Artist", + Signal.MEDIA_NOW_PLAYING_ALBUM: "Test Album", + }, + "createdAt": "2024-10-04T10:55:17.000Z", + } + ) + await hass.async_block_till_done() + state = hass.states.get("media_player.test_media_player") + assert state == snapshot(name="on") + + await reload_platform(hass, entry, [Platform.MEDIA_PLAYER]) + + # Ensure the restored state is the same as the previous state + state = hass.states.get("media_player.test_media_player") + assert state == snapshot(name="on") diff --git a/tests/components/teslemetry/test_number.py b/tests/components/teslemetry/test_number.py index 95eed5a3f1e..2c45631a060 100644 --- a/tests/components/teslemetry/test_number.py +++ b/tests/components/teslemetry/test_number.py @@ -42,7 +42,7 @@ async def test_number_services( entity_id = "number.test_charge_current" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.set_charging_amps", + "tesla_fleet_api.teslemetry.Vehicle.set_charging_amps", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -57,7 +57,7 @@ async def test_number_services( entity_id = "number.test_charge_limit" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.set_charge_limit", + "tesla_fleet_api.teslemetry.Vehicle.set_charge_limit", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -72,7 +72,7 @@ async def test_number_services( entity_id = "number.energy_site_backup_reserve" with patch( - "homeassistant.components.teslemetry.EnergySpecific.backup", + "tesla_fleet_api.teslemetry.EnergySite.backup", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -90,7 +90,7 @@ async def test_number_services( entity_id = "number.energy_site_off_grid_reserve" with patch( - "homeassistant.components.teslemetry.EnergySpecific.off_grid_vehicle_charging_reserve", + "tesla_fleet_api.teslemetry.EnergySite.off_grid_vehicle_charging_reserve", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/teslemetry/test_select.py b/tests/components/teslemetry/test_select.py index c49e83803cd..b17b52903fa 100644 --- a/tests/components/teslemetry/test_select.py +++ b/tests/components/teslemetry/test_select.py @@ -41,7 +41,7 @@ async def test_select_services(hass: HomeAssistant, mock_vehicle_data) -> None: entity_id = "select.test_seat_heater_front_left" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.remote_seat_heater_request", + "tesla_fleet_api.teslemetry.Vehicle.remote_seat_heater_request", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -56,7 +56,7 @@ async def test_select_services(hass: HomeAssistant, mock_vehicle_data) -> None: entity_id = "select.test_steering_wheel_heater" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.remote_steering_wheel_heat_level_request", + "tesla_fleet_api.teslemetry.Vehicle.remote_steering_wheel_heat_level_request", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -71,7 +71,7 @@ async def test_select_services(hass: HomeAssistant, mock_vehicle_data) -> None: entity_id = "select.energy_site_operation_mode" with patch( - "homeassistant.components.teslemetry.EnergySpecific.operation", + "tesla_fleet_api.teslemetry.EnergySite.operation", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -89,7 +89,7 @@ async def test_select_services(hass: HomeAssistant, mock_vehicle_data) -> None: entity_id = "select.energy_site_allow_export" with patch( - "homeassistant.components.teslemetry.EnergySpecific.grid_import_export", + "tesla_fleet_api.teslemetry.EnergySite.grid_import_export", return_value=COMMAND_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/teslemetry/test_sensor.py b/tests/components/teslemetry/test_sensor.py index a488ebc8a06..213811f6ea0 100644 --- a/tests/components/teslemetry/test_sensor.py +++ b/tests/components/teslemetry/test_sensor.py @@ -31,9 +31,7 @@ async def test_sensors( freezer.move_to("2024-01-01 00:00:00+00:00") # Force the vehicle to use polling - with patch( - "homeassistant.components.teslemetry.VehicleSpecific.pre2021", return_value=True - ): + with patch("tesla_fleet_api.teslemetry.Vehicle.pre2021", return_value=True): entry = await setup_platform(hass, [Platform.SENSOR]) assert_entities(hass, entry.entry_id, entity_registry, snapshot) @@ -72,7 +70,7 @@ async def test_sensors_streaming( Signal.AC_CHARGING_ENERGY_IN: 10, Signal.AC_CHARGING_POWER: 2, Signal.CHARGING_CABLE_TYPE: None, - Signal.TIME_TO_FULL_CHARGE: 10, + Signal.TIME_TO_FULL_CHARGE: 0.166666667, Signal.MINUTES_TO_ARRIVAL: None, }, "createdAt": "2024-10-04T10:45:17.537Z", diff --git a/tests/components/teslemetry/test_services.py b/tests/components/teslemetry/test_services.py index a5b55f5dcc5..bcf5407999f 100644 --- a/tests/components/teslemetry/test_services.py +++ b/tests/components/teslemetry/test_services.py @@ -51,7 +51,7 @@ async def test_services( ).device_id with patch( - "homeassistant.components.teslemetry.VehicleSpecific.navigation_gps_request", + "tesla_fleet_api.teslemetry.Vehicle.navigation_gps_request", return_value=COMMAND_OK, ) as navigation_gps_request: await hass.services.async_call( @@ -66,7 +66,7 @@ async def test_services( navigation_gps_request.assert_called_once() with patch( - "homeassistant.components.teslemetry.VehicleSpecific.set_scheduled_charging", + "tesla_fleet_api.teslemetry.Vehicle.set_scheduled_charging", return_value=COMMAND_OK, ) as set_scheduled_charging: await hass.services.async_call( @@ -93,7 +93,7 @@ async def test_services( ) with patch( - "homeassistant.components.teslemetry.VehicleSpecific.set_scheduled_departure", + "tesla_fleet_api.teslemetry.Vehicle.set_scheduled_departure", return_value=COMMAND_OK, ) as set_scheduled_departure: await hass.services.async_call( @@ -138,7 +138,7 @@ async def test_services( ) with patch( - "homeassistant.components.teslemetry.VehicleSpecific.set_valet_mode", + "tesla_fleet_api.teslemetry.Vehicle.set_valet_mode", return_value=COMMAND_OK, ) as set_valet_mode: await hass.services.async_call( @@ -154,7 +154,7 @@ async def test_services( set_valet_mode.assert_called_once() with patch( - "homeassistant.components.teslemetry.VehicleSpecific.speed_limit_activate", + "tesla_fleet_api.teslemetry.Vehicle.speed_limit_activate", return_value=COMMAND_OK, ) as speed_limit_activate: await hass.services.async_call( @@ -170,7 +170,7 @@ async def test_services( speed_limit_activate.assert_called_once() with patch( - "homeassistant.components.teslemetry.VehicleSpecific.speed_limit_deactivate", + "tesla_fleet_api.teslemetry.Vehicle.speed_limit_deactivate", return_value=COMMAND_OK, ) as speed_limit_deactivate: await hass.services.async_call( @@ -186,7 +186,7 @@ async def test_services( speed_limit_deactivate.assert_called_once() with patch( - "homeassistant.components.teslemetry.EnergySpecific.time_of_use_settings", + "tesla_fleet_api.teslemetry.EnergySite.time_of_use_settings", return_value=COMMAND_OK, ) as set_time_of_use: await hass.services.async_call( @@ -202,7 +202,7 @@ async def test_services( with ( patch( - "homeassistant.components.teslemetry.EnergySpecific.time_of_use_settings", + "tesla_fleet_api.teslemetry.EnergySite.time_of_use_settings", return_value=COMMAND_ERROR, ) as set_time_of_use, pytest.raises(HomeAssistantError), diff --git a/tests/components/teslemetry/test_switch.py b/tests/components/teslemetry/test_switch.py index 6a1ddb430ce..6b31a28db59 100644 --- a/tests/components/teslemetry/test_switch.py +++ b/tests/components/teslemetry/test_switch.py @@ -4,6 +4,7 @@ from unittest.mock import AsyncMock, patch import pytest from syrupy.assertion import SnapshotAssertion +from teslemetry_stream import Signal from homeassistant.components.switch import ( DOMAIN as SWITCH_DOMAIN, @@ -14,7 +15,7 @@ from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from . import assert_entities, assert_entities_alt, setup_platform +from . import assert_entities, assert_entities_alt, reload_platform, setup_platform from .const import COMMAND_OK, VEHICLE_DATA_ALT @@ -22,6 +23,7 @@ async def test_switch( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, + mock_legacy: AsyncMock, ) -> None: """Tests that the switch entities are correct.""" @@ -34,6 +36,7 @@ async def test_switch_alt( snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, mock_vehicle_data: AsyncMock, + mock_legacy: AsyncMock, ) -> None: """Tests that the switch entities are correct.""" @@ -46,41 +49,41 @@ async def test_switch_alt( @pytest.mark.parametrize( ("name", "on", "off"), [ - ("test_charge", "VehicleSpecific.charge_start", "VehicleSpecific.charge_stop"), + ("test_charge", "Vehicle.charge_start", "Vehicle.charge_stop"), ( "test_auto_seat_climate_left", - "VehicleSpecific.remote_auto_seat_climate_request", - "VehicleSpecific.remote_auto_seat_climate_request", + "Vehicle.remote_auto_seat_climate_request", + "Vehicle.remote_auto_seat_climate_request", ), ( "test_auto_seat_climate_right", - "VehicleSpecific.remote_auto_seat_climate_request", - "VehicleSpecific.remote_auto_seat_climate_request", + "Vehicle.remote_auto_seat_climate_request", + "Vehicle.remote_auto_seat_climate_request", ), ( "test_auto_steering_wheel_heater", - "VehicleSpecific.remote_auto_steering_wheel_heat_climate_request", - "VehicleSpecific.remote_auto_steering_wheel_heat_climate_request", + "Vehicle.remote_auto_steering_wheel_heat_climate_request", + "Vehicle.remote_auto_steering_wheel_heat_climate_request", ), ( "test_defrost", - "VehicleSpecific.set_preconditioning_max", - "VehicleSpecific.set_preconditioning_max", + "Vehicle.set_preconditioning_max", + "Vehicle.set_preconditioning_max", ), ( "energy_site_storm_watch", - "EnergySpecific.storm_mode", - "EnergySpecific.storm_mode", + "EnergySite.storm_mode", + "EnergySite.storm_mode", ), ( "energy_site_allow_charging_from_grid", - "EnergySpecific.grid_import_export", - "EnergySpecific.grid_import_export", + "EnergySite.grid_import_export", + "EnergySite.grid_import_export", ), ( "test_sentry_mode", - "VehicleSpecific.set_sentry_mode", - "VehicleSpecific.set_sentry_mode", + "Vehicle.set_sentry_mode", + "Vehicle.set_sentry_mode", ), ], ) @@ -93,7 +96,7 @@ async def test_switch_services( entity_id = f"switch.{name}" with patch( - f"homeassistant.components.teslemetry.{on}", + f"tesla_fleet_api.teslemetry.{on}", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -107,7 +110,7 @@ async def test_switch_services( call.assert_called_once() with patch( - f"homeassistant.components.teslemetry.{off}", + f"tesla_fleet_api.teslemetry.{off}", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -119,3 +122,47 @@ async def test_switch_services( state = hass.states.get(entity_id) assert state.state == STATE_OFF call.assert_called_once() + + +async def test_switch_streaming( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_vehicle_data: AsyncMock, + mock_add_listener: AsyncMock, +) -> None: + """Tests that the switch entities with streaming are correct.""" + + entry = await setup_platform(hass, [Platform.SWITCH]) + + # Stream update + mock_add_listener.send( + { + "vin": VEHICLE_DATA_ALT["response"]["vin"], + "data": { + Signal.SENTRY_MODE: "SentryModeStateIdle", + Signal.AUTO_SEAT_CLIMATE_LEFT: True, + Signal.AUTO_SEAT_CLIMATE_RIGHT: False, + Signal.HVAC_STEERING_WHEEL_HEAT_AUTO: True, + Signal.DEFROST_MODE: "DefrostModeStateOff", + Signal.DETAILED_CHARGE_STATE: "DetailedChargeStateCharging", + }, + "createdAt": "2024-10-04T10:45:17.537Z", + } + ) + await hass.async_block_till_done() + + # Reload the entry + await reload_platform(hass, entry, [Platform.SWITCH]) + + # Assert the entities restored their values + for entity_id in ( + "switch.test_sentry_mode", + "switch.test_auto_seat_climate_left", + "switch.test_auto_seat_climate_right", + "switch.test_auto_steering_wheel_heater", + "switch.test_defrost", + "switch.test_charge", + ): + state = hass.states.get(entity_id) + assert state.state == snapshot(name=entity_id) diff --git a/tests/components/teslemetry/test_update.py b/tests/components/teslemetry/test_update.py index 448f31afd67..af6c9d847f1 100644 --- a/tests/components/teslemetry/test_update.py +++ b/tests/components/teslemetry/test_update.py @@ -4,7 +4,9 @@ import copy from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory +import pytest from syrupy.assertion import SnapshotAssertion +from teslemetry_stream import Signal from homeassistant.components.teslemetry.coordinator import VEHICLE_INTERVAL from homeassistant.components.teslemetry.update import INSTALLING @@ -13,7 +15,7 @@ from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from . import assert_entities, setup_platform +from . import assert_entities, reload_platform, setup_platform from .const import COMMAND_OK, VEHICLE_DATA, VEHICLE_DATA_ALT from tests.common import async_fire_time_changed @@ -23,6 +25,7 @@ async def test_update( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, + mock_legacy: AsyncMock, ) -> None: """Tests that the update entities are correct.""" @@ -35,6 +38,7 @@ async def test_update_alt( snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, mock_vehicle_data: AsyncMock, + mock_legacy: AsyncMock, ) -> None: """Tests that the update entities are correct.""" @@ -48,6 +52,7 @@ async def test_update_services( mock_vehicle_data: AsyncMock, freezer: FrozenDateTimeFactory, snapshot: SnapshotAssertion, + mock_legacy: AsyncMock, ) -> None: """Tests that the update services work.""" @@ -56,7 +61,7 @@ async def test_update_services( entity_id = "update.test_update" with patch( - "homeassistant.components.teslemetry.VehicleSpecific.schedule_software_update", + "tesla_fleet_api.teslemetry.Vehicle.schedule_software_update", return_value=COMMAND_OK, ) as call: await hass.services.async_call( @@ -78,3 +83,90 @@ async def test_update_services( state = hass.states.get(entity_id) assert state.attributes["in_progress"] == 1 + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_update_streaming( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_vehicle_data: AsyncMock, + mock_add_listener: AsyncMock, +) -> None: + """Tests that the select entities with streaming are correct.""" + + entry = await setup_platform(hass, [Platform.UPDATE]) + + # Stream update + mock_add_listener.send( + { + "vin": VEHICLE_DATA_ALT["response"]["vin"], + "data": { + Signal.SOFTWARE_UPDATE_DOWNLOAD_PERCENT_COMPLETE: 50, + Signal.SOFTWARE_UPDATE_INSTALLATION_PERCENT_COMPLETE: None, + Signal.SOFTWARE_UPDATE_SCHEDULED_START_TIME: None, + Signal.SOFTWARE_UPDATE_VERSION: "2025.2.1", + Signal.VERSION: "2025.1.1", + }, + "createdAt": "2024-10-04T10:45:17.537Z", + } + ) + await hass.async_block_till_done() + + state = hass.states.get("update.test_update") + assert state == snapshot(name="downloading") + + mock_add_listener.send( + { + "vin": VEHICLE_DATA_ALT["response"]["vin"], + "data": { + Signal.SOFTWARE_UPDATE_DOWNLOAD_PERCENT_COMPLETE: 100, + Signal.SOFTWARE_UPDATE_INSTALLATION_PERCENT_COMPLETE: 1, + Signal.SOFTWARE_UPDATE_SCHEDULED_START_TIME: None, + Signal.SOFTWARE_UPDATE_VERSION: "2025.2.1", + Signal.VERSION: "2025.1.1", + }, + "createdAt": "2024-10-04T10:45:17.537Z", + } + ) + await hass.async_block_till_done() + state = hass.states.get("update.test_update") + assert state == snapshot(name="ready") + + mock_add_listener.send( + { + "vin": VEHICLE_DATA_ALT["response"]["vin"], + "data": { + Signal.SOFTWARE_UPDATE_DOWNLOAD_PERCENT_COMPLETE: 100, + Signal.SOFTWARE_UPDATE_INSTALLATION_PERCENT_COMPLETE: 50, + Signal.SOFTWARE_UPDATE_SCHEDULED_START_TIME: None, + Signal.SOFTWARE_UPDATE_VERSION: "2025.2.1", + Signal.VERSION: "2025.1.1", + }, + "createdAt": "2024-10-04T10:45:17.537Z", + } + ) + await hass.async_block_till_done() + state = hass.states.get("update.test_update") + assert state == snapshot(name="installing") + + mock_add_listener.send( + { + "vin": VEHICLE_DATA_ALT["response"]["vin"], + "data": { + Signal.SOFTWARE_UPDATE_DOWNLOAD_PERCENT_COMPLETE: None, + Signal.SOFTWARE_UPDATE_INSTALLATION_PERCENT_COMPLETE: None, + Signal.SOFTWARE_UPDATE_SCHEDULED_START_TIME: None, + Signal.SOFTWARE_UPDATE_VERSION: "", + Signal.VERSION: "2025.2.1", + }, + "createdAt": "2024-10-04T10:45:17.537Z", + } + ) + await hass.async_block_till_done() + state = hass.states.get("update.test_update") + assert state == snapshot(name="updated") + + await reload_platform(hass, entry, [Platform.UPDATE]) + + state = hass.states.get("update.test_update") + assert state == snapshot(name="restored") diff --git a/tests/components/tessie/conftest.py b/tests/components/tessie/conftest.py index e0aba73af17..5fb844ff6b4 100644 --- a/tests/components/tessie/conftest.py +++ b/tests/components/tessie/conftest.py @@ -85,7 +85,7 @@ def mock_request(): def mock_live_status(): """Mock Tesla Fleet API EnergySpecific live_status method.""" with patch( - "homeassistant.components.tessie.EnergySpecific.live_status", + "tesla_fleet_api.tessie.EnergySite.live_status", side_effect=lambda: deepcopy(LIVE_STATUS), ) as mock_live_status: yield mock_live_status @@ -95,7 +95,7 @@ def mock_live_status(): def mock_site_info(): """Mock Tesla Fleet API EnergySpecific site_info method.""" with patch( - "homeassistant.components.tessie.EnergySpecific.site_info", + "tesla_fleet_api.tessie.EnergySite.site_info", side_effect=lambda: deepcopy(SITE_INFO), ) as mock_live_status: yield mock_live_status diff --git a/tests/components/tessie/test_number.py b/tests/components/tessie/test_number.py index 0fb13779183..69bbe1c9087 100644 --- a/tests/components/tessie/test_number.py +++ b/tests/components/tessie/test_number.py @@ -67,7 +67,7 @@ async def test_numbers( entity_id = "number.energy_site_backup_reserve" with patch( - "homeassistant.components.teslemetry.EnergySpecific.backup", + "tesla_fleet_api.tessie.EnergySite.backup", return_value=TEST_RESPONSE, ) as call: await hass.services.async_call( @@ -85,7 +85,7 @@ async def test_numbers( entity_id = "number.energy_site_off_grid_reserve" with patch( - "homeassistant.components.teslemetry.EnergySpecific.off_grid_vehicle_charging_reserve", + "tesla_fleet_api.tessie.EnergySite.off_grid_vehicle_charging_reserve", return_value=TEST_RESPONSE, ) as call: await hass.services.async_call( diff --git a/tests/components/tessie/test_select.py b/tests/components/tessie/test_select.py index c78923fbf5b..64380d363fc 100644 --- a/tests/components/tessie/test_select.py +++ b/tests/components/tessie/test_select.py @@ -52,7 +52,7 @@ async def test_select( # Test site operation mode entity_id = "select.energy_site_operation_mode" with patch( - "homeassistant.components.teslemetry.EnergySpecific.operation", + "tesla_fleet_api.tessie.EnergySite.operation", return_value=TEST_RESPONSE, ) as call: await hass.services.async_call( @@ -71,7 +71,7 @@ async def test_select( # Test site export mode entity_id = "select.energy_site_allow_export" with patch( - "homeassistant.components.teslemetry.EnergySpecific.grid_import_export", + "tesla_fleet_api.tessie.EnergySite.grid_import_export", return_value=TEST_RESPONSE, ) as call: await hass.services.async_call( @@ -129,7 +129,7 @@ async def test_errors(hass: HomeAssistant) -> None: # Test changing energy select with unknown error with ( patch( - "homeassistant.components.tessie.EnergySpecific.operation", + "tesla_fleet_api.tessie.EnergySite.operation", side_effect=UnsupportedVehicle, ) as mock_set, pytest.raises(HomeAssistantError) as error, diff --git a/tests/components/tessie/test_switch.py b/tests/components/tessie/test_switch.py index 690ad7d1ab4..f58468edfb7 100644 --- a/tests/components/tessie/test_switch.py +++ b/tests/components/tessie/test_switch.py @@ -61,13 +61,13 @@ async def test_switches( [ ( "energy_site_storm_watch", - "EnergySpecific.storm_mode", - "EnergySpecific.storm_mode", + "storm_mode", + "storm_mode", ), ( "energy_site_allow_charging_from_grid", - "EnergySpecific.grid_import_export", - "EnergySpecific.grid_import_export", + "grid_import_export", + "grid_import_export", ), ], ) @@ -80,7 +80,7 @@ async def test_switch_services( entity_id = f"switch.{name}" with patch( - f"homeassistant.components.teslemetry.{on}", + f"tesla_fleet_api.tessie.EnergySite.{on}", return_value=RESPONSE_OK, ) as call: await hass.services.async_call( @@ -94,7 +94,7 @@ async def test_switch_services( call.assert_called_once() with patch( - f"homeassistant.components.teslemetry.{off}", + f"tesla_fleet_api.tessie.EnergySite.{off}", return_value=RESPONSE_OK, ) as call: await hass.services.async_call( diff --git a/tests/components/todo/__init__.py b/tests/components/todo/__init__.py index 53772ab144e..239b586d366 100644 --- a/tests/components/todo/__init__.py +++ b/tests/components/todo/__init__.py @@ -34,6 +34,13 @@ class MockTodoListEntity(TodoListEntity): """Delete an item in the To-do list.""" self._attr_todo_items = [item for item in self.items if item.uid not in uids] + async def async_update_todo_item(self, item: TodoItem) -> None: + """Update an item in the To-do list.""" + for idx, existing_item in enumerate(self.items): + if existing_item.uid == item.uid: + self._attr_todo_items[idx] = item + break + async def create_mock_platform( hass: HomeAssistant, diff --git a/tests/components/todo/test_init.py b/tests/components/todo/test_init.py index 8e8c010f758..11ef3d6f044 100644 --- a/tests/components/todo/test_init.py +++ b/tests/components/todo/test_init.py @@ -7,8 +7,6 @@ import zoneinfo import pytest import voluptuous as vol -from homeassistant.components import conversation -from homeassistant.components.homeassistant.exposed_entities import async_expose_entity from homeassistant.components.todo import ( ATTR_DESCRIPTION, ATTR_DUE_DATE, @@ -22,7 +20,6 @@ from homeassistant.components.todo import ( TodoListEntity, TodoListEntityFeature, TodoServices, - intent as todo_intent, ) from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES @@ -32,10 +29,9 @@ from homeassistant.exceptions import ( ServiceNotSupported, ServiceValidationError, ) -from homeassistant.helpers import intent from homeassistant.setup import async_setup_component -from . import MockTodoListEntity, create_mock_platform +from . import create_mock_platform from tests.typing import WebSocketGenerator @@ -989,116 +985,6 @@ async def test_move_item_unsupported( assert resp.get("error", {}).get("code") == "not_supported" -async def test_add_item_intent( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, -) -> None: - """Test adding items to lists using an intent.""" - assert await async_setup_component(hass, "homeassistant", {}) - await todo_intent.async_setup_intents(hass) - - entity1 = MockTodoListEntity() - entity1._attr_name = "List 1" - entity1.entity_id = "todo.list_1" - - entity2 = MockTodoListEntity() - entity2._attr_name = "List 2" - entity2.entity_id = "todo.list_2" - - await create_mock_platform(hass, [entity1, entity2]) - - # Add to first list - response = await intent.async_handle( - hass, - "test", - todo_intent.INTENT_LIST_ADD_ITEM, - {ATTR_ITEM: {"value": " beer "}, "name": {"value": "list 1"}}, - assistant=conversation.DOMAIN, - ) - assert response.response_type == intent.IntentResponseType.ACTION_DONE - assert response.success_results[0].name == "list 1" - assert response.success_results[0].type == intent.IntentResponseTargetType.ENTITY - assert response.success_results[0].id == entity1.entity_id - - assert len(entity1.items) == 1 - assert len(entity2.items) == 0 - assert entity1.items[0].summary == "beer" # summary is trimmed - assert entity1.items[0].status == TodoItemStatus.NEEDS_ACTION - entity1.items.clear() - - # Add to second list - response = await intent.async_handle( - hass, - "test", - todo_intent.INTENT_LIST_ADD_ITEM, - {ATTR_ITEM: {"value": "cheese"}, "name": {"value": "List 2"}}, - assistant=conversation.DOMAIN, - ) - assert response.response_type == intent.IntentResponseType.ACTION_DONE - - assert len(entity1.items) == 0 - assert len(entity2.items) == 1 - assert entity2.items[0].summary == "cheese" - assert entity2.items[0].status == TodoItemStatus.NEEDS_ACTION - - # List name is case insensitive - response = await intent.async_handle( - hass, - "test", - todo_intent.INTENT_LIST_ADD_ITEM, - {ATTR_ITEM: {"value": "wine"}, "name": {"value": "lIST 2"}}, - assistant=conversation.DOMAIN, - ) - assert response.response_type == intent.IntentResponseType.ACTION_DONE - - assert len(entity1.items) == 0 - assert len(entity2.items) == 2 - assert entity2.items[1].summary == "wine" - assert entity2.items[1].status == TodoItemStatus.NEEDS_ACTION - - # Should fail if lists are not exposed - async_expose_entity(hass, conversation.DOMAIN, entity1.entity_id, False) - async_expose_entity(hass, conversation.DOMAIN, entity2.entity_id, False) - with pytest.raises(intent.MatchFailedError) as err: - await intent.async_handle( - hass, - "test", - todo_intent.INTENT_LIST_ADD_ITEM, - {"item": {"value": "cookies"}, "name": {"value": "list 1"}}, - assistant=conversation.DOMAIN, - ) - assert err.value.result.no_match_reason == intent.MatchFailedReason.ASSISTANT - - # Missing list - with pytest.raises(intent.MatchFailedError): - await intent.async_handle( - hass, - "test", - todo_intent.INTENT_LIST_ADD_ITEM, - {"item": {"value": "wine"}, "name": {"value": "This list does not exist"}}, - assistant=conversation.DOMAIN, - ) - - # Fail with empty name/item - with pytest.raises(intent.InvalidSlotInfo): - await intent.async_handle( - hass, - "test", - todo_intent.INTENT_LIST_ADD_ITEM, - {"item": {"value": "wine"}, "name": {"value": ""}}, - assistant=conversation.DOMAIN, - ) - - with pytest.raises(intent.InvalidSlotInfo): - await intent.async_handle( - hass, - "test", - todo_intent.INTENT_LIST_ADD_ITEM, - {"item": {"value": ""}, "name": {"value": "list 1"}}, - assistant=conversation.DOMAIN, - ) - - async def test_remove_completed_items_service( hass: HomeAssistant, test_entity: TodoListEntity, diff --git a/tests/components/todo/test_intent.py b/tests/components/todo/test_intent.py new file mode 100644 index 00000000000..3f86347d1b7 --- /dev/null +++ b/tests/components/todo/test_intent.py @@ -0,0 +1,292 @@ +"""Tests for the todo intents.""" + +from unittest.mock import patch + +import pytest + +from homeassistant.components import conversation +from homeassistant.components.homeassistant.exposed_entities import async_expose_entity +from homeassistant.components.todo import ( + ATTR_ITEM, + DOMAIN, + TodoItem, + TodoItemStatus, + TodoListEntity, + intent as todo_intent, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_NAME +from homeassistant.core import HomeAssistant +from homeassistant.helpers import intent +from homeassistant.setup import async_setup_component + +from . import MockTodoListEntity, create_mock_platform + +from tests.common import async_mock_service +from tests.typing import WebSocketGenerator + + +@pytest.fixture(autouse=True) +async def setup_intents(hass: HomeAssistant) -> None: + """Set up the intents.""" + assert await async_setup_component(hass, "homeassistant", {}) + await todo_intent.async_setup_intents(hass) + + +async def test_add_item_intent( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test adding items to lists using an intent.""" + assert await async_setup_component(hass, "homeassistant", {}) + await todo_intent.async_setup_intents(hass) + + entity1 = MockTodoListEntity() + entity1._attr_name = "List 1" + entity1.entity_id = "todo.list_1" + + entity2 = MockTodoListEntity() + entity2._attr_name = "List 2" + entity2.entity_id = "todo.list_2" + + await create_mock_platform(hass, [entity1, entity2]) + + # Add to first list + response = await intent.async_handle( + hass, + "test", + todo_intent.INTENT_LIST_ADD_ITEM, + {ATTR_ITEM: {"value": " beer "}, "name": {"value": "list 1"}}, + assistant=conversation.DOMAIN, + ) + assert response.response_type == intent.IntentResponseType.ACTION_DONE + assert response.success_results[0].name == "list 1" + assert response.success_results[0].type == intent.IntentResponseTargetType.ENTITY + assert response.success_results[0].id == entity1.entity_id + + assert len(entity1.items) == 1 + assert len(entity2.items) == 0 + assert entity1.items[0].summary == "beer" # summary is trimmed + assert entity1.items[0].status == TodoItemStatus.NEEDS_ACTION + entity1.items.clear() + + # Add to second list + response = await intent.async_handle( + hass, + "test", + todo_intent.INTENT_LIST_ADD_ITEM, + {ATTR_ITEM: {"value": "cheese"}, "name": {"value": "List 2"}}, + assistant=conversation.DOMAIN, + ) + assert response.response_type == intent.IntentResponseType.ACTION_DONE + + assert len(entity1.items) == 0 + assert len(entity2.items) == 1 + assert entity2.items[0].summary == "cheese" + assert entity2.items[0].status == TodoItemStatus.NEEDS_ACTION + + # List name is case insensitive + response = await intent.async_handle( + hass, + "test", + todo_intent.INTENT_LIST_ADD_ITEM, + {ATTR_ITEM: {"value": "wine"}, "name": {"value": "lIST 2"}}, + assistant=conversation.DOMAIN, + ) + assert response.response_type == intent.IntentResponseType.ACTION_DONE + + assert len(entity1.items) == 0 + assert len(entity2.items) == 2 + assert entity2.items[1].summary == "wine" + assert entity2.items[1].status == TodoItemStatus.NEEDS_ACTION + + # Should fail if lists are not exposed + async_expose_entity(hass, conversation.DOMAIN, entity1.entity_id, False) + async_expose_entity(hass, conversation.DOMAIN, entity2.entity_id, False) + with pytest.raises(intent.MatchFailedError) as err: + await intent.async_handle( + hass, + "test", + todo_intent.INTENT_LIST_ADD_ITEM, + {"item": {"value": "cookies"}, "name": {"value": "list 1"}}, + assistant=conversation.DOMAIN, + ) + assert err.value.result.no_match_reason == intent.MatchFailedReason.ASSISTANT + + # Missing list + with pytest.raises(intent.MatchFailedError): + await intent.async_handle( + hass, + "test", + todo_intent.INTENT_LIST_ADD_ITEM, + {"item": {"value": "wine"}, "name": {"value": "This list does not exist"}}, + assistant=conversation.DOMAIN, + ) + + # Fail with empty name/item + with pytest.raises(intent.InvalidSlotInfo): + await intent.async_handle( + hass, + "test", + todo_intent.INTENT_LIST_ADD_ITEM, + {"item": {"value": "wine"}, "name": {"value": ""}}, + assistant=conversation.DOMAIN, + ) + + with pytest.raises(intent.InvalidSlotInfo): + await intent.async_handle( + hass, + "test", + todo_intent.INTENT_LIST_ADD_ITEM, + {"item": {"value": ""}, "name": {"value": "list 1"}}, + assistant=conversation.DOMAIN, + ) + + +async def test_add_item_intent_errors( + hass: HomeAssistant, + test_entity: TodoListEntity, +) -> None: + """Test errors with the add item intent.""" + test_entity._attr_name = "List 1" + await create_mock_platform(hass, [test_entity]) + + # Try to add item in list that does not exist + with pytest.raises(intent.MatchFailedError): + await intent.async_handle( + hass, + "test", + todo_intent.INTENT_LIST_ADD_ITEM, + { + ATTR_ITEM: {"value": "wine"}, + ATTR_NAME: {"value": "This list does not exist"}, + }, + assistant=conversation.DOMAIN, + ) + + # Mock the get_entity method to return None + hass.data[DOMAIN].get_entity = lambda entity_id: None + + # Try to add item in a list that exists but get_entity returns None + with pytest.raises(intent.IntentHandleError, match="No to-do list: List 1"): + await intent.async_handle( + hass, + "test", + todo_intent.INTENT_LIST_ADD_ITEM, + { + ATTR_ITEM: {"value": "wine"}, + ATTR_NAME: {"value": "List 1"}, + }, + assistant=conversation.DOMAIN, + ) + + +async def test_complete_item_intent( + hass: HomeAssistant, +) -> None: + """Test the complete item intent.""" + entity1 = MockTodoListEntity( + [ + TodoItem(summary="beer", uid="1", status=TodoItemStatus.NEEDS_ACTION), + TodoItem(summary="wine", uid="2", status=TodoItemStatus.NEEDS_ACTION), + ] + ) + entity1._attr_name = "List 1" + entity1.entity_id = "todo.list_1" + + # Add entities to hass + config_entry = await create_mock_platform(hass, [entity1]) + assert config_entry.state is ConfigEntryState.LOADED + + assert len(entity1.items) == 2 + assert entity1.items[0].status == TodoItemStatus.NEEDS_ACTION + + # Complete item + async_mock_service(hass, DOMAIN, todo_intent.INTENT_LIST_COMPLETE_ITEM) + response = await intent.async_handle( + hass, + DOMAIN, + todo_intent.INTENT_LIST_COMPLETE_ITEM, + {ATTR_ITEM: {"value": "beer"}, ATTR_NAME: {"value": "list 1"}}, + assistant=conversation.DOMAIN, + ) + assert response.response_type == intent.IntentResponseType.ACTION_DONE + + assert len(entity1.items) == 2 + assert entity1.items[0].status == TodoItemStatus.COMPLETED + + +async def test_complete_item_intent_errors( + hass: HomeAssistant, + test_entity: TodoListEntity, +) -> None: + """Test errors with the complete item intent.""" + entity1 = MockTodoListEntity( + [ + TodoItem(summary="beer", uid="1", status=TodoItemStatus.COMPLETED), + ] + ) + entity1._attr_name = "List 1" + entity1.entity_id = "todo.list_1" + + # Add entities to hass + await create_mock_platform(hass, [entity1]) + + # Try to complete item in list that does not exist + with pytest.raises(intent.MatchFailedError): + await intent.async_handle( + hass, + "test", + todo_intent.INTENT_LIST_COMPLETE_ITEM, + { + ATTR_ITEM: {"value": "wine"}, + ATTR_NAME: {"value": "This list does not exist"}, + }, + assistant=conversation.DOMAIN, + ) + + # Try to complete item that does not exist + with pytest.raises(intent.IntentHandleError): + await intent.async_handle( + hass, + "test", + todo_intent.INTENT_LIST_COMPLETE_ITEM, + {ATTR_ITEM: {"value": "bread"}, ATTR_NAME: {"value": "list 1"}}, + assistant=conversation.DOMAIN, + ) + + # Item is already completed + with pytest.raises(intent.IntentHandleError): + await intent.async_handle( + hass, + "test", + todo_intent.INTENT_LIST_COMPLETE_ITEM, + {ATTR_ITEM: {"value": "beer"}, ATTR_NAME: {"value": "list 1"}}, + assistant=conversation.DOMAIN, + ) + + +async def test_complete_item_intent_ha_errors( + hass: HomeAssistant, + test_entity: TodoListEntity, +) -> None: + """Test error handling of HA errors with the complete item intent.""" + test_entity._attr_name = "List 1" + test_entity.entity_id = "todo.list_1" + await create_mock_platform(hass, [test_entity]) + + # Mock the get_entity method to return None + with ( + patch( + "homeassistant.helpers.entity_component.EntityComponent.get_entity", + return_value=None, + ), + pytest.raises(intent.IntentHandleError), + ): + await intent.async_handle( + hass, + DOMAIN, + todo_intent.INTENT_LIST_COMPLETE_ITEM, + {ATTR_ITEM: {"value": "wine"}, ATTR_NAME: {"value": "List 1"}}, + assistant=conversation.DOMAIN, + ) diff --git a/tests/components/totalconnect/test_button.py b/tests/components/totalconnect/test_button.py index 80de004be1d..87764e55186 100644 --- a/tests/components/totalconnect/test_button.py +++ b/tests/components/totalconnect/test_button.py @@ -11,12 +11,7 @@ from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import ( - RESPONSE_ZONE_BYPASS_FAILURE, - RESPONSE_ZONE_BYPASS_SUCCESS, - TOTALCONNECT_REQUEST, - setup_platform, -) +from .common import setup_platform from tests.common import snapshot_platform @@ -34,12 +29,23 @@ async def test_entity_registry( await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) -@pytest.mark.parametrize("entity_id", [ZONE_BYPASS_ID, PANEL_BYPASS_ID]) -async def test_bypass_button(hass: HomeAssistant, entity_id: str) -> None: +@pytest.mark.parametrize( + ("entity_id", "tcc_request"), + [ + (ZONE_BYPASS_ID, "total_connect_client.zone.TotalConnectZone.bypass"), + ( + PANEL_BYPASS_ID, + "total_connect_client.location.TotalConnectLocation.zone_bypass_all", + ), + ], +) +async def test_bypass_button( + hass: HomeAssistant, entity_id: str, tcc_request: str +) -> None: """Test pushing a bypass button.""" - responses = [RESPONSE_ZONE_BYPASS_FAILURE, RESPONSE_ZONE_BYPASS_SUCCESS] + responses = [FailedToBypassZone, None] await setup_platform(hass, BUTTON) - with patch(TOTALCONNECT_REQUEST, side_effect=responses) as mock_request: + with patch(tcc_request, side_effect=responses) as mock_request: # try to bypass, but fails with pytest.raises(FailedToBypassZone): await hass.services.async_call( diff --git a/tests/components/totalconnect/test_config_flow.py b/tests/components/totalconnect/test_config_flow.py index f5020394bce..b7ac42c84b5 100644 --- a/tests/components/totalconnect/test_config_flow.py +++ b/tests/components/totalconnect/test_config_flow.py @@ -28,6 +28,7 @@ from .common import ( TOTALCONNECT_REQUEST, TOTALCONNECT_REQUEST_TOKEN, USERNAME, + init_integration, ) from tests.common import MockConfigEntry @@ -219,42 +220,19 @@ async def test_no_locations(hass: HomeAssistant) -> None: async def test_options_flow(hass: HomeAssistant) -> None: """Test config flow options.""" - config_entry = MockConfigEntry( - domain=DOMAIN, - data=CONFIG_DATA, - unique_id=USERNAME, + config_entry = await init_integration(hass) + result = await hass.config_entries.options.async_init(config_entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], user_input={AUTO_BYPASS: True, CODE_REQUIRED: False} ) - config_entry.add_to_hass(hass) - responses = [ - RESPONSE_SESSION_DETAILS, - RESPONSE_PARTITION_DETAILS, - RESPONSE_GET_ZONE_DETAILS_SUCCESS, - RESPONSE_DISARMED, - RESPONSE_DISARMED, - RESPONSE_DISARMED, - ] + assert result["type"] is FlowResultType.CREATE_ENTRY + assert config_entry.options == {AUTO_BYPASS: True, CODE_REQUIRED: False} + await hass.async_block_till_done() - with ( - patch(TOTALCONNECT_REQUEST, side_effect=responses), - patch(TOTALCONNECT_GET_CONFIG, side_effect=None), - patch(TOTALCONNECT_REQUEST_TOKEN, side_effect=None), - ): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - result = await hass.config_entries.options.async_init(config_entry.entry_id) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], user_input={AUTO_BYPASS: True, CODE_REQUIRED: False} - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert config_entry.options == {AUTO_BYPASS: True, CODE_REQUIRED: False} - await hass.async_block_till_done() - - assert await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() + assert await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/tractive/test_device_tracker.py b/tests/components/tractive/test_device_tracker.py index ff78173ef7b..ff9c7ca88ef 100644 --- a/tests/components/tractive/test_device_tracker.py +++ b/tests/components/tractive/test_device_tracker.py @@ -59,3 +59,31 @@ async def test_source_type_phone( hass.states.get("device_tracker.test_pet_tracker").attributes["source_type"] is SourceType.BLUETOOTH ) + + +async def test_source_type_gps( + hass: HomeAssistant, + mock_tractive_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test if the source type is GPS when the location sensor is KNOWN WIFI.""" + await init_integration(hass, mock_config_entry) + + mock_tractive_client.send_position_event( + mock_config_entry, + { + "tracker_id": "device_id_123", + "position": { + "latlong": [22.333, 44.555], + "accuracy": 99, + "sensor_used": "KNOWN_WIFI", + }, + }, + ) + mock_tractive_client.send_hardware_event(mock_config_entry) + await hass.async_block_till_done() + + assert ( + hass.states.get("device_tracker.test_pet_tracker").attributes["source_type"] + is SourceType.GPS + ) diff --git a/tests/components/tts/common.py b/tests/components/tts/common.py index 921cab4cba2..99c698771f7 100644 --- a/tests/components/tts/common.py +++ b/tests/components/tts/common.py @@ -14,9 +14,11 @@ import voluptuous as vol from homeassistant.components import media_source from homeassistant.components.tts import ( CONF_LANG, + DATA_TTS_MANAGER, DOMAIN as TTS_DOMAIN, PLATFORM_SCHEMA as TTS_PLATFORM_SCHEMA, Provider, + ResultStream, TextToSpeechEntity, TtsAudioType, Voice, @@ -263,3 +265,33 @@ async def mock_config_entry_setup( await hass.async_block_till_done() return config_entry + + +class MockResultStream(ResultStream): + """Mock result stream.""" + + test_set_message: str | None = None + + def __init__(self, hass: HomeAssistant, extension: str, data: bytes) -> None: + """Initialize the result stream.""" + super().__init__( + token="test-token", + extension=extension, + content_type=f"audio/mock-{extension}", + engine="test-engine", + use_file_cache=True, + language="en", + options={}, + _manager=hass.data[DATA_TTS_MANAGER], + ) + hass.data[DATA_TTS_MANAGER].token_to_stream[self.token] = self + self._mock_data = data + + @callback + def async_set_message(self, message: str) -> None: + """Set message to be generated.""" + self.test_set_message = message + + async def async_stream_result(self): + """Stream the result.""" + yield self._mock_data diff --git a/tests/components/tts/test_entity.py b/tests/components/tts/test_entity.py new file mode 100644 index 00000000000..d82ec6a5d2b --- /dev/null +++ b/tests/components/tts/test_entity.py @@ -0,0 +1,144 @@ +"""Tests for the TTS entity.""" + +import pytest + +from homeassistant.components import tts +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant, State + +from .common import ( + DEFAULT_LANG, + SUPPORT_LANGUAGES, + TEST_DOMAIN, + MockTTSEntity, + mock_config_entry_setup, +) + +from tests.common import mock_restore_cache + + +class DefaultEntity(tts.TextToSpeechEntity): + """Test entity.""" + + _attr_supported_languages = SUPPORT_LANGUAGES + _attr_default_language = DEFAULT_LANG + + +async def test_default_entity_attributes() -> None: + """Test default entity attributes.""" + entity = DefaultEntity() + + assert entity.hass is None + assert entity.default_language == DEFAULT_LANG + assert entity.supported_languages == SUPPORT_LANGUAGES + assert entity.supported_options is None + assert entity.default_options is None + assert entity.async_get_supported_voices("test") is None + + +async def test_restore_state( + hass: HomeAssistant, + mock_tts_entity: MockTTSEntity, +) -> None: + """Test we restore state in the integration.""" + entity_id = f"{tts.DOMAIN}.{TEST_DOMAIN}" + timestamp = "2023-01-01T23:59:59+00:00" + mock_restore_cache(hass, (State(entity_id, timestamp),)) + + config_entry = await mock_config_entry_setup(hass, mock_tts_entity) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + state = hass.states.get(entity_id) + assert state + assert state.state == timestamp + + +async def test_tts_entity_subclass_properties( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test for errors when subclasses of the TextToSpeechEntity are missing required properties.""" + + class TestClass1(tts.TextToSpeechEntity): + _attr_default_language = DEFAULT_LANG + _attr_supported_languages = SUPPORT_LANGUAGES + + await mock_config_entry_setup(hass, TestClass1()) + + class TestClass2(tts.TextToSpeechEntity): + @property + def default_language(self) -> str: + return DEFAULT_LANG + + @property + def supported_languages(self) -> list[str]: + return SUPPORT_LANGUAGES + + await mock_config_entry_setup(hass, TestClass2()) + + assert all(record.exc_info is None for record in caplog.records) + + caplog.clear() + + class TestClass3(tts.TextToSpeechEntity): + _attr_default_language = DEFAULT_LANG + + await mock_config_entry_setup(hass, TestClass3()) + + assert ( + "TTS entities must either set the '_attr_supported_languages' attribute or override the 'supported_languages' property" + in [ + str(record.exc_info[1]) + for record in caplog.records + if record.exc_info is not None + ] + ) + caplog.clear() + + class TestClass4(tts.TextToSpeechEntity): + _attr_supported_languages = SUPPORT_LANGUAGES + + await mock_config_entry_setup(hass, TestClass4()) + + assert ( + "TTS entities must either set the '_attr_default_language' attribute or override the 'default_language' property" + in [ + str(record.exc_info[1]) + for record in caplog.records + if record.exc_info is not None + ] + ) + caplog.clear() + + class TestClass5(tts.TextToSpeechEntity): + @property + def default_language(self) -> str: + return DEFAULT_LANG + + await mock_config_entry_setup(hass, TestClass5()) + + assert ( + "TTS entities must either set the '_attr_supported_languages' attribute or override the 'supported_languages' property" + in [ + str(record.exc_info[1]) + for record in caplog.records + if record.exc_info is not None + ] + ) + caplog.clear() + + class TestClass6(tts.TextToSpeechEntity): + @property + def supported_languages(self) -> list[str]: + return SUPPORT_LANGUAGES + + await mock_config_entry_setup(hass, TestClass6()) + + assert ( + "TTS entities must either set the '_attr_default_language' attribute or override the 'default_language' property" + in [ + str(record.exc_info[1]) + for record in caplog.records + if record.exc_info is not None + ] + ) diff --git a/tests/components/tts/test_init.py b/tests/components/tts/test_init.py index 4d0767cddf3..4e17bc68a5e 100644 --- a/tests/components/tts/test_init.py +++ b/tests/components/tts/test_init.py @@ -20,15 +20,15 @@ from homeassistant.components.media_player import ( ) from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN -from homeassistant.core import HomeAssistant, State +from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util from .common import ( DEFAULT_LANG, - SUPPORT_LANGUAGES, TEST_DOMAIN, + MockResultStream, MockTTS, MockTTSEntity, MockTTSProvider, @@ -38,37 +38,12 @@ from .common import ( retrieve_media, ) -from tests.common import ( - MockModule, - async_mock_service, - mock_integration, - mock_platform, - mock_restore_cache, -) +from tests.common import MockModule, async_mock_service, mock_integration, mock_platform from tests.typing import ClientSessionGenerator, WebSocketGenerator ORIG_WRITE_TAGS = tts.SpeechManager.write_tags -class DefaultEntity(tts.TextToSpeechEntity): - """Test entity.""" - - _attr_supported_languages = SUPPORT_LANGUAGES - _attr_default_language = DEFAULT_LANG - - -async def test_default_entity_attributes() -> None: - """Test default entity attributes.""" - entity = DefaultEntity() - - assert entity.hass is None - assert entity.default_language == DEFAULT_LANG - assert entity.supported_languages == SUPPORT_LANGUAGES - assert entity.supported_options is None - assert entity.default_options is None - assert entity.async_get_supported_voices("test") is None - - async def test_config_entry_unload( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -120,24 +95,6 @@ async def test_config_entry_unload( assert state is None -async def test_restore_state( - hass: HomeAssistant, - mock_tts_entity: MockTTSEntity, -) -> None: - """Test we restore state in the integration.""" - entity_id = f"{tts.DOMAIN}.{TEST_DOMAIN}" - timestamp = "2023-01-01T23:59:59+00:00" - mock_restore_cache(hass, (State(entity_id, timestamp),)) - - config_entry = await mock_config_entry_setup(hass, mock_tts_entity) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.LOADED - state = hass.states.get(entity_id) - assert state - assert state.state == timestamp - - @pytest.mark.parametrize( "setup", ["mock_setup", "mock_config_entry_setup"], indirect=True ) @@ -211,7 +168,7 @@ async def test_service( assert await get_media_source_url( hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] ) == ("/api/tts_proxy/test_token.mp3") - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert ( mock_tts_cache_dir / f"42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_{expected_url_suffix}.mp3" @@ -273,7 +230,7 @@ async def test_service_default_language( assert await get_media_source_url( hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] ) == ("/api/tts_proxy/test_token.mp3") - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert ( mock_tts_cache_dir / ( @@ -337,7 +294,7 @@ async def test_service_default_special_language( assert await get_media_source_url( hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] ) == ("/api/tts_proxy/test_token.mp3") - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert ( mock_tts_cache_dir / f"42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_{expected_url_suffix}.mp3" @@ -397,7 +354,7 @@ async def test_service_language( assert await get_media_source_url( hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] ) == ("/api/tts_proxy/test_token.mp3") - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert ( mock_tts_cache_dir / f"42f18378fd4393d18c8dd11d03fa9563c1e54491_de-de_-_{expected_url_suffix}.mp3" @@ -513,7 +470,7 @@ async def test_service_options( assert await get_media_source_url( hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] ) == ("/api/tts_proxy/test_token.mp3") - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert ( mock_tts_cache_dir / ( @@ -597,7 +554,7 @@ async def test_service_default_options( assert await get_media_source_url( hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] ) == ("/api/tts_proxy/test_token.mp3") - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert ( mock_tts_cache_dir / ( @@ -671,7 +628,7 @@ async def test_merge_default_service_options( assert await get_media_source_url( hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] ) == ("/api/tts_proxy/test_token.mp3") - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert ( mock_tts_cache_dir / ( @@ -786,7 +743,7 @@ async def test_service_clear_cache( # To make sure the file is persisted assert len(calls) == 1 await get_media_source_url(hass, calls[0].data[ATTR_MEDIA_CONTENT_ID]) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) assert ( mock_tts_cache_dir / f"42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_{expected_url_suffix}.mp3" @@ -1197,7 +1154,7 @@ async def test_service_get_tts_error( assert len(calls) == 1 assert ( await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) - == HTTPStatus.NOT_FOUND + == HTTPStatus.INTERNAL_SERVER_ERROR ) @@ -1420,29 +1377,6 @@ def test_resolve_engine(hass: HomeAssistant, setup: str, engine_id: str) -> None assert tts.async_resolve_engine(hass, None) is None -@pytest.mark.parametrize( - ("setup", "engine_id"), - [ - ("mock_setup", "test"), - ("mock_config_entry_setup", "tts.test"), - ], - indirect=["setup"], -) -async def test_support_options(hass: HomeAssistant, setup: str, engine_id: str) -> None: - """Test supporting options.""" - assert await tts.async_support_options(hass, engine_id, "en_US") is True - assert await tts.async_support_options(hass, engine_id, "nl") is False - assert ( - await tts.async_support_options( - hass, engine_id, "en_US", {"invalid_option": "yo"} - ) - is False - ) - - with pytest.raises(HomeAssistantError): - await tts.async_support_options(hass, "non-existing") - - async def test_legacy_fetching_in_async( hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: @@ -1835,99 +1769,15 @@ async def test_async_convert_audio_error(hass: HomeAssistant) -> None: """Test that ffmpeg failing during audio conversion will raise an error.""" assert await async_setup_component(hass, ffmpeg.DOMAIN, {}) + async def bad_data_gen(): + yield bytes(0) + with pytest.raises(RuntimeError): # Simulate a bad WAV file - await tts.async_convert_audio(hass, "wav", bytes(0), "mp3") - - -async def test_ttsentity_subclass_properties( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test for errors when subclasses of the TextToSpeechEntity are missing required properties.""" - - class TestClass1(tts.TextToSpeechEntity): - _attr_default_language = DEFAULT_LANG - _attr_supported_languages = SUPPORT_LANGUAGES - - await mock_config_entry_setup(hass, TestClass1()) - - class TestClass2(tts.TextToSpeechEntity): - @property - def default_language(self) -> str: - return DEFAULT_LANG - - @property - def supported_languages(self) -> list[str]: - return SUPPORT_LANGUAGES - - await mock_config_entry_setup(hass, TestClass2()) - - assert all(record.exc_info is None for record in caplog.records) - - caplog.clear() - - class TestClass3(tts.TextToSpeechEntity): - _attr_default_language = DEFAULT_LANG - - await mock_config_entry_setup(hass, TestClass3()) - - assert ( - "TTS entities must either set the '_attr_supported_languages' attribute or override the 'supported_languages' property" - in [ - str(record.exc_info[1]) - for record in caplog.records - if record.exc_info is not None - ] - ) - caplog.clear() - - class TestClass4(tts.TextToSpeechEntity): - _attr_supported_languages = SUPPORT_LANGUAGES - - await mock_config_entry_setup(hass, TestClass4()) - - assert ( - "TTS entities must either set the '_attr_default_language' attribute or override the 'default_language' property" - in [ - str(record.exc_info[1]) - for record in caplog.records - if record.exc_info is not None - ] - ) - caplog.clear() - - class TestClass5(tts.TextToSpeechEntity): - @property - def default_language(self) -> str: - return DEFAULT_LANG - - await mock_config_entry_setup(hass, TestClass5()) - - assert ( - "TTS entities must either set the '_attr_supported_languages' attribute or override the 'supported_languages' property" - in [ - str(record.exc_info[1]) - for record in caplog.records - if record.exc_info is not None - ] - ) - caplog.clear() - - class TestClass6(tts.TextToSpeechEntity): - @property - def supported_languages(self) -> list[str]: - return SUPPORT_LANGUAGES - - await mock_config_entry_setup(hass, TestClass6()) - - assert ( - "TTS entities must either set the '_attr_default_language' attribute or override the 'default_language' property" - in [ - str(record.exc_info[1]) - for record in caplog.records - if record.exc_info is not None - ] - ) + async for _chunk in tts._async_convert_audio( + hass, "wav", bad_data_gen(), "mp3" + ): + pass async def test_default_engine_prefer_entity( @@ -1986,3 +1836,102 @@ async def test_default_engine_prefer_cloud_entity( provider_engine = tts.async_resolve_engine(hass, "test") assert provider_engine == "test" assert tts.async_default_engine(hass) == "tts.cloud_tts_entity" + + +async def test_stream(hass: HomeAssistant, mock_tts_entity: MockTTSEntity) -> None: + """Test creating streams.""" + await mock_config_entry_setup(hass, mock_tts_entity) + stream = tts.async_create_stream(hass, mock_tts_entity.entity_id) + assert stream.language == mock_tts_entity.default_language + assert stream.options == (mock_tts_entity.default_options or {}) + assert tts.async_get_stream(hass, stream.token) is stream + + data = b"beer" + stream2 = MockResultStream(hass, "wav", data) + assert tts.async_get_stream(hass, stream2.token) is stream2 + assert stream2.extension == "wav" + result_data = b"".join([chunk async for chunk in stream2.async_stream_result()]) + assert result_data == data + + +async def test_tts_cache() -> None: + """Test TTSCache.""" + + async def data_gen(queue: asyncio.Queue[bytes | None | Exception]): + while chunk := await queue.get(): + if isinstance(chunk, Exception): + raise chunk + yield chunk + + queue = asyncio.Queue() + cache = tts.TTSCache("test-key", "mp3", data_gen(queue)) + assert cache.cache_key == "test-key" + assert cache.extension == "mp3" + + for i in range(10): + queue.put_nowait(f"{i}".encode()) + queue.put_nowait(None) + + assert await cache.async_load_data() == b"0123456789" + + with pytest.raises(RuntimeError): + await cache.async_load_data() + + # When data is loaded, we get it all in 1 chunk + cur = 0 + async for chunk in cache.async_stream_data(): + assert chunk == b"0123456789" + cur += 1 + assert cur == 1 + + # Show we can stream the data while it's still being generated + async def consume_cache(cache: tts.TTSCache): + return b"".join([chunk async for chunk in cache.async_stream_data()]) + + queue = asyncio.Queue() + cache = tts.TTSCache("test-key", "mp3", data_gen(queue)) + + load_data_task = asyncio.create_task(cache.async_load_data()) + consume_pre_data_loaded_task = asyncio.create_task(consume_cache(cache)) + queue.put_nowait(b"0") + await asyncio.sleep(0) + queue.put_nowait(b"1") + await asyncio.sleep(0) + consume_mid_data_task = asyncio.create_task(consume_cache(cache)) + queue.put_nowait(b"2") + await asyncio.sleep(0) + queue.put_nowait(None) + consume_post_data_loaded_task = asyncio.create_task(consume_cache(cache)) + await asyncio.sleep(0) + assert await load_data_task == b"012" + assert await consume_post_data_loaded_task == b"012" + assert await consume_mid_data_task == b"012" + assert await consume_pre_data_loaded_task == b"012" + + # Now with errors + async def consume_cache(cache: tts.TTSCache): + return b"".join([chunk async for chunk in cache.async_stream_data()]) + + queue = asyncio.Queue() + cache = tts.TTSCache("test-key", "mp3", data_gen(queue)) + + load_data_task = asyncio.create_task(cache.async_load_data()) + consume_pre_data_loaded_task = asyncio.create_task(consume_cache(cache)) + queue.put_nowait(b"0") + await asyncio.sleep(0) + queue.put_nowait(b"1") + await asyncio.sleep(0) + consume_mid_data_task = asyncio.create_task(consume_cache(cache)) + queue.put_nowait(ValueError("Boom!")) + await asyncio.sleep(0) + queue.put_nowait(None) + consume_post_data_loaded_task = asyncio.create_task(consume_cache(cache)) + await asyncio.sleep(0) + with pytest.raises(ValueError): + assert await load_data_task == b"012" + with pytest.raises(ValueError): + assert await consume_post_data_loaded_task == b"012" + with pytest.raises(ValueError): + assert await consume_mid_data_task == b"012" + with pytest.raises(ValueError): + assert await consume_pre_data_loaded_task == b"012" diff --git a/tests/components/tts/test_media_source.py b/tests/components/tts/test_media_source.py index d90923b02ab..9e50cc6b512 100644 --- a/tests/components/tts/test_media_source.py +++ b/tests/components/tts/test_media_source.py @@ -268,7 +268,7 @@ async def test_generate_media_source_id_and_media_source_id_to_kwargs( "message": "hello", "language": "en_US", "options": {"age": 5}, - "cache": True, + "use_file_cache": True, } kwargs = { @@ -284,7 +284,7 @@ async def test_generate_media_source_id_and_media_source_id_to_kwargs( "message": "hello", "language": "en_US", "options": {"age": [5, 6]}, - "cache": True, + "use_file_cache": True, } kwargs = { @@ -300,5 +300,5 @@ async def test_generate_media_source_id_and_media_source_id_to_kwargs( "message": "hello", "language": "en_US", "options": {"age": {"k1": [5, 6], "k2": "v2"}}, - "cache": True, + "use_file_cache": True, } diff --git a/tests/components/uk_transport/test_sensor.py b/tests/components/uk_transport/test_sensor.py index a4a9aea18c8..ba547c5eecc 100644 --- a/tests/components/uk_transport/test_sensor.py +++ b/tests/components/uk_transport/test_sensor.py @@ -8,6 +8,7 @@ import requests_mock from homeassistant.components.uk_transport.sensor import ( ATTR_ATCOCODE, ATTR_CALLING_AT, + ATTR_LAST_UPDATED, ATTR_LOCALITY, ATTR_NEXT_BUSES, ATTR_NEXT_TRAINS, @@ -90,3 +91,4 @@ async def test_train(hass: HomeAssistant) -> None: == "London Waterloo" ) assert train_state.attributes[ATTR_NEXT_TRAINS][0]["estimated"] == "06:13" + assert train_state.attributes[ATTR_LAST_UPDATED] == "2017-07-10T06:10:05+01:00" diff --git a/tests/components/usb/__init__.py b/tests/components/usb/__init__.py index 96d671d0958..6db0cea1ffe 100644 --- a/tests/components/usb/__init__.py +++ b/tests/components/usb/__init__.py @@ -1,44 +1,29 @@ """Tests for the USB Discovery integration.""" -from homeassistant.components.usb.models import USBDevice +from unittest.mock import patch -conbee_device = USBDevice( - device="/dev/cu.usbmodemDE24338801", - vid="1CF1", - pid="0030", - serial_number="DE2433880", - manufacturer="dresden elektronik ingenieurtechnik GmbH", - description="ConBee II", -) -slae_sh_device = USBDevice( - device="/dev/cu.usbserial-110", - vid="10C4", - pid="EA60", - serial_number="00_12_4B_00_22_98_88_7F", - manufacturer="Silicon Labs", - description="slae.sh cc2652rb stick - slaesh's iot stuff", -) -electro_lama_device = USBDevice( - device="/dev/cu.usbserial-110", - vid="1A86", - pid="7523", - serial_number=None, - manufacturer=None, - description="USB2.0-Serial", -) -skyconnect_macos_correct = USBDevice( - device="/dev/cu.SLAB_USBtoUART", - vid="10C4", - pid="EA60", - serial_number="9ab1da1ea4b3ed11956f4eaca7669f5d", - manufacturer="Nabu Casa", - description="SkyConnect v1.0", -) -skyconnect_macos_incorrect = USBDevice( - device="/dev/cu.usbserial-2110", - vid="10C4", - pid="EA60", - serial_number="9ab1da1ea4b3ed11956f4eaca7669f5d", - manufacturer="Nabu Casa", - description="SkyConnect v1.0", -) +from aiousbwatcher import InotifyNotAvailableError +import pytest + +from homeassistant.components.usb import async_request_scan as usb_async_request_scan +from homeassistant.core import HomeAssistant + + +@pytest.fixture(name="force_usb_polling_watcher") +def force_usb_polling_watcher(): + """Patch the USB integration to not use inotify and fall back to polling.""" + with patch( + "homeassistant.components.usb.AIOUSBWatcher.async_start", + side_effect=InotifyNotAvailableError, + ): + yield + + +def patch_scanned_serial_ports(**kwargs) -> None: + """Patch the USB integration's list of scanned serial ports.""" + return patch("homeassistant.components.usb.scan_serial_ports", **kwargs) + + +async def async_request_scan(hass: HomeAssistant) -> None: + """Request a USB scan.""" + return await usb_async_request_scan(hass) diff --git a/tests/components/usb/test_init.py b/tests/components/usb/test_init.py index 9730dba53d7..3a56e929b22 100644 --- a/tests/components/usb/test_init.py +++ b/tests/components/usb/test_init.py @@ -7,31 +7,40 @@ import os from typing import Any from unittest.mock import MagicMock, Mock, call, patch, sentinel -from aiousbwatcher import InotifyNotAvailableError import pytest from homeassistant.components import usb -from homeassistant.components.usb.utils import usb_device_from_port +from homeassistant.components.usb.models import USBDevice from homeassistant.const import EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant from homeassistant.helpers.service_info.usb import UsbServiceInfo from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util -from . import conbee_device, slae_sh_device +from . import ( + force_usb_polling_watcher, # noqa: F401 + patch_scanned_serial_ports, +) from tests.common import async_fire_time_changed, import_and_test_deprecated_constant from tests.typing import WebSocketGenerator - -@pytest.fixture(name="aiousbwatcher_no_inotify") -def aiousbwatcher_no_inotify(): - """Patch AIOUSBWatcher to not use inotify.""" - with patch( - "homeassistant.components.usb.AIOUSBWatcher.async_start", - side_effect=InotifyNotAvailableError, - ): - yield +conbee_device = USBDevice( + device="/dev/cu.usbmodemDE24338801", + vid="1CF1", + pid="0030", + serial_number="DE2433880", + manufacturer="dresden elektronik ingenieurtechnik GmbH", + description="ConBee II", +) +slae_sh_device = USBDevice( + device="/dev/cu.usbserial-110", + vid="10C4", + pid="EA60", + serial_number="00_12_4B_00_22_98_88_7F", + manufacturer="Silicon Labs", + description="slae.sh cc2652rb stick - slaesh's iot stuff", +) async def test_aiousbwatcher_discovery( @@ -40,11 +49,11 @@ async def test_aiousbwatcher_discovery( """Test that aiousbwatcher can discover a device without raising an exception.""" new_usb = [{"domain": "test1", "vid": "3039"}, {"domain": "test2", "vid": "0FA0"}] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -63,7 +72,7 @@ async def test_aiousbwatcher_discovery( with ( patch("sys.platform", "linux"), patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch( "homeassistant.components.usb.AIOUSBWatcher", return_value=MockAIOUSBWatcher ), @@ -81,11 +90,11 @@ async def test_aiousbwatcher_discovery( await hass.async_block_till_done() assert len(mock_config_flow.mock_calls) == 1 - mock_comports.append( - MagicMock( + mock_ports.append( + USBDevice( device=slae_sh_device.device, - vid=4000, - pid=4000, + vid="0FA0", + pid="0FA0", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -107,7 +116,7 @@ async def test_aiousbwatcher_discovery( await hass.async_block_till_done() -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_polling_discovery( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -115,19 +124,19 @@ async def test_polling_discovery( new_usb = [{"domain": "test1", "vid": "3039"}] mock_comports_found_device = asyncio.Event() - def get_comports() -> list: - nonlocal mock_comports + def scan_serial_ports() -> list: + nonlocal mock_ports # Only "find" a device after a few invocations - if len(mock_comports.mock_calls) < 5: + if len(mock_ports.mock_calls) < 5: return [] mock_comports_found_device.set() return [ - MagicMock( + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -141,9 +150,7 @@ async def test_polling_discovery( timedelta(seconds=0.01), ), patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch( - "homeassistant.components.usb.comports", side_effect=get_comports - ) as mock_comports, + patch_scanned_serial_ports(side_effect=scan_serial_ports) as mock_ports, patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -163,16 +170,16 @@ async def test_polling_discovery( await hass.async_block_till_done() -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_removal_by_aiousbwatcher_before_started(hass: HomeAssistant) -> None: """Test a device is removed by the aiousbwatcher before started.""" new_usb = [{"domain": "test1", "vid": "3039", "pid": "3039"}] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -181,13 +188,13 @@ async def test_removal_by_aiousbwatcher_before_started(hass: HomeAssistant) -> N with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) await hass.async_block_till_done() - with patch("homeassistant.components.usb.comports", return_value=[]): + with patch_scanned_serial_ports(return_value=[]): hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() @@ -197,18 +204,18 @@ async def test_removal_by_aiousbwatcher_before_started(hass: HomeAssistant) -> N await hass.async_block_till_done() -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_discovered_by_websocket_scan( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test a device is discovered from websocket scan.""" new_usb = [{"domain": "test1", "vid": "3039", "pid": "3039"}] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -217,7 +224,7 @@ async def test_discovered_by_websocket_scan( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -234,7 +241,7 @@ async def test_discovered_by_websocket_scan( assert mock_config_flow.mock_calls[0][1][0] == "test1" -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_discovered_by_websocket_scan_limited_by_description_matcher( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -243,11 +250,11 @@ async def test_discovered_by_websocket_scan_limited_by_description_matcher( {"domain": "test1", "vid": "3039", "pid": "3039", "description": "*2652*"} ] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -256,7 +263,7 @@ async def test_discovered_by_websocket_scan_limited_by_description_matcher( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -273,7 +280,7 @@ async def test_discovered_by_websocket_scan_limited_by_description_matcher( assert mock_config_flow.mock_calls[0][1][0] == "test1" -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_most_targeted_matcher_wins( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -283,11 +290,11 @@ async def test_most_targeted_matcher_wins( {"domain": "more", "vid": "3039", "pid": "3039", "description": "*2652*"}, ] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -296,7 +303,7 @@ async def test_most_targeted_matcher_wins( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -313,7 +320,7 @@ async def test_most_targeted_matcher_wins( assert mock_config_flow.mock_calls[0][1][0] == "more" -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_discovered_by_websocket_scan_rejected_by_description_matcher( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -322,11 +329,11 @@ async def test_discovered_by_websocket_scan_rejected_by_description_matcher( {"domain": "test1", "vid": "3039", "pid": "3039", "description": "*not_it*"} ] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -335,7 +342,7 @@ async def test_discovered_by_websocket_scan_rejected_by_description_matcher( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -351,7 +358,7 @@ async def test_discovered_by_websocket_scan_rejected_by_description_matcher( assert len(mock_config_flow.mock_calls) == 0 -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_discovered_by_websocket_scan_limited_by_serial_number_matcher( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -365,11 +372,11 @@ async def test_discovered_by_websocket_scan_limited_by_serial_number_matcher( } ] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -378,7 +385,7 @@ async def test_discovered_by_websocket_scan_limited_by_serial_number_matcher( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -395,7 +402,7 @@ async def test_discovered_by_websocket_scan_limited_by_serial_number_matcher( assert mock_config_flow.mock_calls[0][1][0] == "test1" -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_discovered_by_websocket_scan_rejected_by_serial_number_matcher( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -404,11 +411,11 @@ async def test_discovered_by_websocket_scan_rejected_by_serial_number_matcher( {"domain": "test1", "vid": "3039", "pid": "3039", "serial_number": "123*"} ] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -417,7 +424,7 @@ async def test_discovered_by_websocket_scan_rejected_by_serial_number_matcher( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -433,7 +440,7 @@ async def test_discovered_by_websocket_scan_rejected_by_serial_number_matcher( assert len(mock_config_flow.mock_calls) == 0 -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_discovered_by_websocket_scan_limited_by_manufacturer_matcher( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -447,11 +454,11 @@ async def test_discovered_by_websocket_scan_limited_by_manufacturer_matcher( } ] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=conbee_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=conbee_device.serial_number, manufacturer=conbee_device.manufacturer, description=conbee_device.description, @@ -460,7 +467,7 @@ async def test_discovered_by_websocket_scan_limited_by_manufacturer_matcher( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -477,7 +484,7 @@ async def test_discovered_by_websocket_scan_limited_by_manufacturer_matcher( assert mock_config_flow.mock_calls[0][1][0] == "test1" -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_discovered_by_websocket_scan_rejected_by_manufacturer_matcher( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -491,11 +498,11 @@ async def test_discovered_by_websocket_scan_rejected_by_manufacturer_matcher( } ] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=conbee_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=conbee_device.serial_number, manufacturer=conbee_device.manufacturer, description=conbee_device.description, @@ -504,7 +511,7 @@ async def test_discovered_by_websocket_scan_rejected_by_manufacturer_matcher( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -520,7 +527,7 @@ async def test_discovered_by_websocket_scan_rejected_by_manufacturer_matcher( assert len(mock_config_flow.mock_calls) == 0 -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_discovered_by_websocket_rejected_with_empty_serial_number_only( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -529,11 +536,11 @@ async def test_discovered_by_websocket_rejected_with_empty_serial_number_only( {"domain": "test1", "vid": "3039", "pid": "3039", "serial_number": "123*"} ] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=conbee_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=None, manufacturer=None, description=None, @@ -542,7 +549,7 @@ async def test_discovered_by_websocket_rejected_with_empty_serial_number_only( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -558,18 +565,18 @@ async def test_discovered_by_websocket_rejected_with_empty_serial_number_only( assert len(mock_config_flow.mock_calls) == 0 -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_discovered_by_websocket_scan_match_vid_only( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test a device is discovered from websocket scan only matching vid.""" new_usb = [{"domain": "test1", "vid": "3039"}] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -578,7 +585,7 @@ async def test_discovered_by_websocket_scan_match_vid_only( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -595,18 +602,18 @@ async def test_discovered_by_websocket_scan_match_vid_only( assert mock_config_flow.mock_calls[0][1][0] == "test1" -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_discovered_by_websocket_scan_match_vid_wrong_pid( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test a device is discovered from websocket scan only matching vid but wrong pid.""" new_usb = [{"domain": "test1", "vid": "3039", "pid": "9999"}] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -615,7 +622,7 @@ async def test_discovered_by_websocket_scan_match_vid_wrong_pid( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -631,15 +638,15 @@ async def test_discovered_by_websocket_scan_match_vid_wrong_pid( assert len(mock_config_flow.mock_calls) == 0 -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_discovered_by_websocket_no_vid_pid( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test a device is discovered from websocket scan with no vid or pid.""" new_usb = [{"domain": "test1", "vid": "3039", "pid": "9999"}] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, vid=None, pid=None, @@ -651,7 +658,7 @@ async def test_discovered_by_websocket_no_vid_pid( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -667,18 +674,18 @@ async def test_discovered_by_websocket_no_vid_pid( assert len(mock_config_flow.mock_calls) == 0 -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_non_matching_discovered_by_scanner_after_started( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test a websocket scan that does not match.""" new_usb = [{"domain": "test1", "vid": "4444", "pid": "4444"}] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -687,7 +694,7 @@ async def test_non_matching_discovered_by_scanner_after_started( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -709,11 +716,11 @@ async def test_aiousbwatcher_on_wsl_fallback_without_throwing_exception( """Test that aiousbwatcher on WSL failure results in fallback to scanning without raising an exception.""" new_usb = [{"domain": "test1", "vid": "3039"}] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -722,7 +729,7 @@ async def test_aiousbwatcher_on_wsl_fallback_without_throwing_exception( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -743,17 +750,17 @@ async def test_discovered_by_aiousbwatcher_before_started(hass: HomeAssistant) - """Test a device is discovered since aiousbwatcher is now running.""" new_usb = [{"domain": "test1", "vid": "3039", "pid": "3039"}] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, ) ] - initial_mock_comports = [] + initial_ports = [] aiousbwatcher_callback = None def async_register_callback(callback): @@ -766,9 +773,7 @@ async def test_discovered_by_aiousbwatcher_before_started(hass: HomeAssistant) - with ( patch("sys.platform", "linux"), patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch( - "homeassistant.components.usb.comports", return_value=initial_mock_comports - ), + patch_scanned_serial_ports(return_value=initial_ports), patch( "homeassistant.components.usb.AIOUSBWatcher", return_value=MockAIOUSBWatcher ), @@ -782,7 +787,7 @@ async def test_discovered_by_aiousbwatcher_before_started(hass: HomeAssistant) - assert len(mock_config_flow.mock_calls) == 0 - initial_mock_comports.extend(mock_comports) + initial_ports.extend(mock_ports) aiousbwatcher_callback() await hass.async_block_till_done() @@ -874,18 +879,18 @@ def test_human_readable_device_name() -> None: assert "8A2A" in name -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_async_is_plugged_in( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test async_is_plugged_in.""" new_usb = [{"domain": "test1", "vid": "3039", "pid": "3039"}] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -899,7 +904,7 @@ async def test_async_is_plugged_in( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=[]), + patch_scanned_serial_ports(return_value=[]), patch.object(hass.config_entries.flow, "async_init"), ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -909,7 +914,7 @@ async def test_async_is_plugged_in( assert not usb.async_is_plugged_in(hass, matcher) with ( - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch.object(hass.config_entries.flow, "async_init"), ): ws_client = await hass_ws_client(hass) @@ -920,7 +925,7 @@ async def test_async_is_plugged_in( assert usb.async_is_plugged_in(hass, matcher) -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") @pytest.mark.parametrize( "matcher", [ @@ -940,7 +945,7 @@ async def test_async_is_plugged_in_case_enforcement( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=[]), + patch_scanned_serial_ports(return_value=[]), patch.object(hass.config_entries.flow, "async_init"), ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -952,7 +957,7 @@ async def test_async_is_plugged_in_case_enforcement( usb.async_is_plugged_in(hass, matcher) -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_web_socket_triggers_discovery_request_callbacks( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -961,7 +966,7 @@ async def test_web_socket_triggers_discovery_request_callbacks( with ( patch("homeassistant.components.usb.async_get_usb", return_value=[]), - patch("homeassistant.components.usb.comports", return_value=[]), + patch_scanned_serial_ports(return_value=[]), patch.object(hass.config_entries.flow, "async_init"), ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -987,7 +992,7 @@ async def test_web_socket_triggers_discovery_request_callbacks( assert len(mock_callback.mock_calls) == 1 -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_initial_scan_callback( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -997,7 +1002,7 @@ async def test_initial_scan_callback( with ( patch("homeassistant.components.usb.async_get_usb", return_value=[]), - patch("homeassistant.components.usb.comports", return_value=[]), + patch_scanned_serial_ports(return_value=[]), patch.object(hass.config_entries.flow, "async_init"), ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -1023,7 +1028,7 @@ async def test_initial_scan_callback( cancel_2() -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_cancel_initial_scan_callback( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -1032,7 +1037,7 @@ async def test_cancel_initial_scan_callback( with ( patch("homeassistant.components.usb.async_get_usb", return_value=[]), - patch("homeassistant.components.usb.comports", return_value=[]), + patch_scanned_serial_ports(return_value=[]), patch.object(hass.config_entries.flow, "async_init"), ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -1049,18 +1054,18 @@ async def test_cancel_initial_scan_callback( assert len(mock_callback.mock_calls) == 0 -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") async def test_resolve_serial_by_id( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test the discovery data resolves to serial/by-id.""" new_usb = [{"domain": "test1", "vid": "3039", "pid": "3039"}] - mock_comports = [ - MagicMock( + mock_ports = [ + USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, @@ -1069,7 +1074,7 @@ async def test_resolve_serial_by_id( with ( patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=mock_comports), + patch_scanned_serial_ports(return_value=mock_ports), patch( "homeassistant.components.usb.get_serial_by_id", return_value="/dev/serial/by-id/bla", @@ -1091,73 +1096,73 @@ async def test_resolve_serial_by_id( assert mock_config_flow.mock_calls[0][2]["data"].device == "/dev/serial/by-id/bla" -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") @pytest.mark.parametrize( "ports", [ [ - MagicMock( + USBDevice( device="/dev/cu.usbserial-2120", - vid=0x3039, - pid=0x3039, + vid="3039", + pid="3039", serial_number=conbee_device.serial_number, manufacturer=conbee_device.manufacturer, description=conbee_device.description, ), - MagicMock( + USBDevice( device="/dev/cu.usbserial-1120", - vid=0x3039, - pid=0x3039, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, ), - MagicMock( + USBDevice( device="/dev/cu.SLAB_USBtoUART", - vid=0x3039, - pid=0x3039, + vid="3039", + pid="3039", serial_number=conbee_device.serial_number, manufacturer=conbee_device.manufacturer, description=conbee_device.description, ), - MagicMock( + USBDevice( device="/dev/cu.SLAB_USBtoUART2", - vid=0x3039, - pid=0x3039, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, ), ], [ - MagicMock( + USBDevice( device="/dev/cu.SLAB_USBtoUART2", - vid=0x3039, - pid=0x3039, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, ), - MagicMock( + USBDevice( device="/dev/cu.SLAB_USBtoUART", - vid=0x3039, - pid=0x3039, + vid="3039", + pid="3039", serial_number=conbee_device.serial_number, manufacturer=conbee_device.manufacturer, description=conbee_device.description, ), - MagicMock( + USBDevice( device="/dev/cu.usbserial-1120", - vid=0x3039, - pid=0x3039, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, ), - MagicMock( + USBDevice( device="/dev/cu.usbserial-2120", - vid=0x3039, - pid=0x3039, + vid="3039", + pid="3039", serial_number=conbee_device.serial_number, manufacturer=conbee_device.manufacturer, description=conbee_device.description, @@ -1177,7 +1182,7 @@ async def test_cp2102n_ordering_on_macos( with ( patch("sys.platform", "darwin"), patch("homeassistant.components.usb.async_get_usb", return_value=new_usb), - patch("homeassistant.components.usb.comports", return_value=ports), + patch_scanned_serial_ports(return_value=ports), patch.object(hass.config_entries.flow, "async_init") as mock_config_flow, ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -1224,34 +1229,31 @@ def test_deprecated_constants( ) -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") @patch("homeassistant.components.usb.REQUEST_SCAN_COOLDOWN", 0) async def test_register_port_event_callback( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test the registration of a port event callback.""" - port1 = Mock( + port1 = USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, ) - port2 = Mock( + port2 = USBDevice( device=conbee_device.device, - vid=12346, - pid=12346, + vid="303A", + pid="303A", serial_number=conbee_device.serial_number, manufacturer=conbee_device.manufacturer, description=conbee_device.description, ) - port1_usb = usb_device_from_port(port1) - port2_usb = usb_device_from_port(port2) - ws_client = await hass_ws_client(hass) mock_callback1 = Mock() @@ -1259,7 +1261,7 @@ async def test_register_port_event_callback( # Start off with no ports with ( - patch("homeassistant.components.usb.comports", return_value=[]), + patch_scanned_serial_ports(return_value=[]), ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -1270,13 +1272,13 @@ async def test_register_port_event_callback( assert mock_callback2.mock_calls == [] # Add two new ports - with patch("homeassistant.components.usb.comports", return_value=[port1, port2]): + with patch_scanned_serial_ports(return_value=[port1, port2]): await ws_client.send_json({"id": 1, "type": "usb/scan"}) response = await ws_client.receive_json() assert response["success"] - assert mock_callback1.mock_calls == [call({port1_usb, port2_usb}, set())] - assert mock_callback2.mock_calls == [call({port1_usb, port2_usb}, set())] + assert mock_callback1.mock_calls == [call({port1, port2}, set())] + assert mock_callback2.mock_calls == [call({port1, port2}, set())] # Cancel the second callback cancel2() @@ -1286,20 +1288,20 @@ async def test_register_port_event_callback( mock_callback2.reset_mock() # Remove port 2 - with patch("homeassistant.components.usb.comports", return_value=[port1]): + with patch_scanned_serial_ports(return_value=[port1]): await ws_client.send_json({"id": 2, "type": "usb/scan"}) response = await ws_client.receive_json() assert response["success"] await hass.async_block_till_done() - assert mock_callback1.mock_calls == [call(set(), {port2_usb})] + assert mock_callback1.mock_calls == [call(set(), {port2})] assert mock_callback2.mock_calls == [] # The second callback was unregistered mock_callback1.reset_mock() mock_callback2.reset_mock() # Keep port 2 removed - with patch("homeassistant.components.usb.comports", return_value=[port1]): + with patch_scanned_serial_ports(return_value=[port1]): await ws_client.send_json({"id": 3, "type": "usb/scan"}) response = await ws_client.receive_json() assert response["success"] @@ -1310,17 +1312,17 @@ async def test_register_port_event_callback( assert mock_callback2.mock_calls == [] # Unplug one and plug in the other - with patch("homeassistant.components.usb.comports", return_value=[port2]): + with patch_scanned_serial_ports(return_value=[port2]): await ws_client.send_json({"id": 4, "type": "usb/scan"}) response = await ws_client.receive_json() assert response["success"] await hass.async_block_till_done() - assert mock_callback1.mock_calls == [call({port2_usb}, {port1_usb})] + assert mock_callback1.mock_calls == [call({port2}, {port1})] assert mock_callback2.mock_calls == [] -@pytest.mark.usefixtures("aiousbwatcher_no_inotify") +@pytest.mark.usefixtures("force_usb_polling_watcher") @patch("homeassistant.components.usb.REQUEST_SCAN_COOLDOWN", 0) async def test_register_port_event_callback_failure( hass: HomeAssistant, @@ -1329,27 +1331,24 @@ async def test_register_port_event_callback_failure( ) -> None: """Test port event callback failure handling.""" - port1 = Mock( + port1 = USBDevice( device=slae_sh_device.device, - vid=12345, - pid=12345, + vid="3039", + pid="3039", serial_number=slae_sh_device.serial_number, manufacturer=slae_sh_device.manufacturer, description=slae_sh_device.description, ) - port2 = Mock( + port2 = USBDevice( device=conbee_device.device, - vid=12346, - pid=12346, + vid="303A", + pid="303A", serial_number=conbee_device.serial_number, manufacturer=conbee_device.manufacturer, description=conbee_device.description, ) - port1_usb = usb_device_from_port(port1) - port2_usb = usb_device_from_port(port2) - ws_client = await hass_ws_client(hass) mock_callback1 = Mock(side_effect=RuntimeError("Failure 1")) @@ -1357,7 +1356,7 @@ async def test_register_port_event_callback_failure( # Start off with no ports with ( - patch("homeassistant.components.usb.comports", return_value=[]), + patch_scanned_serial_ports(return_value=[]), ): assert await async_setup_component(hass, "usb", {"usb": {}}) @@ -1369,7 +1368,7 @@ async def test_register_port_event_callback_failure( # Add two new ports with ( - patch("homeassistant.components.usb.comports", return_value=[port1, port2]), + patch_scanned_serial_ports(return_value=[port1, port2]), caplog.at_level(logging.ERROR, logger="homeassistant.components.usb"), ): await ws_client.send_json({"id": 1, "type": "usb/scan"}) @@ -1378,8 +1377,8 @@ async def test_register_port_event_callback_failure( await hass.async_block_till_done() # Both were called even though they raised exceptions - assert mock_callback1.mock_calls == [call({port1_usb, port2_usb}, set())] - assert mock_callback2.mock_calls == [call({port1_usb, port2_usb}, set())] + assert mock_callback1.mock_calls == [call({port1, port2}, set())] + assert mock_callback2.mock_calls == [call({port1, port2}, set())] assert caplog.text.count("Error in USB port event callback") == 2 assert "Failure 1" in caplog.text diff --git a/tests/components/vacuum/test_init.py b/tests/components/vacuum/test_init.py index 8ae054b5646..967b9672805 100644 --- a/tests/components/vacuum/test_init.py +++ b/tests/components/vacuum/test_init.py @@ -5,7 +5,6 @@ from __future__ import annotations from enum import Enum from types import ModuleType from typing import Any -from unittest.mock import patch import pytest @@ -25,7 +24,6 @@ from homeassistant.components.vacuum import ( VacuumEntityFeature, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import frame from . import MockVacuum, help_async_setup_entry_init, help_async_unload_entry from .common import async_start @@ -326,7 +324,6 @@ async def test_vacuum_not_log_deprecated_state_warning( @pytest.mark.usefixtures("mock_as_custom_component") -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_vacuum_log_deprecated_state_warning_using_state_prop( hass: HomeAssistant, config_flow_fixture: None, @@ -356,6 +353,7 @@ async def test_vacuum_log_deprecated_state_warning_using_state_prop( async_setup_entry=help_async_setup_entry_init, async_unload_entry=help_async_unload_entry, ), + built_in=False, ) setup_test_component_platform(hass, VACUUM_DOMAIN, [entity], from_config_entry=True) assert await hass.config_entries.async_setup(config_entry.entry_id) @@ -370,7 +368,6 @@ async def test_vacuum_log_deprecated_state_warning_using_state_prop( @pytest.mark.usefixtures("mock_as_custom_component") -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_vacuum_log_deprecated_state_warning_using_attr_state_attr( hass: HomeAssistant, config_flow_fixture: None, @@ -399,6 +396,7 @@ async def test_vacuum_log_deprecated_state_warning_using_attr_state_attr( async_setup_entry=help_async_setup_entry_init, async_unload_entry=help_async_unload_entry, ), + built_in=False, ) setup_test_component_platform(hass, VACUUM_DOMAIN, [entity], from_config_entry=True) assert await hass.config_entries.async_setup(config_entry.entry_id) @@ -427,8 +425,7 @@ async def test_vacuum_log_deprecated_state_warning_using_attr_state_attr( @pytest.mark.usefixtures("mock_as_custom_component") -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) -async def test_alarm_control_panel_deprecated_state_does_not_break_state( +async def test_vacuum_deprecated_state_does_not_break_state( hass: HomeAssistant, config_flow_fixture: None, caplog: pytest.LogCaptureFixture, @@ -463,6 +460,7 @@ async def test_alarm_control_panel_deprecated_state_does_not_break_state( async_setup_entry=help_async_setup_entry_init, async_unload_entry=help_async_unload_entry, ), + built_in=False, ) setup_test_component_platform(hass, VACUUM_DOMAIN, [entity], from_config_entry=True) assert await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/vesync/test_select.py b/tests/components/vesync/test_select.py index 30c83c89e0e..c96d687dfd2 100644 --- a/tests/components/vesync/test_select.py +++ b/tests/components/vesync/test_select.py @@ -7,8 +7,10 @@ from homeassistant.components.select import ( DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION, ) -from homeassistant.components.vesync.const import NIGHT_LIGHT_LEVEL_DIM -from homeassistant.components.vesync.select import HA_TO_VS_NIGHT_LIGHT_LEVEL_MAP +from homeassistant.components.vesync.const import HUMIDIFIER_NIGHT_LIGHT_LEVEL_DIM +from homeassistant.components.vesync.select import ( + HA_TO_VS_HUMIDIFIER_NIGHT_LIGHT_LEVEL_MAP, +) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -18,24 +20,24 @@ from .common import ENTITY_HUMIDIFIER_300S_NIGHT_LIGHT_SELECT @pytest.mark.parametrize( "install_humidifier_device", ["humidifier_300s"], indirect=True ) -async def test_set_nightlight_level( +async def test_humidifier_set_nightlight_level( hass: HomeAssistant, manager, humidifier_300s, install_humidifier_device ) -> None: - """Test set of night light level.""" + """Test set of humidifier night light level.""" await hass.services.async_call( SELECT_DOMAIN, SERVICE_SELECT_OPTION, { ATTR_ENTITY_ID: ENTITY_HUMIDIFIER_300S_NIGHT_LIGHT_SELECT, - ATTR_OPTION: NIGHT_LIGHT_LEVEL_DIM, + ATTR_OPTION: HUMIDIFIER_NIGHT_LIGHT_LEVEL_DIM, }, blocking=True, ) # Assert that setter API was invoked with the expected translated value humidifier_300s.set_night_light_brightness.assert_called_once_with( - HA_TO_VS_NIGHT_LIGHT_LEVEL_MAP[NIGHT_LIGHT_LEVEL_DIM] + HA_TO_VS_HUMIDIFIER_NIGHT_LIGHT_LEVEL_MAP[HUMIDIFIER_NIGHT_LIGHT_LEVEL_DIM] ) # Assert that devices were refreshed manager.update_all_devices.assert_called_once() @@ -44,11 +46,13 @@ async def test_set_nightlight_level( @pytest.mark.parametrize( "install_humidifier_device", ["humidifier_300s"], indirect=True ) -async def test_nightlight_level(hass: HomeAssistant, install_humidifier_device) -> None: - """Test the state of night light level select entity.""" +async def test_humidifier_nightlight_level( + hass: HomeAssistant, install_humidifier_device +) -> None: + """Test the state of humidifier night light level select entity.""" # The mocked device has night_light_brightness=50 which is "dim" assert ( hass.states.get(ENTITY_HUMIDIFIER_300S_NIGHT_LIGHT_SELECT).state - == NIGHT_LIGHT_LEVEL_DIM + == HUMIDIFIER_NIGHT_LIGHT_LEVEL_DIM ) diff --git a/tests/components/vodafone_station/test_button.py b/tests/components/vodafone_station/test_button.py index d5f377d3f6f..ade5eb78965 100644 --- a/tests/components/vodafone_station/test_button.py +++ b/tests/components/vodafone_station/test_button.py @@ -2,11 +2,20 @@ from unittest.mock import AsyncMock, patch +from aiovodafone.exceptions import ( + AlreadyLogged, + CannotAuthenticate, + CannotConnect, + GenericLoginError, +) +import pytest from syrupy import SnapshotAssertion from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.vodafone_station.const import DOMAIN from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from . import setup_integration @@ -46,3 +55,39 @@ async def test_pressing_button( blocking=True, ) mock_vodafone_station_router.restart_router.assert_called_once() + + +@pytest.mark.parametrize( + ("side_effect", "key", "error"), + [ + (CannotConnect, "cannot_execute_action", "CannotConnect()"), + (AlreadyLogged, "cannot_execute_action", "AlreadyLogged()"), + (GenericLoginError, "cannot_execute_action", "GenericLoginError()"), + (CannotAuthenticate, "cannot_authenticate", "CannotAuthenticate()"), + ], +) +async def test_button_fails( + hass: HomeAssistant, + mock_vodafone_station_router: AsyncMock, + mock_config_entry: MockConfigEntry, + side_effect: Exception, + key: str, + error: str, +) -> None: + """Test button action fails.""" + + await setup_integration(hass, mock_config_entry) + + mock_vodafone_station_router.restart_router.side_effect = side_effect + + with pytest.raises(HomeAssistantError) as exc_info: + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.vodafone_station_m123456789_restart"}, + blocking=True, + ) + + assert exc_info.value.translation_domain == DOMAIN + assert exc_info.value.translation_key == key + assert exc_info.value.translation_placeholders == {"error": error} diff --git a/tests/components/vodafone_station/test_config_flow.py b/tests/components/vodafone_station/test_config_flow.py index 68f8247bdf9..0648987eb27 100644 --- a/tests/components/vodafone_station/test_config_flow.py +++ b/tests/components/vodafone_station/test_config_flow.py @@ -228,3 +228,75 @@ async def test_options_flow( assert result["data"] == { CONF_CONSIDER_HOME: 37, } + + +async def test_reconfigure_successful( + hass: HomeAssistant, + mock_vodafone_station_router: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that the host can be reconfigured.""" + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + # original entry + assert mock_config_entry.data["host"] == "fake_host" + + reconfigure_result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "host": "192.168.100.60", + "password": "fake_password", + "username": "fake_username", + }, + ) + + assert reconfigure_result["type"] is FlowResultType.ABORT + assert reconfigure_result["reason"] == "reconfigure_successful" + + # changed entry + assert mock_config_entry.data["host"] == "192.168.100.60" + + +@pytest.mark.parametrize( + ("side_effect", "error"), + [ + (CannotConnect, "cannot_connect"), + (CannotAuthenticate, "invalid_auth"), + (AlreadyLogged, "already_logged"), + (ConnectionResetError, "unknown"), + ], +) +async def test_reconfigure_fails( + hass: HomeAssistant, + mock_vodafone_station_router: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, + side_effect: Exception, + error: str, +) -> None: + """Test that the host can be reconfigured.""" + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + mock_vodafone_station_router.login.side_effect = side_effect + + reconfigure_result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "host": "192.168.100.60", + "password": "fake_password", + "username": "fake_username", + }, + ) + + assert reconfigure_result["type"] is FlowResultType.FORM + assert reconfigure_result["step_id"] == "reconfigure" + assert reconfigure_result["errors"] == {"base": error} diff --git a/tests/components/vodafone_station/test_init.py b/tests/components/vodafone_station/test_init.py new file mode 100644 index 00000000000..12b3c3dce8f --- /dev/null +++ b/tests/components/vodafone_station/test_init.py @@ -0,0 +1,33 @@ +"""Tests for Vodafone Station init.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.device_tracker import CONF_CONSIDER_HOME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_reload_config_entry_with_options( + hass: HomeAssistant, + mock_vodafone_station_router: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the the config entry is reloaded with options.""" + await setup_integration(hass, mock_config_entry) + + result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_CONSIDER_HOME: 37, + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_CONSIDER_HOME: 37, + } diff --git a/tests/components/voicerss/test_tts.py b/tests/components/voicerss/test_tts.py index 776c0ac153a..e6a30d7fac2 100644 --- a/tests/components/voicerss/test_tts.py +++ b/tests/components/voicerss/test_tts.py @@ -200,7 +200,7 @@ async def test_service_say_error( assert ( await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) - == HTTPStatus.NOT_FOUND + == HTTPStatus.INTERNAL_SERVER_ERROR ) assert len(aioclient_mock.mock_calls) == 1 assert aioclient_mock.mock_calls[0][2] == FORM_DATA @@ -234,7 +234,7 @@ async def test_service_say_timeout( assert ( await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) - == HTTPStatus.NOT_FOUND + == HTTPStatus.INTERNAL_SERVER_ERROR ) assert len(aioclient_mock.mock_calls) == 1 assert aioclient_mock.mock_calls[0][2] == FORM_DATA @@ -273,7 +273,7 @@ async def test_service_say_error_msg( assert ( await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) - == HTTPStatus.NOT_FOUND + == HTTPStatus.INTERNAL_SERVER_ERROR ) assert len(aioclient_mock.mock_calls) == 1 assert aioclient_mock.mock_calls[0][2] == FORM_DATA diff --git a/tests/components/voip/test_assist_satellite.py b/tests/components/voip/test_assist_satellite.py new file mode 100644 index 00000000000..f3e2611631e --- /dev/null +++ b/tests/components/voip/test_assist_satellite.py @@ -0,0 +1,62 @@ +"""Test the Assist Satellite platform.""" + +from unittest.mock import patch + +import pytest + +from homeassistant.components.voip.devices import VoIPDevice +from homeassistant.core import HomeAssistant +from homeassistant.helpers import intent as intent_helper + + +@pytest.mark.parametrize( + ("intent_args", "message"), + [ + ( + {}, + "0:02:00 timer finished", + ), + ( + {"name": {"value": "pizza"}}, + "pizza finished", + ), + ], +) +async def test_timer_events( + hass: HomeAssistant, voip_device: VoIPDevice, intent_args: dict, message: str +) -> None: + """Test for timer events.""" + + await intent_helper.async_handle( + hass, + "test", + intent_helper.INTENT_START_TIMER, + { + "minutes": {"value": 2}, + } + | intent_args, + device_id=voip_device.device_id, + ) + + with ( + patch( + "homeassistant.components.voip.assist_satellite.VoipAssistSatellite._resolve_announcement_media_id", + ) as mock_resolve, + patch( + "homeassistant.components.voip.assist_satellite.VoipAssistSatellite.async_announce", + ) as mock_announce, + ): + await intent_helper.async_handle( + hass, + "test", + intent_helper.INTENT_DECREASE_TIMER, + { + "minutes": {"value": 2}, + }, + device_id=voip_device.device_id, + ) + await hass.async_block_till_done(wait_background_tasks=True) + + assert len(mock_resolve.mock_calls) == 1 + assert len(mock_announce.mock_calls) == 1 + assert mock_resolve.mock_calls[0][1][0] == message diff --git a/tests/components/voip/test_voip.py b/tests/components/voip/test_voip.py index 3e3e5337417..459ab020336 100644 --- a/tests/components/voip/test_voip.py +++ b/tests/components/voip/test_voip.py @@ -22,10 +22,13 @@ from homeassistant.components.voip.devices import VoIPDevice, VoIPDevices from homeassistant.components.voip.voip import PreRecordMessageProtocol, make_protocol from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import Context, HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_component import EntityComponent from homeassistant.setup import async_setup_component +from tests.components.tts.common import MockResultStream + _ONE_SECOND = 16000 * 2 # 16Khz 16-bit _MEDIA_ID = "12345" @@ -862,9 +865,23 @@ async def test_announce( & assist_satellite.AssistSatelliteEntityFeature.ANNOUNCE ) + with pytest.raises(HomeAssistantError) as err: + await hass.services.async_call( + "assist_satellite", + "announce", + service_data={"media_id": "http://example.com"}, + blocking=True, + target={ + "entity_id": satellite.entity_id, + }, + ) + assert err.value.translation_domain == "voip" + assert err.value.translation_key == "non_tts_announcement" + announcement = assist_satellite.AssistSatelliteAnnouncement( message="test announcement", media_id=_MEDIA_ID, + tts_token="test-token", original_media_id=_MEDIA_ID, media_id_source="tts", ) @@ -912,6 +929,7 @@ async def test_voip_id_is_ip_address( announcement = assist_satellite.AssistSatelliteAnnouncement( message="test announcement", media_id=_MEDIA_ID, + tts_token="test-token", original_media_id=_MEDIA_ID, media_id_source="tts", ) @@ -964,6 +982,7 @@ async def test_announce_timeout( announcement = assist_satellite.AssistSatelliteAnnouncement( message="test announcement", media_id=_MEDIA_ID, + tts_token="test-token", original_media_id=_MEDIA_ID, media_id_source="tts", ) @@ -1004,6 +1023,7 @@ async def test_start_conversation( announcement = assist_satellite.AssistSatelliteAnnouncement( message="test announcement", media_id=_MEDIA_ID, + tts_token="test-token", original_media_id=_MEDIA_ID, media_id_source="tts", ) @@ -1148,8 +1168,16 @@ async def test_start_conversation_user_doesnt_pick_up( new=async_pipeline_from_audio_stream, ), patch( - "homeassistant.components.assist_satellite.entity.tts_generate_media_source_id", - return_value="test media id", + "homeassistant.components.tts.generate_media_source_id", + return_value="media-source://bla", + ), + patch( + "homeassistant.components.tts.async_resolve_engine", + return_value="test tts", + ), + patch( + "homeassistant.components.tts.async_create_stream", + return_value=MockResultStream(hass, "wav", b""), ), ): satellite.transport = Mock() diff --git a/tests/components/watergate/snapshots/test_event.ambr b/tests/components/watergate/snapshots/test_event.ambr new file mode 100644 index 00000000000..97f453697ca --- /dev/null +++ b/tests/components/watergate/snapshots/test_event.ambr @@ -0,0 +1,111 @@ +# serializer version: 1 +# name: test_event[event.sonic_duration_auto_shut_off-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'duration_threshold', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.sonic_duration_auto_shut_off', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Duration auto shut-off', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'auto_shut_off_duration', + 'unique_id': 'a63182948ce2896a.auto_shut_off_duration', + 'unit_of_measurement': None, + }) +# --- +# name: test_event[event.sonic_duration_auto_shut_off-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'event_type': None, + 'event_types': list([ + 'duration_threshold', + ]), + 'friendly_name': 'Sonic Duration auto shut-off', + }), + 'context': , + 'entity_id': 'event.sonic_duration_auto_shut_off', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_event[event.sonic_volume_auto_shut_off-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'event_types': list([ + 'volume_threshold', + ]), + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'event', + 'entity_category': None, + 'entity_id': 'event.sonic_volume_auto_shut_off', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Volume auto shut-off', + 'platform': 'watergate', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'auto_shut_off_volume', + 'unique_id': 'a63182948ce2896a.auto_shut_off_volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_event[event.sonic_volume_auto_shut_off-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'event_type': None, + 'event_types': list([ + 'volume_threshold', + ]), + 'friendly_name': 'Sonic Volume auto shut-off', + }), + 'context': , + 'entity_id': 'event.sonic_volume_auto_shut_off', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/watergate/test_event.py b/tests/components/watergate/test_event.py new file mode 100644 index 00000000000..6997c3f1fdf --- /dev/null +++ b/tests/components/watergate/test_event.py @@ -0,0 +1,84 @@ +"""Tests for the Watergate event entity platform.""" + +from collections.abc import Generator + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.typing import StateType + +from . import init_integration +from .const import MOCK_WEBHOOK_ID + +from tests.common import AsyncMock, MockConfigEntry, patch, snapshot_platform +from tests.typing import ClientSessionGenerator + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_event( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_entry: MockConfigEntry, + mock_watergate_client: Generator[AsyncMock], + freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, +) -> None: + """Test states of the sensor.""" + freezer.move_to("2021-01-09 12:00:00+00:00") + with patch("homeassistant.components.watergate.PLATFORMS", [Platform.EVENT]): + await init_integration(hass, mock_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_entry.entry_id) + + +@pytest.mark.parametrize( + ("entity_id", "event_type"), + [ + ("sonic_volume_auto_shut_off", "volume_threshold"), + ("sonic_duration_auto_shut_off", "duration_threshold"), + ], +) +async def test_auto_shut_off_webhook( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + mock_entry: MockConfigEntry, + mock_watergate_client: Generator[AsyncMock], + entity_id: str, + event_type: str, +) -> None: + """Test if water flow webhook is handled correctly.""" + await init_integration(hass, mock_entry) + + def assert_state(entity_id: str, expected_state: str): + state = hass.states.get(f"event.{entity_id}") + assert state.state == str(expected_state) + + assert_state(entity_id, "unknown") + + telemetry_change_data = { + "type": "auto-shut-off-report", + "data": { + "type": event_type, + "volume": 1500, + "duration": 30, + "timestamp": 1730148016, + }, + } + client = await hass_client_no_auth() + await client.post(f"/api/webhook/{MOCK_WEBHOOK_ID}", json=telemetry_change_data) + + await hass.async_block_till_done() + + def assert_extra_state( + entity_id: str, attribute: str, expected_attribute: StateType + ): + attributes = hass.states.get(f"event.{entity_id}").attributes + assert attributes.get(attribute) == expected_attribute + + assert_extra_state(entity_id, "event_type", event_type) + assert_extra_state(entity_id, "volume", 1500) + assert_extra_state(entity_id, "duration", 30) diff --git a/tests/components/watergate/test_sensor.py b/tests/components/watergate/test_sensor.py index 78e375857ed..0bf883a1955 100644 --- a/tests/components/watergate/test_sensor.py +++ b/tests/components/watergate/test_sensor.py @@ -1,4 +1,4 @@ -"""Tests for the Watergate valve platform.""" +"""Tests for the Watergate sensor platform.""" from collections.abc import Generator diff --git a/tests/components/webdav/conftest.py b/tests/components/webdav/conftest.py index 645e2111364..5fa972e5fae 100644 --- a/tests/components/webdav/conftest.py +++ b/tests/components/webdav/conftest.py @@ -9,7 +9,7 @@ import pytest from homeassistant.components.webdav.const import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME -from .const import BACKUP_METADATA, MOCK_LIST_WITH_PROPERTIES +from .const import BACKUP_METADATA, MOCK_LIST_FILES from tests.common import MockConfigEntry @@ -58,7 +58,7 @@ def mock_webdav_client() -> Generator[AsyncMock]: mock = mock_webdav_client.return_value mock.check.return_value = True mock.mkdir.return_value = True - mock.list_with_properties.return_value = MOCK_LIST_WITH_PROPERTIES + mock.list_files.return_value = MOCK_LIST_FILES mock.download_iter.side_effect = _download_mock mock.upload_iter.return_value = None mock.clean.return_value = None diff --git a/tests/components/webdav/const.py b/tests/components/webdav/const.py index 8d6b8ad67d7..0147826a777 100644 --- a/tests/components/webdav/const.py +++ b/tests/components/webdav/const.py @@ -1,7 +1,5 @@ """Constants for WebDAV tests.""" -from aiowebdav2 import Property - BACKUP_METADATA = { "addons": [], "backup_id": "23e64aec", @@ -16,18 +14,7 @@ BACKUP_METADATA = { "size": 34519040, } -MOCK_LIST_WITH_PROPERTIES = { - "/Automatic_backup_2025.2.1_2025-02-10_18.31_30202686.tar": [], - "/Automatic_backup_2025.2.1_2025-02-10_18.31_30202686.metadata.json": [ - Property( - namespace="https://home-assistant.io", - name="backup_id", - value="23e64aec", - ), - Property( - namespace="https://home-assistant.io", - name="metadata_version", - value="1", - ), - ], -} +MOCK_LIST_FILES = [ + "/Automatic_backup_2025.2.1_2025-02-10_18.31_30202686.tar", + "/Automatic_backup_2025.2.1_2025-02-10_18.31_30202686.metadata.json", +] diff --git a/tests/components/webdav/test_backup.py b/tests/components/webdav/test_backup.py index c20e73cc786..ca20467484f 100644 --- a/tests/components/webdav/test_backup.py +++ b/tests/components/webdav/test_backup.py @@ -6,7 +6,6 @@ from collections.abc import AsyncGenerator from io import StringIO from unittest.mock import Mock, patch -from aiowebdav2 import Property from aiowebdav2.exceptions import UnauthorizedError, WebDavError import pytest @@ -17,7 +16,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.backup import async_initialize_backup from homeassistant.setup import async_setup_component -from .const import BACKUP_METADATA, MOCK_LIST_WITH_PROPERTIES +from .const import BACKUP_METADATA from tests.common import AsyncMock, MockConfigEntry from tests.typing import ClientSessionGenerator, WebSocketGenerator @@ -184,7 +183,6 @@ async def test_agents_upload( assert resp.status == 201 assert webdav_client.upload_iter.call_count == 2 - assert webdav_client.set_property_batch.call_count == 1 async def test_agents_download( @@ -211,7 +209,7 @@ async def test_error_on_agents_download( """Test we get not found on a not existing backup on download.""" client = await hass_client() backup_id = BACKUP_METADATA["backup_id"] - webdav_client.list_with_properties.side_effect = [MOCK_LIST_WITH_PROPERTIES, {}] + webdav_client.list_files.return_value = [] resp = await client.get( f"/api/backup/download/{backup_id}?agent_id={DOMAIN}.{mock_config_entry.entry_id}" @@ -262,7 +260,7 @@ async def test_agents_delete_not_found_does_not_throw( webdav_client: AsyncMock, ) -> None: """Test agent delete backup.""" - webdav_client.list_with_properties.return_value = {} + webdav_client.list_files.return_value = {} client = await hass_ws_client(hass) await client.send_json_auto_id( @@ -283,7 +281,7 @@ async def test_agents_backup_not_found( webdav_client: AsyncMock, ) -> None: """Test backup not found.""" - webdav_client.list_with_properties.return_value = [] + webdav_client.list_files.return_value = [] backup_id = BACKUP_METADATA["backup_id"] client = await hass_ws_client(hass) await client.send_json_auto_id({"type": "backup/details", "backup_id": backup_id}) @@ -300,7 +298,7 @@ async def test_raises_on_403( mock_config_entry: MockConfigEntry, ) -> None: """Test we raise on 403.""" - webdav_client.list_with_properties.side_effect = UnauthorizedError( + webdav_client.list_files.side_effect = UnauthorizedError( "https://webdav.example.com" ) backup_id = BACKUP_METADATA["backup_id"] @@ -324,30 +322,3 @@ async def test_listeners_get_cleaned_up(hass: HomeAssistant) -> None: remove_listener() assert hass.data.get(DATA_BACKUP_AGENT_LISTENERS) is None - - -async def test_metadata_misses_backup_id( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - webdav_client: AsyncMock, - mock_config_entry: MockConfigEntry, -) -> None: - """Test getting a backup when metadata has backup id property.""" - MOCK_LIST_WITH_PROPERTIES[ - "/Automatic_backup_2025.2.1_2025-02-10_18.31_30202686.metadata.json" - ] = [ - Property( - namespace="homeassistant", - name="metadata_version", - value="1", - ) - ] - webdav_client.list_with_properties.return_value = MOCK_LIST_WITH_PROPERTIES - - backup_id = BACKUP_METADATA["backup_id"] - client = await hass_ws_client(hass) - await client.send_json_auto_id({"type": "backup/details", "backup_id": backup_id}) - response = await client.receive_json() - - assert response["success"] - assert response["result"]["backup"] is None diff --git a/tests/components/webdav/test_config_flow.py b/tests/components/webdav/test_config_flow.py index eb887edb1a1..9204e6eadab 100644 --- a/tests/components/webdav/test_config_flow.py +++ b/tests/components/webdav/test_config_flow.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock -from aiowebdav2.exceptions import UnauthorizedError +from aiowebdav2.exceptions import MethodNotSupportedError, UnauthorizedError import pytest from homeassistant import config_entries @@ -86,6 +86,7 @@ async def test_form_fail(hass: HomeAssistant, webdav_client: AsyncMock) -> None: ("exception", "expected_error"), [ (UnauthorizedError("https://webdav.demo"), "invalid_auth"), + (MethodNotSupportedError("check", "https://webdav.demo"), "invalid_method"), (Exception("Unexpected error"), "unknown"), ], ) diff --git a/tests/components/webmin/snapshots/test_sensor.ambr b/tests/components/webmin/snapshots/test_sensor.ambr index a2068f662ba..1af5fe46b5c 100644 --- a/tests/components/webmin/snapshots/test_sensor.ambr +++ b/tests/components/webmin/snapshots/test_sensor.ambr @@ -1451,7 +1451,7 @@ 'state': '8794.3125', }) # --- -# name: test_sensor[sensor.192_168_1_1_load_15m-entry] +# name: test_sensor[sensor.192_168_1_1_load_15_min-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1466,7 +1466,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_load_15m', + 'entity_id': 'sensor.192_168_1_1_load_15_min', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1478,7 +1478,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Load (15m)', + 'original_name': 'Load (15 min)', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, @@ -1487,21 +1487,21 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[sensor.192_168_1_1_load_15m-state] +# name: test_sensor[sensor.192_168_1_1_load_15_min-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Load (15m)', + 'friendly_name': '192.168.1.1 Load (15 min)', 'state_class': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_load_15m', + 'entity_id': 'sensor.192_168_1_1_load_15_min', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1.37', }) # --- -# name: test_sensor[sensor.192_168_1_1_load_1m-entry] +# name: test_sensor[sensor.192_168_1_1_load_1_min-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1516,7 +1516,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_load_1m', + 'entity_id': 'sensor.192_168_1_1_load_1_min', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1528,7 +1528,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Load (1m)', + 'original_name': 'Load (1 min)', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, @@ -1537,21 +1537,21 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[sensor.192_168_1_1_load_1m-state] +# name: test_sensor[sensor.192_168_1_1_load_1_min-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Load (1m)', + 'friendly_name': '192.168.1.1 Load (1 min)', 'state_class': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_load_1m', + 'entity_id': 'sensor.192_168_1_1_load_1_min', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1.29', }) # --- -# name: test_sensor[sensor.192_168_1_1_load_5m-entry] +# name: test_sensor[sensor.192_168_1_1_load_5_min-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -1566,7 +1566,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.192_168_1_1_load_5m', + 'entity_id': 'sensor.192_168_1_1_load_5_min', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -1578,7 +1578,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Load (5m)', + 'original_name': 'Load (5 min)', 'platform': 'webmin', 'previous_unique_id': None, 'supported_features': 0, @@ -1587,14 +1587,14 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[sensor.192_168_1_1_load_5m-state] +# name: test_sensor[sensor.192_168_1_1_load_5_min-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': '192.168.1.1 Load (5m)', + 'friendly_name': '192.168.1.1 Load (5 min)', 'state_class': , }), 'context': , - 'entity_id': 'sensor.192_168_1_1_load_5m', + 'entity_id': 'sensor.192_168_1_1_load_5_min', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/websocket_api/test_http.py b/tests/components/websocket_api/test_http.py index 03e30c11ee9..370aab1067a 100644 --- a/tests/components/websocket_api/test_http.py +++ b/tests/components/websocket_api/test_http.py @@ -241,7 +241,7 @@ async def test_pending_msg_peak( instance: http.WebSocketHandler = cast(http.WebSocketHandler, setup_instance) # Fill the queue past the allowed peak - for _ in range(10): + for _ in range(20): instance._send_message({"overload": "message"}) async_fire_time_changed( @@ -251,7 +251,7 @@ async def test_pending_msg_peak( msg = await websocket_client.receive() assert msg.type is WSMsgType.CLOSE assert "Client unable to keep up with pending messages" in caplog.text - assert "Stayed over 5 for 5 seconds" in caplog.text + assert "Stayed over 5 for 10 seconds" in caplog.text assert "overload" in caplog.text diff --git a/tests/components/whirlpool/__init__.py b/tests/components/whirlpool/__init__.py index ca00975941a..97d9b4d61d5 100644 --- a/tests/components/whirlpool/__init__.py +++ b/tests/components/whirlpool/__init__.py @@ -31,5 +31,4 @@ async def init_integration_with_entry( entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - return entry diff --git a/tests/components/whirlpool/conftest.py b/tests/components/whirlpool/conftest.py index c302922fe25..93881d3735a 100644 --- a/tests/components/whirlpool/conftest.py +++ b/tests/components/whirlpool/conftest.py @@ -8,10 +8,7 @@ import whirlpool import whirlpool.aircon from whirlpool.backendselector import Brand, Region -MOCK_SAID1 = "said1" -MOCK_SAID2 = "said2" -MOCK_SAID3 = "said3" -MOCK_SAID4 = "said4" +from .const import MOCK_SAID1, MOCK_SAID2, MOCK_SAID3, MOCK_SAID4 @pytest.fixture( @@ -36,7 +33,7 @@ def fixture_brand(request: pytest.FixtureRequest) -> tuple[str, Brand]: return request.param -@pytest.fixture(name="mock_auth_api") +@pytest.fixture(name="mock_auth_api", autouse=True) def fixture_mock_auth_api(): """Set up Auth fixture.""" with ( @@ -50,8 +47,10 @@ def fixture_mock_auth_api(): yield mock_auth -@pytest.fixture(name="mock_appliances_manager_api") -def fixture_mock_appliances_manager_api(): +@pytest.fixture(name="mock_appliances_manager_api", autouse=True) +def fixture_mock_appliances_manager_api( + mock_aircon1_api, mock_aircon2_api, mock_sensor1_api, mock_sensor2_api +): """Set up AppliancesManager fixture.""" with ( mock.patch( @@ -63,28 +62,15 @@ def fixture_mock_appliances_manager_api(): ), ): mock_appliances_manager.return_value.fetch_appliances = AsyncMock() + mock_appliances_manager.return_value.connect = AsyncMock() + mock_appliances_manager.return_value.disconnect = AsyncMock() mock_appliances_manager.return_value.aircons = [ - {"SAID": MOCK_SAID1, "NAME": "TestZone"}, - {"SAID": MOCK_SAID2, "NAME": "TestZone"}, + mock_aircon1_api, + mock_aircon2_api, ] mock_appliances_manager.return_value.washer_dryers = [ - {"SAID": MOCK_SAID3, "NAME": "washer"}, - {"SAID": MOCK_SAID4, "NAME": "dryer"}, - ] - yield mock_appliances_manager - - -@pytest.fixture(name="mock_appliances_manager_laundry_api") -def fixture_mock_appliances_manager_laundry_api(): - """Set up AppliancesManager fixture.""" - with mock.patch( - "homeassistant.components.whirlpool.AppliancesManager" - ) as mock_appliances_manager: - mock_appliances_manager.return_value.fetch_appliances = AsyncMock() - mock_appliances_manager.return_value.aircons = None - mock_appliances_manager.return_value.washer_dryers = [ - {"SAID": MOCK_SAID3, "NAME": "washer"}, - {"SAID": MOCK_SAID4, "NAME": "dryer"}, + mock_sensor1_api, + mock_sensor2_api, ] yield mock_appliances_manager @@ -107,9 +93,11 @@ def fixture_mock_backend_selector_api(): def get_aircon_mock(said): """Get a mock of an air conditioner.""" mock_aircon = mock.Mock(said=said) - mock_aircon.connect = AsyncMock() - mock_aircon.disconnect = AsyncMock() + mock_aircon.name = f"Aircon {said}" mock_aircon.register_attr_callback = MagicMock() + mock_aircon.appliance_info.data_model = "aircon_model" + mock_aircon.appliance_info.category = "aircon" + mock_aircon.appliance_info.model_number = "12345" mock_aircon.get_online.return_value = True mock_aircon.get_power_on.return_value = True mock_aircon.get_mode.return_value = whirlpool.aircon.Mode.Cool @@ -132,13 +120,13 @@ def get_aircon_mock(said): @pytest.fixture(name="mock_aircon1_api", autouse=False) -def fixture_mock_aircon1_api(mock_auth_api, mock_appliances_manager_api): +def fixture_mock_aircon1_api(): """Set up air conditioner API fixture.""" return get_aircon_mock(MOCK_SAID1) @pytest.fixture(name="mock_aircon2_api", autouse=False) -def fixture_mock_aircon2_api(mock_auth_api, mock_appliances_manager_api): +def fixture_mock_aircon2_api(): """Set up air conditioner API fixture.""" return get_aircon_mock(MOCK_SAID2) @@ -168,9 +156,11 @@ def side_effect_function(*args, **kwargs): def get_sensor_mock(said): """Get a mock of a sensor.""" mock_sensor = mock.Mock(said=said) - mock_sensor.connect = AsyncMock() - mock_sensor.disconnect = AsyncMock() + mock_sensor.name = f"WasherDryer {said}" mock_sensor.register_attr_callback = MagicMock() + mock_sensor.appliance_info.data_model = "washer_dryer_model" + mock_sensor.appliance_info.category = "washer_dryer" + mock_sensor.appliance_info.model_number = "12345" mock_sensor.get_online.return_value = True mock_sensor.get_machine_state.return_value = ( whirlpool.washerdryer.MachineState.Standby @@ -187,13 +177,13 @@ def get_sensor_mock(said): @pytest.fixture(name="mock_sensor1_api", autouse=False) -def fixture_mock_sensor1_api(mock_auth_api, mock_appliances_manager_laundry_api): +def fixture_mock_sensor1_api(): """Set up sensor API fixture.""" return get_sensor_mock(MOCK_SAID3) @pytest.fixture(name="mock_sensor2_api", autouse=False) -def fixture_mock_sensor2_api(mock_auth_api, mock_appliances_manager_laundry_api): +def fixture_mock_sensor2_api(): """Set up sensor API fixture.""" return get_sensor_mock(MOCK_SAID4) diff --git a/tests/components/whirlpool/const.py b/tests/components/whirlpool/const.py new file mode 100644 index 00000000000..04ea5c0645c --- /dev/null +++ b/tests/components/whirlpool/const.py @@ -0,0 +1,6 @@ +"""Constants for the Whirlpool Sixth Sense integration tests.""" + +MOCK_SAID1 = "said1" +MOCK_SAID2 = "said2" +MOCK_SAID3 = "said3" +MOCK_SAID4 = "said4" diff --git a/tests/components/whirlpool/snapshots/test_diagnostics.ambr b/tests/components/whirlpool/snapshots/test_diagnostics.ambr index ee8abe04bf1..7ffae8bc808 100644 --- a/tests/components/whirlpool/snapshots/test_diagnostics.ambr +++ b/tests/components/whirlpool/snapshots/test_diagnostics.ambr @@ -2,24 +2,32 @@ # name: test_entry_diagnostics dict({ 'appliances': dict({ - 'Washer_dryers': dict({ - 'dryer': dict({ - 'NAME': 'dryer', - 'SAID': '**REDACTED**', - }), - 'washer': dict({ - 'NAME': 'washer', - 'SAID': '**REDACTED**', - }), - }), 'aircons': dict({ - 'TestZone': dict({ - 'NAME': 'TestZone', - 'SAID': '**REDACTED**', + 'Aircon said1': dict({ + 'category': 'aircon', + 'data_model': 'aircon_model', + 'model_number': '12345', + }), + 'Aircon said2': dict({ + 'category': 'aircon', + 'data_model': 'aircon_model', + 'model_number': '12345', }), }), 'ovens': dict({ }), + 'washer_dryers': dict({ + 'WasherDryer said3': dict({ + 'category': 'washer_dryer', + 'data_model': 'washer_dryer_model', + 'model_number': '12345', + }), + 'WasherDryer said4': dict({ + 'category': 'washer_dryer', + 'data_model': 'washer_dryer_model', + 'model_number': '12345', + }), + }), }), 'config_entry': dict({ 'data': dict({ diff --git a/tests/components/whirlpool/test_climate.py b/tests/components/whirlpool/test_climate.py index cdae28f4432..0586d654f7f 100644 --- a/tests/components/whirlpool/test_climate.py +++ b/tests/components/whirlpool/test_climate.py @@ -68,6 +68,7 @@ async def test_no_appliances( ) -> None: """Test the setup of the climate entities when there are no appliances available.""" mock_appliances_manager_api.return_value.aircons = [] + mock_appliances_manager_api.return_value.washer_dryers = [] await init_integration(hass) assert len(hass.states.async_all()) == 0 @@ -75,16 +76,15 @@ async def test_no_appliances( async def test_static_attributes( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_aircon1_api: MagicMock, - mock_aircon_api_instances: MagicMock, ) -> None: """Test static climate attributes.""" await init_integration(hass) - for entity_id in ("climate.said1", "climate.said2"): + for said in ("said1", "said2"): + entity_id = f"climate.{said}" entry = entity_registry.async_get(entity_id) assert entry - assert entry.unique_id == entity_id.split(".")[1] + assert entry.unique_id == said state = hass.states.get(entity_id) assert state is not None @@ -92,7 +92,7 @@ async def test_static_attributes( assert state.state == HVACMode.COOL attributes = state.attributes - assert attributes[ATTR_FRIENDLY_NAME] == "TestZone" + assert attributes[ATTR_FRIENDLY_NAME] == f"Aircon {said}" assert ( attributes[ATTR_SUPPORTED_FEATURES] @@ -123,7 +123,6 @@ async def test_static_attributes( async def test_dynamic_attributes( hass: HomeAssistant, - mock_aircon_api_instances: MagicMock, mock_aircon1_api: MagicMock, mock_aircon2_api: MagicMock, ) -> None: @@ -212,7 +211,6 @@ async def test_dynamic_attributes( async def test_service_calls( hass: HomeAssistant, - mock_aircon_api_instances: MagicMock, mock_aircon1_api: MagicMock, mock_aircon2_api: MagicMock, ) -> None: diff --git a/tests/components/whirlpool/test_diagnostics.py b/tests/components/whirlpool/test_diagnostics.py index 2a0b2e6fd18..192339156e1 100644 --- a/tests/components/whirlpool/test_diagnostics.py +++ b/tests/components/whirlpool/test_diagnostics.py @@ -1,7 +1,5 @@ """Test Blink diagnostics.""" -from unittest.mock import MagicMock - from syrupy import SnapshotAssertion from syrupy.filters import props @@ -19,9 +17,6 @@ async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, snapshot: SnapshotAssertion, - mock_appliances_manager_api: MagicMock, - mock_aircon1_api: MagicMock, - mock_aircon_api_instances: MagicMock, ) -> None: """Test config entry diagnostics.""" diff --git a/tests/components/whirlpool/test_init.py b/tests/components/whirlpool/test_init.py index 8f082ff6294..5f04bf84b9e 100644 --- a/tests/components/whirlpool/test_init.py +++ b/tests/components/whirlpool/test_init.py @@ -21,7 +21,6 @@ async def test_setup( mock_backend_selector_api: MagicMock, region, brand, - mock_aircon_api_instances: MagicMock, ) -> None: """Test setup.""" entry = await init_integration(hass, region[0], brand[0]) @@ -33,7 +32,6 @@ async def test_setup( async def test_setup_region_fallback( hass: HomeAssistant, mock_backend_selector_api: MagicMock, - mock_aircon_api_instances: MagicMock, ) -> None: """Test setup when no region is available on the ConfigEntry. @@ -57,7 +55,6 @@ async def test_setup_brand_fallback( hass: HomeAssistant, region, mock_backend_selector_api: MagicMock, - mock_aircon_api_instances: MagicMock, ) -> None: """Test setup when no brand is available on the ConfigEntry. @@ -81,7 +78,6 @@ async def test_setup_brand_fallback( async def test_setup_http_exception( hass: HomeAssistant, mock_auth_api: MagicMock, - mock_aircon_api_instances: MagicMock, ) -> None: """Test setup with an http exception.""" mock_auth_api.return_value.do_auth = AsyncMock( @@ -95,7 +91,6 @@ async def test_setup_http_exception( async def test_setup_auth_failed( hass: HomeAssistant, mock_auth_api: MagicMock, - mock_aircon_api_instances: MagicMock, ) -> None: """Test setup with failed auth.""" mock_auth_api.return_value.do_auth = AsyncMock() @@ -108,7 +103,6 @@ async def test_setup_auth_failed( async def test_setup_auth_account_locked( hass: HomeAssistant, mock_auth_api: MagicMock, - mock_aircon_api_instances: MagicMock, ) -> None: """Test setup with failed auth due to account being locked.""" mock_auth_api.return_value.do_auth.side_effect = AccountLockedError @@ -120,7 +114,6 @@ async def test_setup_auth_account_locked( async def test_setup_fetch_appliances_failed( hass: HomeAssistant, mock_appliances_manager_api: MagicMock, - mock_aircon_api_instances: MagicMock, ) -> None: """Test setup with failed fetch_appliances.""" mock_appliances_manager_api.return_value.fetch_appliances.return_value = False @@ -129,11 +122,7 @@ async def test_setup_fetch_appliances_failed( assert entry.state is ConfigEntryState.SETUP_ERROR -async def test_unload_entry( - hass: HomeAssistant, - mock_aircon_api_instances: MagicMock, - mock_sensor_api_instances: MagicMock, -) -> None: +async def test_unload_entry(hass: HomeAssistant) -> None: """Test successful unload of entry.""" entry = await init_integration(hass) assert len(hass.config_entries.async_entries(DOMAIN)) == 1 diff --git a/tests/components/whirlpool/test_sensor.py b/tests/components/whirlpool/test_sensor.py index 548025e29bd..95fca331707 100644 --- a/tests/components/whirlpool/test_sensor.py +++ b/tests/components/whirlpool/test_sensor.py @@ -12,14 +12,13 @@ from homeassistant.helpers import entity_registry as er from homeassistant.util.dt import as_timestamp, utc_from_timestamp, utcnow from . import init_integration +from .const import MOCK_SAID3, MOCK_SAID4 from tests.common import async_fire_time_changed, mock_restore_cache_with_extra_data async def update_sensor_state( - hass: HomeAssistant, - entity_id: str, - mock_sensor_api_instance: MagicMock, + hass: HomeAssistant, entity_id: str, mock_sensor_api_instance: MagicMock ) -> State: """Simulate an update trigger from the API.""" @@ -46,10 +45,7 @@ def side_effect_function_open_door(*args, **kwargs): async def test_dryer_sensor_values( - hass: HomeAssistant, - mock_sensor_api_instances: MagicMock, - mock_sensor2_api: MagicMock, - entity_registry: er.EntityRegistry, + hass: HomeAssistant, mock_sensor2_api: MagicMock, entity_registry: er.EntityRegistry ) -> None: """Test the sensor value callbacks.""" hass.set_state(CoreState.not_running) @@ -58,14 +54,11 @@ async def test_dryer_sensor_values( hass, ( ( - State( - "sensor.washer_end_time", - "1", - ), + State(f"sensor.washerdryer_{MOCK_SAID3}_end_time", "1"), {"native_value": thetimestamp, "native_unit_of_measurement": None}, ), ( - State("sensor.dryer_end_time", "1"), + State(f"sensor.washerdryer_{MOCK_SAID4}_end_time", "1"), {"native_value": thetimestamp, "native_unit_of_measurement": None}, ), ), @@ -73,7 +66,7 @@ async def test_dryer_sensor_values( await init_integration(hass) - entity_id = "sensor.dryer_state" + entity_id = f"sensor.washerdryer_{MOCK_SAID4}_state" mock_instance = mock_sensor2_api entry = entity_registry.async_get(entity_id) assert entry @@ -83,7 +76,7 @@ async def test_dryer_sensor_values( state = await update_sensor_state(hass, entity_id, mock_instance) assert state is not None - state_id = f"{entity_id.split('_', maxsplit=1)[0]}_end_time" + state_id = f"sensor.washerdryer_{MOCK_SAID3}_end_time" state = hass.states.get(state_id) assert state.state == thetimestamp.isoformat() @@ -110,10 +103,7 @@ async def test_dryer_sensor_values( async def test_washer_sensor_values( - hass: HomeAssistant, - mock_sensor_api_instances: MagicMock, - mock_sensor1_api: MagicMock, - entity_registry: er.EntityRegistry, + hass: HomeAssistant, mock_sensor1_api: MagicMock, entity_registry: er.EntityRegistry ) -> None: """Test the sensor value callbacks.""" hass.set_state(CoreState.not_running) @@ -122,14 +112,11 @@ async def test_washer_sensor_values( hass, ( ( - State( - "sensor.washer_end_time", - "1", - ), + State(f"sensor.washerdryer_{MOCK_SAID3}_end_time", "1"), {"native_value": thetimestamp, "native_unit_of_measurement": None}, ), ( - State("sensor.dryer_end_time", "1"), + State(f"sensor.washerdryer_{MOCK_SAID4}_end_time", "1"), {"native_value": thetimestamp, "native_unit_of_measurement": None}, ), ), @@ -143,7 +130,7 @@ async def test_washer_sensor_values( ) await hass.async_block_till_done() - entity_id = "sensor.washer_state" + entity_id = f"sensor.washerdryer_{MOCK_SAID3}_state" mock_instance = mock_sensor1_api entry = entity_registry.async_get(entity_id) assert entry @@ -153,11 +140,11 @@ async def test_washer_sensor_values( state = await update_sensor_state(hass, entity_id, mock_instance) assert state is not None - state_id = f"{entity_id.split('_', maxsplit=1)[0]}_end_time" + state_id = f"sensor.washerdryer_{MOCK_SAID3}_end_time" state = hass.states.get(state_id) assert state.state == thetimestamp.isoformat() - state_id = f"{entity_id.split('_', maxsplit=1)[0]}_detergent_level" + state_id = f"sensor.washerdryer_{MOCK_SAID3}_detergent_level" entry = entity_registry.async_get(state_id) assert entry assert entry.disabled @@ -277,10 +264,7 @@ async def test_washer_sensor_values( assert state.state == "door_open" -async def test_restore_state( - hass: HomeAssistant, - mock_sensor_api_instances: MagicMock, -) -> None: +async def test_restore_state(hass: HomeAssistant) -> None: """Test sensor restore state.""" # Home assistant is not running yet hass.set_state(CoreState.not_running) @@ -289,14 +273,11 @@ async def test_restore_state( hass, ( ( - State( - "sensor.washer_end_time", - "1", - ), + State(f"sensor.washerdryer_{MOCK_SAID3}_end_time", "1"), {"native_value": thetimestamp, "native_unit_of_measurement": None}, ), ( - State("sensor.dryer_end_time", "1"), + State(f"sensor.washerdryer_{MOCK_SAID4}_end_time", "1"), {"native_value": thetimestamp, "native_unit_of_measurement": None}, ), ), @@ -305,20 +286,18 @@ async def test_restore_state( # create and add entry await init_integration(hass) # restore from cache - state = hass.states.get("sensor.washer_end_time") + state = hass.states.get(f"sensor.washerdryer_{MOCK_SAID3}_end_time") assert state.state == thetimestamp.isoformat() - state = hass.states.get("sensor.dryer_end_time") + state = hass.states.get(f"sensor.washerdryer_{MOCK_SAID4}_end_time") assert state.state == thetimestamp.isoformat() async def test_no_restore_state( - hass: HomeAssistant, - mock_sensor_api_instances: MagicMock, - mock_sensor1_api: MagicMock, + hass: HomeAssistant, mock_sensor1_api: MagicMock ) -> None: """Test sensor restore state with no restore.""" # create and add entry - entity_id = "sensor.washer_end_time" + entity_id = f"sensor.washerdryer_{MOCK_SAID3}_end_time" await init_integration(hass) # restore from cache state = hass.states.get(entity_id) @@ -330,11 +309,7 @@ async def test_no_restore_state( @pytest.mark.freeze_time("2022-11-30 00:00:00") -async def test_callback( - hass: HomeAssistant, - mock_sensor_api_instances: MagicMock, - mock_sensor1_api: MagicMock, -) -> None: +async def test_callback(hass: HomeAssistant, mock_sensor1_api: MagicMock) -> None: """Test callback timestamp callback function.""" hass.set_state(CoreState.not_running) thetimestamp: datetime = datetime(2022, 11, 29, 00, 00, 00, 00, UTC) @@ -342,14 +317,11 @@ async def test_callback( hass, ( ( - State( - "sensor.washer_end_time", - "1", - ), + State(f"sensor.washerdryer_{MOCK_SAID3}_end_time", "1"), {"native_value": thetimestamp, "native_unit_of_measurement": None}, ), ( - State("sensor.dryer_end_time", "1"), + State(f"sensor.washerdryer_{MOCK_SAID4}_end_time", "1"), {"native_value": thetimestamp, "native_unit_of_measurement": None}, ), ), @@ -358,12 +330,12 @@ async def test_callback( # create and add entry await init_integration(hass) # restore from cache - state = hass.states.get("sensor.washer_end_time") + state = hass.states.get(f"sensor.washerdryer_{MOCK_SAID3}_end_time") assert state.state == thetimestamp.isoformat() callback = mock_sensor1_api.register_attr_callback.call_args_list[1][0][0] callback() - state = hass.states.get("sensor.washer_end_time") + state = hass.states.get(f"sensor.washerdryer_{MOCK_SAID3}_end_time") assert state.state == thetimestamp.isoformat() mock_sensor1_api.get_machine_state.return_value = MachineState.RunningMainCycle mock_sensor1_api.get_attribute.side_effect = None @@ -371,19 +343,19 @@ async def test_callback( callback() # Test new timestamp when machine starts a cycle. - state = hass.states.get("sensor.washer_end_time") + state = hass.states.get(f"sensor.washerdryer_{MOCK_SAID3}_end_time") time = state.state assert state.state != thetimestamp.isoformat() # Test no timestamp change for < 60 seconds time change. mock_sensor1_api.get_attribute.return_value = "65" callback() - state = hass.states.get("sensor.washer_end_time") + state = hass.states.get(f"sensor.washerdryer_{MOCK_SAID3}_end_time") assert state.state == time # Test timestamp change for > 60 seconds. mock_sensor1_api.get_attribute.return_value = "125" callback() - state = hass.states.get("sensor.washer_end_time") + state = hass.states.get(f"sensor.washerdryer_{MOCK_SAID3}_end_time") newtime = utc_from_timestamp(as_timestamp(time) + 65) assert state.state == newtime.isoformat() diff --git a/tests/components/whois/snapshots/test_config_flow.ambr b/tests/components/whois/snapshots/test_config_flow.ambr index 0d99b0596e3..97d6fde6376 100644 --- a/tests/components/whois/snapshots/test_config_flow.ambr +++ b/tests/components/whois/snapshots/test_config_flow.ambr @@ -175,6 +175,94 @@ 'version': 1, }) # --- +# name: test_full_flow_with_error[WhoisPrivateRegistry-private_registry] + FlowResultSnapshot({ + 'context': dict({ + 'source': 'user', + 'unique_id': 'example.com', + }), + 'data': dict({ + 'domain': 'example.com', + }), + 'description': None, + 'description_placeholders': None, + 'flow_id': , + 'handler': 'whois', + 'minor_version': 1, + 'options': dict({ + }), + 'result': ConfigEntrySnapshot({ + 'data': dict({ + 'domain': 'example.com', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'whois', + 'entry_id': , + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'subentries': list([ + ]), + 'title': 'Example.com', + 'unique_id': 'example.com', + 'version': 1, + }), + 'subentries': tuple( + ), + 'title': 'Example.com', + 'type': , + 'version': 1, + }) +# --- +# name: test_full_flow_with_error[WhoisQuotaExceeded-quota_exceeded] + FlowResultSnapshot({ + 'context': dict({ + 'source': 'user', + 'unique_id': 'example.com', + }), + 'data': dict({ + 'domain': 'example.com', + }), + 'description': None, + 'description_placeholders': None, + 'flow_id': , + 'handler': 'whois', + 'minor_version': 1, + 'options': dict({ + }), + 'result': ConfigEntrySnapshot({ + 'data': dict({ + 'domain': 'example.com', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'whois', + 'entry_id': , + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'subentries': list([ + ]), + 'title': 'Example.com', + 'unique_id': 'example.com', + 'version': 1, + }), + 'subentries': tuple( + ), + 'title': 'Example.com', + 'type': , + 'version': 1, + }) +# --- # name: test_full_user_flow FlowResultSnapshot({ 'context': dict({ diff --git a/tests/components/whois/test_config_flow.py b/tests/components/whois/test_config_flow.py index 35e40c4e809..6ab02887be2 100644 --- a/tests/components/whois/test_config_flow.py +++ b/tests/components/whois/test_config_flow.py @@ -9,6 +9,8 @@ from whois.exceptions import ( UnknownDateFormat, UnknownTld, WhoisCommandFailed, + WhoisPrivateRegistry, + WhoisQuotaExceeded, ) from homeassistant.components.whois.const import DOMAIN @@ -52,6 +54,8 @@ async def test_full_user_flow( (FailedParsingWhoisOutput, "unexpected_response"), (UnknownDateFormat, "unknown_date_format"), (WhoisCommandFailed, "whois_command_failed"), + (WhoisPrivateRegistry, "private_registry"), + (WhoisQuotaExceeded, "quota_exceeded"), ], ) async def test_full_flow_with_error( diff --git a/tests/components/withings/snapshots/test_sensor.ambr b/tests/components/withings/snapshots/test_sensor.ambr index ec9fc1ed3fc..f735c506f65 100644 --- a/tests/components/withings/snapshots/test_sensor.ambr +++ b/tests/components/withings/snapshots/test_sensor.ambr @@ -503,6 +503,9 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -512,7 +515,7 @@ 'supported_features': 0, 'translation_key': 'deep_sleep', 'unique_id': 'withings_12345_sleep_deep_duration_seconds', - 'unit_of_measurement': , + 'unit_of_measurement': , }) # --- # name: test_all_entities[sensor.henk_deep_sleep-state] @@ -521,14 +524,14 @@ 'device_class': 'duration', 'friendly_name': 'henk Deep sleep', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.henk_deep_sleep', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '5820', + 'state': '1.617', }) # --- # name: test_all_entities[sensor.henk_diastolic_blood_pressure-entry] @@ -1778,7 +1781,7 @@ 'name': None, 'options': dict({ 'sensor': dict({ - 'suggested_display_precision': 1, + 'suggested_display_precision': 2, }), }), 'original_device_class': , @@ -2242,6 +2245,9 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -2251,7 +2257,7 @@ 'supported_features': 0, 'translation_key': 'light_sleep', 'unique_id': 'withings_12345_sleep_light_duration_seconds', - 'unit_of_measurement': , + 'unit_of_measurement': , }) # --- # name: test_all_entities[sensor.henk_light_sleep-state] @@ -2260,14 +2266,14 @@ 'device_class': 'duration', 'friendly_name': 'henk Light sleep', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.henk_light_sleep', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '10440', + 'state': '2.900', }) # --- # name: test_all_entities[sensor.henk_maximum_heart_rate-entry] @@ -2988,6 +2994,9 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -2997,7 +3006,7 @@ 'supported_features': 0, 'translation_key': 'rem_sleep', 'unique_id': 'withings_12345_sleep_rem_duration_seconds', - 'unit_of_measurement': , + 'unit_of_measurement': , }) # --- # name: test_all_entities[sensor.henk_rem_sleep-state] @@ -3006,14 +3015,14 @@ 'device_class': 'duration', 'friendly_name': 'henk REM sleep', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.henk_rem_sleep', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2400', + 'state': '0.667', }) # --- # name: test_all_entities[sensor.henk_skin_temperature-entry] @@ -3616,6 +3625,9 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -3625,7 +3637,7 @@ 'supported_features': 0, 'translation_key': 'time_to_sleep', 'unique_id': 'withings_12345_sleep_tosleep_duration_seconds', - 'unit_of_measurement': , + 'unit_of_measurement': , }) # --- # name: test_all_entities[sensor.henk_time_to_sleep-state] @@ -3634,14 +3646,14 @@ 'device_class': 'duration', 'friendly_name': 'henk Time to sleep', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.henk_time_to_sleep', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '540', + 'state': '0.150', }) # --- # name: test_all_entities[sensor.henk_time_to_wakeup-entry] @@ -3668,6 +3680,9 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -3677,7 +3692,7 @@ 'supported_features': 0, 'translation_key': 'time_to_wakeup', 'unique_id': 'withings_12345_sleep_towakeup_duration_seconds', - 'unit_of_measurement': , + 'unit_of_measurement': , }) # --- # name: test_all_entities[sensor.henk_time_to_wakeup-state] @@ -3686,14 +3701,14 @@ 'device_class': 'duration', 'friendly_name': 'henk Time to wakeup', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.henk_time_to_wakeup', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1140', + 'state': '0.317', }) # --- # name: test_all_entities[sensor.henk_total_calories_burnt_today-entry] @@ -3971,6 +3986,9 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -3980,7 +3998,7 @@ 'supported_features': 0, 'translation_key': 'wakeup_time', 'unique_id': 'withings_12345_sleep_wakeup_duration_seconds', - 'unit_of_measurement': , + 'unit_of_measurement': , }) # --- # name: test_all_entities[sensor.henk_wakeup_time-state] @@ -3989,14 +4007,14 @@ 'device_class': 'duration', 'friendly_name': 'henk Wakeup time', 'state_class': , - 'unit_of_measurement': , + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.henk_wakeup_time', 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '3060', + 'state': '0.850', }) # --- # name: test_all_entities[sensor.henk_weight-entry] diff --git a/tests/components/wled/snapshots/test_select.ambr b/tests/components/wled/snapshots/test_select.ambr index ca3b0a5dc6e..d3f8fbcc21d 100644 --- a/tests/components/wled/snapshots/test_select.ambr +++ b/tests/components/wled/snapshots/test_select.ambr @@ -99,77 +99,77 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'WLED RGB Light Segment 1 color palette', 'options': list([ - 'Default', - '* Random Cycle', '* Color 1', - '* Colors 1&2', '* Color Gradient', + '* Colors 1&2', '* Colors Only', - 'Party', - 'Cloud', - 'Lava', - 'Ocean', - 'Forest', - 'Rainbow', - 'Rainbow Bands', - 'Sunset', - 'Rivendell', - 'Breeze', - 'Red & Blue', - 'Yellowout', + '* Random Cycle', 'Analogous', - 'Splash', - 'Pastel', - 'Sunset 2', - 'Beach', - 'Vintage', - 'Departure', - 'Landscape', - 'Beech', - 'Sherbet', - 'Hult', - 'Hult 64', - 'Drywet', - 'Jul', - 'Grintage', - 'Rewhi', - 'Tertiary', - 'Fire', - 'Icefire', - 'Cyane', - 'Light Pink', - 'Autumn', - 'Magenta', - 'Magred', - 'Yelmag', - 'Yelblu', - 'Orange & Teal', - 'Tiamat', 'April Night', - 'Orangery', - 'C9', - 'Sakura', - 'Aurora', + 'Aqua Flash', 'Atlantica', + 'Aurora', + 'Aurora 2', + 'Autumn', + 'Beach', + 'Beech', + 'Blink Red', + 'Breeze', + 'C9', 'C9 2', 'C9 New', - 'Temperature', - 'Aurora 2', - 'Retro Clown', 'Candy', - 'Toxy Reaf', + 'Candy2', + 'Cloud', + 'Cyane', + 'Default', + 'Departure', + 'Drywet', 'Fairy Reaf', - 'Semi Blue', - 'Pink Candy', - 'Red Reaf', - 'Aqua Flash', - 'Yelblu Hot', + 'Fire', + 'Forest', + 'Grintage', + 'Hult', + 'Hult 64', + 'Icefire', + 'Jul', + 'Landscape', + 'Lava', + 'Light Pink', 'Lite Light', + 'Magenta', + 'Magred', + 'Ocean', + 'Orange & Teal', + 'Orangery', + 'Party', + 'Pastel', + 'Pink Candy', + 'Rainbow', + 'Rainbow Bands', + 'Red & Blue', 'Red Flash', - 'Blink Red', + 'Red Reaf', 'Red Shift', 'Red Tide', - 'Candy2', + 'Retro Clown', + 'Rewhi', + 'Rivendell', + 'Sakura', + 'Semi Blue', + 'Sherbet', + 'Splash', + 'Sunset', + 'Sunset 2', + 'Temperature', + 'Tertiary', + 'Tiamat', + 'Toxy Reaf', + 'Vintage', + 'Yelblu', + 'Yelblu Hot', + 'Yellowout', + 'Yelmag', ]), }), 'context': , @@ -187,77 +187,77 @@ 'area_id': None, 'capabilities': dict({ 'options': list([ - 'Default', - '* Random Cycle', '* Color 1', - '* Colors 1&2', '* Color Gradient', + '* Colors 1&2', '* Colors Only', - 'Party', - 'Cloud', - 'Lava', - 'Ocean', - 'Forest', - 'Rainbow', - 'Rainbow Bands', - 'Sunset', - 'Rivendell', - 'Breeze', - 'Red & Blue', - 'Yellowout', + '* Random Cycle', 'Analogous', - 'Splash', - 'Pastel', - 'Sunset 2', - 'Beach', - 'Vintage', - 'Departure', - 'Landscape', - 'Beech', - 'Sherbet', - 'Hult', - 'Hult 64', - 'Drywet', - 'Jul', - 'Grintage', - 'Rewhi', - 'Tertiary', - 'Fire', - 'Icefire', - 'Cyane', - 'Light Pink', - 'Autumn', - 'Magenta', - 'Magred', - 'Yelmag', - 'Yelblu', - 'Orange & Teal', - 'Tiamat', 'April Night', - 'Orangery', - 'C9', - 'Sakura', - 'Aurora', + 'Aqua Flash', 'Atlantica', + 'Aurora', + 'Aurora 2', + 'Autumn', + 'Beach', + 'Beech', + 'Blink Red', + 'Breeze', + 'C9', 'C9 2', 'C9 New', - 'Temperature', - 'Aurora 2', - 'Retro Clown', 'Candy', - 'Toxy Reaf', + 'Candy2', + 'Cloud', + 'Cyane', + 'Default', + 'Departure', + 'Drywet', 'Fairy Reaf', - 'Semi Blue', - 'Pink Candy', - 'Red Reaf', - 'Aqua Flash', - 'Yelblu Hot', + 'Fire', + 'Forest', + 'Grintage', + 'Hult', + 'Hult 64', + 'Icefire', + 'Jul', + 'Landscape', + 'Lava', + 'Light Pink', 'Lite Light', + 'Magenta', + 'Magred', + 'Ocean', + 'Orange & Teal', + 'Orangery', + 'Party', + 'Pastel', + 'Pink Candy', + 'Rainbow', + 'Rainbow Bands', + 'Red & Blue', 'Red Flash', - 'Blink Red', + 'Red Reaf', 'Red Shift', 'Red Tide', - 'Candy2', + 'Retro Clown', + 'Rewhi', + 'Rivendell', + 'Sakura', + 'Semi Blue', + 'Sherbet', + 'Splash', + 'Sunset', + 'Sunset 2', + 'Temperature', + 'Tertiary', + 'Tiamat', + 'Toxy Reaf', + 'Vintage', + 'Yelblu', + 'Yelblu Hot', + 'Yellowout', + 'Yelmag', ]), }), 'config_entry_id': , diff --git a/tests/components/wolflink/__init__.py b/tests/components/wolflink/__init__.py index dea7c5195ad..11c82ad9f61 100644 --- a/tests/components/wolflink/__init__.py +++ b/tests/components/wolflink/__init__.py @@ -1 +1,14 @@ """Tests for the Wolf SmartSet Service integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Set up the wolflink integration for testing.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/wolflink/conftest.py b/tests/components/wolflink/conftest.py new file mode 100644 index 00000000000..5142762b5e4 --- /dev/null +++ b/tests/components/wolflink/conftest.py @@ -0,0 +1,125 @@ +"""Fixtures for Wolflink integration tests.""" + +from __future__ import annotations + +from collections.abc import Generator +from unittest.mock import MagicMock, patch + +import pytest +from wolf_comm import ( + EnergyParameter, + FlowParameter, + FrequencyParameter, + HoursParameter, + ListItem, + ListItemParameter, + PercentageParameter, + PowerParameter, + Pressure, + RPMParameter, + SimpleParameter, + Temperature, + Value, +) + +from homeassistant.components.wolflink.const import ( + DEVICE_GATEWAY, + DEVICE_ID, + DEVICE_NAME, + DOMAIN, +) +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + title="Wolf SmartSet", + domain=DOMAIN, + data={ + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + DEVICE_NAME: "test-device", + DEVICE_GATEWAY: "5678", + DEVICE_ID: "1234", + }, + unique_id="1234", + version=1, + minor_version=2, + ) + + +@pytest.fixture +def mock_wolflink() -> Generator[MagicMock]: + """Return a mocked wolflink client.""" + with ( + patch( + "homeassistant.components.wolflink.WolfClient", autospec=True + ) as wolflink_mock, + patch( + "homeassistant.components.wolflink.config_flow.WolfClient", + new=wolflink_mock, + ), + ): + wolflink = wolflink_mock.return_value + + wolflink.fetch_parameters.return_value = [ + EnergyParameter( + 6002800000, "Energy Parameter", "Heating", 6005200000, 2000 + ), + ListItemParameter( + 8002800000, + "List Item Parameter", + "Heating", + [ListItem("0", "Aus"), ListItem("1", "Ein")], + 8005200000, + 3001, + ), + PowerParameter(5002800000, "Power Parameter", "Heating", 5005200000, 1000), + Pressure(4002800000, "Pressure Parameter", "Heating", 4005200000, 1000), + Temperature(3002800000, "Temperature Parameter", "Solar", 3005200000, 1000), + PercentageParameter( + 2002800000, "Percentage Parameter", "Solar", 2005200000, 1000 + ), + HoursParameter(7002800000, "Hours Parameter", "Heating", 7005200000, 1000), + SimpleParameter(1002800000, "Simple Parameter", "DHW", 1005200000, 1000), + FrequencyParameter( + 9002800000, "Frequency Parameter", "Heating", 9005200000, 1000 + ), + RPMParameter(1000280001, "RPM Parameter", "Heating", 10005200000, 7000), + FlowParameter(1100280001, "Flow Parameter", "Heating", 11005200000, 8000), + HoursParameter(7002800000, "Hours Parameter", "Heating", 7005200000, 1000), + SimpleParameter(1002800000, "Simple Parameter", "DHW", 1005200000, 1000), + ] + + wolflink.fetch_value.return_value = [ + Value(6002800000, "183", 1), + Value(8002800000, "1", 1), + Value(5002800000, "50", 1), + Value(4002800000, "3", 1), + Value(3002800000, "65", 1), + Value(2002800000, "20", 1), + Value(7002800000, "10", 1), + Value(1002800000, "12", 1), + Value(9002800000, "50", 1), + Value(1000280001, "1500", 1), + Value(1100280001, "5", 1), + ] + + yield wolflink + + +@pytest.fixture +async def init_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_wolflink: MagicMock +) -> MockConfigEntry: + """Set up the Wolflink integration for testing.""" + await setup_integration(hass, mock_config_entry) + + return mock_config_entry diff --git a/tests/components/wolflink/snapshots/test_sensor.ambr b/tests/components/wolflink/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..c1ff80c9630 --- /dev/null +++ b/tests/components/wolflink/snapshots/test_sensor.ambr @@ -0,0 +1,603 @@ +# serializer version: 1 +# name: test_device_entry + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'config_entries_subentries': , + 'configuration_url': 'https://www.wolf-smartset.com/', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'wolflink', + '1234', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'WOLF GmbH', + 'model': None, + 'model_id': None, + 'name': 'Wolf SmartSet', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- +# name: test_sensors[sensor.energy_parameter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_parameter', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy Parameter', + 'platform': 'wolflink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234:6005200000', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_parameter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Energy Parameter', + 'parameter_id': 6005200000, + 'parent': 'Heating', + 'unit_of_measurement': , + 'value_id': 6002800000, + }), + 'context': , + 'entity_id': 'sensor.energy_parameter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '183', + }) +# --- +# name: test_sensors[sensor.flow_parameter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.flow_parameter', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Flow Parameter', + 'platform': 'wolflink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234:11005200000', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.flow_parameter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'volume_flow_rate', + 'friendly_name': 'Flow Parameter', + 'parameter_id': 11005200000, + 'parent': 'Heating', + 'unit_of_measurement': , + 'value_id': 1100280001, + }), + 'context': , + 'entity_id': 'sensor.flow_parameter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5', + }) +# --- +# name: test_sensors[sensor.frequency_parameter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.frequency_parameter', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Frequency Parameter', + 'platform': 'wolflink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234:9005200000', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.frequency_parameter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Frequency Parameter', + 'parameter_id': 9005200000, + 'parent': 'Heating', + 'unit_of_measurement': , + 'value_id': 9002800000, + }), + 'context': , + 'entity_id': 'sensor.frequency_parameter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50', + }) +# --- +# name: test_sensors[sensor.hours_parameter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.hours_parameter', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:clock', + 'original_name': 'Hours Parameter', + 'platform': 'wolflink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234:7005200000', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.hours_parameter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Hours Parameter', + 'icon': 'mdi:clock', + 'parameter_id': 7005200000, + 'parent': 'Heating', + 'unit_of_measurement': , + 'value_id': 7002800000, + }), + 'context': , + 'entity_id': 'sensor.hours_parameter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_sensors[sensor.list_item_parameter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.list_item_parameter', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'List Item Parameter', + 'platform': 'wolflink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'state', + 'unique_id': '1234:8005200000', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.list_item_parameter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'List Item Parameter', + 'parameter_id': 8005200000, + 'parent': 'Heating', + 'value_id': 8002800000, + }), + 'context': , + 'entity_id': 'sensor.list_item_parameter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'ein', + }) +# --- +# name: test_sensors[sensor.percentage_parameter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.percentage_parameter', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Percentage Parameter', + 'platform': 'wolflink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234:2005200000', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.percentage_parameter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Percentage Parameter', + 'parameter_id': 2005200000, + 'parent': 'Solar', + 'unit_of_measurement': '%', + 'value_id': 2002800000, + }), + 'context': , + 'entity_id': 'sensor.percentage_parameter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20', + }) +# --- +# name: test_sensors[sensor.power_parameter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.power_parameter', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power Parameter', + 'platform': 'wolflink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234:5005200000', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.power_parameter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Power Parameter', + 'parameter_id': 5005200000, + 'parent': 'Heating', + 'unit_of_measurement': , + 'value_id': 5002800000, + }), + 'context': , + 'entity_id': 'sensor.power_parameter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50', + }) +# --- +# name: test_sensors[sensor.pressure_parameter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pressure_parameter', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pressure Parameter', + 'platform': 'wolflink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234:4005200000', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.pressure_parameter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Pressure Parameter', + 'parameter_id': 4005200000, + 'parent': 'Heating', + 'unit_of_measurement': , + 'value_id': 4002800000, + }), + 'context': , + 'entity_id': 'sensor.pressure_parameter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3', + }) +# --- +# name: test_sensors[sensor.rpm_parameter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.rpm_parameter', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'RPM Parameter', + 'platform': 'wolflink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234:10005200000', + 'unit_of_measurement': 'rpm', + }) +# --- +# name: test_sensors[sensor.rpm_parameter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'RPM Parameter', + 'parameter_id': 10005200000, + 'parent': 'Heating', + 'state_class': , + 'unit_of_measurement': 'rpm', + 'value_id': 1000280001, + }), + 'context': , + 'entity_id': 'sensor.rpm_parameter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1500', + }) +# --- +# name: test_sensors[sensor.simple_parameter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.simple_parameter', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Simple Parameter', + 'platform': 'wolflink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234:1005200000', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.simple_parameter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Simple Parameter', + 'parameter_id': 1005200000, + 'parent': 'DHW', + 'value_id': 1002800000, + }), + 'context': , + 'entity_id': 'sensor.simple_parameter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12', + }) +# --- +# name: test_sensors[sensor.temperature_parameter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'config_subentry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.temperature_parameter', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature Parameter', + 'platform': 'wolflink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234:3005200000', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.temperature_parameter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Temperature Parameter', + 'parameter_id': 3005200000, + 'parent': 'Solar', + 'unit_of_measurement': , + 'value_id': 3002800000, + }), + 'context': , + 'entity_id': 'sensor.temperature_parameter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '65', + }) +# --- diff --git a/tests/components/wolflink/test_sensor.py b/tests/components/wolflink/test_sensor.py new file mode 100644 index 00000000000..8fc78f707d5 --- /dev/null +++ b/tests/components/wolflink/test_sensor.py @@ -0,0 +1,45 @@ +"""Test the Wolf SmartSet Service Sensor platform.""" + +from unittest.mock import MagicMock + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, patch, snapshot_platform + + +async def test_device_entry( + hass: HomeAssistant, + mock_wolflink: MagicMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test device entry creation.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + device = device_registry.async_get_device({(mock_config_entry.domain, "1234")}) + assert device == snapshot + + +async def test_sensors( + hass: HomeAssistant, + mock_wolflink: MagicMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test wolflink sensors.""" + + with patch("homeassistant.components.wolflink.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/wyoming/conftest.py b/tests/components/wyoming/conftest.py index 018fff33821..125edc547c6 100644 --- a/tests/components/wyoming/conftest.py +++ b/tests/components/wyoming/conftest.py @@ -121,7 +121,9 @@ def handle_config_entry(hass: HomeAssistant) -> ConfigEntry: @pytest.fixture -async def init_wyoming_stt(hass: HomeAssistant, stt_config_entry: ConfigEntry): +async def init_wyoming_stt( + hass: HomeAssistant, stt_config_entry: ConfigEntry +) -> ConfigEntry: """Initialize Wyoming STT.""" with patch( "homeassistant.components.wyoming.data.load_wyoming_info", @@ -129,9 +131,13 @@ async def init_wyoming_stt(hass: HomeAssistant, stt_config_entry: ConfigEntry): ): await hass.config_entries.async_setup(stt_config_entry.entry_id) + return stt_config_entry + @pytest.fixture -async def init_wyoming_tts(hass: HomeAssistant, tts_config_entry: ConfigEntry): +async def init_wyoming_tts( + hass: HomeAssistant, tts_config_entry: ConfigEntry +) -> ConfigEntry: """Initialize Wyoming TTS.""" with patch( "homeassistant.components.wyoming.data.load_wyoming_info", @@ -139,11 +145,13 @@ async def init_wyoming_tts(hass: HomeAssistant, tts_config_entry: ConfigEntry): ): await hass.config_entries.async_setup(tts_config_entry.entry_id) + return tts_config_entry + @pytest.fixture async def init_wyoming_wake_word( hass: HomeAssistant, wake_word_config_entry: ConfigEntry -): +) -> ConfigEntry: """Initialize Wyoming Wake Word.""" with patch( "homeassistant.components.wyoming.data.load_wyoming_info", @@ -151,6 +159,8 @@ async def init_wyoming_wake_word( ): await hass.config_entries.async_setup(wake_word_config_entry.entry_id) + return wake_word_config_entry + @pytest.fixture async def init_wyoming_intent( diff --git a/tests/components/wyoming/test_tts.py b/tests/components/wyoming/test_tts.py index 263804787b1..6e0edc022c0 100644 --- a/tests/components/wyoming/test_tts.py +++ b/tests/components/wyoming/test_tts.py @@ -150,17 +150,15 @@ async def test_get_tts_audio_connection_lost( hass: HomeAssistant, init_wyoming_tts ) -> None: """Test streaming audio and losing connection.""" - with ( - patch( - "homeassistant.components.wyoming.tts.AsyncTcpClient", - MockAsyncTcpClient([None]), - ), - pytest.raises(HomeAssistantError), + stream = tts.async_create_stream(hass, "tts.test_tts", "en-US") + with patch( + "homeassistant.components.wyoming.tts.AsyncTcpClient", + MockAsyncTcpClient([None]), ): - await tts.async_get_media_source_audio( - hass, - tts.generate_media_source_id(hass, "Hello world", "tts.test_tts", "en-US"), - ) + stream.async_set_message("Hello world") + with pytest.raises(HomeAssistantError): + async for _chunk in stream.async_stream_result(): + pass async def test_get_tts_audio_audio_oserror( diff --git a/tests/components/wyoming/test_websocket.py b/tests/components/wyoming/test_websocket.py new file mode 100644 index 00000000000..18b43321354 --- /dev/null +++ b/tests/components/wyoming/test_websocket.py @@ -0,0 +1,58 @@ +"""Websocket tests for Wyoming integration.""" + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant + +from tests.typing import WebSocketGenerator + + +async def test_info( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + init_components, + init_wyoming_stt: ConfigEntry, + init_wyoming_tts: ConfigEntry, + init_wyoming_wake_word: ConfigEntry, + init_wyoming_intent: ConfigEntry, + init_wyoming_handle: ConfigEntry, +) -> None: + """Test info websocket command.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "wyoming/info"}) + + # result + msg = await client.receive_json() + assert msg["success"] + + info = msg.get("result", {}).get("info", {}) + + # stt (speech-to-text) = asr (automated speech recognition) + assert init_wyoming_stt.entry_id in info + asr_info = info[init_wyoming_stt.entry_id].get("asr", []) + assert len(asr_info) == 1 + assert asr_info[0].get("name") == "Test ASR" + + # tts (text-to-speech) + assert init_wyoming_tts.entry_id in info + tts_info = info[init_wyoming_tts.entry_id].get("tts", []) + assert len(tts_info) == 1 + assert tts_info[0].get("name") == "Test TTS" + + # wake word detection + assert init_wyoming_wake_word.entry_id in info + wake_info = info[init_wyoming_wake_word.entry_id].get("wake", []) + assert len(wake_info) == 1 + assert wake_info[0].get("name") == "Test Wake Word" + + # intent recognition + assert init_wyoming_intent.entry_id in info + intent_info = info[init_wyoming_intent.entry_id].get("intent", []) + assert len(intent_info) == 1 + assert intent_info[0].get("name") == "Test Intent" + + # intent handling + assert init_wyoming_handle.entry_id in info + handle_info = info[init_wyoming_handle.entry_id].get("handle", []) + assert len(handle_info) == 1 + assert handle_info[0].get("name") == "Test Handle" diff --git a/tests/components/yandextts/test_tts.py b/tests/components/yandextts/test_tts.py index 77878c2be51..098fc025bf3 100644 --- a/tests/components/yandextts/test_tts.py +++ b/tests/components/yandextts/test_tts.py @@ -223,7 +223,7 @@ async def test_service_say_timeout( assert len(calls) == 1 assert ( await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) - == HTTPStatus.NOT_FOUND + == HTTPStatus.INTERNAL_SERVER_ERROR ) assert len(aioclient_mock.mock_calls) == 1 @@ -269,7 +269,7 @@ async def test_service_say_http_error( assert len(calls) == 1 assert ( await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) - == HTTPStatus.NOT_FOUND + == HTTPStatus.INTERNAL_SERVER_ERROR ) diff --git a/tests/components/zha/test_cover.py b/tests/components/zha/test_cover.py index e5d588aa1bf..4bc4d6c97cf 100644 --- a/tests/components/zha/test_cover.py +++ b/tests/components/zha/test_cover.py @@ -81,7 +81,7 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: cluster = zigpy_device.endpoints[1].window_covering cluster.PLUGGED_ATTR_READS = { WCAttrs.current_position_lift_percentage.name: 0, - WCAttrs.current_position_tilt_percentage.name: 42, + WCAttrs.current_position_tilt_percentage.name: 100, WCAttrs.window_covering_type.name: WCT.Tilt_blind_tilt_and_lift, WCAttrs.config_status.name: WCCS(~WCCS.Open_up_commands_reversed), } @@ -115,33 +115,33 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert state assert state.state == CoverState.OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 - assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 58 + assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 - # test that the state has changed from unavailable to off + # test that the state has changed from open to closed await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 100} ) assert hass.states.get(entity_id).state == CoverState.CLOSED - # test to see if it opens + # test that it opens await send_attributes_report( hass, cluster, {WCAttrs.current_position_lift_percentage.id: 0} ) assert hass.states.get(entity_id).state == CoverState.OPEN - # test that the state remains after tilting to 100% - await send_attributes_report( - hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 100} - ) - assert hass.states.get(entity_id).state == CoverState.OPEN - - # test to see the state remains after tilting to 0% + # test that the state remains after tilting to 0% (open) await send_attributes_report( hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 0} ) assert hass.states.get(entity_id).state == CoverState.OPEN - # close from UI + # test that the state remains after tilting to 100% (closed) + await send_attributes_report( + hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 100} + ) + assert hass.states.get(entity_id).state == CoverState.OPEN + + # close lift from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x1, zcl_f.Status.SUCCESS]): await hass.services.async_call( COVER_DOMAIN, SERVICE_CLOSE_COVER, {"entity_id": entity_id}, blocking=True @@ -160,6 +160,11 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert hass.states.get(entity_id).state == CoverState.CLOSED + # close tilt from UI, needs re-opening first + await send_attributes_report( + hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 0} + ) + assert hass.states.get(entity_id).state == CoverState.OPEN with patch("zigpy.zcl.Cluster.request", return_value=[0x1, zcl_f.Status.SUCCESS]): await hass.services.async_call( COVER_DOMAIN, @@ -185,7 +190,7 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert hass.states.get(entity_id).state == CoverState.CLOSED - # open from UI + # open lift from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x0, zcl_f.Status.SUCCESS]): await hass.services.async_call( COVER_DOMAIN, SERVICE_OPEN_COVER, {"entity_id": entity_id}, blocking=True @@ -204,6 +209,7 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert hass.states.get(entity_id).state == CoverState.OPEN + # open tilt from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x0, zcl_f.Status.SUCCESS]): await hass.services.async_call( COVER_DOMAIN, @@ -229,7 +235,7 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert hass.states.get(entity_id).state == CoverState.OPEN - # set position UI + # set lift position from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x5, zcl_f.Status.SUCCESS]): await hass.services.async_call( COVER_DOMAIN, @@ -261,6 +267,7 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert hass.states.get(entity_id).state == CoverState.OPEN + # set tilt position from UI with patch("zigpy.zcl.Cluster.request", return_value=[0x5, zcl_f.Status.SUCCESS]): await hass.services.async_call( COVER_DOMAIN, @@ -281,13 +288,13 @@ async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: assert hass.states.get(entity_id).state == CoverState.CLOSING await send_attributes_report( - hass, cluster, {WCAttrs.current_position_lift_percentage.id: 35} + hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 35} ) assert hass.states.get(entity_id).state == CoverState.CLOSING await send_attributes_report( - hass, cluster, {WCAttrs.current_position_lift_percentage.id: 53} + hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 53} ) assert hass.states.get(entity_id).state == CoverState.OPEN @@ -338,7 +345,7 @@ async def test_cover_failures( # load up cover domain cluster = zigpy_device.endpoints[1].window_covering cluster.PLUGGED_ATTR_READS = { - WCAttrs.current_position_tilt_percentage.name: 42, + WCAttrs.current_position_tilt_percentage.name: 100, WCAttrs.window_covering_type.name: WCT.Tilt_blind_tilt_and_lift, } update_attribute_cache(cluster) @@ -355,7 +362,7 @@ async def test_cover_failures( await send_attributes_report(hass, cluster, {0: 0, 8: 100, 1: 1}) assert hass.states.get(entity_id).state == CoverState.CLOSED - # test to see if it opens + # test that it opens await send_attributes_report(hass, cluster, {0: 1, 8: 0, 1: 100}) assert hass.states.get(entity_id).state == CoverState.OPEN diff --git a/tests/components/zwave_js/conftest.py b/tests/components/zwave_js/conftest.py index bcdc0c3ce16..ce7b0e0109e 100644 --- a/tests/components/zwave_js/conftest.py +++ b/tests/components/zwave_js/conftest.py @@ -13,7 +13,9 @@ from zwave_js_server.model.node import Node from zwave_js_server.model.node.data_model import NodeDataType from zwave_js_server.version import VersionInfo +from homeassistant.components.zwave_js import PLATFORMS from homeassistant.components.zwave_js.const import DOMAIN +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.util.json import JsonArrayType @@ -511,18 +513,25 @@ def aeotec_smart_switch_7_state_fixture() -> NodeDataType: @pytest.fixture(name="listen_block") -def mock_listen_block_fixture(): +def mock_listen_block_fixture() -> asyncio.Event: """Mock a listen block.""" return asyncio.Event() +@pytest.fixture(name="listen_result") +def listen_result_fixture() -> asyncio.Future[None]: + """Mock a listen result.""" + return asyncio.Future() + + @pytest.fixture(name="client") def mock_client_fixture( - controller_state, - controller_node_state, - version_state, - log_config_state, - listen_block, + controller_state: dict[str, Any], + controller_node_state: dict[str, Any], + version_state: dict[str, Any], + log_config_state: dict[str, Any], + listen_block: asyncio.Event, + listen_result: asyncio.Future[None], ): """Mock a client.""" with patch( @@ -537,6 +546,7 @@ def mock_client_fixture( async def listen(driver_ready: asyncio.Event) -> None: driver_ready.set() await listen_block.wait() + await listen_result async def disconnect(): client.connected = False @@ -817,18 +827,29 @@ def nortek_thermostat_removed_event_fixture(client) -> Node: @pytest.fixture(name="integration") -async def integration_fixture(hass: HomeAssistant, client) -> MockConfigEntry: +async def integration_fixture( + hass: HomeAssistant, + client: MagicMock, + platforms: list[Platform], +) -> MockConfigEntry: """Set up the zwave_js integration.""" entry = MockConfigEntry(domain="zwave_js", data={"url": "ws://test.org"}) entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() + with patch("homeassistant.components.zwave_js.PLATFORMS", platforms): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() client.async_send_command.reset_mock() return entry +@pytest.fixture +def platforms() -> list[Platform]: + """Fixture to specify platforms to test.""" + return PLATFORMS + + @pytest.fixture(name="chain_actuator_zws12") def window_cover_fixture(client, chain_actuator_zws12_state) -> Node: """Mock a window cover node.""" diff --git a/tests/components/zwave_js/fixtures/controller_state.json b/tests/components/zwave_js/fixtures/controller_state.json index d6d9dcacd9e..c3b9de4bdec 100644 --- a/tests/components/zwave_js/fixtures/controller_state.json +++ b/tests/components/zwave_js/fixtures/controller_state.json @@ -23,6 +23,7 @@ ], "sucNodeId": 1, "supportsTimers": false, + "supportsLongRange": true, "isHealNetworkActive": false, "inclusionState": 0, "status": 0 diff --git a/tests/components/zwave_js/test_api.py b/tests/components/zwave_js/test_api.py index 42c5d59d7ad..f0134c7c43c 100644 --- a/tests/components/zwave_js/test_api.py +++ b/tests/components/zwave_js/test_api.py @@ -5,7 +5,7 @@ from http import HTTPStatus from io import BytesIO import json from typing import Any -from unittest.mock import MagicMock, PropertyMock, patch +from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch import pytest from zwave_js_server.const import ( @@ -103,6 +103,12 @@ from tests.typing import ClientSessionGenerator, WebSocketGenerator CONTROLLER_PATCH_PREFIX = "zwave_js_server.model.controller.Controller" +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [] + + def get_device(hass: HomeAssistant, node): """Get device ID for a node.""" dev_reg = dr.async_get(hass) @@ -168,6 +174,7 @@ async def test_network_status( assert result["client"]["server_version"] == "1.0.0" assert not result["client"]["server_logging_enabled"] assert result["controller"]["inclusion_state"] == InclusionState.IDLE + assert result["controller"]["supports_long_range"] # Try API call with device ID device = device_registry.async_get_device( @@ -1094,52 +1101,27 @@ async def test_provision_smart_start_node( client.async_send_command.return_value = {"success": True} - # Test provisioning entry - await ws_client.send_json( - { - ID: 2, - TYPE: "zwave_js/provision_smart_start_node", - ENTRY_ID: entry.entry_id, - PLANNED_PROVISIONING_ENTRY: { - DSK: "test", - SECURITY_CLASSES: [0], - }, - } - ) - - msg = await ws_client.receive_json() - assert msg["success"] - - assert len(client.async_send_command.call_args_list) == 1 - assert client.async_send_command.call_args[0][0] == { - "command": "controller.provision_smart_start_node", - "entry": ProvisioningEntry( - "test", [SecurityClass.S2_UNAUTHENTICATED] - ).to_dict(), + valid_qr_info = { + VERSION: 1, + SECURITY_CLASSES: [0], + DSK: "test", + GENERIC_DEVICE_CLASS: 1, + SPECIFIC_DEVICE_CLASS: 1, + INSTALLER_ICON_TYPE: 1, + MANUFACTURER_ID: 1, + PRODUCT_TYPE: 1, + PRODUCT_ID: 1, + APPLICATION_VERSION: "test", + "name": "test", } - client.async_send_command.reset_mock() - client.async_send_command.return_value = {"success": True} - # Test QR provisioning information await ws_client.send_json( { ID: 3, TYPE: "zwave_js/provision_smart_start_node", ENTRY_ID: entry.entry_id, - QR_PROVISIONING_INFORMATION: { - VERSION: 1, - SECURITY_CLASSES: [0], - DSK: "test", - GENERIC_DEVICE_CLASS: 1, - SPECIFIC_DEVICE_CLASS: 1, - INSTALLER_ICON_TYPE: 1, - MANUFACTURER_ID: 1, - PRODUCT_TYPE: 1, - PRODUCT_ID: 1, - APPLICATION_VERSION: "test", - "name": "test", - }, + QR_PROVISIONING_INFORMATION: valid_qr_info, } ) @@ -1170,28 +1152,6 @@ async def test_provision_smart_start_node( client.async_send_command.reset_mock() client.async_send_command.return_value = {"success": True} - # Test QR code string - await ws_client.send_json( - { - ID: 4, - TYPE: "zwave_js/provision_smart_start_node", - ENTRY_ID: entry.entry_id, - QR_CODE_STRING: "90testtesttesttesttesttesttesttesttesttesttesttesttest", - } - ) - - msg = await ws_client.receive_json() - assert msg["success"] - - assert len(client.async_send_command.call_args_list) == 1 - assert client.async_send_command.call_args[0][0] == { - "command": "controller.provision_smart_start_node", - "entry": "90testtesttesttesttesttesttesttesttesttesttesttesttest", - } - - client.async_send_command.reset_mock() - client.async_send_command.return_value = {"success": True} - # Test QR provisioning information with S2 version throws error await ws_client.send_json( { @@ -1242,9 +1202,7 @@ async def test_provision_smart_start_node( ID: 7, TYPE: "zwave_js/provision_smart_start_node", ENTRY_ID: entry.entry_id, - QR_CODE_STRING: ( - "90testtesttesttesttesttesttesttesttesttesttesttesttest" - ), + QR_PROVISIONING_INFORMATION: valid_qr_info, } ) msg = await ws_client.receive_json() @@ -1262,7 +1220,7 @@ async def test_provision_smart_start_node( ID: 8, TYPE: "zwave_js/provision_smart_start_node", ENTRY_ID: entry.entry_id, - QR_CODE_STRING: "90testtesttesttesttesttesttesttesttesttesttesttesttest", + QR_PROVISIONING_INFORMATION: valid_qr_info, } ) msg = await ws_client.receive_json() @@ -5200,6 +5158,242 @@ async def test_get_integration_settings( } +async def test_backup_nvm( + hass: HomeAssistant, + integration, + client, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test the backup NVM websocket command.""" + ws_client = await hass_ws_client(hass) + + # Set up mocks for the controller events + controller = client.driver.controller + + # Test subscription and events + with patch.object( + controller, "async_backup_nvm_raw_base64", return_value="test" + ) as mock_backup: + # Send the subscription request + await ws_client.send_json_auto_id( + { + "type": "zwave_js/backup_nvm", + "entry_id": integration.entry_id, + } + ) + + # Verify the finished event with data first + msg = await ws_client.receive_json() + assert msg["type"] == "event" + assert msg["event"]["event"] == "finished" + assert msg["event"]["data"] == "test" + + # Verify subscription success + msg = await ws_client.receive_json() + assert msg["type"] == "result" + assert msg["success"] is True + + # Simulate progress events + event = Event( + "nvm backup progress", + { + "source": "controller", + "event": "nvm backup progress", + "bytesRead": 25, + "total": 100, + }, + ) + controller.receive_event(event) + msg = await ws_client.receive_json() + assert msg["event"]["event"] == "nvm backup progress" + assert msg["event"]["bytesRead"] == 25 + assert msg["event"]["total"] == 100 + + event = Event( + "nvm backup progress", + { + "source": "controller", + "event": "nvm backup progress", + "bytesRead": 50, + "total": 100, + }, + ) + controller.receive_event(event) + msg = await ws_client.receive_json() + assert msg["event"]["event"] == "nvm backup progress" + assert msg["event"]["bytesRead"] == 50 + assert msg["event"]["total"] == 100 + + # Wait for the backup to complete + await hass.async_block_till_done() + + # Verify the backup was called + assert mock_backup.called + + # Test backup failure + with patch.object( + controller, + "async_backup_nvm_raw_base64", + side_effect=FailedCommand("failed_command", "Backup failed"), + ): + # Send the subscription request + await ws_client.send_json_auto_id( + { + "type": "zwave_js/backup_nvm", + "entry_id": integration.entry_id, + } + ) + + # Verify error response + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == "Backup failed" + + # Test config entry not found + await ws_client.send_json_auto_id( + { + "type": "zwave_js/backup_nvm", + "entry_id": "invalid_entry_id", + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == "not_found" + + # Test config entry not loaded + await hass.config_entries.async_unload(integration.entry_id) + await hass.async_block_till_done() + + await ws_client.send_json_auto_id( + { + "type": "zwave_js/backup_nvm", + "entry_id": integration.entry_id, + } + ) + msg = await ws_client.receive_json() + assert msg["error"]["code"] == "not_loaded" + + +async def test_restore_nvm( + hass: HomeAssistant, + integration, + client, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test the restore NVM websocket command.""" + ws_client = await hass_ws_client(hass) + + # Set up mocks for the controller events + controller = client.driver.controller + + # Test restore success + with patch.object( + controller, "async_restore_nvm_base64", return_value=None + ) as mock_restore: + # Send the subscription request + await ws_client.send_json_auto_id( + { + "type": "zwave_js/restore_nvm", + "entry_id": integration.entry_id, + "data": "dGVzdA==", # base64 encoded "test" + } + ) + + # Verify the finished event first + msg = await ws_client.receive_json() + assert msg["type"] == "event" + assert msg["event"]["event"] == "finished" + + # Verify subscription success + msg = await ws_client.receive_json() + assert msg["type"] == "result" + assert msg["success"] is True + + # Simulate progress events + event = Event( + "nvm restore progress", + { + "source": "controller", + "event": "nvm restore progress", + "bytesWritten": 25, + "total": 100, + }, + ) + controller.receive_event(event) + msg = await ws_client.receive_json() + assert msg["event"]["event"] == "nvm restore progress" + assert msg["event"]["bytesWritten"] == 25 + assert msg["event"]["total"] == 100 + + event = Event( + "nvm restore progress", + { + "source": "controller", + "event": "nvm restore progress", + "bytesWritten": 50, + "total": 100, + }, + ) + controller.receive_event(event) + msg = await ws_client.receive_json() + assert msg["event"]["event"] == "nvm restore progress" + assert msg["event"]["bytesWritten"] == 50 + assert msg["event"]["total"] == 100 + + # Wait for the restore to complete + await hass.async_block_till_done() + + # Verify the restore was called + assert mock_restore.called + + # Test restore failure + with patch.object( + controller, + "async_restore_nvm_base64", + side_effect=FailedCommand("failed_command", "Restore failed"), + ): + # Send the subscription request + await ws_client.send_json_auto_id( + { + "type": "zwave_js/restore_nvm", + "entry_id": integration.entry_id, + "data": "dGVzdA==", # base64 encoded "test" + } + ) + + # Verify error response + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == "Restore failed" + + # Test entry_id not found + await ws_client.send_json_auto_id( + { + "type": "zwave_js/restore_nvm", + "entry_id": "invalid_entry_id", + "data": "dGVzdA==", # base64 encoded "test" + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == "not_found" + + # Test config entry not loaded + await hass.config_entries.async_unload(integration.entry_id) + await hass.async_block_till_done() + + await ws_client.send_json_auto_id( + { + "type": "zwave_js/restore_nvm", + "entry_id": integration.entry_id, + "data": "dGVzdA==", # base64 encoded "test" + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == "not_loaded" + + async def test_cancel_secure_bootstrap_s2( hass: HomeAssistant, client, integration, hass_ws_client: WebSocketGenerator ) -> None: @@ -5340,3 +5534,127 @@ async def test_subscribe_s2_inclusion( msg = await ws_client.receive_json() assert not msg["success"] assert msg["error"]["code"] == ERR_NOT_FOUND + + +async def test_lookup_device( + hass: HomeAssistant, + integration: MockConfigEntry, + client: MagicMock, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test lookup_device websocket command.""" + entry = integration + ws_client = await hass_ws_client(hass) + + # Create mock device response + mock_device = MagicMock() + mock_device.to_dict.return_value = { + "manufacturer": "Test Manufacturer", + "label": "Test Device", + "description": "Test Device Description", + "devices": [{"productType": 1, "productId": 2}], + "firmwareVersion": {"min": "1.0", "max": "2.0"}, + } + + # Test successful lookup + client.driver.config_manager.lookup_device = AsyncMock(return_value=mock_device) + + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/lookup_device", + ENTRY_ID: entry.entry_id, + MANUFACTURER_ID: 1, + PRODUCT_TYPE: 2, + PRODUCT_ID: 3, + APPLICATION_VERSION: "1.5", + } + ) + msg = await ws_client.receive_json() + + assert msg["success"] + assert msg["result"] == mock_device.to_dict.return_value + + client.driver.config_manager.lookup_device.assert_called_once_with(1, 2, 3, "1.5") + + # Reset mock + client.driver.config_manager.lookup_device.reset_mock() + + # Test lookup without optional application_version + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/lookup_device", + ENTRY_ID: entry.entry_id, + MANUFACTURER_ID: 4, + PRODUCT_TYPE: 5, + PRODUCT_ID: 6, + } + ) + msg = await ws_client.receive_json() + + assert msg["success"] + assert msg["result"] == mock_device.to_dict.return_value + + client.driver.config_manager.lookup_device.assert_called_once_with(4, 5, 6, None) + + # Test device not found + with patch.object( + client.driver.config_manager, + "lookup_device", + return_value=None, + ): + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/lookup_device", + ENTRY_ID: entry.entry_id, + MANUFACTURER_ID: 99, + PRODUCT_TYPE: 99, + PRODUCT_ID: 99, + APPLICATION_VERSION: "9.9", + } + ) + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_FOUND + assert msg["error"]["message"] == "Device not found" + + # Test sending command with improper entry ID fails + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/lookup_device", + ENTRY_ID: "invalid_entry_id", + MANUFACTURER_ID: 1, + PRODUCT_TYPE: 1, + PRODUCT_ID: 1, + APPLICATION_VERSION: "1.0", + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_FOUND + assert msg["error"]["message"] == "Config entry invalid_entry_id not found" + + # Test FailedCommand exception + error_message = "Failed to execute lookup_device command" + with patch.object( + client.driver.config_manager, + "lookup_device", + side_effect=FailedCommand("lookup_device", error_message), + ): + # Send the subscription request + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/lookup_device", + ENTRY_ID: entry.entry_id, + MANUFACTURER_ID: 1, + PRODUCT_TYPE: 2, + PRODUCT_ID: 3, + APPLICATION_VERSION: "1.0", + } + ) + + # Verify error response + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == error_message + assert msg["error"]["message"] == f"Command failed: {error_message}" diff --git a/tests/components/zwave_js/test_binary_sensor.py b/tests/components/zwave_js/test_binary_sensor.py index 0054439ef1d..657dd337bf9 100644 --- a/tests/components/zwave_js/test_binary_sensor.py +++ b/tests/components/zwave_js/test_binary_sensor.py @@ -1,5 +1,6 @@ """Test the Z-Wave JS binary sensor platform.""" +import pytest from zwave_js_server.event import Event from zwave_js_server.model.node import Node @@ -10,6 +11,7 @@ from homeassistant.const import ( STATE_ON, STATE_UNKNOWN, EntityCategory, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -26,6 +28,12 @@ from .common import ( from tests.common import MockConfigEntry +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.BINARY_SENSOR] + + async def test_low_battery_sensor( hass: HomeAssistant, entity_registry: er.EntityRegistry, multisensor_6, integration ) -> None: diff --git a/tests/components/zwave_js/test_button.py b/tests/components/zwave_js/test_button.py index b0c06668926..0282a268b54 100644 --- a/tests/components/zwave_js/test_button.py +++ b/tests/components/zwave_js/test_button.py @@ -5,11 +5,17 @@ import pytest from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.components.zwave_js.const import DOMAIN, SERVICE_REFRESH_VALUE from homeassistant.components.zwave_js.helpers import get_valueless_base_unique_id -from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.BUTTON] + + async def test_ping_entity( hass: HomeAssistant, entity_registry: er.EntityRegistry, diff --git a/tests/components/zwave_js/test_climate.py b/tests/components/zwave_js/test_climate.py index 5d711528a28..f312284d897 100644 --- a/tests/components/zwave_js/test_climate.py +++ b/tests/components/zwave_js/test_climate.py @@ -42,6 +42,7 @@ from homeassistant.const import ( ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -56,6 +57,12 @@ from .common import ( ) +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.CLIMATE] + + async def test_thermostat_v2( hass: HomeAssistant, client, diff --git a/tests/components/zwave_js/test_cover.py b/tests/components/zwave_js/test_cover.py index b13d4f9787f..13f519725fd 100644 --- a/tests/components/zwave_js/test_cover.py +++ b/tests/components/zwave_js/test_cover.py @@ -2,6 +2,7 @@ import logging +import pytest from zwave_js_server.const import ( CURRENT_STATE_PROPERTY, CURRENT_VALUE_PROPERTY, @@ -35,6 +36,7 @@ from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, STATE_UNKNOWN, + Platform, ) from homeassistant.core import HomeAssistant @@ -50,6 +52,12 @@ FIBARO_FGR_223_SHUTTER_COVER_ENTITY = "cover.fgr_223_test_cover" LOGGER.setLevel(logging.DEBUG) +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.COVER] + + async def test_window_cover( hass: HomeAssistant, client, chain_actuator_zws12, integration ) -> None: diff --git a/tests/components/zwave_js/test_event.py b/tests/components/zwave_js/test_event.py index 1db02662f4e..84b1ade2632 100644 --- a/tests/components/zwave_js/test_event.py +++ b/tests/components/zwave_js/test_event.py @@ -3,11 +3,12 @@ from datetime import timedelta from freezegun import freeze_time +import pytest from zwave_js_server.event import Event from homeassistant.components.event import ATTR_EVENT_TYPE from homeassistant.components.zwave_js.const import ATTR_VALUE -from homeassistant.const import STATE_UNKNOWN +from homeassistant.const import STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.util import dt as dt_util @@ -15,6 +16,12 @@ BASIC_EVENT_VALUE_ENTITY = "event.honeywell_in_wall_smart_fan_control_event_valu CENTRAL_SCENE_ENTITY = "event.node_51_scene_002" +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.EVENT] + + async def test_basic( hass: HomeAssistant, client, fan_honeywell_39358, integration ) -> None: diff --git a/tests/components/zwave_js/test_events.py b/tests/components/zwave_js/test_events.py index 0bb6376a02b..8cdaef3e63d 100644 --- a/tests/components/zwave_js/test_events.py +++ b/tests/components/zwave_js/test_events.py @@ -6,11 +6,18 @@ import pytest from zwave_js_server.const import CommandClass from zwave_js_server.event import Event +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from tests.common import async_capture_events +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [] + + async def test_scenes( hass: HomeAssistant, hank_binary_switch, integration, client ) -> None: @@ -244,6 +251,7 @@ async def test_notifications( assert events[2].data["command_class_name"] == "Multilevel Switch" +@pytest.mark.parametrize("platforms", [[Platform.SWITCH]]) async def test_value_updated( hass: HomeAssistant, vision_security_zl7432, integration, client ) -> None: diff --git a/tests/components/zwave_js/test_init.py b/tests/components/zwave_js/test_init.py index c575066b57c..91e333f7c7d 100644 --- a/tests/components/zwave_js/test_init.py +++ b/tests/components/zwave_js/test_init.py @@ -3,14 +3,19 @@ import asyncio from copy import deepcopy import logging -from unittest.mock import AsyncMock, call, patch +from typing import Any +from unittest.mock import AsyncMock, MagicMock, call, patch from aiohasupervisor import SupervisorError from aiohasupervisor.models import AddonsOptions import pytest from zwave_js_server.client import Client from zwave_js_server.event import Event -from zwave_js_server.exceptions import BaseZwaveJSServerError, InvalidServerVersion +from zwave_js_server.exceptions import ( + BaseZwaveJSServerError, + InvalidServerVersion, + NotConnected, +) from zwave_js_server.model.node import Node from zwave_js_server.model.version import VersionInfo @@ -21,7 +26,7 @@ from homeassistant.components.zwave_js import DOMAIN from homeassistant.components.zwave_js.helpers import get_device_id from homeassistant.config_entries import ConfigEntryDisabler, ConfigEntryState from homeassistant.const import STATE_UNAVAILABLE -from homeassistant.core import HomeAssistant +from homeassistant.core import CoreState, HomeAssistant from homeassistant.helpers import ( area_registry as ar, device_registry as dr, @@ -32,7 +37,11 @@ from homeassistant.setup import async_setup_component from .common import AIR_TEMPERATURE_SENSOR, EATON_RF9640_ENTITY -from tests.common import MockConfigEntry, async_get_persistent_notifications +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + async_get_persistent_notifications, +) from tests.typing import WebSocketGenerator @@ -127,24 +136,215 @@ async def test_noop_statistics(hass: HomeAssistant, client) -> None: assert not mock_cmd2.called -@pytest.mark.parametrize("error", [BaseZwaveJSServerError("Boom"), Exception("Boom")]) -async def test_listen_failure(hass: HomeAssistant, client, error) -> None: - """Test we handle errors during client listen.""" +async def test_driver_ready_timeout_during_setup( + hass: HomeAssistant, + client: MagicMock, + listen_block: asyncio.Event, +) -> None: + """Test we handle driver ready timeout during setup.""" - async def listen(driver_ready): - """Mock the client listen method.""" - # Set the connect side effect to stop an endless loop on reload. - client.connect.side_effect = BaseZwaveJSServerError("Boom") - raise error + async def listen(driver_ready: asyncio.Event) -> None: + """Mock listen.""" + await listen_block.wait() client.listen.side_effect = listen + + entry = MockConfigEntry( + domain="zwave_js", + data={"url": "ws://test.org", "data_collection_opted_in": True}, + ) + entry.add_to_hass(hass) + assert client.disconnect.call_count == 0 + + with patch("homeassistant.components.zwave_js.DRIVER_READY_TIMEOUT", new=0): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.SETUP_RETRY + assert client.disconnect.call_count == 1 + + +@pytest.mark.parametrize("core_state", [CoreState.running, CoreState.stopping]) +@pytest.mark.parametrize( + ("listen_future_result_method", "listen_future_result"), + [ + ("set_exception", BaseZwaveJSServerError("Boom")), + ("set_exception", Exception("Boom")), + ("set_result", None), + ], +) +async def test_listen_done_during_setup_before_forward_entry( + hass: HomeAssistant, + client: MagicMock, + listen_block: asyncio.Event, + listen_result: asyncio.Future[None], + core_state: CoreState, + listen_future_result_method: str, + listen_future_result: Exception | None, +) -> None: + """Test listen task finishing during setup before forward entry.""" + assert hass.state is CoreState.running + + async def listen(driver_ready: asyncio.Event) -> None: + await listen_block.wait() + await listen_result + async_fire_time_changed(hass, fire_all=True) + + client.listen.side_effect = listen + hass.set_state(core_state) + listen_block.set() + getattr(listen_result, listen_future_result_method)(listen_future_result) + entry = MockConfigEntry(domain="zwave_js", data={"url": "ws://test.org"}) entry.add_to_hass(hass) + assert client.disconnect.call_count == 0 await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() assert entry.state is ConfigEntryState.SETUP_RETRY + assert client.disconnect.call_count == 1 + + +async def test_not_connected_during_setup_after_forward_entry( + hass: HomeAssistant, + client: MagicMock, + listen_block: asyncio.Event, + listen_result: asyncio.Future[None], +) -> None: + """Test we handle not connected client during setup after forward entry.""" + + async def send_command_side_effect(*args: Any, **kwargs: Any) -> None: + """Mock send command.""" + listen_block.set() + listen_result.set_result(None) + # Yield to allow the listen task to run + await asyncio.sleep(0) + raise NotConnected("Boom") + + async def listen(driver_ready: asyncio.Event) -> None: + """Mock listen.""" + driver_ready.set() + client.async_send_command.side_effect = send_command_side_effect + await listen_block.wait() + await listen_result + + client.listen.side_effect = listen + + entry = MockConfigEntry( + domain="zwave_js", + data={"url": "ws://test.org", "data_collection_opted_in": True}, + ) + entry.add_to_hass(hass) + assert client.disconnect.call_count == 0 + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.SETUP_RETRY + assert client.disconnect.call_count == 1 + + +@pytest.mark.parametrize("core_state", [CoreState.running, CoreState.stopping]) +@pytest.mark.parametrize( + ("listen_future_result_method", "listen_future_result"), + [ + ("set_exception", BaseZwaveJSServerError("Boom")), + ("set_exception", Exception("Boom")), + ("set_result", None), + ], +) +async def test_listen_done_during_setup_after_forward_entry( + hass: HomeAssistant, + client: MagicMock, + listen_block: asyncio.Event, + listen_result: asyncio.Future[None], + core_state: CoreState, + listen_future_result_method: str, + listen_future_result: Exception | None, +) -> None: + """Test listen task finishing during setup after forward entry.""" + assert hass.state is CoreState.running + + async def send_command_side_effect(*args: Any, **kwargs: Any) -> None: + """Mock send command.""" + listen_block.set() + getattr(listen_result, listen_future_result_method)(listen_future_result) + # Yield to allow the listen task to run + await asyncio.sleep(0) + + async def listen(driver_ready: asyncio.Event) -> None: + """Mock listen.""" + driver_ready.set() + client.async_send_command.side_effect = send_command_side_effect + await listen_block.wait() + await listen_result + + client.listen.side_effect = listen + hass.set_state(core_state) + + entry = MockConfigEntry( + domain="zwave_js", + data={"url": "ws://test.org", "data_collection_opted_in": True}, + ) + entry.add_to_hass(hass) + assert client.disconnect.call_count == 0 + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.SETUP_RETRY + assert client.disconnect.call_count == 1 + + +@pytest.mark.parametrize( + ("core_state", "final_config_entry_state", "disconnect_call_count"), + [ + ( + CoreState.running, + ConfigEntryState.SETUP_RETRY, + 2, + ), # the reload will cause a disconnect call too + ( + CoreState.stopping, + ConfigEntryState.LOADED, + 0, + ), # the home assistant stop event will handle the disconnect + ], +) +@pytest.mark.parametrize( + ("listen_future_result_method", "listen_future_result"), + [ + ("set_exception", BaseZwaveJSServerError("Boom")), + ("set_exception", Exception("Boom")), + ("set_result", None), + ], +) +async def test_listen_done_after_setup( + hass: HomeAssistant, + client: MagicMock, + integration: MockConfigEntry, + listen_block: asyncio.Event, + listen_result: asyncio.Future[None], + core_state: CoreState, + listen_future_result_method: str, + listen_future_result: Exception | None, + final_config_entry_state: ConfigEntryState, + disconnect_call_count: int, +) -> None: + """Test listen task finishing after setup.""" + config_entry = integration + assert config_entry.state is ConfigEntryState.LOADED + assert hass.state is CoreState.running + assert client.disconnect.call_count == 0 + + hass.set_state(core_state) + listen_block.set() + getattr(listen_result, listen_future_result_method)(listen_future_result) + await hass.async_block_till_done() + + assert config_entry.state is final_config_entry_state + assert client.disconnect.call_count == disconnect_call_count async def test_new_entity_on_value_added( diff --git a/tests/components/zwave_js/test_siren.py b/tests/components/zwave_js/test_siren.py index 4eb872954d1..d932338f9dc 100644 --- a/tests/components/zwave_js/test_siren.py +++ b/tests/components/zwave_js/test_siren.py @@ -1,5 +1,6 @@ """Test the Z-Wave JS siren platform.""" +import pytest from zwave_js_server.event import Event from homeassistant.components.siren import ( @@ -7,7 +8,7 @@ from homeassistant.components.siren import ( ATTR_TONE, ATTR_VOLUME_LEVEL, ) -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant SIREN_ENTITY = "siren.indoor_siren_6_play_tone_2" @@ -64,6 +65,12 @@ TONE_ID_VALUE_ID = { } +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.SIREN] + + async def test_siren( hass: HomeAssistant, client, aeotec_zw164_siren, integration ) -> None: diff --git a/tests/components/zwave_js/test_update.py b/tests/components/zwave_js/test_update.py index d6683fa24cb..6a4f48a0dc5 100644 --- a/tests/components/zwave_js/test_update.py +++ b/tests/components/zwave_js/test_update.py @@ -658,8 +658,10 @@ async def test_update_entity_delay( assert len(client.async_send_command.call_args_list) == 2 - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=5)) - await hass.async_block_till_done(wait_background_tasks=True) + update_interval = timedelta(minutes=5) + freezer.tick(update_interval) + async_fire_time_changed(hass) + await hass.async_block_till_done() nodes: set[int] = set() @@ -668,8 +670,9 @@ async def test_update_entity_delay( assert args["command"] == "controller.get_available_firmware_updates" nodes.add(args["nodeId"]) - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=10)) - await hass.async_block_till_done(wait_background_tasks=True) + freezer.tick(update_interval) + async_fire_time_changed(hass) + await hass.async_block_till_done() assert len(client.async_send_command.call_args_list) == 4 args = client.async_send_command.call_args_list[3][0][0] diff --git a/tests/conftest.py b/tests/conftest.py index 2f7330ebf22..65e3518956e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -16,6 +16,7 @@ import reprlib from shutil import rmtree import sqlite3 import ssl +import sys import threading from typing import TYPE_CHECKING, Any, cast from unittest.mock import AsyncMock, MagicMock, Mock, _patch, patch @@ -66,7 +67,12 @@ from homeassistant.components.websocket_api.auth import ( # pylint: disable-next=hass-component-root-import from homeassistant.components.websocket_api.http import URL from homeassistant.config import YAML_CONFIG_FILE -from homeassistant.config_entries import ConfigEntries, ConfigEntry, ConfigEntryState +from homeassistant.config_entries import ( + ConfigEntries, + ConfigEntry, + ConfigEntryState, + ConfigSubentryData, +) from homeassistant.const import BASE_PLATFORMS, HASSIO_USER_NAME from homeassistant.core import ( Context, @@ -83,6 +89,7 @@ from homeassistant.helpers import ( device_registry as dr, entity_registry as er, floor_registry as fr, + frame, issue_registry as ir, label_registry as lr, recorder as recorder_helper, @@ -428,11 +435,17 @@ def verify_cleanup( @pytest.fixture(autouse=True) -def reset_hass_threading_local_object() -> Generator[None]: - """Reset the _Hass threading.local object for every test case.""" +def reset_globals() -> Generator[None]: + """Reset global objects for every test case.""" yield + + # Reset the _Hass threading.local object ha._hass.__dict__.clear() + # Reset the frame helper globals + frame.async_setup(None) + frame._REPORTED_INTEGRATIONS.clear() + @pytest.fixture(autouse=True, scope="session") def bcrypt_cost() -> Generator[None]: @@ -598,6 +611,7 @@ async def hass( async with async_test_home_assistant(loop, load_registries) as hass: orig_exception_handler = loop.get_exception_handler() loop.set_exception_handler(exc_handle) + frame.async_setup(hass) yield hass @@ -937,6 +951,12 @@ def mqtt_config_entry_data() -> dict[str, Any] | None: return None +@pytest.fixture +def mqtt_config_subentries_data() -> tuple[ConfigSubentryData] | None: + """Fixture to allow overriding MQTT subentries data.""" + return None + + @pytest.fixture def mqtt_config_entry_options() -> dict[str, Any] | None: """Fixture to allow overriding MQTT entry options.""" @@ -1023,6 +1043,7 @@ async def mqtt_mock( mqtt_client_mock: MqttMockPahoClient, mqtt_config_entry_data: dict[str, Any] | None, mqtt_config_entry_options: dict[str, Any] | None, + mqtt_config_subentries_data: tuple[ConfigSubentryData] | None, mqtt_mock_entry: MqttMockHAClientGenerator, ) -> AsyncGenerator[MqttMockHAClient]: """Fixture to mock MQTT component.""" @@ -1035,6 +1056,7 @@ async def _mqtt_mock_entry( mqtt_client_mock: MqttMockPahoClient, mqtt_config_entry_data: dict[str, Any] | None, mqtt_config_entry_options: dict[str, Any] | None, + mqtt_config_subentries_data: tuple[ConfigSubentryData] | None, ) -> AsyncGenerator[MqttMockHAClientGenerator]: """Fixture to mock a delayed setup of the MQTT config entry.""" # Local import to avoid processing MQTT modules when running a testcase @@ -1051,6 +1073,7 @@ async def _mqtt_mock_entry( entry = MockConfigEntry( data=mqtt_config_entry_data, options=mqtt_config_entry_options, + subentries_data=mqtt_config_subentries_data, domain=mqtt.DOMAIN, title="MQTT", version=1, @@ -1165,6 +1188,7 @@ async def mqtt_mock_entry( mqtt_client_mock: MqttMockPahoClient, mqtt_config_entry_data: dict[str, Any] | None, mqtt_config_entry_options: dict[str, Any] | None, + mqtt_config_subentries_data: tuple[ConfigSubentryData] | None, ) -> AsyncGenerator[MqttMockHAClientGenerator]: """Set up an MQTT config entry.""" @@ -1181,7 +1205,11 @@ async def mqtt_mock_entry( return await mqtt_mock_entry(_async_setup_config_entry) async with _mqtt_mock_entry( - hass, mqtt_client_mock, mqtt_config_entry_data, mqtt_config_entry_options + hass, + mqtt_client_mock, + mqtt_config_entry_data, + mqtt_config_entry_options, + mqtt_config_subentries_data, ) as mqtt_mock_entry: yield _setup_mqtt_entry @@ -1889,12 +1917,15 @@ def mock_integration_frame(integration_frame_path: str) -> Generator[Mock]: Defaults to calling from `hue` core integration, and can be parametrized with `integration_frame_path`. """ + correct_filename = f"/home/paulus/{integration_frame_path}/light.py" + correct_module_name = f"{integration_frame_path.replace('/', '.')}.light" correct_frame = Mock( filename=f"/home/paulus/{integration_frame_path}/light.py", lineno="23", line="self.light.is_on", ) with ( + patch.dict(sys.modules, {correct_module_name: Mock(__file__=correct_filename)}), patch( "homeassistant.helpers.frame.linecache.getline", return_value=correct_frame.line, diff --git a/tests/helpers/snapshots/test_frame.ambr b/tests/helpers/snapshots/test_frame.ambr new file mode 100644 index 00000000000..e74a4b2947a --- /dev/null +++ b/tests/helpers/snapshots/test_frame.ambr @@ -0,0 +1,82 @@ +# serializer version: 1 +# name: test_report_usage[core default] + list([ + ]) +# --- +# name: test_report_usage[core integration default] + list([ + "Detected that integration 'test_core_integration' test_report_string at homeassistant/components/test_core_integration/light.py, line 23: self.light.is_on. Please create a bug report at https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+test_core_integration%22", + ]) +# --- +# name: test_report_usage[core_behavior ignore] + list([ + ]) +# --- +# name: test_report_usage[core_behavior log] + list([ + 'Detected code that test_report_string. Please report this issue', + ]) +# --- +# name: test_report_usage[core_integration_behavior error] + list([ + "Detected that integration 'test_integration_frame' test_report_string at homeassistant/components/test_integration_frame/light.py, line 23: self.light.is_on. Please create a bug report at https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+test_integration_frame%22", + ]) +# --- +# name: test_report_usage[core_integration_behavior ignore] + list([ + ]) +# --- +# name: test_report_usage[custom integration default] + list([ + "Detected that custom integration 'test_custom_integration' test_report_string at custom_components/test_custom_integration/light.py, line 23: self.light.is_on. Please report it to the author of the 'test_custom_integration' custom integration", + ]) +# --- +# name: test_report_usage[custom integration error] + list([ + "Detected that custom integration 'test_custom_integration' test_report_string at custom_components/test_custom_integration/light.py, line 23: self.light.is_on. Please report it to the author of the 'test_custom_integration' custom integration", + ]) +# --- +# name: test_report_usage[custom integration ignore] + list([ + ]) +# --- +# name: test_report_usage_find_issue_tracker[core integration] + list([ + "Detected that integration 'test_core_integration' test_report_string at homeassistant/components/test_core_integration/light.py, line 23: self.light.is_on. Please create a bug report at https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+test_core_integration%22", + ]) +# --- +# name: test_report_usage_find_issue_tracker[core] + list([ + 'Detected code that test_report_string. Please report this issue', + ]) +# --- +# name: test_report_usage_find_issue_tracker[custom integration] + list([ + "Detected that custom integration 'test_custom_integration' test_report_string at custom_components/test_custom_integration/light.py, line 23: self.light.is_on. Please create a bug report at https://blablabla.com", + ]) +# --- +# name: test_report_usage_find_issue_tracker[unknown custom integration] + list([ + "Detected that custom integration 'unknown_custom_integration' test_report_string at custom_components/unknown_custom_integration/light.py, line 23: self.light.is_on. Please report it to the author of the 'unknown_custom_integration' custom integration", + ]) +# --- +# name: test_report_usage_find_issue_tracker_other_thread[core integration] + list([ + "Detected that integration 'test_core_integration' test_report_string at homeassistant/components/test_core_integration/light.py, line 23: self.light.is_on. Please create a bug report at https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+test_core_integration%22", + ]) +# --- +# name: test_report_usage_find_issue_tracker_other_thread[core] + list([ + 'Detected code that test_report_string. Please report this issue', + ]) +# --- +# name: test_report_usage_find_issue_tracker_other_thread[custom integration] + list([ + "Detected that custom integration 'test_custom_integration' test_report_string at custom_components/test_custom_integration/light.py, line 23: self.light.is_on. Please create a bug report at https://blablabla.com", + ]) +# --- +# name: test_report_usage_find_issue_tracker_other_thread[unknown custom integration] + list([ + "Detected that custom integration 'unknown_custom_integration' test_report_string at custom_components/unknown_custom_integration/light.py, line 23: self.light.is_on. Please report it to the author of the 'unknown_custom_integration' custom integration", + ]) +# --- diff --git a/tests/helpers/test_aiohttp_client.py b/tests/helpers/test_aiohttp_client.py index 13cb25bc516..6d2a7e7a8bb 100644 --- a/tests/helpers/test_aiohttp_client.py +++ b/tests/helpers/test_aiohttp_client.py @@ -249,7 +249,6 @@ async def test_get_clientsession_patched_close(hass: HomeAssistant) -> None: assert mock_close.call_count == 0 -@patch("homeassistant.helpers.frame._REPORTED_INTEGRATIONS", set()) async def test_warning_close_session_integration( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: @@ -292,7 +291,6 @@ async def test_warning_close_session_integration( ) in caplog.text -@patch("homeassistant.helpers.frame._REPORTED_INTEGRATIONS", set()) async def test_warning_close_session_custom( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: diff --git a/tests/helpers/test_condition.py b/tests/helpers/test_condition.py index b8c8c8a18c8..aac64f6139a 100644 --- a/tests/helpers/test_condition.py +++ b/tests/helpers/test_condition.py @@ -2080,6 +2080,7 @@ async def test_multiple_zones(hass: HomeAssistant) -> None: assert not test(hass) +@pytest.mark.usefixtures("hass") async def test_extract_entities() -> None: """Test extracting entities.""" assert condition.async_extract_entities( @@ -2153,6 +2154,7 @@ async def test_extract_entities() -> None: } +@pytest.mark.usefixtures("hass") async def test_extract_devices() -> None: """Test extracting devices.""" assert condition.async_extract_devices( diff --git a/tests/helpers/test_config_entry_oauth2_flow.py b/tests/helpers/test_config_entry_oauth2_flow.py index 0fc6b582bb5..5d16a9a62fd 100644 --- a/tests/helpers/test_config_entry_oauth2_flow.py +++ b/tests/helpers/test_config_entry_oauth2_flow.py @@ -1,11 +1,11 @@ """Tests for the Somfy config flow.""" -from collections.abc import Generator +from collections.abc import AsyncGenerator, Generator from http import HTTPStatus import logging import time from typing import Any -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import aiohttp import pytest @@ -15,7 +15,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import config_entry_oauth2_flow from homeassistant.helpers.network import NoURLAvailableError -from tests.common import MockConfigEntry, mock_platform +from tests.common import MockConfigEntry, MockModule, mock_integration, mock_platform from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator @@ -27,6 +27,11 @@ ACCESS_TOKEN_1 = "mock-access-token-1" ACCESS_TOKEN_2 = "mock-access-token-2" AUTHORIZE_URL = "https://example.como/auth/authorize" TOKEN_URL = "https://example.como/auth/token" +MOCK_SECRET_TOKEN_URLSAFE = ( + "token-" + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" +) @pytest.fixture @@ -40,6 +45,22 @@ async def local_impl( ) +@pytest.fixture +async def local_impl_pkce( + hass: HomeAssistant, +) -> AsyncGenerator[config_entry_oauth2_flow.LocalOAuth2ImplementationWithPkce]: + """Local implementation.""" + assert await setup.async_setup_component(hass, "auth", {}) + with patch( + "homeassistant.helpers.config_entry_oauth2_flow.secrets.token_urlsafe", + return_value=MOCK_SECRET_TOKEN_URLSAFE + + "bbbbbb", # Add some characters that should be removed by the logic. + ): + yield config_entry_oauth2_flow.LocalOAuth2ImplementationWithPkce( + hass, TEST_DOMAIN, CLIENT_ID, AUTHORIZE_URL, TOKEN_URL + ) + + @pytest.fixture def flow_handler( hass: HomeAssistant, @@ -963,3 +984,143 @@ async def test_oauth2_without_secret_init( client = await hass_client_no_auth() resp = await client.get("/auth/external/callback?code=abcd&state=qwer") assert resp.status == 400 + + +@pytest.mark.usefixtures("current_request_with_host") +async def test_abort_oauth_with_pkce_rejected( + hass: HomeAssistant, + flow_handler: type[config_entry_oauth2_flow.AbstractOAuth2FlowHandler], + local_impl_pkce: config_entry_oauth2_flow.LocalOAuth2ImplementationWithPkce, + hass_client_no_auth: ClientSessionGenerator, +) -> None: + """Check bad oauth token.""" + flow_handler.async_register_implementation(hass, local_impl_pkce) + + result = await hass.config_entries.flow.async_init( + TEST_DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + + code_challenge = local_impl_pkce.compute_code_challenge(MOCK_SECRET_TOKEN_URLSAFE) + assert result["type"] == data_entry_flow.FlowResultType.EXTERNAL_STEP + + assert result["url"].startswith(f"{AUTHORIZE_URL}?") + assert f"client_id={CLIENT_ID}" in result["url"] + assert "redirect_uri=https://example.com/auth/external/callback" in result["url"] + assert f"state={state}" in result["url"] + assert "scope=read+write" in result["url"] + assert "response_type=code" in result["url"] + assert f"code_challenge={code_challenge}" in result["url"] + assert "code_challenge_method=S256" in result["url"] + + client = await hass_client_no_auth() + resp = await client.get( + f"/auth/external/callback?error=access_denied&state={state}" + ) + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] == data_entry_flow.FlowResultType.ABORT + assert result["reason"] == "user_rejected_authorize" + assert result["description_placeholders"] == {"error": "access_denied"} + + +@pytest.mark.usefixtures("current_request_with_host") +async def test_oauth_with_pkce_adds_code_verifier_to_token_resolve( + hass: HomeAssistant, + flow_handler: type[config_entry_oauth2_flow.AbstractOAuth2FlowHandler], + local_impl_pkce: config_entry_oauth2_flow.LocalOAuth2ImplementationWithPkce, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, +) -> None: + """Check pkce flow.""" + + mock_integration( + hass, + MockModule( + domain=TEST_DOMAIN, + async_setup_entry=AsyncMock(return_value=True), + ), + ) + mock_platform(hass, f"{TEST_DOMAIN}.config_flow", None) + flow_handler.async_register_implementation(hass, local_impl_pkce) + + result = await hass.config_entries.flow.async_init( + TEST_DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + + code_challenge = local_impl_pkce.compute_code_challenge(MOCK_SECRET_TOKEN_URLSAFE) + assert result["type"] == data_entry_flow.FlowResultType.EXTERNAL_STEP + + assert result["url"].startswith(f"{AUTHORIZE_URL}?") + assert f"client_id={CLIENT_ID}" in result["url"] + assert "redirect_uri=https://example.com/auth/external/callback" in result["url"] + assert f"state={state}" in result["url"] + assert "scope=read+write" in result["url"] + assert "response_type=code" in result["url"] + assert f"code_challenge={code_challenge}" in result["url"] + assert "code_challenge_method=S256" in result["url"] + + # Setup the response when HA tries to fetch a token with the code + aioclient_mock.post( + TOKEN_URL, + json={ + "refresh_token": REFRESH_TOKEN, + "access_token": ACCESS_TOKEN_1, + "type": "bearer", + "expires_in": 60, + }, + ) + + client = await hass_client_no_auth() + # trigger the callback + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + # Verify the token resolve request occurred + assert len(aioclient_mock.mock_calls) == 1 + assert aioclient_mock.mock_calls[0][2] == { + "client_id": CLIENT_ID, + "grant_type": "authorization_code", + "code": "abcd", + "redirect_uri": "https://example.com/auth/external/callback", + "code_verifier": MOCK_SECRET_TOKEN_URLSAFE, + } + + +@pytest.mark.parametrize("code_verifier_length", [40, 129]) +def test_generate_code_verifier_invalid_length(code_verifier_length: int) -> None: + """Test generate_code_verifier with an invalid length.""" + with pytest.raises(ValueError): + config_entry_oauth2_flow.LocalOAuth2ImplementationWithPkce.generate_code_verifier( + code_verifier_length + ) + + +@pytest.mark.parametrize("code_verifier", ["", "yyy", "a" * 129]) +def test_compute_code_challenge_invalid_code_verifier(code_verifier: str) -> None: + """Test compute_code_challenge with an invalid code_verifier.""" + with pytest.raises(ValueError): + config_entry_oauth2_flow.LocalOAuth2ImplementationWithPkce.compute_code_challenge( + code_verifier + ) diff --git a/tests/helpers/test_config_validation.py b/tests/helpers/test_config_validation.py index 7202cef6f5f..c72295493e8 100644 --- a/tests/helpers/test_config_validation.py +++ b/tests/helpers/test_config_validation.py @@ -773,6 +773,7 @@ async def test_dynamic_template_no_hass(hass: HomeAssistant) -> None: await hass.async_add_executor_job(schema, value) +@pytest.mark.usefixtures("hass") def test_template_complex() -> None: """Test template_complex validator.""" schema = vol.Schema(cv.template_complex) @@ -1414,6 +1415,7 @@ def test_key_value_schemas() -> None: schema({"mode": mode, "data": data}) +@pytest.mark.usefixtures("hass") def test_key_value_schemas_with_default() -> None: """Test key value schemas.""" schema = vol.Schema( @@ -1492,6 +1494,7 @@ def test_key_value_schemas_with_default() -> None: ), ], ) +@pytest.mark.usefixtures("hass") def test_script(caplog: pytest.LogCaptureFixture, config: dict, error: str) -> None: """Test script validation is user friendly.""" with pytest.raises(vol.Invalid, match=error): @@ -1570,6 +1573,7 @@ def test_language() -> None: assert schema(value) +@pytest.mark.usefixtures("hass") def test_positive_time_period_template() -> None: """Test positive time period template validation.""" schema = vol.Schema(cv.positive_time_period_template) diff --git a/tests/helpers/test_frame.py b/tests/helpers/test_frame.py index fb98111fd42..e99db76dcbc 100644 --- a/tests/helpers/test_frame.py +++ b/tests/helpers/test_frame.py @@ -1,15 +1,17 @@ """Test the frame helper.""" +from contextlib import AbstractContextManager, nullcontext as does_not_raise from typing import Any from unittest.mock import ANY, Mock, patch import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.core import HomeAssistant from homeassistant.helpers import frame from homeassistant.loader import async_get_integration -from tests.common import extract_stack_to_frame +from tests.common import MockModule, extract_stack_to_frame, mock_integration async def test_extract_frame_integration( @@ -21,7 +23,7 @@ async def test_extract_frame_integration( custom_integration=False, frame=mock_integration_frame, integration="hue", - module=None, + module="homeassistant.components.hue.light", relative_filename="homeassistant/components/hue/light.py", ) @@ -34,8 +36,8 @@ async def test_get_integration_logger( assert logger.name == "homeassistant.components.hue" -@pytest.mark.usefixtures("enable_custom_integrations") -async def test_extract_frame_resolve_module(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("enable_custom_integrations", "hass") +async def test_extract_frame_resolve_module() -> None: """Test extracting the current frame from integration context.""" # pylint: disable-next=import-outside-toplevel from custom_components.test_integration_frame import call_get_integration_frame @@ -51,8 +53,8 @@ async def test_extract_frame_resolve_module(hass: HomeAssistant) -> None: ) -@pytest.mark.usefixtures("enable_custom_integrations") -async def test_get_integration_logger_resolve_module(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("enable_custom_integrations", "hass") +async def test_get_integration_logger_resolve_module() -> None: """Test getting the logger from integration context.""" # pylint: disable-next=import-outside-toplevel from custom_components.test_integration_frame import call_get_integration_logger @@ -159,99 +161,224 @@ async def test_get_integration_logger_no_integration( @pytest.mark.parametrize( - ("integration_frame_path", "keywords", "expected_error", "expected_log"), + ("integration_frame_path", "keywords", "expected_result", "expected_log"), [ pytest.param( "homeassistant/test_core", {}, - True, + pytest.raises(RuntimeError, match="test_report_string"), 0, id="core default", ), pytest.param( "homeassistant/components/test_core_integration", {}, - False, + does_not_raise(), 1, id="core integration default", ), pytest.param( "custom_components/test_custom_integration", {}, - False, + does_not_raise(), 1, id="custom integration default", ), pytest.param( "custom_components/test_custom_integration", {"custom_integration_behavior": frame.ReportBehavior.IGNORE}, - False, + does_not_raise(), 0, id="custom integration ignore", ), pytest.param( "custom_components/test_custom_integration", {"custom_integration_behavior": frame.ReportBehavior.ERROR}, - True, + pytest.raises(RuntimeError, match="test_report_string"), 1, id="custom integration error", ), pytest.param( "homeassistant/components/test_integration_frame", {"core_integration_behavior": frame.ReportBehavior.IGNORE}, - False, + does_not_raise(), 0, id="core_integration_behavior ignore", ), pytest.param( "homeassistant/components/test_integration_frame", {"core_integration_behavior": frame.ReportBehavior.ERROR}, - True, + pytest.raises(RuntimeError, match="test_report_string"), 1, id="core_integration_behavior error", ), pytest.param( "homeassistant/test_integration_frame", {"core_behavior": frame.ReportBehavior.IGNORE}, - False, + does_not_raise(), 0, id="core_behavior ignore", ), pytest.param( "homeassistant/test_integration_frame", {"core_behavior": frame.ReportBehavior.LOG}, - False, + does_not_raise(), 1, id="core_behavior log", ), ], ) -@pytest.mark.usefixtures("mock_integration_frame") +@pytest.mark.usefixtures("hass", "mock_integration_frame") async def test_report_usage( caplog: pytest.LogCaptureFixture, + snapshot: SnapshotAssertion, keywords: dict[str, Any], - expected_error: bool, + expected_result: AbstractContextManager, expected_log: int, ) -> None: - """Test report.""" + """Test report_usage. + + Note: This test doesn't set up mock integrations, so it will not + find the correct issue tracker URL, and we don't check for that. + """ what = "test_report_string" - errored = False - try: - with patch.object(frame, "_REPORTED_INTEGRATIONS", set()): - frame.report_usage(what, **keywords) - except RuntimeError: - errored = True - - assert errored == expected_error + with patch.object(frame, "_REPORTED_INTEGRATIONS", set()), expected_result: + frame.report_usage(what, **keywords) assert caplog.text.count(what) == expected_log + reports = [ + rec.message for rec in caplog.records if rec.message.startswith("Detected") + ] + assert reports == snapshot -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_report_usage_no_hass() -> None: + """Test report_usage when frame helper is not set up.""" + + with pytest.raises(RuntimeError, match="Frame helper not set up"): + frame.report_usage("blablabla") + + +@pytest.mark.parametrize( + "integration_frame_path", + [ + pytest.param( + "homeassistant/test_core", + id="core", + ), + pytest.param( + "homeassistant/components/test_core_integration", + id="core integration", + ), + pytest.param( + "custom_components/test_custom_integration", + id="custom integration", + ), + pytest.param( + "custom_components/unknown_custom_integration", + id="unknown custom integration", + ), + ], +) +@pytest.mark.usefixtures("mock_integration_frame") +async def test_report_usage_find_issue_tracker( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + snapshot: SnapshotAssertion, +) -> None: + """Test report_usage finds the correct issue tracker. + + Note: The issue tracker is found by loader.async_suggest_report_issue, this + test is a sanity check to ensure async_suggest_report_issue is given the + right parameters. + """ + + what = "test_report_string" + mock_integration(hass, MockModule("test_core_integration")) + mock_integration( + hass, + MockModule( + "test_custom_integration", + partial_manifest={"issue_tracker": "https://blablabla.com"}, + ), + built_in=False, + ) + + with patch.object(frame, "_REPORTED_INTEGRATIONS", set()): + frame.report_usage(what, core_behavior=frame.ReportBehavior.LOG) + + assert caplog.text.count(what) == 1 + reports = [ + rec.message for rec in caplog.records if rec.message.startswith("Detected") + ] + assert reports == snapshot + + +@pytest.mark.parametrize( + "integration_frame_path", + [ + pytest.param( + "homeassistant/test_core", + id="core", + ), + pytest.param( + "homeassistant/components/test_core_integration", + id="core integration", + ), + pytest.param( + "custom_components/test_custom_integration", + id="custom integration", + ), + pytest.param( + "custom_components/unknown_custom_integration", + id="unknown custom integration", + ), + ], +) +@pytest.mark.usefixtures("mock_integration_frame") +async def test_report_usage_find_issue_tracker_other_thread( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + snapshot: SnapshotAssertion, +) -> None: + """Test report_usage finds the correct issue tracker. + + In this test, we run the report_usage in a separate thread. + + Note: The issue tracker is found by loader.async_suggest_report_issue, this + test is a sanity check to ensure async_suggest_report_issue is given the + right parameters. + """ + + what = "test_report_string" + mock_integration(hass, MockModule("test_core_integration")) + mock_integration( + hass, + MockModule( + "test_custom_integration", + partial_manifest={"issue_tracker": "https://blablabla.com"}, + ), + built_in=False, + ) + + def sync_job() -> None: + with patch.object(frame, "_REPORTED_INTEGRATIONS", set()): + frame.report_usage(what, core_behavior=frame.ReportBehavior.LOG) + + await hass.async_add_executor_job(sync_job) + + assert caplog.text.count(what) == 1 + reports = [ + rec.message for rec in caplog.records if rec.message.startswith("Detected") + ] + assert reports == snapshot + + +@pytest.mark.usefixtures("hass", "mock_integration_frame") async def test_prevent_flooding( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_integration_frame: Mock + caplog: pytest.LogCaptureFixture, mock_integration_frame: Mock ) -> None: """Test to ensure a report is only written once to the log.""" @@ -267,22 +394,22 @@ async def test_prevent_flooding( f"q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+{integration}%22" ) - frame.report(what, error_if_core=False) + frame.report_usage(what, core_behavior=frame.ReportBehavior.LOG) assert expected_message in caplog.text assert key in frame._REPORTED_INTEGRATIONS assert len(frame._REPORTED_INTEGRATIONS) == 1 caplog.clear() - frame.report(what, error_if_core=False) + frame.report_usage(what, core_behavior=frame.ReportBehavior.LOG) assert expected_message not in caplog.text assert key in frame._REPORTED_INTEGRATIONS assert len(frame._REPORTED_INTEGRATIONS) == 1 -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +@pytest.mark.usefixtures("hass", "mock_integration_frame") async def test_breaks_in_ha_version( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_integration_frame: Mock + caplog: pytest.LogCaptureFixture, mock_integration_frame: Mock ) -> None: """Test to ensure a report is only written once to the log.""" @@ -302,6 +429,7 @@ async def test_breaks_in_ha_version( assert expected_message in caplog.text +@pytest.mark.usefixtures("hass") async def test_report_missing_integration_frame( caplog: pytest.LogCaptureFixture, ) -> None: @@ -312,19 +440,20 @@ async def test_report_missing_integration_frame( "homeassistant.helpers.frame.get_integration_frame", side_effect=frame.MissingIntegrationFrame, ): - frame.report(what, error_if_core=False) + frame.report_usage(what, core_behavior=frame.ReportBehavior.LOG) assert what in caplog.text assert caplog.text.count(what) == 1 caplog.clear() - frame.report(what, error_if_core=False, log_custom_component_only=True) + frame.report_usage(what, core_behavior=frame.ReportBehavior.IGNORE) assert caplog.text == "" @pytest.mark.parametrize("run_count", [1, 2]) # Run this twice to make sure the flood check does not # kick in when error_if_integration=True +@pytest.mark.usefixtures("hass") async def test_report_error_if_integration( caplog: pytest.LogCaptureFixture, run_count: int ) -> None: @@ -361,99 +490,24 @@ async def test_report_error_if_integration( ), ), ): - frame.report("did a bad thing", error_if_integration=True) + frame.report_usage( + "did a bad thing", core_integration_behavior=frame.ReportBehavior.ERROR + ) @pytest.mark.parametrize( - ("integration_frame_path", "keywords", "expected_error", "expected_log"), - [ - pytest.param( - "homeassistant/test_core", - {}, - True, - 0, - id="core default", - ), - pytest.param( - "homeassistant/components/test_core_integration", - {}, - False, - 1, - id="core integration default", - ), - pytest.param( - "custom_components/test_custom_integration", - {}, - False, - 1, - id="custom integration default", - ), - pytest.param( - "custom_components/test_integration_frame", - {"log_custom_component_only": True}, - False, - 1, - id="log_custom_component_only with custom integration", - ), - pytest.param( - "homeassistant/components/test_integration_frame", - {"log_custom_component_only": True}, - False, - 0, - id="log_custom_component_only with core integration", - ), - pytest.param( - "homeassistant/test_integration_frame", - {"error_if_core": False}, - False, - 1, - id="disable error_if_core", - ), - pytest.param( - "custom_components/test_integration_frame", - {"error_if_integration": True}, - True, - 1, - id="error_if_integration with custom integration", - ), - pytest.param( - "homeassistant/components/test_integration_frame", - {"error_if_integration": True}, - True, - 1, - id="error_if_integration with core integration", - ), - ], -) -@pytest.mark.usefixtures("mock_integration_frame") -async def test_report( - caplog: pytest.LogCaptureFixture, - keywords: dict[str, Any], - expected_error: bool, - expected_log: int, -) -> None: - """Test report.""" - - what = "test_report_string" - - errored = False - try: - with patch.object(frame, "_REPORTED_INTEGRATIONS", set()): - frame.report(what, **keywords) - except RuntimeError: - errored = True - - assert errored == expected_error - - assert caplog.text.count(what) == expected_log - - -@pytest.mark.parametrize( - ("behavior", "integration_domain", "source", "logs_again"), + ( + "behavior", + "integration_domain", + "integration_frame_path", + "source", + "logs_again", + ), [ pytest.param( "core_behavior", None, + "homeassistant", "code that", True, id="core", @@ -461,6 +515,7 @@ async def test_report( pytest.param( "core_behavior", "unknown_integration", + "homeassistant", "code that", True, id="unknown integration", @@ -468,6 +523,7 @@ async def test_report( pytest.param( "core_integration_behavior", "sensor", + "homeassistant", "that integration 'sensor'", False, id="core integration", @@ -475,13 +531,32 @@ async def test_report( pytest.param( "custom_integration_behavior", "test_package", + "homeassistant", "that custom integration 'test_package'", False, id="custom integration", ), + # Assert integration_domain has priority over integration found in stack frame + pytest.param( + "core_integration_behavior", + "sensor", + "homeassistant/components/hue", + "that integration 'sensor'", + False, + id="core integration stack mismatch", + ), + # Assert integration_domain has priority over integration found in stack frame + pytest.param( + "custom_integration_behavior", + "test_package", + "custom_components/hue", + "that custom integration 'test_package'", + False, + id="custom integration stack mismatch", + ), ], ) -@pytest.mark.usefixtures("enable_custom_integrations") +@pytest.mark.usefixtures("enable_custom_integrations", "mock_integration_frame") async def test_report_integration_domain( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -490,7 +565,7 @@ async def test_report_integration_domain( source: str, logs_again: bool, ) -> None: - """Test report.""" + """Test report_usage when integration_domain is specified.""" await async_get_integration(hass, "sensor") await async_get_integration(hass, "test_package") diff --git a/tests/helpers/test_httpx_client.py b/tests/helpers/test_httpx_client.py index 4b9f2fa2bf6..c3b9c1f9de8 100644 --- a/tests/helpers/test_httpx_client.py +++ b/tests/helpers/test_httpx_client.py @@ -100,7 +100,6 @@ async def test_get_async_client_context_manager(hass: HomeAssistant) -> None: assert mock_aclose.call_count == 0 -@patch("homeassistant.helpers.frame._REPORTED_INTEGRATIONS", set()) async def test_warning_close_session_integration( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: @@ -144,7 +143,6 @@ async def test_warning_close_session_integration( ) in caplog.text -@patch("homeassistant.helpers.frame._REPORTED_INTEGRATIONS", set()) async def test_warning_close_session_custom( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: diff --git a/tests/helpers/test_llm.py b/tests/helpers/test_llm.py index 630ed3f4fa1..19ada407550 100644 --- a/tests/helpers/test_llm.py +++ b/tests/helpers/test_llm.py @@ -181,19 +181,19 @@ async def test_assist_api( assert len(llm.async_get_apis(hass)) == 1 api = await llm.async_get_api(hass, "assist", llm_context) - assert len(api.tools) == 0 + assert [tool.name for tool in api.tools] == ["get_home_state"] # Match all intent_handler.platforms = None api = await llm.async_get_api(hass, "assist", llm_context) - assert len(api.tools) == 1 + assert [tool.name for tool in api.tools] == ["test_intent", "get_home_state"] # Match specific domain intent_handler.platforms = {"light"} api = await llm.async_get_api(hass, "assist", llm_context) - assert len(api.tools) == 1 + assert len(api.tools) == 2 tool = api.tools[0] assert tool.name == "test_intent" assert tool.description == "Execute Home Assistant test_intent intent" @@ -622,6 +622,40 @@ async def test_assist_api_prompt( domain: light state: unavailable areas: Test Area 2 +""" + stateless_exposed_entities_prompt = """An overview of the areas and the devices in this smart home: +- names: Kitchen + domain: light +- names: Living Room + domain: light + areas: Test Area, Alternative name +- names: Test Device, my test light + domain: light + areas: Test Area, Alternative name +- names: Test Service + domain: light + areas: Test Area, Alternative name +- names: Test Service + domain: light + areas: Test Area, Alternative name +- names: Test Service + domain: light + areas: Test Area, Alternative name +- names: Test Device 2 + domain: light + areas: Test Area 2 +- names: Test Device 3 + domain: light + areas: Test Area 2 +- names: Test Device 4 + domain: light + areas: Test Area 2 +- names: Unnamed Device + domain: light + areas: Test Area 2 +- names: '1' + domain: light + areas: Test Area 2 """ first_part_prompt = ( "When controlling Home Assistant always call the intent tools. " @@ -640,9 +674,18 @@ async def test_assist_api_prompt( f"""{first_part_prompt} {area_prompt} {no_timer_prompt} -{exposed_entities_prompt}""" +{stateless_exposed_entities_prompt}""" ) + # Verify that the get_home_state tool returns the same results as the exposed_entities_prompt + result = await api.async_call_tool( + llm.ToolInput(tool_name="get_home_state", tool_args={}) + ) + assert result == { + "success": True, + "result": exposed_entities_prompt, + } + # Fake that request is made from a specific device ID with an area llm_context.device_id = device.id area_prompt = ( @@ -654,7 +697,7 @@ async def test_assist_api_prompt( f"""{first_part_prompt} {area_prompt} {no_timer_prompt} -{exposed_entities_prompt}""" +{stateless_exposed_entities_prompt}""" ) # Add floor @@ -669,7 +712,7 @@ async def test_assist_api_prompt( f"""{first_part_prompt} {area_prompt} {no_timer_prompt} -{exposed_entities_prompt}""" +{stateless_exposed_entities_prompt}""" ) # Register device for timers @@ -680,7 +723,7 @@ async def test_assist_api_prompt( assert api.api_prompt == ( f"""{first_part_prompt} {area_prompt} -{exposed_entities_prompt}""" +{stateless_exposed_entities_prompt}""" ) @@ -1267,3 +1310,19 @@ async def test_calendar_get_events_tool(hass: HomeAssistant) -> None: "start_date_time": now, "end_date_time": dt_util.start_of_local_day() + timedelta(days=7), } + + +async def test_no_tools_exposed(hass: HomeAssistant) -> None: + """Test that tools are not exposed when no entities are exposed.""" + assert await async_setup_component(hass, "homeassistant", {}) + context = Context() + llm_context = llm.LLMContext( + platform="test_platform", + context=context, + user_prompt="test_text", + language="*", + assistant="conversation", + device_id=None, + ) + api = await llm.async_get_api(hass, "assist", llm_context) + assert api.tools == [] diff --git a/tests/helpers/test_script.py b/tests/helpers/test_script.py index df589a41daa..4c707590528 100644 --- a/tests/helpers/test_script.py +++ b/tests/helpers/test_script.py @@ -494,7 +494,7 @@ async def test_calling_service_response_data_in_scopes(hass: HomeAssistant) -> N assert result.variables["my_response"] == expected_var expected_trace = { - "0": [{"variables": {"my_response": expected_var}}], + "0": [{"variables": {"my_response": expected_var, "state": "off"}}], "0/parallel/0/sequence/0": [{"variables": {"state": "off"}}], "0/parallel/0/sequence/1": [ { @@ -1797,7 +1797,7 @@ async def test_wait_in_sequence(hass: HomeAssistant) -> None: assert result.variables["wait"] == expected_var expected_trace = { - "0": [{"variables": {"wait": expected_var}}], + "0": [{"variables": {"wait": expected_var, "state": "off"}}], "0/sequence/0": [{"variables": {"state": "off"}}], "0/sequence/1": [ { @@ -1840,7 +1840,7 @@ async def test_wait_in_parallel(hass: HomeAssistant) -> None: assert "wait" not in result.variables expected_trace = { - "0": [{}], + "0": [{"variables": {"state": "off"}}], "0/parallel/0/sequence/0": [{"variables": {"state": "off"}}], "0/parallel/0/sequence/1": [ { @@ -5277,11 +5277,23 @@ async def test_set_variable( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test setting variables in scripts.""" - alias = "variables step" sequence = cv.SCRIPT_SCHEMA( [ - {"alias": alias, "variables": {"variable": "value"}}, - {"action": "test.script", "data": {"value": "{{ variable }}"}}, + {"alias": "variables", "variables": {"x": 1, "y": 1}}, + { + "alias": "scope", + "sequence": [ + {"variables": {"y": 3, "z": 3}}, + { + "action": "test.script", + "data": {"value": "x={{ x }}, y={{ y }}, z={{ z }}"}, + }, + ], + }, + { + "action": "test.script", + "data": {"value": "x={{ x }}, y={{ y }}, z={{ z }}"}, + }, ] ) script_obj = script.Script(hass, sequence, "test script", "test_domain") @@ -5291,18 +5303,36 @@ async def test_set_variable( await script_obj.async_run(context=Context()) await hass.async_block_till_done() - assert mock_calls[0].data["value"] == "value" - assert f"Executing step {alias}" in caplog.text + assert len(mock_calls) == 2 + assert mock_calls[0].data["value"] == "x=1, y=3, z=3" + assert mock_calls[1].data["value"] == "x=1, y=3, z=3" + + assert "Executing step variables" in caplog.text expected_trace = { - "0": [{"variables": {"variable": "value"}}], - "1": [ + "0": [{"variables": {"x": 1, "y": 1}}], + "1": [{"variables": {"y": 3, "z": 3}}], + "1/sequence/0": [{"variables": {"y": 3, "z": 3}}], + "1/sequence/1": [ { "result": { "params": { "domain": "test", "service": "script", - "service_data": {"value": "value"}, + "service_data": {"value": "x=1, y=3, z=3"}, + "target": {}, + }, + "running_script": False, + }, + } + ], + "2": [ + { + "result": { + "params": { + "domain": "test", + "service": "script", + "service_data": {"value": "x=1, y=3, z=3"}, "target": {}, }, "running_script": False, @@ -5823,14 +5853,16 @@ async def test_stop_action_subscript( ) +@pytest.mark.parametrize(("var", "response"), [(1, "If: Then"), (2, "Testing 123")]) @pytest.mark.parametrize( - ("var", "response"), - [(1, "If: Then"), (2, "Testing 123")], + ("script_mode", "max_runs"), [("single", 1), ("parallel", 2), ("queued", 2)] ) async def test_stop_action_response_variables( hass: HomeAssistant, var: int, response: str, + script_mode, + max_runs, ) -> None: """Test setting stop response_variable in a subscript.""" sequence = cv.SCRIPT_SCHEMA( @@ -5849,7 +5881,14 @@ async def test_stop_action_response_variables( {"stop": "In the name of love", "response_variable": "output"}, ] ) - script_obj = script.Script(hass, sequence, "Test Name", "test_domain") + script_obj = script.Script( + hass, + sequence, + "Test Name", + "test_domain", + script_mode=script_mode, + max_runs=max_runs, + ) run_vars = MappingProxyType({"var": var}) result = await script_obj.async_run(run_vars, context=Context()) @@ -5899,7 +5938,9 @@ async def test_stop_action_nested_response_variables( "variables": {"var": var, "output": {"value": "Testing 123"}}, } ], - "1": [{"result": {"choice": choice}}], + "1": [ + {"result": {"choice": choice}, "variables": {"output": {"value": response}}} + ], "1/if": [{"result": {"result": if_result}}], "1/if/condition/0": [{"result": {"result": var == 1, "entities": []}}], f"1/{choice}/0": [{"variables": {"output": {"value": response}}}], diff --git a/tests/helpers/test_selector.py b/tests/helpers/test_selector.py index d07bb7458e9..3ddbecaf48d 100644 --- a/tests/helpers/test_selector.py +++ b/tests/helpers/test_selector.py @@ -88,6 +88,7 @@ def _test_selector( ({"integration": "zha"}, ("abc123",), (None,)), ({"manufacturer": "mock-manuf"}, ("abc123",), (None,)), ({"model": "mock-model"}, ("abc123",), (None,)), + ({"model_id": "mock-model_id"}, ("abc123",), (None,)), ({"manufacturer": "mock-manuf", "model": "mock-model"}, ("abc123",), (None,)), ( {"integration": "zha", "manufacturer": "mock-manuf", "model": "mock-model"}, @@ -979,6 +980,7 @@ def test_datetime_selector_schema(schema, valid_selections, invalid_selections) ("schema", "valid_selections", "invalid_selections"), [({}, ("abc123", "{{ now() }}"), (None, "{{ incomplete }", "{% if True %}Hi!"))], ) +@pytest.mark.usefixtures("hass") def test_template_selector_schema(schema, valid_selections, invalid_selections) -> None: """Test template selector.""" _test_selector("template", schema, valid_selections, invalid_selections) diff --git a/tests/helpers/test_service.py b/tests/helpers/test_service.py index 142f7a23f81..70ab20e87fa 100644 --- a/tests/helpers/test_service.py +++ b/tests/helpers/test_service.py @@ -961,7 +961,7 @@ async def test_async_get_all_descriptions_dot_keys(hass: HomeAssistant) -> None: side_effect=service._load_services_files, ) as proxy_load_services_files, patch( - "homeassistant.util.yaml.loader.load_yaml", + "annotatedyaml.loader.load_yaml", side_effect=load_yaml, ) as mock_load_yaml, ): @@ -1033,7 +1033,7 @@ async def test_async_get_all_descriptions_filter(hass: HomeAssistant) -> None: side_effect=service._load_services_files, ) as proxy_load_services_files, patch( - "homeassistant.util.yaml.loader.load_yaml", + "annotatedyaml.loader.load_yaml", side_effect=load_yaml, ) as mock_load_yaml, ): diff --git a/tests/helpers/test_template.py b/tests/helpers/test_template.py index b3a30806cbd..89d1c307fd7 100644 --- a/tests/helpers/test_template.py +++ b/tests/helpers/test_template.py @@ -15,6 +15,7 @@ from unittest.mock import patch from freezegun import freeze_time import orjson import pytest +from pytest_unordered import unordered from syrupy import SnapshotAssertion import voluptuous as vol @@ -149,6 +150,7 @@ async def test_template_render_info_collision(hass: HomeAssistant) -> None: template_obj.async_render_to_info() +@pytest.mark.usefixtures("hass") def test_template_equality() -> None: """Test template comparison and hashing.""" template_one = template.Template("{{ template_one }}") @@ -5166,6 +5168,7 @@ def test_iif(hass: HomeAssistant) -> None: assert tpl.async_render() == "no" +@pytest.mark.usefixtures("hass") async def test_cache_garbage_collection() -> None: """Test caching a template.""" template_string = ( @@ -5881,6 +5884,75 @@ async def test_floor_areas( assert info.rate_limit is None +async def test_floor_entities( + hass: HomeAssistant, + floor_registry: fr.FloorRegistry, + area_registry: ar.AreaRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test floor_entities function.""" + + # Test non existing floor ID + info = render_to_info(hass, "{{ floor_entities('skyring') }}") + assert_result_info(info, []) + assert info.rate_limit is None + + info = render_to_info(hass, "{{ 'skyring' | floor_entities }}") + assert_result_info(info, []) + assert info.rate_limit is None + + # Test wrong value type + info = render_to_info(hass, "{{ floor_entities(42) }}") + assert_result_info(info, []) + assert info.rate_limit is None + + info = render_to_info(hass, "{{ 42 | floor_entities }}") + assert_result_info(info, []) + assert info.rate_limit is None + + floor = floor_registry.async_create("First floor") + area1 = area_registry.async_create("Living room") + area2 = area_registry.async_create("Dining room") + area_registry.async_update(area1.id, floor_id=floor.floor_id) + area_registry.async_update(area2.id, floor_id=floor.floor_id) + + config_entry = MockConfigEntry(domain="light") + config_entry.add_to_hass(hass) + entity_entry = entity_registry.async_get_or_create( + "light", + "hue", + "living_room", + config_entry=config_entry, + ) + entity_registry.async_update_entity(entity_entry.entity_id, area_id=area1.id) + entity_entry = entity_registry.async_get_or_create( + "light", + "hue", + "dining_room", + config_entry=config_entry, + ) + entity_registry.async_update_entity(entity_entry.entity_id, area_id=area2.id) + + # Get entities by floor ID + expected = ["light.hue_living_room", "light.hue_dining_room"] + info = render_to_info(hass, f"{{{{ floor_entities('{floor.floor_id}') }}}}") + assert_result_info(info, expected) + assert info.rate_limit is None + + info = render_to_info(hass, f"{{{{ '{floor.floor_id}' | floor_entities }}}}") + assert_result_info(info, expected) + assert info.rate_limit is None + + # Get entities by floor name + info = render_to_info(hass, f"{{{{ floor_entities('{floor.name}') }}}}") + assert_result_info(info, expected) + assert info.rate_limit is None + + info = render_to_info(hass, f"{{{{ '{floor.name}' | floor_entities }}}}") + assert_result_info(info, expected) + assert info.rate_limit is None + + async def test_labels( hass: HomeAssistant, label_registry: lr.LabelRegistry, @@ -6601,3 +6673,384 @@ async def test_merge_response_not_mutate_original_object( tpl = template.Template(_template, hass) assert tpl.async_render() + + +def test_shuffle(hass: HomeAssistant) -> None: + """Test the shuffle function and filter.""" + assert list( + template.Template("{{ [1, 2, 3] | shuffle }}", hass).async_render() + ) == unordered([1, 2, 3]) + + assert list( + template.Template("{{ shuffle([1, 2, 3]) }}", hass).async_render() + ) == unordered([1, 2, 3]) + + assert list( + template.Template("{{ shuffle(1, 2, 3) }}", hass).async_render() + ) == unordered([1, 2, 3]) + + assert list(template.Template("{{ shuffle([]) }}", hass).async_render()) == [] + + assert list(template.Template("{{ [] | shuffle }}", hass).async_render()) == [] + + # Testing using seed + assert list( + template.Template("{{ shuffle([1, 2, 3], 'seed') }}", hass).async_render() + ) == [2, 3, 1] + + assert list( + template.Template( + "{{ shuffle([1, 2, 3], seed='seed') }}", + hass, + ).async_render() + ) == [2, 3, 1] + + assert list( + template.Template( + "{{ [1, 2, 3] | shuffle('seed') }}", + hass, + ).async_render() + ) == [2, 3, 1] + + assert list( + template.Template( + "{{ [1, 2, 3] | shuffle(seed='seed') }}", + hass, + ).async_render() + ) == [2, 3, 1] + + with pytest.raises(TemplateError): + template.Template("{{ 1 | shuffle }}", hass).async_render() + + with pytest.raises(TemplateError): + template.Template("{{ shuffle() }}", hass).async_render() + + +def test_typeof(hass: HomeAssistant) -> None: + """Test the typeof debug filter/function.""" + assert template.Template("{{ True | typeof }}", hass).async_render() == "bool" + assert template.Template("{{ typeof(True) }}", hass).async_render() == "bool" + + assert template.Template("{{ [1, 2, 3] | typeof }}", hass).async_render() == "list" + assert template.Template("{{ typeof([1, 2, 3]) }}", hass).async_render() == "list" + + assert template.Template("{{ 1 | typeof }}", hass).async_render() == "int" + assert template.Template("{{ typeof(1) }}", hass).async_render() == "int" + + assert template.Template("{{ 1.1 | typeof }}", hass).async_render() == "float" + assert template.Template("{{ typeof(1.1) }}", hass).async_render() == "float" + + assert template.Template("{{ None | typeof }}", hass).async_render() == "NoneType" + assert template.Template("{{ typeof(None) }}", hass).async_render() == "NoneType" + + assert ( + template.Template("{{ 'Home Assistant' | typeof }}", hass).async_render() + == "str" + ) + assert ( + template.Template("{{ typeof('Home Assistant') }}", hass).async_render() + == "str" + ) + + +def test_flatten(hass: HomeAssistant) -> None: + """Test the flatten function and filter.""" + assert template.Template( + "{{ flatten([1, [2, [3]], 4, [5 , 6]]) }}", hass + ).async_render() == [1, 2, 3, 4, 5, 6] + + assert template.Template( + "{{ [1, [2, [3]], 4, [5 , 6]] | flatten }}", hass + ).async_render() == [1, 2, 3, 4, 5, 6] + + assert template.Template( + "{{ flatten([1, [2, [3]], 4, [5 , 6]], 1) }}", hass + ).async_render() == [1, 2, [3], 4, 5, 6] + + assert template.Template( + "{{ flatten([1, [2, [3]], 4, [5 , 6]], levels=1) }}", hass + ).async_render() == [1, 2, [3], 4, 5, 6] + + assert template.Template( + "{{ [1, [2, [3]], 4, [5 , 6]] | flatten(1) }}", hass + ).async_render() == [1, 2, [3], 4, 5, 6] + + assert template.Template( + "{{ [1, [2, [3]], 4, [5 , 6]] | flatten(levels=1) }}", hass + ).async_render() == [1, 2, [3], 4, 5, 6] + + assert template.Template("{{ flatten([]) }}", hass).async_render() == [] + + assert template.Template("{{ [] | flatten }}", hass).async_render() == [] + + with pytest.raises(TemplateError): + template.Template("{{ 'string' | flatten }}", hass).async_render() + + with pytest.raises(TemplateError): + template.Template("{{ flatten() }}", hass).async_render() + + +def test_intersect(hass: HomeAssistant) -> None: + """Test the intersect function and filter.""" + assert list( + template.Template( + "{{ intersect([1, 2, 5, 3, 4, 10], [1, 2, 3, 4, 5, 11, 99]) }}", hass + ).async_render() + ) == unordered([1, 2, 3, 4, 5]) + + assert list( + template.Template( + "{{ [1, 2, 5, 3, 4, 10] | intersect([1, 2, 3, 4, 5, 11, 99]) }}", hass + ).async_render() + ) == unordered([1, 2, 3, 4, 5]) + + assert list( + template.Template( + "{{ intersect(['a', 'b', 'c'], ['b', 'c', 'd']) }}", hass + ).async_render() + ) == unordered(["b", "c"]) + + assert list( + template.Template( + "{{ ['a', 'b', 'c'] | intersect(['b', 'c', 'd']) }}", hass + ).async_render() + ) == unordered(["b", "c"]) + + assert ( + template.Template("{{ intersect([], [1, 2, 3]) }}", hass).async_render() == [] + ) + + assert ( + template.Template("{{ [] | intersect([1, 2, 3]) }}", hass).async_render() == [] + ) + + with pytest.raises(TemplateError, match="intersect expected a list, got str"): + template.Template("{{ 'string' | intersect([1, 2, 3]) }}", hass).async_render() + + with pytest.raises(TemplateError, match="intersect expected a list, got str"): + template.Template("{{ [1, 2, 3] | intersect('string') }}", hass).async_render() + + +def test_difference(hass: HomeAssistant) -> None: + """Test the difference function and filter.""" + assert list( + template.Template( + "{{ difference([1, 2, 5, 3, 4, 10], [1, 2, 3, 4, 5, 11, 99]) }}", hass + ).async_render() + ) == [10] + + assert list( + template.Template( + "{{ [1, 2, 5, 3, 4, 10] | difference([1, 2, 3, 4, 5, 11, 99]) }}", hass + ).async_render() + ) == [10] + + assert list( + template.Template( + "{{ difference(['a', 'b', 'c'], ['b', 'c', 'd']) }}", hass + ).async_render() + ) == ["a"] + + assert list( + template.Template( + "{{ ['a', 'b', 'c'] | difference(['b', 'c', 'd']) }}", hass + ).async_render() + ) == ["a"] + + assert ( + template.Template("{{ difference([], [1, 2, 3]) }}", hass).async_render() == [] + ) + + assert ( + template.Template("{{ [] | difference([1, 2, 3]) }}", hass).async_render() == [] + ) + + with pytest.raises(TemplateError, match="difference expected a list, got str"): + template.Template("{{ 'string' | difference([1, 2, 3]) }}", hass).async_render() + + with pytest.raises(TemplateError, match="difference expected a list, got str"): + template.Template("{{ [1, 2, 3] | difference('string') }}", hass).async_render() + + +def test_union(hass: HomeAssistant) -> None: + """Test the union function and filter.""" + assert list( + template.Template( + "{{ union([1, 2, 5, 3, 4, 10], [1, 2, 3, 4, 5, 11, 99]) }}", hass + ).async_render() + ) == unordered([1, 2, 3, 4, 5, 10, 11, 99]) + + assert list( + template.Template( + "{{ [1, 2, 5, 3, 4, 10] | union([1, 2, 3, 4, 5, 11, 99]) }}", hass + ).async_render() + ) == unordered([1, 2, 3, 4, 5, 10, 11, 99]) + + assert list( + template.Template( + "{{ union(['a', 'b', 'c'], ['b', 'c', 'd']) }}", hass + ).async_render() + ) == unordered(["a", "b", "c", "d"]) + + assert list( + template.Template( + "{{ ['a', 'b', 'c'] | union(['b', 'c', 'd']) }}", hass + ).async_render() + ) == unordered(["a", "b", "c", "d"]) + + assert list( + template.Template("{{ union([], [1, 2, 3]) }}", hass).async_render() + ) == unordered([1, 2, 3]) + + assert list( + template.Template("{{ [] | union([1, 2, 3]) }}", hass).async_render() + ) == unordered([1, 2, 3]) + + with pytest.raises(TemplateError, match="union expected a list, got str"): + template.Template("{{ 'string' | union([1, 2, 3]) }}", hass).async_render() + + with pytest.raises(TemplateError, match="union expected a list, got str"): + template.Template("{{ [1, 2, 3] | union('string') }}", hass).async_render() + + +def test_symmetric_difference(hass: HomeAssistant) -> None: + """Test the symmetric_difference function and filter.""" + assert list( + template.Template( + "{{ symmetric_difference([1, 2, 5, 3, 4, 10], [1, 2, 3, 4, 5, 11, 99]) }}", + hass, + ).async_render() + ) == unordered([10, 11, 99]) + + assert list( + template.Template( + "{{ [1, 2, 5, 3, 4, 10] | symmetric_difference([1, 2, 3, 4, 5, 11, 99]) }}", + hass, + ).async_render() + ) == unordered([10, 11, 99]) + + assert list( + template.Template( + "{{ symmetric_difference(['a', 'b', 'c'], ['b', 'c', 'd']) }}", hass + ).async_render() + ) == unordered(["a", "d"]) + + assert list( + template.Template( + "{{ ['a', 'b', 'c'] | symmetric_difference(['b', 'c', 'd']) }}", hass + ).async_render() + ) == unordered(["a", "d"]) + + assert list( + template.Template( + "{{ symmetric_difference([], [1, 2, 3]) }}", hass + ).async_render() + ) == unordered([1, 2, 3]) + + assert list( + template.Template( + "{{ [] | symmetric_difference([1, 2, 3]) }}", hass + ).async_render() + ) == unordered([1, 2, 3]) + + with pytest.raises( + TemplateError, match="symmetric_difference expected a list, got str" + ): + template.Template( + "{{ 'string' | symmetric_difference([1, 2, 3]) }}", hass + ).async_render() + + with pytest.raises( + TemplateError, match="symmetric_difference expected a list, got str" + ): + template.Template( + "{{ [1, 2, 3] | symmetric_difference('string') }}", hass + ).async_render() + + +def test_md5(hass: HomeAssistant) -> None: + """Test the md5 function and filter.""" + assert ( + template.Template("{{ md5('Home Assistant') }}", hass).async_render() + == "3d15e5c102c3413d0337393c3287e006" + ) + + assert ( + template.Template("{{ 'Home Assistant' | md5 }}", hass).async_render() + == "3d15e5c102c3413d0337393c3287e006" + ) + + +def test_sha1(hass: HomeAssistant) -> None: + """Test the sha1 function and filter.""" + assert ( + template.Template("{{ sha1('Home Assistant') }}", hass).async_render() + == "c8fd3bb19b94312664faa619af7729bdbf6e9f8a" + ) + + assert ( + template.Template("{{ 'Home Assistant' | sha1 }}", hass).async_render() + == "c8fd3bb19b94312664faa619af7729bdbf6e9f8a" + ) + + +def test_sha256(hass: HomeAssistant) -> None: + """Test the sha256 function and filter.""" + assert ( + template.Template("{{ sha256('Home Assistant') }}", hass).async_render() + == "2a366abb0cd47f51f3725bf0fb7ebcb4fefa6e20f4971e25fe2bb8da8145ce2b" + ) + + assert ( + template.Template("{{ 'Home Assistant' | sha256 }}", hass).async_render() + == "2a366abb0cd47f51f3725bf0fb7ebcb4fefa6e20f4971e25fe2bb8da8145ce2b" + ) + + +def test_sha512(hass: HomeAssistant) -> None: + """Test the sha512 function and filter.""" + assert ( + template.Template("{{ sha512('Home Assistant') }}", hass).async_render() + == "9e3c2cdd1fbab0037378d37e1baf8a3a4bf92c54b56ad1d459deee30ccbb2acbebd7a3614552ea08992ad27dedeb7b4c5473525ba90cb73dbe8b9ec5f69295bb" + ) + + assert ( + template.Template("{{ 'Home Assistant' | sha512 }}", hass).async_render() + == "9e3c2cdd1fbab0037378d37e1baf8a3a4bf92c54b56ad1d459deee30ccbb2acbebd7a3614552ea08992ad27dedeb7b4c5473525ba90cb73dbe8b9ec5f69295bb" + ) + + +def test_combine(hass: HomeAssistant) -> None: + """Test combine filter and function.""" + assert template.Template( + "{{ {'a': 1, 'b': 2} | combine({'b': 3, 'c': 4}) }}", hass + ).async_render() == {"a": 1, "b": 3, "c": 4} + + assert template.Template( + "{{ combine({'a': 1, 'b': 2}, {'b': 3, 'c': 4}) }}", hass + ).async_render() == {"a": 1, "b": 3, "c": 4} + + assert template.Template( + "{{ combine({'a': 1, 'b': {'x': 1}}, {'b': {'y': 2}, 'c': 4}, recursive=True) }}", + hass, + ).async_render() == {"a": 1, "b": {"x": 1, "y": 2}, "c": 4} + + # Test that recursive=False does not merge nested dictionaries + assert template.Template( + "{{ combine({'a': 1, 'b': {'x': 1}}, {'b': {'y': 2}, 'c': 4}, recursive=False) }}", + hass, + ).async_render() == {"a": 1, "b": {"y": 2}, "c": 4} + + # Test that None values are handled correctly in recursive merge + assert template.Template( + "{{ combine({'a': 1, 'b': none}, {'b': {'y': 2}, 'c': 4}, recursive=True) }}", + hass, + ).async_render() == {"a": 1, "b": {"y": 2}, "c": 4} + + with pytest.raises( + TemplateError, match="combine expected at least 1 argument, got 0" + ): + template.Template("{{ combine() }}", hass).async_render() + + with pytest.raises(TemplateError, match="combine expected a dict, got str"): + template.Template("{{ {'a': 1} | combine('not a dict') }}", hass).async_render() diff --git a/tests/helpers/test_update_coordinator.py b/tests/helpers/test_update_coordinator.py index 3ad5754dada..5fd9f9e39fd 100644 --- a/tests/helpers/test_update_coordinator.py +++ b/tests/helpers/test_update_coordinator.py @@ -19,7 +19,7 @@ from homeassistant.exceptions import ( ConfigEntryError, ConfigEntryNotReady, ) -from homeassistant.helpers import frame, update_coordinator +from homeassistant.helpers import update_coordinator from homeassistant.util.dt import utcnow from tests.common import MockConfigEntry, async_fire_time_changed @@ -638,7 +638,6 @@ async def test_async_config_entry_first_refresh_invalid_state( @pytest.mark.usefixtures("mock_integration_frame") -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_async_config_entry_first_refresh_invalid_state_in_integration( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: diff --git a/tests/snapshots/test_config.ambr b/tests/snapshots/test_config.ambr index 6fcbce7d8d6..7531bf5a663 100644 --- a/tests/snapshots/test_config.ambr +++ b/tests/snapshots/test_config.ambr @@ -434,7 +434,7 @@ # name: test_yaml_error[basic] ''' mapping values are not allowed here - in "configuration.yaml", line 4, column 14 + in "/fixtures/core/config/yaml_errors/basic/configuration.yaml", line 4, column 14 ''' # --- # name: test_yaml_error[basic].1 @@ -448,7 +448,7 @@ # name: test_yaml_error[basic_include] ''' mapping values are not allowed here - in "integrations/iot_domain.yaml", line 3, column 12 + in "/fixtures/core/config/yaml_errors/basic_include/integrations/iot_domain.yaml", line 3, column 12 ''' # --- # name: test_yaml_error[basic_include].1 @@ -462,7 +462,7 @@ # name: test_yaml_error[include_dir_list] ''' mapping values are not allowed here - in "iot_domain/iot_domain_1.yaml", line 3, column 10 + in "/fixtures/core/config/yaml_errors/include_dir_list/iot_domain/iot_domain_1.yaml", line 3, column 10 ''' # --- # name: test_yaml_error[include_dir_list].1 @@ -476,7 +476,7 @@ # name: test_yaml_error[include_dir_merge_list] ''' mapping values are not allowed here - in "iot_domain/iot_domain_1.yaml", line 3, column 12 + in "/fixtures/core/config/yaml_errors/include_dir_merge_list/iot_domain/iot_domain_1.yaml", line 3, column 12 ''' # --- # name: test_yaml_error[include_dir_merge_list].1 @@ -490,7 +490,7 @@ # name: test_yaml_error[packages_include_dir_named] ''' mapping values are not allowed here - in "integrations/adr_0007_1.yaml", line 4, column 9 + in "/fixtures/core/config/yaml_errors/packages_include_dir_named/integrations/adr_0007_1.yaml", line 4, column 9 ''' # --- # name: test_yaml_error[packages_include_dir_named].1 diff --git a/tests/syrupy.py b/tests/syrupy.py index 3c8e398f0f8..e028d5839cb 100644 --- a/tests/syrupy.py +++ b/tests/syrupy.py @@ -109,7 +109,11 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): serializable_data = cls._serializable_issue_registry_entry(data) elif isinstance(data, dict) and "flow_id" in data and "handler" in data: serializable_data = cls._serializable_flow_result(data) - elif isinstance(data, dict) and set(data) == {"conversation_id", "response"}: + elif isinstance(data, dict) and set(data) == { + "conversation_id", + "response", + "continue_conversation", + }: serializable_data = cls._serializable_conversation_result(data) elif isinstance(data, vol.Schema): serializable_data = voluptuous_serialize.convert(data) diff --git a/tests/test_backup_restore.py b/tests/test_backup_restore.py index 4c6bc930667..7efe25c8428 100644 --- a/tests/test_backup_restore.py +++ b/tests/test_backup_restore.py @@ -15,7 +15,7 @@ from .common import get_test_config_dir @pytest.fixture(autouse=True) -def remove_restore_result_file() -> Generator[None, Any, Any]: +def remove_restore_result_file() -> Generator[None]: """Remove the restore result file.""" yield Path(get_test_config_dir(".HA_RESTORE_RESULT")).unlink(missing_ok=True) diff --git a/tests/test_block_async_io.py b/tests/test_block_async_io.py index f42fbb9f4ef..337e5500718 100644 --- a/tests/test_block_async_io.py +++ b/tests/test_block_async_io.py @@ -459,3 +459,14 @@ async def test_open_calls_ignored_in_tests(caplog: pytest.LogCaptureFixture) -> pass assert "Detected blocking call to open with args" not in caplog.text + + +async def test_protect_loop_set_default_verify_paths( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test SSLContext.set_default_verify_paths calls in the loop are logged.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + context = ssl.create_default_context() + context.set_default_verify_paths() + assert "Detected blocking call to set_default_verify_paths" in caplog.text diff --git a/tests/test_bootstrap.py b/tests/test_bootstrap.py index 0d7c8614c6f..1fb87ac5ef6 100644 --- a/tests/test_bootstrap.py +++ b/tests/test_bootstrap.py @@ -572,7 +572,7 @@ async def test_setup_after_deps_not_present(hass: HomeAssistant) -> None: MockModule( domain="second_dep", async_setup=gen_domain_setup("second_dep"), - partial_manifest={"after_dependencies": ["first_dep"]}, + partial_manifest={"after_dependencies": ["first_dep", "root"]}, ), ) @@ -1169,6 +1169,7 @@ async def test_bootstrap_is_cancellation_safe( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test cancellation during async_setup_component does not cancel bootstrap.""" + mock_integration(hass, MockModule(domain="cancel_integration")) with patch.object( bootstrap, "async_setup_component", side_effect=asyncio.CancelledError ): @@ -1185,6 +1186,18 @@ async def test_bootstrap_empty_integrations(hass: HomeAssistant) -> None: await hass.async_block_till_done() +@pytest.mark.parametrize("load_registries", [False]) +async def test_bootstrap_log_already_setup_stage( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test logging when all integrations in a stage were already setup.""" + with patch.object(bootstrap, "STAGE_1_INTEGRATIONS", {"frontend"}): + await bootstrap._async_set_up_integrations(hass, {}) + await hass.async_block_till_done() + + assert "Already set up stage 1: {'frontend'}" in caplog.text + + @pytest.fixture(name="mock_mqtt_config_flow") def mock_mqtt_config_flow_fixture() -> Generator[None]: """Mock MQTT config flow.""" @@ -1528,3 +1541,73 @@ def test_should_rollover_is_always_false() -> None: ).shouldRollover(Mock()) is False ) + + +async def test_no_base_platforms_loaded_before_recorder(hass: HomeAssistant) -> None: + """Verify stage 0 not load base platforms before recorder. + + If a stage 0 integration implements base platforms or has a base + platform in its dependencies and it loads before the recorder, + because of platform-based YAML schema, it may inadvertently + load integrations that expect the recorder to already be loaded. + We need to ensure that doesn't happen. + """ + IGNORE_BASE_PLATFORM_FILES = { + # config/scene.py is not a platform + "config": {"scene.py"}, + # websocket_api/sensor.py is using the platform YAML schema + # we must not migrate it to an integration key until + # we remove the platform YAML schema support for sensors + "websocket_api": {"sensor.py"}, + } + + integrations_before_recorder: set[str] = set() + for _, integrations, _ in bootstrap.STAGE_0_INTEGRATIONS: + integrations_before_recorder |= integrations + if "recorder" in integrations: + break + else: + pytest.fail("recorder not in stage 0") + + integrations_or_excs = await loader.async_get_integrations( + hass, integrations_before_recorder + ) + integrations: dict[str, Integration] = {} + for domain, integration in integrations_or_excs.items(): + assert not isinstance(integrations_or_excs, Exception) + integrations[domain] = integration + + integrations_all_dependencies = await loader.resolve_integrations_dependencies( + hass, integrations.values() + ) + all_integrations = integrations.copy() + all_integrations.update( + (domain, loader.async_get_loaded_integration(hass, domain)) + for domains in integrations_all_dependencies.values() + for domain in domains + ) + + problems: dict[str, set[str]] = {} + for domain in integrations: + domain_with_base_platforms_deps = ( + integrations_all_dependencies[domain] & BASE_PLATFORMS + ) + if domain_with_base_platforms_deps: + problems[domain] = domain_with_base_platforms_deps + assert not problems, ( + f"Integrations that are setup before recorder have base platforms in their dependencies: {problems}" + ) + + base_platform_py_files = {f"{base_platform}.py" for base_platform in BASE_PLATFORMS} + + for domain, integration in all_integrations.items(): + integration_base_platforms_files = ( + integration._top_level_files & base_platform_py_files + ) + if ignore := IGNORE_BASE_PLATFORM_FILES.get(domain): + integration_base_platforms_files -= ignore + if integration_base_platforms_files: + problems[domain] = integration_base_platforms_files + assert not problems, ( + f"Integrations that are setup before recorder implement base platforms: {problems}" + ) diff --git a/tests/test_config_entries.py b/tests/test_config_entries.py index 7066417bfee..e3b80ecc03f 100644 --- a/tests/test_config_entries.py +++ b/tests/test_config_entries.py @@ -1421,83 +1421,6 @@ async def test_discovery_notification( assert "config_entry_discovery" not in notifications -async def test_reauth_notification(hass: HomeAssistant) -> None: - """Test that we create/dismiss a notification when source is reauth.""" - mock_integration(hass, MockModule("test")) - mock_platform(hass, "test.config_flow", None) - - entry = MockConfigEntry(title="test_title", domain="test") - entry.add_to_hass(hass) - - class TestFlow(config_entries.ConfigFlow): - """Test flow.""" - - VERSION = 5 - - async def async_step_user(self, user_input): - """Test user step.""" - return self.async_show_form(step_id="user_confirm") - - async def async_step_user_confirm(self, user_input): - """Test user confirm step.""" - return self.async_show_form(step_id="user_confirm") - - async def async_step_reauth(self, user_input): - """Test reauth step.""" - return self.async_show_form(step_id="reauth_confirm") - - async def async_step_reauth_confirm(self, user_input): - """Test reauth confirm step.""" - return self.async_abort(reason="test") - - with mock_config_flow("test", TestFlow): - # Start user flow to assert that reconfigure notification doesn't fire - await hass.config_entries.flow.async_init( - "test", context={"source": config_entries.SOURCE_USER} - ) - - await hass.async_block_till_done() - notifications = async_get_persistent_notifications(hass) - assert "config_entry_reconfigure" not in notifications - - # Start first reauth flow to assert that reconfigure notification fires - flow1 = await hass.config_entries.flow.async_init( - "test", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - ) - - await hass.async_block_till_done() - notifications = async_get_persistent_notifications(hass) - assert "config_entry_reconfigure" in notifications - - # Start a second reauth flow so we can finish the first and assert that - # the reconfigure notification persists until the second one is complete - flow2 = await hass.config_entries.flow.async_init( - "test", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - ) - - flow1 = await hass.config_entries.flow.async_configure(flow1["flow_id"], {}) - assert flow1["type"] == data_entry_flow.FlowResultType.ABORT - - await hass.async_block_till_done() - notifications = async_get_persistent_notifications(hass) - assert "config_entry_reconfigure" in notifications - - flow2 = await hass.config_entries.flow.async_configure(flow2["flow_id"], {}) - assert flow2["type"] == data_entry_flow.FlowResultType.ABORT - - await hass.async_block_till_done() - notifications = async_get_persistent_notifications(hass) - assert "config_entry_reconfigure" not in notifications - - async def test_reauth_issue( hass: HomeAssistant, manager: config_entries.ConfigEntries, @@ -3566,37 +3489,97 @@ async def test_unique_id_not_update_existing_entry( assert len(async_reload.mock_calls) == 0 +ABORT_IN_PROGRESS = { + "type": data_entry_flow.FlowResultType.ABORT, + "reason": "already_in_progress", +} + + +@pytest.mark.parametrize( + ("existing_flow_source", "expected_result"), + # Test all sources except SOURCE_IGNORE + [ + (config_entries.SOURCE_BLUETOOTH, ABORT_IN_PROGRESS), + (config_entries.SOURCE_DHCP, ABORT_IN_PROGRESS), + (config_entries.SOURCE_DISCOVERY, ABORT_IN_PROGRESS), + (config_entries.SOURCE_HARDWARE, ABORT_IN_PROGRESS), + (config_entries.SOURCE_HASSIO, ABORT_IN_PROGRESS), + (config_entries.SOURCE_HOMEKIT, ABORT_IN_PROGRESS), + (config_entries.SOURCE_IMPORT, ABORT_IN_PROGRESS), + (config_entries.SOURCE_INTEGRATION_DISCOVERY, ABORT_IN_PROGRESS), + (config_entries.SOURCE_MQTT, ABORT_IN_PROGRESS), + (config_entries.SOURCE_REAUTH, {"type": data_entry_flow.FlowResultType.FORM}), + (config_entries.SOURCE_RECONFIGURE, ABORT_IN_PROGRESS), + (config_entries.SOURCE_SSDP, ABORT_IN_PROGRESS), + (config_entries.SOURCE_SYSTEM, ABORT_IN_PROGRESS), + (config_entries.SOURCE_USB, ABORT_IN_PROGRESS), + (config_entries.SOURCE_USER, ABORT_IN_PROGRESS), + (config_entries.SOURCE_ZEROCONF, ABORT_IN_PROGRESS), + ], +) async def test_unique_id_in_progress( - hass: HomeAssistant, manager: config_entries.ConfigEntries + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + existing_flow_source: str, + expected_result: dict, ) -> None: """Test that we abort if there is already a flow in progress with same unique id.""" mock_integration(hass, MockModule("comp")) mock_platform(hass, "comp.config_flow", None) + entry = MockConfigEntry(domain="comp") + entry.add_to_hass(hass) class TestFlow(config_entries.ConfigFlow): """Test flow.""" VERSION = 1 + async def _async_step_discovery_without_unique_id(self): + """Handle a flow initialized by discovery.""" + return await self._async_step() + + async def async_step_hardware(self, user_input=None): + """Test hardware step.""" + return await self._async_step() + + async def async_step_import(self, user_input=None): + """Test import step.""" + return await self._async_step() + + async def async_step_reauth(self, user_input=None): + """Test reauth step.""" + return await self._async_step() + + async def async_step_reconfigure(self, user_input=None): + """Test reconfigure step.""" + return await self._async_step() + + async def async_step_system(self, user_input=None): + """Test system step.""" + return await self._async_step() + async def async_step_user(self, user_input=None): """Test user step.""" + return await self._async_step() + + async def _async_step(self, user_input=None): + """Test step.""" await self.async_set_unique_id("mock-unique-id") return self.async_show_form(step_id="discovery") with mock_config_flow("comp", TestFlow): # Create one to be in progress result = await manager.flow.async_init( - "comp", context={"source": config_entries.SOURCE_USER} + "comp", context={"source": existing_flow_source, "entry_id": entry.entry_id} ) assert result["type"] == data_entry_flow.FlowResultType.FORM - # Will be canceled result2 = await manager.flow.async_init( "comp", context={"source": config_entries.SOURCE_USER} ) - assert result2["type"] == data_entry_flow.FlowResultType.ABORT - assert result2["reason"] == "already_in_progress" + for k, v in expected_result.items(): + assert result2[k] == v async def test_finish_flow_aborts_progress( @@ -5763,7 +5746,6 @@ async def test_reauth_reconfigure_missing_entry( @pytest.mark.usefixtures("mock_integration_frame") -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) @pytest.mark.parametrize( "source", [config_entries.SOURCE_REAUTH, config_entries.SOURCE_RECONFIGURE] ) @@ -5995,7 +5977,7 @@ async def test_async_wait_component_startup(hass: HomeAssistant) -> None: "integration_frame_path", ["homeassistant/components/my_integration", "homeassistant.core"], ) -@pytest.mark.usefixtures("mock_integration_frame") +@pytest.mark.usefixtures("hass", "mock_integration_frame") async def test_options_flow_with_config_entry_core() -> None: """Test that OptionsFlowWithConfigEntry cannot be used in core.""" entry = MockConfigEntry( @@ -6009,8 +5991,7 @@ async def test_options_flow_with_config_entry_core() -> None: @pytest.mark.parametrize("integration_frame_path", ["custom_components/my_integration"]) -@pytest.mark.usefixtures("mock_integration_frame") -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +@pytest.mark.usefixtures("hass", "mock_integration_frame") async def test_options_flow_with_config_entry(caplog: pytest.LogCaptureFixture) -> None: """Test that OptionsFlowWithConfigEntry doesn't mutate entry options.""" entry = MockConfigEntry( @@ -6508,7 +6489,7 @@ async def test_update_subentry_and_abort( class SubentryFlowHandler(config_entries.ConfigSubentryFlow): async def async_step_reconfigure(self, user_input=None): return self.async_update_and_abort( - self._get_reconfigure_entry(), + self._get_entry(), self._get_reconfigure_subentry(), **kwargs, ) @@ -8100,10 +8081,10 @@ async def test_get_reconfigure_entry( assert result["reason"] == "Source is user, expected reconfigure: -" -async def test_subentry_get_reconfigure_entry( +async def test_subentry_get_entry( hass: HomeAssistant, manager: config_entries.ConfigEntries ) -> None: - """Test subentry _get_reconfigure_entry and _get_reconfigure_subentry behavior.""" + """Test subentry _get_entry and _get_reconfigure_subentry behavior.""" subentry_id = "mock_subentry_id" entry = MockConfigEntry( data={}, @@ -8140,13 +8121,13 @@ async def test_subentry_get_reconfigure_entry( async def _async_step_confirm(self): """Confirm input.""" try: - entry = self._get_reconfigure_entry() + entry = self._get_entry() except ValueError as err: reason = str(err) else: reason = f"Found entry {entry.title}" try: - entry_id = self._reconfigure_entry_id + entry_id = self._entry_id except ValueError: reason = f"{reason}: -" else: @@ -8175,7 +8156,7 @@ async def test_subentry_get_reconfigure_entry( ) -> dict[str, type[config_entries.ConfigSubentryFlow]]: return {"test": TestFlow.SubentryFlowHandler} - # A reconfigure flow finds the config entry + # A reconfigure flow finds the config entry and subentry with mock_config_flow("test", TestFlow): result = await entry.start_subentry_reconfigure_flow(hass, "test", subentry_id) assert ( @@ -8197,14 +8178,14 @@ async def test_subentry_get_reconfigure_entry( == "Found entry entry_title: mock_entry_id/Subentry not found: 01JRemoved" ) - # A user flow does not have access to the config entry or subentry + # A user flow finds the config entry but not the subentry with mock_config_flow("test", TestFlow): result = await manager.subentries.async_init( (entry.entry_id, "test"), context={"source": config_entries.SOURCE_USER} ) assert ( result["reason"] - == "Source is user, expected reconfigure: -/Source is user, expected reconfigure: -" + == "Found entry entry_title: mock_entry_id/Source is user, expected reconfigure: -" ) @@ -8789,7 +8770,6 @@ async def test_options_flow_config_entry( @pytest.mark.parametrize("integration_frame_path", ["custom_components/my_integration"]) @pytest.mark.usefixtures("mock_integration_frame") -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_options_flow_deprecated_config_entry_setter( hass: HomeAssistant, manager: config_entries.ConfigEntries, @@ -8849,7 +8829,8 @@ async def test_options_flow_deprecated_config_entry_setter( "config_entry explicitly, which is deprecated at " "custom_components/my_integration/light.py, line 23: " "self.light.is_on. This will stop working in Home Assistant 2025.12, please " - "create a bug report at " in caplog.text + "report it to the author of the 'my_integration' custom integration" + in caplog.text ) @@ -8899,3 +8880,63 @@ async def test_add_description_placeholder_automatically_not_overwrites( result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], None) assert result["type"] == FlowResultType.FORM assert result["description_placeholders"] == {"name": "Custom title"} + + +@pytest.mark.parametrize( + ("domain", "expected_log"), + [ + ("some_integration", True), + ("mobile_app", False), + ], +) +async def test_create_entry_existing_unique_id( + hass: HomeAssistant, + domain: str, + expected_log: bool, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test to highlight unexpected behavior on create_entry.""" + entry = MockConfigEntry( + title="From config flow", + domain=domain, + entry_id="01J915Q6T9F6G5V0QJX6HBC94T", + data={"host": "any", "port": 123}, + unique_id="mock-unique-id", + ) + entry.add_to_hass(hass) + + assert len(hass.config_entries.async_entries(domain)) == 1 + + mock_setup_entry = AsyncMock(return_value=True) + + mock_integration(hass, MockModule(domain, async_setup_entry=mock_setup_entry)) + mock_platform(hass, f"{domain}.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + VERSION = 1 + + async def async_step_user(self, user_input=None): + """Test user step.""" + await self.async_set_unique_id("mock-unique-id") + return self.async_create_entry(title="mock-title", data={}) + + with ( + mock_config_flow(domain, TestFlow), + patch.object(frame, "_REPORTED_INTEGRATIONS", set()), + ): + result = await hass.config_entries.flow.async_init( + domain, context={"source": config_entries.SOURCE_USER} + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.CREATE_ENTRY + + assert len(hass.config_entries.async_entries(domain)) == 1 + + log_text = ( + f"Detected that integration '{domain}' creates a config entry " + "when another entry with the same unique ID exists. Please " + "create a bug report at https:" + ) + assert (log_text in caplog.text) == expected_log diff --git a/tests/test_data_entry_flow.py b/tests/test_data_entry_flow.py index 74a55cb4989..86ba5257001 100644 --- a/tests/test_data_entry_flow.py +++ b/tests/test_data_entry_flow.py @@ -133,6 +133,61 @@ async def test_show_form(manager: MockFlowManager) -> None: assert form["errors"] == {"username": "Should be unique."} +async def test_form_shows_with_added_suggested_values(manager: MockFlowManager) -> None: + """Test that we can show a form with suggested values.""" + schema = vol.Schema( + { + vol.Required("username"): str, + vol.Required("password"): str, + vol.Required("section_1"): data_entry_flow.section( + vol.Schema( + { + vol.Optional("full_name"): str, + } + ), + {"collapsed": False}, + ), + } + ) + + @manager.mock_reg_handler("test") + class TestFlow(data_entry_flow.FlowHandler): + async def async_step_init(self, user_input=None): + data_schema = self.add_suggested_values_to_schema( + schema, + { + "username": "doej", + "password": "verySecret1", + "section_1": {"full_name": "John Doe"}, + }, + ) + return self.async_show_form( + step_id="init", + data_schema=data_schema, + ) + + form = await manager.async_init("test") + assert form["type"] == data_entry_flow.FlowResultType.FORM + assert form["data_schema"].schema == schema.schema + markers = list(form["data_schema"].schema) + assert len(markers) == 3 + assert markers[0] == "username" + assert markers[0].description == {"suggested_value": "doej"} + assert markers[1] == "password" + assert markers[1].description == {"suggested_value": "verySecret1"} + assert markers[2] == "section_1" + section_validator = form["data_schema"].schema["section_1"] + assert isinstance(section_validator, data_entry_flow.section) + # The section class was not replaced + assert section_validator is schema.schema["section_1"] + # The section schema was not replaced + assert section_validator.schema is schema.schema["section_1"].schema + section_markers = list(section_validator.schema.schema) + assert len(section_markers) == 1 + assert section_markers[0] == "full_name" + assert section_markers[0].description == {"suggested_value": "John Doe"} + + async def test_abort_removes_instance(manager: MockFlowManager) -> None: """Test that abort removes the flow from progress.""" diff --git a/tests/test_loader.py b/tests/test_loader.py index 8afe800144c..0b83ddee3ea 100644 --- a/tests/test_loader.py +++ b/tests/test_loader.py @@ -15,7 +15,6 @@ from homeassistant import loader from homeassistant.components import http, hue from homeassistant.components.hue import light as hue_light from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import frame from homeassistant.helpers.json import json_dumps from homeassistant.util.json import json_loads @@ -28,40 +27,91 @@ async def test_circular_component_dependencies(hass: HomeAssistant) -> None: mock_integration(hass, MockModule("mod2", dependencies=["mod1"])) mock_integration(hass, MockModule("mod3", dependencies=["mod1"])) mod_4 = mock_integration(hass, MockModule("mod4", dependencies=["mod2", "mod3"])) + all_domains = {"mod1", "mod2", "mod3", "mod4"} - deps = await loader._async_component_dependencies(hass, mod_4) - assert deps == {"mod1", "mod2", "mod3", "mod4"} + deps = await loader._do_resolve_dependencies(mod_4, cache={}) + assert deps == {"mod1", "mod2", "mod3"} # Create a circular dependency mock_integration(hass, MockModule("mod1", dependencies=["mod4"])) with pytest.raises(loader.CircularDependency): - await loader._async_component_dependencies(hass, mod_4) + await loader._do_resolve_dependencies(mod_4, cache={}) # Create a different circular dependency mock_integration(hass, MockModule("mod1", dependencies=["mod3"])) with pytest.raises(loader.CircularDependency): - await loader._async_component_dependencies(hass, mod_4) + await loader._do_resolve_dependencies(mod_4, cache={}) # Create a circular after_dependency mock_integration( hass, MockModule("mod1", partial_manifest={"after_dependencies": ["mod4"]}) ) with pytest.raises(loader.CircularDependency): - await loader._async_component_dependencies(hass, mod_4) + await loader._do_resolve_dependencies( + mod_4, + cache={}, + possible_after_dependencies=all_domains, + ) # Create a different circular after_dependency mock_integration( hass, MockModule("mod1", partial_manifest={"after_dependencies": ["mod3"]}) ) with pytest.raises(loader.CircularDependency): - await loader._async_component_dependencies(hass, mod_4) + await loader._do_resolve_dependencies( + mod_4, + cache={}, + possible_after_dependencies=all_domains, + ) + + # Create a circular after_dependency without a hard dependency + mock_integration( + hass, MockModule("mod1", partial_manifest={"after_dependencies": ["mod4"]}) + ) + mod_4 = mock_integration( + hass, MockModule("mod4", partial_manifest={"after_dependencies": ["mod2"]}) + ) + with pytest.raises(loader.CircularDependency): + await loader._do_resolve_dependencies( + mod_4, + cache={}, + possible_after_dependencies=all_domains, + ) + + result = await loader.resolve_integrations_after_dependencies(hass, (mod_4,)) + assert result == {} + result = await loader.resolve_integrations_after_dependencies( + hass, (mod_4,), ignore_exceptions=True + ) + assert result["mod4"] == {"mod4", "mod2", "mod1"} async def test_nonexistent_component_dependencies(hass: HomeAssistant) -> None: """Test if we can detect nonexistent dependencies of components.""" mod_1 = mock_integration(hass, MockModule("mod1", dependencies=["nonexistent"])) + mod_2 = mock_integration(hass, MockModule("mod2", dependencies=["mod1"])) + + assert await mod_2.resolve_dependencies() is None + assert mod_2.all_dependencies_resolved with pytest.raises(loader.IntegrationNotFound): - await loader._async_component_dependencies(hass, mod_1) + mod_2.all_dependencies # noqa: B018 + + assert mod_1.all_dependencies_resolved + assert await mod_1.resolve_dependencies() is None + with pytest.raises(loader.IntegrationNotFound): + mod_1.all_dependencies # noqa: B018 + + result = await loader.resolve_integrations_dependencies(hass, (mod_2, mod_1)) + assert result == {} + + mod_1 = mock_integration( + hass, + MockModule("mod1", partial_manifest={"after_dependencies": ["non.existent"]}), + ) + mod_2 = mock_integration(hass, MockModule("mod2", dependencies=["mod1"])) + + result = await loader.resolve_integrations_after_dependencies(hass, (mod_2, mod_1)) + assert result == {} def test_component_loader(hass: HomeAssistant) -> None: @@ -1314,7 +1364,6 @@ async def test_config_folder_not_in_path() -> None: ], ) @pytest.mark.usefixtures("mock_integration_frame") -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_hass_components_use_reported( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, @@ -2010,7 +2059,6 @@ async def test_has_services(hass: HomeAssistant) -> None: ], ) @pytest.mark.usefixtures("mock_integration_frame") -@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_hass_helpers_use_reported( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, diff --git a/tests/testing_config/blueprints/template/test_event_sensor.yaml b/tests/testing_config/blueprints/template/test_event_sensor.yaml new file mode 100644 index 00000000000..8b615eb90ba --- /dev/null +++ b/tests/testing_config/blueprints/template/test_event_sensor.yaml @@ -0,0 +1,27 @@ +blueprint: + name: Create Sensor from Event + description: Creates a timestamp sensor from an event + domain: template + source_url: https://github.com/home-assistant/core/blob/dev/homeassistant/components/template/blueprints/event_sensor.yaml + input: + event_type: + name: Name of the event_type + description: The event_type for the event trigger + selector: + text: + event_data: + name: The data for the event + description: The event_data for the event trigger + selector: + object: +trigger: + - trigger: event + event_type: !input event_type + event_data: !input event_data +variables: + event_data: "{{ trigger.event.data }}" +sensor: + state: "{{ now() }}" + device_class: timestamp + attributes: + data: "{{ event_data }}" diff --git a/tests/util/test_logging.py b/tests/util/test_logging.py index e5b85f35693..ba473ee0c58 100644 --- a/tests/util/test_logging.py +++ b/tests/util/test_logging.py @@ -6,6 +6,7 @@ import logging import queue from unittest.mock import patch +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.core import ( @@ -17,6 +18,13 @@ from homeassistant.core import ( from homeassistant.util import logging as logging_util +async def empty_log_queue() -> None: + """Empty the log queue.""" + log_queue: queue.SimpleQueue = logging.root.handlers[0].queue + while not log_queue.empty(): + await asyncio.sleep(0) + + async def test_logging_with_queue_handler() -> None: """Test logging with HomeAssistantQueueHandler.""" @@ -149,3 +157,150 @@ async def test_catch_log_exception_catches_and_logs() -> None: func("failure sync passed") assert saved_args == [("failure sync passed",)] + + +@patch("homeassistant.util.logging.HomeAssistantQueueListener.MAX_LOGS_COUNT", 5) +@patch( + "homeassistant.util.logging.HomeAssistantQueueListener.EXCLUDED_LOG_COUNT_MODULES", + ["excluded"], +) +@pytest.mark.parametrize( + ( + "logger1_count", + "logger1_expected_notices", + "logger2_count", + "logger2_expected_notices", + ), + [(4, 0, 0, 0), (5, 1, 1, 0), (11, 1, 5, 1), (20, 1, 20, 1)], +) +async def test_noisy_loggers( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + logger1_count: int, + logger1_expected_notices: int, + logger2_count: int, + logger2_expected_notices: int, +) -> None: + """Test that noisy loggers all logged as warnings.""" + + logging_util.async_activate_log_queue_handler(hass) + logger1 = logging.getLogger("noisy1") + logger2 = logging.getLogger("noisy2.module") + logger_excluded = logging.getLogger("excluded.module") + + for _ in range(logger1_count): + logger1.info("This is a log") + + for _ in range(logger2_count): + logger2.info("This is another log") + + for _ in range(logging_util.HomeAssistantQueueListener.MAX_LOGS_COUNT + 1): + logger_excluded.info("This log should not trigger a warning") + + await empty_log_queue() + + assert ( + caplog.text.count( + "Module noisy1 is logging too frequently. 5 messages since last count" + ) + == logger1_expected_notices + ) + assert ( + caplog.text.count( + "Module noisy2.module is logging too frequently. 5 messages since last count" + ) + == logger2_expected_notices + ) + # Ensure that the excluded module did not trigger a warning + assert ( + caplog.text.count("is logging too frequently") + == logger1_expected_notices + logger2_expected_notices + ) + + # close the handler so the queue thread stops + logging.root.handlers[0].close() + + +@patch("homeassistant.util.logging.HomeAssistantQueueListener.MAX_LOGS_COUNT", 1) +async def test_noisy_loggers_ignores_self( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test that the noisy loggers warning does not trigger a warning for its own module.""" + + logging_util.async_activate_log_queue_handler(hass) + logger1 = logging.getLogger("noisy_module1") + logger2 = logging.getLogger("noisy_module2") + logger3 = logging.getLogger("noisy_module3") + + logger1.info("This is a log") + logger2.info("This is a log") + logger3.info("This is a log") + + await empty_log_queue() + assert caplog.text.count("logging too frequently") == 3 + + # close the handler so the queue thread stops + logging.root.handlers[0].close() + + +@patch("homeassistant.util.logging.HomeAssistantQueueListener.MAX_LOGS_COUNT", 5) +async def test_noisy_loggers_ignores_lower_than_info( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test that noisy loggers all logged as warnings, except for levels lower than INFO.""" + + logging_util.async_activate_log_queue_handler(hass) + logger = logging.getLogger("noisy_module") + + for _ in range(5): + logger.debug("This is a log") + + await empty_log_queue() + expected_warning = "Module noisy_module is logging too frequently" + assert caplog.text.count(expected_warning) == 0 + + logger.info("This is a log") + logger.info("This is a log") + logger.warning("This is a log") + logger.error("This is a log") + logger.critical("This is a log") + + await empty_log_queue() + assert caplog.text.count(expected_warning) == 1 + + # close the handler so the queue thread stops + logging.root.handlers[0].close() + + +@patch("homeassistant.util.logging.HomeAssistantQueueListener.MAX_LOGS_COUNT", 3) +async def test_noisy_loggers_counters_reset( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + freezer: FrozenDateTimeFactory, +) -> None: + """Test that noisy logger counters reset periodically.""" + + logging_util.async_activate_log_queue_handler(hass) + logger = logging.getLogger("noisy_module") + + expected_warning = "Module noisy_module is logging too frequently" + + # Do multiple iterations to ensure the reset is periodic + for _ in range(logging_util.HomeAssistantQueueListener.MAX_LOGS_COUNT * 2): + logger.info("This is log 0") + await empty_log_queue() + + freezer.tick( + logging_util.HomeAssistantQueueListener.LOG_COUNTS_RESET_INTERVAL + 1 + ) + + logger.info("This is log 1") + await empty_log_queue() + assert caplog.text.count(expected_warning) == 0 + + logger.info("This is log 2") + logger.info("This is log 3") + await empty_log_queue() + assert caplog.text.count(expected_warning) == 1 + # close the handler so the queue thread stops + logging.root.handlers[0].close() diff --git a/tests/util/test_unit_conversion.py b/tests/util/test_unit_conversion.py index aeea4ad9a5a..3f55ceef242 100644 --- a/tests/util/test_unit_conversion.py +++ b/tests/util/test_unit_conversion.py @@ -902,8 +902,8 @@ def test_convert_nonnumeric_value( ("converter", "from_unit", "to_unit", "expected"), [ # Process all items in _GET_UNIT_RATIO - (converter, item[0], item[1], item[2]) - for converter, item in _GET_UNIT_RATIO.items() + (converter, from_unit, to_unit, expected) + for converter, (from_unit, to_unit, expected) in _GET_UNIT_RATIO.items() ], ) def test_get_unit_ratio( @@ -915,13 +915,34 @@ def test_get_unit_ratio( assert converter.get_unit_ratio(to_unit, from_unit) == pytest.approx(1 / ratio) +@pytest.mark.parametrize( + ("converter", "from_unit", "to_unit", "expected"), + [ + # Process all items in _GET_UNIT_RATIO + (converter, from_unit, to_unit, expected) + for converter, (from_unit, to_unit, expected) in _GET_UNIT_RATIO.items() + ], +) +def get_unit_floored_log_ratio( + converter: type[BaseUnitConverter], from_unit: str, to_unit: str, expected: float +) -> None: + """Test floored log unit ratio. + + Should not use pytest.approx since we are checking these + values are exact. + """ + ratio = converter.get_unit_floored_log_ratio(from_unit, to_unit) + assert ratio == expected + assert converter.get_unit_floored_log_ratio(to_unit, from_unit) == 1 / ratio + + @pytest.mark.parametrize( ("converter", "value", "from_unit", "expected", "to_unit"), [ # Process all items in _CONVERTED_VALUE - (converter, list_item[0], list_item[1], list_item[2], list_item[3]) + (converter, value, from_unit, expected, to_unit) for converter, item in _CONVERTED_VALUE.items() - for list_item in item + for value, from_unit, expected, to_unit in item ], ) def test_unit_conversion( diff --git a/tests/util/yaml/test_init.py b/tests/util/yaml/test_init.py index 0346e21044f..dacbd2c1247 100644 --- a/tests/util/yaml/test_init.py +++ b/tests/util/yaml/test_init.py @@ -374,7 +374,7 @@ def test_include_dir_merge_named_recursive(mock_walk: Mock) -> None: } -@patch("homeassistant.util.yaml.loader.open", create=True) +@patch("annotatedyaml.loader.open", create=True) @pytest.mark.usefixtures("try_both_loaders") def test_load_yaml_encoding_error(mock_open: Mock) -> None: """Test raising a UnicodeDecodeError.""" @@ -598,7 +598,7 @@ def test_load_yaml_wrap_oserror( ) -> None: """Test load_yaml wraps OSError in HomeAssistantError.""" with ( - patch("homeassistant.util.yaml.loader.open", side_effect=open_exception), + patch("annotatedyaml.loader.open", side_effect=open_exception), pytest.raises(load_yaml_exception), ): yaml_loader.load_yaml("bla")