diff --git a/.core_files.yaml b/.core_files.yaml index e211b8ca5ec..6fd3a74df92 100644 --- a/.core_files.yaml +++ b/.core_files.yaml @@ -79,6 +79,7 @@ components: &components - homeassistant/components/group/** - homeassistant/components/hassio/** - homeassistant/components/homeassistant/** + - homeassistant/components/homeassistant_hardware/** - homeassistant/components/http/** - homeassistant/components/image/** - homeassistant/components/input_boolean/** diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index e359ed59cf0..c2fee9512fb 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -10,7 +10,7 @@ on: env: BUILD_TYPE: core - DEFAULT_PYTHON: "3.12" + DEFAULT_PYTHON: "3.13" PIP_TIMEOUT: 60 UV_HTTP_TIMEOUT: 60 UV_SYSTEM_PYTHON: "true" @@ -509,7 +509,7 @@ jobs: password: ${{ secrets.GITHUB_TOKEN }} - name: Build Docker image - uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0 + uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355 # v6.10.0 with: context: . # So action will not pull the repository again file: ./script/hassfest/docker/Dockerfile @@ -522,7 +522,7 @@ jobs: - name: Push Docker image if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true' id: push - uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0 + uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355 # v6.10.0 with: context: . # So action will not pull the repository again file: ./script/hassfest/docker/Dockerfile @@ -531,7 +531,7 @@ jobs: - name: Generate artifact attestation if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true' - uses: actions/attest-build-provenance@1c608d11d69870c2092266b3f9a6f3abbf17002c # v1.4.3 + uses: actions/attest-build-provenance@ef244123eb79f2f7a7e75d99086184180e6d0018 # v1.4.4 with: subject-name: ${{ env.HASSFEST_IMAGE_NAME }} subject-digest: ${{ steps.push.outputs.digest }} diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 263f9ed5d6d..b9e5b91aff2 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -40,9 +40,9 @@ env: CACHE_VERSION: 11 UV_CACHE_VERSION: 1 MYPY_CACHE_VERSION: 9 - HA_SHORT_VERSION: "2024.11" + HA_SHORT_VERSION: "2024.12" DEFAULT_PYTHON: "3.12" - ALL_PYTHON_VERSIONS: "['3.12']" + ALL_PYTHON_VERSIONS: "['3.12', '3.13']" # 10.3 is the oldest supported version # - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022) # 10.6 is the current long-term-support @@ -622,13 +622,13 @@ jobs: steps: - name: Check out code from GitHub uses: actions/checkout@v4.2.2 - - name: Set up Python ${{ env.DEFAULT_PYTHON }} + - name: Set up Python ${{ matrix.python-version }} id: python uses: actions/setup-python@v5.3.0 with: - python-version: ${{ env.DEFAULT_PYTHON }} + python-version: ${{ matrix.python-version }} check-latest: true - - name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment + - name: Restore full Python ${{ matrix.python-version }} virtual environment id: cache-venv uses: actions/cache/restore@v4.1.2 with: @@ -819,11 +819,7 @@ jobs: needs: - info - base - strategy: - fail-fast: false - matrix: - python-version: ${{ fromJson(needs.info.outputs.python_versions) }} - name: Split tests for full run Python ${{ matrix.python-version }} + name: Split tests for full run steps: - name: Install additional OS dependencies run: | @@ -836,11 +832,11 @@ jobs: libgammu-dev - name: Check out code from GitHub uses: actions/checkout@v4.2.2 - - name: Set up Python ${{ matrix.python-version }} + - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5.3.0 with: - python-version: ${{ matrix.python-version }} + python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true - name: Restore base Python virtual environment id: cache-venv @@ -858,7 +854,7 @@ jobs: - name: Upload pytest_buckets uses: actions/upload-artifact@v4.4.3 with: - name: pytest_buckets-${{ matrix.python-version }} + name: pytest_buckets path: pytest_buckets.txt overwrite: true @@ -923,7 +919,7 @@ jobs: - name: Download pytest_buckets uses: actions/download-artifact@v4.1.8 with: - name: pytest_buckets-${{ matrix.python-version }} + name: pytest_buckets - name: Compile English translations run: | . venv/bin/activate @@ -949,6 +945,7 @@ jobs: --timeout=9 \ --durations=10 \ --numprocesses auto \ + --snapshot-details \ --dist=loadfile \ ${cov_params[@]} \ -o console_output_style=count \ @@ -1071,6 +1068,7 @@ jobs: -qq \ --timeout=20 \ --numprocesses 1 \ + --snapshot-details \ ${cov_params[@]} \ -o console_output_style=count \ --durations=10 \ @@ -1199,6 +1197,7 @@ jobs: -qq \ --timeout=9 \ --numprocesses 1 \ + --snapshot-details \ ${cov_params[@]} \ -o console_output_style=count \ --durations=0 \ @@ -1249,12 +1248,11 @@ jobs: pattern: coverage-* - name: Upload coverage to Codecov if: needs.info.outputs.test_full_suite == 'true' - uses: codecov/codecov-action@v4.6.0 + uses: codecov/codecov-action@v5.0.7 with: fail_ci_if_error: true flags: full-suite token: ${{ secrets.CODECOV_TOKEN }} - version: v0.6.0 pytest-partial: runs-on: ubuntu-24.04 @@ -1345,6 +1343,7 @@ jobs: -qq \ --timeout=9 \ --numprocesses auto \ + --snapshot-details \ ${cov_params[@]} \ -o console_output_style=count \ --durations=0 \ @@ -1387,8 +1386,7 @@ jobs: pattern: coverage-* - name: Upload coverage to Codecov if: needs.info.outputs.test_full_suite == 'false' - uses: codecov/codecov-action@v4.6.0 + uses: codecov/codecov-action@v5.0.7 with: fail_ci_if_error: true token: ${{ secrets.CODECOV_TOKEN }} - version: v0.6.0 diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 176e010c5b9..4977139f5dc 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,11 +24,11 @@ jobs: uses: actions/checkout@v4.2.2 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.27.0 + uses: github/codeql-action/init@v3.27.5 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.27.0 + uses: github/codeql-action/analyze@v3.27.5 with: category: "/language:python" diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 0c8df57d5a2..749f95fa922 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -112,7 +112,7 @@ jobs: strategy: fail-fast: false matrix: - abi: ["cp312"] + abi: ["cp312", "cp313"] arch: ${{ fromJson(needs.init.outputs.architectures) }} steps: - name: Checkout the repository @@ -135,15 +135,15 @@ jobs: sed -i "/uv/d" requirements_diff.txt - name: Build wheels - uses: home-assistant/wheels@2024.07.1 + uses: home-assistant/wheels@2024.11.0 with: abi: ${{ matrix.abi }} tag: musllinux_1_2 arch: ${{ matrix.arch }} wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true - apk: "libffi-dev;openssl-dev;yaml-dev;nasm" - skip-binary: aiohttp;multidict;yarl + apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-dev" + skip-binary: aiohttp;multidict;propcache;yarl;SQLAlchemy constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements.txt" @@ -156,7 +156,7 @@ jobs: strategy: fail-fast: false matrix: - abi: ["cp312"] + abi: ["cp312", "cp313"] arch: ${{ fromJson(needs.init.outputs.architectures) }} steps: - name: Checkout the repository @@ -198,6 +198,7 @@ jobs: split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt - name: Create requirements for cython<3 + if: matrix.abi == 'cp312' run: | # Some dependencies still require 'cython<3' # and don't yet use isolated build environments. @@ -208,7 +209,8 @@ jobs: cat homeassistant/package_constraints.txt | grep 'pydantic==' >> requirements_old-cython.txt - name: Build wheels (old cython) - uses: home-assistant/wheels@2024.07.1 + uses: home-assistant/wheels@2024.11.0 + if: matrix.abi == 'cp312' with: abi: ${{ matrix.abi }} tag: musllinux_1_2 @@ -223,43 +225,43 @@ jobs: pip: "'cython<3'" - name: Build wheels (part 1) - uses: home-assistant/wheels@2024.07.1 + uses: home-assistant/wheels@2024.11.0 with: abi: ${{ matrix.abi }} tag: musllinux_1_2 arch: ${{ matrix.arch }} wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true - apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" - skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl + apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev" + skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtaa" - name: Build wheels (part 2) - uses: home-assistant/wheels@2024.07.1 + uses: home-assistant/wheels@2024.11.0 with: abi: ${{ matrix.abi }} tag: musllinux_1_2 arch: ${{ matrix.arch }} wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true - apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" - skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl + apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev" + skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtab" - name: Build wheels (part 3) - uses: home-assistant/wheels@2024.07.1 + uses: home-assistant/wheels@2024.11.0 with: abi: ${{ matrix.abi }} tag: musllinux_1_2 arch: ${{ matrix.arch }} wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true - apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" - skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl + apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev" + skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtac" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f7072e5c96e..3a20276c881 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.7.1 + rev: v0.8.0 hooks: - id: ruff args: @@ -18,7 +18,7 @@ repos: exclude_types: [csv, json, html] exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/ - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v5.0.0 hooks: - id: check-executables-have-shebangs stages: [manual] @@ -83,7 +83,7 @@ repos: pass_filenames: false language: script types: [text] - files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements.+\.txt)$ + files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/.+/(quality_scale)\.yaml|homeassistant/brands/.*\.json|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py|requirements.+\.txt)$ - id: hassfest-metadata name: hassfest-metadata entry: script/run-in-env.sh python3 -m script.hassfest -p metadata,docker diff --git a/.strict-typing b/.strict-typing index 4bfacaa64f4..cb0cab984ee 100644 --- a/.strict-typing +++ b/.strict-typing @@ -324,11 +324,13 @@ homeassistant.components.moon.* homeassistant.components.mopeka.* homeassistant.components.motionmount.* homeassistant.components.mqtt.* +homeassistant.components.music_assistant.* homeassistant.components.my.* homeassistant.components.mysensors.* homeassistant.components.myuplink.* homeassistant.components.nam.* homeassistant.components.nanoleaf.* +homeassistant.components.nasweb.* homeassistant.components.neato.* homeassistant.components.nest.* homeassistant.components.netatmo.* @@ -338,6 +340,7 @@ homeassistant.components.nfandroidtv.* homeassistant.components.nightscout.* homeassistant.components.nissan_leaf.* homeassistant.components.no_ip.* +homeassistant.components.nordpool.* homeassistant.components.notify.* homeassistant.components.notion.* homeassistant.components.number.* @@ -382,6 +385,7 @@ homeassistant.components.recollect_waste.* homeassistant.components.recorder.* homeassistant.components.remote.* homeassistant.components.renault.* +homeassistant.components.reolink.* homeassistant.components.repairs.* homeassistant.components.rest.* homeassistant.components.rest_command.* @@ -434,6 +438,7 @@ homeassistant.components.starlink.* homeassistant.components.statistics.* homeassistant.components.steamist.* homeassistant.components.stookalert.* +homeassistant.components.stookwijzer.* homeassistant.components.stream.* homeassistant.components.streamlabswater.* homeassistant.components.stt.* diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 2495249af66..2b02916a73e 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -87,6 +87,22 @@ }, "problemMatcher": [] }, + { + "label": "Update syrupy snapshots", + "detail": "Update syrupy snapshots for a given integration.", + "type": "shell", + "command": "python3 -m pytest ./tests/components/${input:integrationName} --snapshot-update", + "dependsOn": ["Compile English translations"], + "group": { + "kind": "test", + "isDefault": true + }, + "presentation": { + "reveal": "always", + "panel": "new" + }, + "problemMatcher": [] + }, { "label": "Generate Requirements", "type": "shell", diff --git a/CODEOWNERS b/CODEOWNERS index 5cda5610f6c..ba233c0c141 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -40,6 +40,8 @@ build.json @home-assistant/supervisor # Integrations /homeassistant/components/abode/ @shred86 /tests/components/abode/ @shred86 +/homeassistant/components/acaia/ @zweckj +/tests/components/acaia/ @zweckj /homeassistant/components/accuweather/ @bieniu /tests/components/accuweather/ @bieniu /homeassistant/components/acmeda/ @atmurray @@ -496,8 +498,8 @@ build.json @home-assistant/supervisor /tests/components/freebox/ @hacf-fr @Quentame /homeassistant/components/freedompro/ @stefano055415 /tests/components/freedompro/ @stefano055415 -/homeassistant/components/fritz/ @mammuth @AaronDavidSchneider @chemelli74 @mib1185 -/tests/components/fritz/ @mammuth @AaronDavidSchneider @chemelli74 @mib1185 +/homeassistant/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185 +/tests/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185 /homeassistant/components/fritzbox/ @mib1185 @flabbamann /tests/components/fritzbox/ @mib1185 @flabbamann /homeassistant/components/fritzbox_callmonitor/ @cdce8p @@ -586,8 +588,8 @@ build.json @home-assistant/supervisor /tests/components/group/ @home-assistant/core /homeassistant/components/guardian/ @bachya /tests/components/guardian/ @bachya -/homeassistant/components/habitica/ @ASMfreaK @leikoilja @tr4nt0r -/tests/components/habitica/ @ASMfreaK @leikoilja @tr4nt0r +/homeassistant/components/habitica/ @tr4nt0r +/tests/components/habitica/ @tr4nt0r /homeassistant/components/hardkernel/ @home-assistant/core /tests/components/hardkernel/ @home-assistant/core /homeassistant/components/hardware/ @home-assistant/core @@ -954,6 +956,8 @@ build.json @home-assistant/supervisor /homeassistant/components/msteams/ @peroyvind /homeassistant/components/mullvad/ @meichthys /tests/components/mullvad/ @meichthys +/homeassistant/components/music_assistant/ @music-assistant +/tests/components/music_assistant/ @music-assistant /homeassistant/components/mutesync/ @currentoor /tests/components/mutesync/ @currentoor /homeassistant/components/my/ @home-assistant/core @@ -968,8 +972,8 @@ build.json @home-assistant/supervisor /tests/components/nam/ @bieniu /homeassistant/components/nanoleaf/ @milanmeu @joostlek /tests/components/nanoleaf/ @milanmeu @joostlek -/homeassistant/components/neato/ @Santobert -/tests/components/neato/ @Santobert +/homeassistant/components/nasweb/ @nasWebio +/tests/components/nasweb/ @nasWebio /homeassistant/components/nederlandse_spoorwegen/ @YarmoM /homeassistant/components/ness_alarm/ @nickw444 /tests/components/ness_alarm/ @nickw444 @@ -1008,6 +1012,8 @@ build.json @home-assistant/supervisor /homeassistant/components/noaa_tides/ @jdelaney72 /homeassistant/components/nobo_hub/ @echoromeo @oyvindwe /tests/components/nobo_hub/ @echoromeo @oyvindwe +/homeassistant/components/nordpool/ @gjohansson-ST +/tests/components/nordpool/ @gjohansson-ST /homeassistant/components/notify/ @home-assistant/core /tests/components/notify/ @home-assistant/core /homeassistant/components/notify_events/ @matrozov @papajojo @@ -1338,6 +1344,8 @@ build.json @home-assistant/supervisor /tests/components/siren/ @home-assistant/core @raman325 /homeassistant/components/sisyphus/ @jkeljo /homeassistant/components/sky_hub/ @rogerselwyn +/homeassistant/components/sky_remote/ @dunnmj @saty9 +/tests/components/sky_remote/ @dunnmj @saty9 /homeassistant/components/skybell/ @tkdrob /tests/components/skybell/ @tkdrob /homeassistant/components/slack/ @tkdrob @fletcherau @@ -1479,8 +1487,8 @@ build.json @home-assistant/supervisor /tests/components/tedee/ @patrickhilker @zweckj /homeassistant/components/tellduslive/ @fredrike /tests/components/tellduslive/ @fredrike -/homeassistant/components/template/ @PhracturedBlue @tetienne @home-assistant/core -/tests/components/template/ @PhracturedBlue @tetienne @home-assistant/core +/homeassistant/components/template/ @PhracturedBlue @home-assistant/core +/tests/components/template/ @PhracturedBlue @home-assistant/core /homeassistant/components/tesla_fleet/ @Bre77 /tests/components/tesla_fleet/ @Bre77 /homeassistant/components/tesla_wall_connector/ @einarhauks @@ -1565,6 +1573,8 @@ build.json @home-assistant/supervisor /tests/components/unifi/ @Kane610 /homeassistant/components/unifi_direct/ @tofuSCHNITZEL /homeassistant/components/unifiled/ @florisvdk +/homeassistant/components/unifiprotect/ @RaHehl +/tests/components/unifiprotect/ @RaHehl /homeassistant/components/upb/ @gwww /tests/components/upb/ @gwww /homeassistant/components/upc_connect/ @pvizeli @fabaff diff --git a/Dockerfile b/Dockerfile index a023b346d59..61d64212b40 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,7 +13,7 @@ ENV \ ARG QEMU_CPU # Install uv -RUN pip3 install uv==0.4.28 +RUN pip3 install uv==0.5.4 WORKDIR /usr/src diff --git a/Dockerfile.dev b/Dockerfile.dev index d05c6df425c..48f582a1581 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -35,6 +35,9 @@ RUN \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* +# Add go2rtc binary +COPY --from=ghcr.io/alexxit/go2rtc:latest /usr/local/bin/go2rtc /bin/go2rtc + # Install uv RUN pip3 install uv diff --git a/build.yaml b/build.yaml index 13618740ab8..a8755bbbf5c 100644 --- a/build.yaml +++ b/build.yaml @@ -1,10 +1,10 @@ image: ghcr.io/home-assistant/{arch}-homeassistant build_from: - aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.06.1 - armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.06.1 - armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.06.1 - amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.06.1 - i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.06.1 + aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.11.0 + armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.11.0 + armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.11.0 + amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.11.0 + i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.11.0 codenotary: signer: notary@home-assistant.io base_image: notary@home-assistant.io diff --git a/homeassistant/__main__.py b/homeassistant/__main__.py index 4c870e94b24..b9d98832705 100644 --- a/homeassistant/__main__.py +++ b/homeassistant/__main__.py @@ -9,6 +9,7 @@ import os import sys import threading +from .backup_restore import restore_backup from .const import REQUIRED_PYTHON_VER, RESTART_EXIT_CODE, __version__ FAULT_LOG_FILENAME = "home-assistant.log.fault" @@ -182,6 +183,9 @@ def main() -> int: return scripts.run(args.script) config_dir = os.path.abspath(os.path.join(os.getcwd(), args.config)) + if restore_backup(config_dir): + return RESTART_EXIT_CODE + ensure_config_path(config_dir) # pylint: disable-next=import-outside-toplevel diff --git a/homeassistant/auth/jwt_wrapper.py b/homeassistant/auth/jwt_wrapper.py index 3aa3ac63764..464df006f5f 100644 --- a/homeassistant/auth/jwt_wrapper.py +++ b/homeassistant/auth/jwt_wrapper.py @@ -18,7 +18,7 @@ from homeassistant.util.json import json_loads JWT_TOKEN_CACHE_SIZE = 16 MAX_TOKEN_SIZE = 8192 -_VERIFY_KEYS = ("signature", "exp", "nbf", "iat", "aud", "iss") +_VERIFY_KEYS = ("signature", "exp", "nbf", "iat", "aud", "iss", "sub", "jti") _VERIFY_OPTIONS: dict[str, Any] = {f"verify_{key}": True for key in _VERIFY_KEYS} | { "require": [] diff --git a/homeassistant/auth/mfa_modules/totp.py b/homeassistant/auth/mfa_modules/totp.py index e9055b45f05..3306f76217f 100644 --- a/homeassistant/auth/mfa_modules/totp.py +++ b/homeassistant/auth/mfa_modules/totp.py @@ -177,17 +177,17 @@ class TotpAuthModule(MultiFactorAuthModule): class TotpSetupFlow(SetupFlow): """Handler for the setup flow.""" + _auth_module: TotpAuthModule + _ota_secret: str + _url: str + _image: str + def __init__( self, auth_module: TotpAuthModule, setup_schema: vol.Schema, user: User ) -> None: """Initialize the setup flow.""" super().__init__(auth_module, setup_schema, user.id) - # to fix typing complaint - self._auth_module: TotpAuthModule = auth_module self._user = user - self._ota_secret: str = "" - self._url: str | None = None - self._image: str | None = None async def async_step_init( self, user_input: dict[str, str] | None = None @@ -214,12 +214,11 @@ class TotpSetupFlow(SetupFlow): errors["base"] = "invalid_code" else: - hass = self._auth_module.hass ( self._ota_secret, self._url, self._image, - ) = await hass.async_add_executor_job( + ) = await self._auth_module.hass.async_add_executor_job( _generate_secret_and_qr_code, str(self._user.name), ) diff --git a/homeassistant/backup_restore.py b/homeassistant/backup_restore.py new file mode 100644 index 00000000000..32991dfb2d3 --- /dev/null +++ b/homeassistant/backup_restore.py @@ -0,0 +1,126 @@ +"""Home Assistant module to handle restoring backups.""" + +from dataclasses import dataclass +import json +import logging +from pathlib import Path +import shutil +import sys +from tempfile import TemporaryDirectory + +from awesomeversion import AwesomeVersion +import securetar + +from .const import __version__ as HA_VERSION + +RESTORE_BACKUP_FILE = ".HA_RESTORE" +KEEP_PATHS = ("backups",) + +_LOGGER = logging.getLogger(__name__) + + +@dataclass +class RestoreBackupFileContent: + """Definition for restore backup file content.""" + + backup_file_path: Path + + +def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent | None: + """Return the contents of the restore backup file.""" + instruction_path = config_dir.joinpath(RESTORE_BACKUP_FILE) + try: + instruction_content = json.loads(instruction_path.read_text(encoding="utf-8")) + return RestoreBackupFileContent( + backup_file_path=Path(instruction_content["path"]) + ) + except (FileNotFoundError, json.JSONDecodeError): + return None + + +def _clear_configuration_directory(config_dir: Path) -> None: + """Delete all files and directories in the config directory except for the backups directory.""" + keep_paths = [config_dir.joinpath(path) for path in KEEP_PATHS] + config_contents = sorted( + [entry for entry in config_dir.iterdir() if entry not in keep_paths] + ) + + for entry in config_contents: + entrypath = config_dir.joinpath(entry) + + if entrypath.is_file(): + entrypath.unlink() + elif entrypath.is_dir(): + shutil.rmtree(entrypath) + + +def _extract_backup(config_dir: Path, backup_file_path: Path) -> None: + """Extract the backup file to the config directory.""" + with ( + TemporaryDirectory() as tempdir, + securetar.SecureTarFile( + backup_file_path, + gzip=False, + mode="r", + ) as ostf, + ): + ostf.extractall( + path=Path(tempdir, "extracted"), + members=securetar.secure_path(ostf), + filter="fully_trusted", + ) + backup_meta_file = Path(tempdir, "extracted", "backup.json") + backup_meta = json.loads(backup_meta_file.read_text(encoding="utf8")) + + if ( + backup_meta_version := AwesomeVersion( + backup_meta["homeassistant"]["version"] + ) + ) > HA_VERSION: + raise ValueError( + f"You need at least Home Assistant version {backup_meta_version} to restore this backup" + ) + + with securetar.SecureTarFile( + Path( + tempdir, + "extracted", + f"homeassistant.tar{'.gz' if backup_meta["compressed"] else ''}", + ), + gzip=backup_meta["compressed"], + mode="r", + ) as istf: + for member in istf.getmembers(): + if member.name == "data": + continue + member.name = member.name.replace("data/", "") + _clear_configuration_directory(config_dir) + istf.extractall( + path=config_dir, + members=[ + member + for member in securetar.secure_path(istf) + if member.name != "data" + ], + filter="fully_trusted", + ) + + +def restore_backup(config_dir_path: str) -> bool: + """Restore the backup file if any. + + Returns True if a restore backup file was found and restored, False otherwise. + """ + config_dir = Path(config_dir_path) + if not (restore_content := restore_backup_file_content(config_dir)): + return False + + logging.basicConfig(stream=sys.stdout, level=logging.INFO) + backup_file_path = restore_content.backup_file_path + _LOGGER.info("Restoring %s", backup_file_path) + try: + _extract_backup(config_dir, backup_file_path) + except FileNotFoundError as err: + raise ValueError(f"Backup file {backup_file_path} does not exist") from err + _LOGGER.info("Restore complete, restarting") + return True diff --git a/homeassistant/bootstrap.py b/homeassistant/bootstrap.py index dcfb6685627..1034223051c 100644 --- a/homeassistant/bootstrap.py +++ b/homeassistant/bootstrap.py @@ -515,7 +515,7 @@ async def async_from_config_dict( issue_registry.async_create_issue( hass, core.DOMAIN, - "python_version", + f"python_version_{required_python_version}", is_fixable=False, severity=issue_registry.IssueSeverity.WARNING, breaks_in_ha_version=REQUIRED_NEXT_PYTHON_HA_RELEASE, diff --git a/homeassistant/brands/sky.json b/homeassistant/brands/sky.json new file mode 100644 index 00000000000..3ab0cbbe5bd --- /dev/null +++ b/homeassistant/brands/sky.json @@ -0,0 +1,5 @@ +{ + "domain": "sky", + "name": "Sky", + "integrations": ["sky_hub", "sky_remote"] +} diff --git a/homeassistant/components/abode/config_flow.py b/homeassistant/components/abode/config_flow.py index 1c0186e1003..01b6c7f568f 100644 --- a/homeassistant/components/abode/config_flow.py +++ b/homeassistant/components/abode/config_flow.py @@ -112,9 +112,6 @@ class AbodeFlowHandler(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - if user_input is None: return self.async_show_form( step_id="user", data_schema=vol.Schema(self.data_schema) diff --git a/homeassistant/components/abode/manifest.json b/homeassistant/components/abode/manifest.json index 9f5806d544a..c1ffb9f699b 100644 --- a/homeassistant/components/abode/manifest.json +++ b/homeassistant/components/abode/manifest.json @@ -9,5 +9,6 @@ }, "iot_class": "cloud_push", "loggers": ["jaraco.abode", "lomond"], - "requirements": ["jaraco.abode==6.2.1"] + "requirements": ["jaraco.abode==6.2.1"], + "single_config_entry": true } diff --git a/homeassistant/components/abode/strings.json b/homeassistant/components/abode/strings.json index 4b98b69eb19..b3d57042754 100644 --- a/homeassistant/components/abode/strings.json +++ b/homeassistant/components/abode/strings.json @@ -28,7 +28,6 @@ "invalid_mfa_code": "Invalid MFA code" }, "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, diff --git a/homeassistant/components/acaia/__init__.py b/homeassistant/components/acaia/__init__.py new file mode 100644 index 00000000000..44f21533e98 --- /dev/null +++ b/homeassistant/components/acaia/__init__.py @@ -0,0 +1,31 @@ +"""Initialize the Acaia component.""" + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .coordinator import AcaiaConfigEntry, AcaiaCoordinator + +PLATFORMS = [ + Platform.BINARY_SENSOR, + Platform.BUTTON, + Platform.SENSOR, +] + + +async def async_setup_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool: + """Set up acaia as config entry.""" + + coordinator = AcaiaCoordinator(hass, entry) + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool: + """Unload a config entry.""" + + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/acaia/binary_sensor.py b/homeassistant/components/acaia/binary_sensor.py new file mode 100644 index 00000000000..9aa4b92e932 --- /dev/null +++ b/homeassistant/components/acaia/binary_sensor.py @@ -0,0 +1,58 @@ +"""Binary sensor platform for Acaia scales.""" + +from collections.abc import Callable +from dataclasses import dataclass + +from aioacaia.acaiascale import AcaiaScale + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .coordinator import AcaiaConfigEntry +from .entity import AcaiaEntity + + +@dataclass(kw_only=True, frozen=True) +class AcaiaBinarySensorEntityDescription(BinarySensorEntityDescription): + """Description for Acaia binary sensor entities.""" + + is_on_fn: Callable[[AcaiaScale], bool] + + +BINARY_SENSORS: tuple[AcaiaBinarySensorEntityDescription, ...] = ( + AcaiaBinarySensorEntityDescription( + key="timer_running", + translation_key="timer_running", + device_class=BinarySensorDeviceClass.RUNNING, + is_on_fn=lambda scale: scale.timer_running, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: AcaiaConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up binary sensors.""" + + coordinator = entry.runtime_data + async_add_entities( + AcaiaBinarySensor(coordinator, description) for description in BINARY_SENSORS + ) + + +class AcaiaBinarySensor(AcaiaEntity, BinarySensorEntity): + """Representation of an Acaia binary sensor.""" + + entity_description: AcaiaBinarySensorEntityDescription + + @property + def is_on(self) -> bool: + """Return true if the binary sensor is on.""" + return self.entity_description.is_on_fn(self._scale) diff --git a/homeassistant/components/acaia/button.py b/homeassistant/components/acaia/button.py new file mode 100644 index 00000000000..a41233bfc17 --- /dev/null +++ b/homeassistant/components/acaia/button.py @@ -0,0 +1,63 @@ +"""Button entities for Acaia scales.""" + +from collections.abc import Callable, Coroutine +from dataclasses import dataclass +from typing import Any + +from aioacaia.acaiascale import AcaiaScale + +from homeassistant.components.button import ButtonEntity, ButtonEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .coordinator import AcaiaConfigEntry +from .entity import AcaiaEntity + +PARALLEL_UPDATES = 0 + + +@dataclass(kw_only=True, frozen=True) +class AcaiaButtonEntityDescription(ButtonEntityDescription): + """Description for acaia button entities.""" + + press_fn: Callable[[AcaiaScale], Coroutine[Any, Any, None]] + + +BUTTONS: tuple[AcaiaButtonEntityDescription, ...] = ( + AcaiaButtonEntityDescription( + key="tare", + translation_key="tare", + press_fn=lambda scale: scale.tare(), + ), + AcaiaButtonEntityDescription( + key="reset_timer", + translation_key="reset_timer", + press_fn=lambda scale: scale.reset_timer(), + ), + AcaiaButtonEntityDescription( + key="start_stop", + translation_key="start_stop", + press_fn=lambda scale: scale.start_stop_timer(), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: AcaiaConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up button entities and services.""" + + coordinator = entry.runtime_data + async_add_entities(AcaiaButton(coordinator, description) for description in BUTTONS) + + +class AcaiaButton(AcaiaEntity, ButtonEntity): + """Representation of an Acaia button.""" + + entity_description: AcaiaButtonEntityDescription + + async def async_press(self) -> None: + """Handle the button press.""" + await self.entity_description.press_fn(self._scale) diff --git a/homeassistant/components/acaia/config_flow.py b/homeassistant/components/acaia/config_flow.py new file mode 100644 index 00000000000..fb2639fc886 --- /dev/null +++ b/homeassistant/components/acaia/config_flow.py @@ -0,0 +1,149 @@ +"""Config flow for Acaia integration.""" + +import logging +from typing import Any + +from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError, AcaiaUnknownDevice +from aioacaia.helpers import is_new_scale +import voluptuous as vol + +from homeassistant.components.bluetooth import ( + BluetoothServiceInfoBleak, + async_discovered_service_info, +) +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_ADDRESS, CONF_NAME +from homeassistant.helpers.device_registry import format_mac +from homeassistant.helpers.selector import ( + SelectOptionDict, + SelectSelector, + SelectSelectorConfig, + SelectSelectorMode, +) + +from .const import CONF_IS_NEW_STYLE_SCALE, DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class AcaiaConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for acaia.""" + + def __init__(self) -> None: + """Initialize the config flow.""" + self._discovered: dict[str, Any] = {} + self._discovered_devices: dict[str, str] = {} + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initialized by the user.""" + + errors: dict[str, str] = {} + + if user_input is not None: + mac = user_input[CONF_ADDRESS] + try: + is_new_style_scale = await is_new_scale(mac) + except AcaiaDeviceNotFound: + errors["base"] = "device_not_found" + except AcaiaError: + _LOGGER.exception("Error occurred while connecting to the scale") + errors["base"] = "unknown" + except AcaiaUnknownDevice: + return self.async_abort(reason="unsupported_device") + else: + await self.async_set_unique_id(format_mac(mac)) + self._abort_if_unique_id_configured() + + if not errors: + return self.async_create_entry( + title=self._discovered_devices[mac], + data={ + CONF_ADDRESS: mac, + CONF_IS_NEW_STYLE_SCALE: is_new_style_scale, + }, + ) + + for device in async_discovered_service_info(self.hass): + self._discovered_devices[device.address] = device.name + + if not self._discovered_devices: + return self.async_abort(reason="no_devices_found") + + options = [ + SelectOptionDict( + value=device_mac, + label=f"{device_name} ({device_mac})", + ) + for device_mac, device_name in self._discovered_devices.items() + ] + + return self.async_show_form( + step_id="user", + data_schema=vol.Schema( + { + vol.Required(CONF_ADDRESS): SelectSelector( + SelectSelectorConfig( + options=options, + mode=SelectSelectorMode.DROPDOWN, + ) + ) + } + ), + errors=errors, + ) + + async def async_step_bluetooth( + self, discovery_info: BluetoothServiceInfoBleak + ) -> ConfigFlowResult: + """Handle a discovered Bluetooth device.""" + + self._discovered[CONF_ADDRESS] = discovery_info.address + self._discovered[CONF_NAME] = discovery_info.name + + await self.async_set_unique_id(format_mac(discovery_info.address)) + self._abort_if_unique_id_configured() + + try: + self._discovered[CONF_IS_NEW_STYLE_SCALE] = await is_new_scale( + discovery_info.address + ) + except AcaiaDeviceNotFound: + _LOGGER.debug("Device not found during discovery") + return self.async_abort(reason="device_not_found") + except AcaiaError: + _LOGGER.debug( + "Error occurred while connecting to the scale during discovery", + exc_info=True, + ) + return self.async_abort(reason="unknown") + except AcaiaUnknownDevice: + _LOGGER.debug("Unsupported device during discovery") + return self.async_abort(reason="unsupported_device") + + return await self.async_step_bluetooth_confirm() + + async def async_step_bluetooth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle confirmation of Bluetooth discovery.""" + + if user_input is not None: + return self.async_create_entry( + title=self._discovered[CONF_NAME], + data={ + CONF_ADDRESS: self._discovered[CONF_ADDRESS], + CONF_IS_NEW_STYLE_SCALE: self._discovered[CONF_IS_NEW_STYLE_SCALE], + }, + ) + + self.context["title_placeholders"] = placeholders = { + CONF_NAME: self._discovered[CONF_NAME] + } + + self._set_confirm_only() + return self.async_show_form( + step_id="bluetooth_confirm", + description_placeholders=placeholders, + ) diff --git a/homeassistant/components/acaia/const.py b/homeassistant/components/acaia/const.py new file mode 100644 index 00000000000..c603578763d --- /dev/null +++ b/homeassistant/components/acaia/const.py @@ -0,0 +1,4 @@ +"""Constants for component.""" + +DOMAIN = "acaia" +CONF_IS_NEW_STYLE_SCALE = "is_new_style_scale" diff --git a/homeassistant/components/acaia/coordinator.py b/homeassistant/components/acaia/coordinator.py new file mode 100644 index 00000000000..bd915b42408 --- /dev/null +++ b/homeassistant/components/acaia/coordinator.py @@ -0,0 +1,86 @@ +"""Coordinator for Acaia integration.""" + +from __future__ import annotations + +from datetime import timedelta +import logging + +from aioacaia.acaiascale import AcaiaScale +from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_ADDRESS +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .const import CONF_IS_NEW_STYLE_SCALE + +SCAN_INTERVAL = timedelta(seconds=15) + +_LOGGER = logging.getLogger(__name__) + +type AcaiaConfigEntry = ConfigEntry[AcaiaCoordinator] + + +class AcaiaCoordinator(DataUpdateCoordinator[None]): + """Class to handle fetching data from the scale.""" + + config_entry: AcaiaConfigEntry + + def __init__(self, hass: HomeAssistant, entry: AcaiaConfigEntry) -> None: + """Initialize coordinator.""" + super().__init__( + hass, + _LOGGER, + name="acaia coordinator", + update_interval=SCAN_INTERVAL, + config_entry=entry, + ) + + self._scale = AcaiaScale( + address_or_ble_device=entry.data[CONF_ADDRESS], + name=entry.title, + is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE], + notify_callback=self.async_update_listeners, + ) + + @property + def scale(self) -> AcaiaScale: + """Return the scale object.""" + return self._scale + + async def _async_update_data(self) -> None: + """Fetch data.""" + + # scale is already connected, return + if self._scale.connected: + return + + # scale is not connected, try to connect + try: + await self._scale.connect(setup_tasks=False) + except (AcaiaDeviceNotFound, AcaiaError, TimeoutError) as ex: + _LOGGER.debug( + "Could not connect to scale: %s, Error: %s", + self.config_entry.data[CONF_ADDRESS], + ex, + ) + self._scale.device_disconnected_handler(notify=False) + return + + # connected, set up background tasks + if not self._scale.heartbeat_task or self._scale.heartbeat_task.done(): + self._scale.heartbeat_task = self.config_entry.async_create_background_task( + hass=self.hass, + target=self._scale.send_heartbeats(), + name="acaia_heartbeat_task", + ) + + if not self._scale.process_queue_task or self._scale.process_queue_task.done(): + self._scale.process_queue_task = ( + self.config_entry.async_create_background_task( + hass=self.hass, + target=self._scale.process_queue(), + name="acaia_process_queue_task", + ) + ) diff --git a/homeassistant/components/acaia/diagnostics.py b/homeassistant/components/acaia/diagnostics.py new file mode 100644 index 00000000000..2d9f4511804 --- /dev/null +++ b/homeassistant/components/acaia/diagnostics.py @@ -0,0 +1,31 @@ +"""Diagnostics support for Acaia.""" + +from __future__ import annotations + +from dataclasses import asdict +from typing import Any + +from homeassistant.core import HomeAssistant + +from . import AcaiaConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, + entry: AcaiaConfigEntry, +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + coordinator = entry.runtime_data + scale = coordinator.scale + + # collect all data sources + return { + "model": scale.model, + "device_state": ( + asdict(scale.device_state) if scale.device_state is not None else "" + ), + "mac": scale.mac, + "last_disconnect_time": scale.last_disconnect_time, + "timer": scale.timer, + "weight": scale.weight, + } diff --git a/homeassistant/components/acaia/entity.py b/homeassistant/components/acaia/entity.py new file mode 100644 index 00000000000..bef1ac313ca --- /dev/null +++ b/homeassistant/components/acaia/entity.py @@ -0,0 +1,46 @@ +"""Base class for Acaia entities.""" + +from dataclasses import dataclass + +from homeassistant.helpers.device_registry import ( + CONNECTION_BLUETOOTH, + DeviceInfo, + format_mac, +) +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import AcaiaCoordinator + + +@dataclass +class AcaiaEntity(CoordinatorEntity[AcaiaCoordinator]): + """Common elements for all entities.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: AcaiaCoordinator, + entity_description: EntityDescription, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self.entity_description = entity_description + self._scale = coordinator.scale + formatted_mac = format_mac(self._scale.mac) + self._attr_unique_id = f"{formatted_mac}_{entity_description.key}" + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, formatted_mac)}, + manufacturer="Acaia", + model=self._scale.model, + suggested_area="Kitchen", + connections={(CONNECTION_BLUETOOTH, self._scale.mac)}, + ) + + @property + def available(self) -> bool: + """Returns whether entity is available.""" + return super().available and self._scale.connected diff --git a/homeassistant/components/acaia/icons.json b/homeassistant/components/acaia/icons.json new file mode 100644 index 00000000000..59b316a36ce --- /dev/null +++ b/homeassistant/components/acaia/icons.json @@ -0,0 +1,24 @@ +{ + "entity": { + "binary_sensor": { + "timer_running": { + "default": "mdi:timer", + "state": { + "on": "mdi:timer-play", + "off": "mdi:timer-off" + } + } + }, + "button": { + "tare": { + "default": "mdi:scale-balance" + }, + "reset_timer": { + "default": "mdi:timer-refresh" + }, + "start_stop": { + "default": "mdi:timer-play" + } + } + } +} diff --git a/homeassistant/components/acaia/manifest.json b/homeassistant/components/acaia/manifest.json new file mode 100644 index 00000000000..3f3e1c14d58 --- /dev/null +++ b/homeassistant/components/acaia/manifest.json @@ -0,0 +1,29 @@ +{ + "domain": "acaia", + "name": "Acaia", + "bluetooth": [ + { + "manufacturer_id": 16962 + }, + { + "local_name": "ACAIA*" + }, + { + "local_name": "PYXIS-*" + }, + { + "local_name": "LUNAR-*" + }, + { + "local_name": "PROCHBT001" + } + ], + "codeowners": ["@zweckj"], + "config_flow": true, + "dependencies": ["bluetooth_adapters"], + "documentation": "https://www.home-assistant.io/integrations/acaia", + "integration_type": "device", + "iot_class": "local_push", + "loggers": ["aioacaia"], + "requirements": ["aioacaia==0.1.10"] +} diff --git a/homeassistant/components/acaia/quality_scale.yaml b/homeassistant/components/acaia/quality_scale.yaml new file mode 100644 index 00000000000..9f9f8da8d5d --- /dev/null +++ b/homeassistant/components/acaia/quality_scale.yaml @@ -0,0 +1,106 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + No custom actions are defined. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + No custom actions are defined. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + No explicit event subscriptions. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: + status: exempt + comment: | + Device is expected to be offline most of the time, but needs to connect quickly once available. + unique-config-entry: done + # Silver + action-exceptions: + status: exempt + comment: | + No custom actions are defined. + config-entry-unloading: done + docs-configuration-parameters: done + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: + status: done + comment: | + Handled by coordinator. + parallel-updates: done + reauthentication-flow: + status: exempt + comment: | + No authentication required. + test-coverage: done + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: | + No IP discovery. + discovery: + status: done + comment: | + Bluetooth discovery. + docs-data-update: done + docs-examples: done + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: done + dynamic-devices: + status: exempt + comment: | + Device type integration. + entity-category: done + entity-device-class: done + entity-disabled-by-default: + status: exempt + comment: | + No noisy/non-essential entities. + entity-translations: done + exception-translations: + status: exempt + comment: | + No custom exceptions. + icon-translations: done + reconfiguration-flow: + status: exempt + comment: | + Only parameter that could be changed (MAC = unique_id) would force a new config entry. + repair-issues: + status: exempt + comment: | + No repairs/issues. + stale-devices: + status: exempt + comment: | + Device type integration. + + # Platinum + async-dependency: done + inject-websession: + status: exempt + comment: | + Bluetooth connection. + strict-typing: done diff --git a/homeassistant/components/acaia/sensor.py b/homeassistant/components/acaia/sensor.py new file mode 100644 index 00000000000..6e6ce6afcb8 --- /dev/null +++ b/homeassistant/components/acaia/sensor.py @@ -0,0 +1,143 @@ +"""Sensor platform for Acaia.""" + +from collections.abc import Callable +from dataclasses import dataclass + +from aioacaia.acaiascale import AcaiaDeviceState, AcaiaScale +from aioacaia.const import UnitMass as AcaiaUnitOfMass + +from homeassistant.components.sensor import ( + RestoreSensor, + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorExtraStoredData, + SensorStateClass, +) +from homeassistant.const import PERCENTAGE, UnitOfMass, UnitOfVolumeFlowRate +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .coordinator import AcaiaConfigEntry +from .entity import AcaiaEntity + + +@dataclass(kw_only=True, frozen=True) +class AcaiaSensorEntityDescription(SensorEntityDescription): + """Description for Acaia sensor entities.""" + + value_fn: Callable[[AcaiaScale], int | float | None] + + +@dataclass(kw_only=True, frozen=True) +class AcaiaDynamicUnitSensorEntityDescription(AcaiaSensorEntityDescription): + """Description for Acaia sensor entities with dynamic units.""" + + unit_fn: Callable[[AcaiaDeviceState], str] | None = None + + +SENSORS: tuple[AcaiaSensorEntityDescription, ...] = ( + AcaiaDynamicUnitSensorEntityDescription( + key="weight", + device_class=SensorDeviceClass.WEIGHT, + native_unit_of_measurement=UnitOfMass.GRAMS, + state_class=SensorStateClass.MEASUREMENT, + unit_fn=lambda data: ( + UnitOfMass.OUNCES + if data.units == AcaiaUnitOfMass.OUNCES + else UnitOfMass.GRAMS + ), + value_fn=lambda scale: scale.weight, + ), + AcaiaDynamicUnitSensorEntityDescription( + key="flow_rate", + device_class=SensorDeviceClass.VOLUME_FLOW_RATE, + native_unit_of_measurement=UnitOfVolumeFlowRate.MILLILITERS_PER_SECOND, + suggested_display_precision=1, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda scale: scale.flow_rate, + ), +) +RESTORE_SENSORS: tuple[AcaiaSensorEntityDescription, ...] = ( + AcaiaSensorEntityDescription( + key="battery", + device_class=SensorDeviceClass.BATTERY, + native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda scale: ( + scale.device_state.battery_level if scale.device_state else None + ), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: AcaiaConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up sensors.""" + + coordinator = entry.runtime_data + entities: list[SensorEntity] = [ + AcaiaSensor(coordinator, entity_description) for entity_description in SENSORS + ] + entities.extend( + AcaiaRestoreSensor(coordinator, entity_description) + for entity_description in RESTORE_SENSORS + ) + async_add_entities(entities) + + +class AcaiaSensor(AcaiaEntity, SensorEntity): + """Representation of an Acaia sensor.""" + + entity_description: AcaiaDynamicUnitSensorEntityDescription + + @property + def native_unit_of_measurement(self) -> str | None: + """Return the unit of measurement of this entity.""" + if ( + self._scale.device_state is not None + and self.entity_description.unit_fn is not None + ): + return self.entity_description.unit_fn(self._scale.device_state) + return self.entity_description.native_unit_of_measurement + + @property + def native_value(self) -> int | float | None: + """Return the state of the entity.""" + return self.entity_description.value_fn(self._scale) + + +class AcaiaRestoreSensor(AcaiaEntity, RestoreSensor): + """Representation of an Acaia sensor with restore capabilities.""" + + entity_description: AcaiaSensorEntityDescription + _restored_data: SensorExtraStoredData | None = None + + async def async_added_to_hass(self) -> None: + """Handle entity which will be added.""" + await super().async_added_to_hass() + + self._restored_data = await self.async_get_last_sensor_data() + if self._restored_data is not None: + self._attr_native_value = self._restored_data.native_value + self._attr_native_unit_of_measurement = ( + self._restored_data.native_unit_of_measurement + ) + + if self._scale.device_state is not None: + self._attr_native_value = self.entity_description.value_fn(self._scale) + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + if self._scale.device_state is not None: + self._attr_native_value = self.entity_description.value_fn(self._scale) + self._async_write_ha_state() + + @property + def available(self) -> bool: + """Return True if entity is available.""" + return super().available or self._restored_data is not None diff --git a/homeassistant/components/acaia/strings.json b/homeassistant/components/acaia/strings.json new file mode 100644 index 00000000000..e0e97b7c2ff --- /dev/null +++ b/homeassistant/components/acaia/strings.json @@ -0,0 +1,46 @@ +{ + "config": { + "flow_title": "{name}", + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]", + "unsupported_device": "This device is not supported." + }, + "error": { + "device_not_found": "Device could not be found.", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "step": { + "bluetooth_confirm": { + "description": "[%key:component::bluetooth::config::step::bluetooth_confirm::description%]" + }, + "user": { + "description": "[%key:component::bluetooth::config::step::user::description%]", + "data": { + "address": "[%key:common::config_flow::data::device%]" + }, + "data_description": { + "address": "Select Acaia scale you want to set up" + } + } + } + }, + "entity": { + "binary_sensor": { + "timer_running": { + "name": "Timer running" + } + }, + "button": { + "tare": { + "name": "Tare" + }, + "reset_timer": { + "name": "Reset timer" + }, + "start_stop": { + "name": "Start/stop timer" + } + } + } +} diff --git a/homeassistant/components/accuweather/manifest.json b/homeassistant/components/accuweather/manifest.json index 24a8180eef8..75f4a265b5f 100644 --- a/homeassistant/components/accuweather/manifest.json +++ b/homeassistant/components/accuweather/manifest.json @@ -7,7 +7,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["accuweather"], - "quality_scale": "platinum", - "requirements": ["accuweather==3.0.0"], + "requirements": ["accuweather==4.0.0"], "single_config_entry": true } diff --git a/homeassistant/components/acer_projector/manifest.json b/homeassistant/components/acer_projector/manifest.json index 58a2372e42a..026374bf53d 100644 --- a/homeassistant/components/acer_projector/manifest.json +++ b/homeassistant/components/acer_projector/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/acer_projector", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["pyserial==3.5"] } diff --git a/homeassistant/components/actiontec/manifest.json b/homeassistant/components/actiontec/manifest.json index ff9cf85614f..e7aa33f1baf 100644 --- a/homeassistant/components/actiontec/manifest.json +++ b/homeassistant/components/actiontec/manifest.json @@ -3,5 +3,6 @@ "name": "Actiontec", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/actiontec", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/ads/cover.py b/homeassistant/components/ads/cover.py index 541f8bfc82c..c7b0f4f2f8a 100644 --- a/homeassistant/components/ads/cover.py +++ b/homeassistant/components/ads/cover.py @@ -37,7 +37,7 @@ STATE_KEY_POSITION = "position" PLATFORM_SCHEMA = COVER_PLATFORM_SCHEMA.extend( { - vol.Optional(CONF_ADS_VAR): cv.string, + vol.Required(CONF_ADS_VAR): cv.string, vol.Optional(CONF_ADS_VAR_POSITION): cv.string, vol.Optional(CONF_ADS_VAR_SET_POS): cv.string, vol.Optional(CONF_ADS_VAR_CLOSE): cv.string, diff --git a/homeassistant/components/ads/manifest.json b/homeassistant/components/ads/manifest.json index 86fc54ea784..683c3cb619f 100644 --- a/homeassistant/components/ads/manifest.json +++ b/homeassistant/components/ads/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/ads", "iot_class": "local_push", "loggers": ["pyads"], + "quality_scale": "legacy", "requirements": ["pyads==3.4.0"] } diff --git a/homeassistant/components/advantage_air/manifest.json b/homeassistant/components/advantage_air/manifest.json index a07d14896eb..553a641b603 100644 --- a/homeassistant/components/advantage_air/manifest.json +++ b/homeassistant/components/advantage_air/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/advantage_air", "iot_class": "local_polling", "loggers": ["advantage_air"], - "quality_scale": "platinum", "requirements": ["advantage-air==0.4.4"] } diff --git a/homeassistant/components/aemet/__init__.py b/homeassistant/components/aemet/__init__.py index e242d62a580..9ec52faec00 100644 --- a/homeassistant/components/aemet/__init__.py +++ b/homeassistant/components/aemet/__init__.py @@ -1,10 +1,9 @@ """The AEMET OpenData component.""" -from dataclasses import dataclass import logging from aemet_opendata.exceptions import AemetError, TownNotFound -from aemet_opendata.interface import AEMET, ConnectionOptions +from aemet_opendata.interface import AEMET, ConnectionOptions, UpdateFeature from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME @@ -13,20 +12,10 @@ from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import aiohttp_client from .const import CONF_STATION_UPDATES, PLATFORMS -from .coordinator import WeatherUpdateCoordinator +from .coordinator import AemetConfigEntry, AemetData, WeatherUpdateCoordinator _LOGGER = logging.getLogger(__name__) -type AemetConfigEntry = ConfigEntry[AemetData] - - -@dataclass -class AemetData: - """Aemet runtime data.""" - - name: str - coordinator: WeatherUpdateCoordinator - async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> bool: """Set up AEMET OpenData as config entry.""" @@ -34,9 +23,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> boo api_key = entry.data[CONF_API_KEY] latitude = entry.data[CONF_LATITUDE] longitude = entry.data[CONF_LONGITUDE] - station_updates = entry.options.get(CONF_STATION_UPDATES, True) + update_features: int = UpdateFeature.FORECAST + if entry.options.get(CONF_STATION_UPDATES, True): + update_features |= UpdateFeature.STATION - options = ConnectionOptions(api_key, station_updates) + options = ConnectionOptions(api_key, update_features) aemet = AEMET(aiohttp_client.async_get_clientsession(hass), options) try: await aemet.select_coordinates(latitude, longitude) @@ -46,7 +37,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> boo except AemetError as err: raise ConfigEntryNotReady(err) from err - weather_coordinator = WeatherUpdateCoordinator(hass, aemet) + weather_coordinator = WeatherUpdateCoordinator(hass, entry, aemet) await weather_coordinator.async_config_entry_first_refresh() entry.runtime_data = AemetData(name=name, coordinator=weather_coordinator) diff --git a/homeassistant/components/aemet/config_flow.py b/homeassistant/components/aemet/config_flow.py index 6b2eca3f5c9..e2b0b436c8c 100644 --- a/homeassistant/components/aemet/config_flow.py +++ b/homeassistant/components/aemet/config_flow.py @@ -45,7 +45,7 @@ class AemetConfigFlow(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(f"{latitude}-{longitude}") self._abort_if_unique_id_configured() - options = ConnectionOptions(user_input[CONF_API_KEY], False) + options = ConnectionOptions(user_input[CONF_API_KEY]) aemet = AEMET(aiohttp_client.async_get_clientsession(self.hass), options) try: await aemet.select_coordinates(latitude, longitude) diff --git a/homeassistant/components/aemet/coordinator.py b/homeassistant/components/aemet/coordinator.py index 8d179ccdb02..2e8534c7466 100644 --- a/homeassistant/components/aemet/coordinator.py +++ b/homeassistant/components/aemet/coordinator.py @@ -3,6 +3,7 @@ from __future__ import annotations from asyncio import timeout +from dataclasses import dataclass from datetime import timedelta import logging from typing import Any, Final, cast @@ -19,6 +20,7 @@ from aemet_opendata.helpers import dict_nested_value from aemet_opendata.interface import AEMET from homeassistant.components.weather import Forecast +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -29,6 +31,16 @@ _LOGGER = logging.getLogger(__name__) API_TIMEOUT: Final[int] = 120 WEATHER_UPDATE_INTERVAL = timedelta(minutes=10) +type AemetConfigEntry = ConfigEntry[AemetData] + + +@dataclass +class AemetData: + """Aemet runtime data.""" + + name: str + coordinator: WeatherUpdateCoordinator + class WeatherUpdateCoordinator(DataUpdateCoordinator): """Weather data update coordinator.""" @@ -36,6 +48,7 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator): def __init__( self, hass: HomeAssistant, + entry: AemetConfigEntry, aemet: AEMET, ) -> None: """Initialize coordinator.""" @@ -44,6 +57,7 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator): super().__init__( hass, _LOGGER, + config_entry=entry, name=DOMAIN, update_interval=WEATHER_UPDATE_INTERVAL, ) diff --git a/homeassistant/components/aemet/diagnostics.py b/homeassistant/components/aemet/diagnostics.py index 2379bd34bc0..bc366fc6d44 100644 --- a/homeassistant/components/aemet/diagnostics.py +++ b/homeassistant/components/aemet/diagnostics.py @@ -15,7 +15,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant -from . import AemetConfigEntry +from .coordinator import AemetConfigEntry TO_REDACT_CONFIG = [ CONF_API_KEY, diff --git a/homeassistant/components/aemet/manifest.json b/homeassistant/components/aemet/manifest.json index 3696e16b437..5c9d1ff7e5a 100644 --- a/homeassistant/components/aemet/manifest.json +++ b/homeassistant/components/aemet/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/aemet", "iot_class": "cloud_polling", "loggers": ["aemet_opendata"], - "requirements": ["AEMET-OpenData==0.5.4"] + "requirements": ["AEMET-OpenData==0.6.3"] } diff --git a/homeassistant/components/aemet/sensor.py b/homeassistant/components/aemet/sensor.py index e55344490aa..88eb34b6f84 100644 --- a/homeassistant/components/aemet/sensor.py +++ b/homeassistant/components/aemet/sensor.py @@ -55,7 +55,6 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util -from . import AemetConfigEntry from .const import ( ATTR_API_CONDITION, ATTR_API_FORECAST_CONDITION, @@ -87,7 +86,7 @@ from .const import ( ATTR_API_WIND_SPEED, CONDITIONS_MAP, ) -from .coordinator import WeatherUpdateCoordinator +from .coordinator import AemetConfigEntry, WeatherUpdateCoordinator from .entity import AemetEntity diff --git a/homeassistant/components/aemet/weather.py b/homeassistant/components/aemet/weather.py index 341b81d71c4..a156652eadd 100644 --- a/homeassistant/components/aemet/weather.py +++ b/homeassistant/components/aemet/weather.py @@ -27,9 +27,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import AemetConfigEntry from .const import CONDITIONS_MAP -from .coordinator import WeatherUpdateCoordinator +from .coordinator import AemetConfigEntry, WeatherUpdateCoordinator from .entity import AemetEntity diff --git a/homeassistant/components/airgradient/quality_scale.yaml b/homeassistant/components/airgradient/quality_scale.yaml new file mode 100644 index 00000000000..8d62e8515fc --- /dev/null +++ b/homeassistant/components/airgradient/quality_scale.yaml @@ -0,0 +1,80 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: todo + docs-installation-instructions: todo + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: todo + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + test-coverage: done + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + This integration has a fixed single device. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + stale-devices: + status: exempt + comment: | + This integration has a fixed single device. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/airly/manifest.json b/homeassistant/components/airly/manifest.json index 233625ab04a..ccd37589e8c 100644 --- a/homeassistant/components/airly/manifest.json +++ b/homeassistant/components/airly/manifest.json @@ -7,6 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["airly"], - "quality_scale": "platinum", "requirements": ["airly==1.1.0"] } diff --git a/homeassistant/components/airnow/config_flow.py b/homeassistant/components/airnow/config_flow.py index e839acdcb7b..d0ab16e9758 100644 --- a/homeassistant/components/airnow/config_flow.py +++ b/homeassistant/components/airnow/config_flow.py @@ -1,5 +1,7 @@ """Config flow for AirNow integration.""" +from __future__ import annotations + import logging from typing import Any @@ -12,7 +14,6 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS from homeassistant.core import HomeAssistant, callback @@ -120,12 +121,12 @@ class AirNowConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> OptionsFlow: + ) -> AirNowOptionsFlowHandler: """Return the options flow.""" - return AirNowOptionsFlowHandler(config_entry) + return AirNowOptionsFlowHandler() -class AirNowOptionsFlowHandler(OptionsFlowWithConfigEntry): +class AirNowOptionsFlowHandler(OptionsFlow): """Handle an options flow for AirNow.""" async def async_step_init( @@ -136,12 +137,7 @@ class AirNowOptionsFlowHandler(OptionsFlowWithConfigEntry): return self.async_create_entry(data=user_input) options_schema = vol.Schema( - { - vol.Optional(CONF_RADIUS): vol.All( - int, - vol.Range(min=5), - ), - } + {vol.Optional(CONF_RADIUS): vol.All(int, vol.Range(min=5))} ) return self.async_show_form( diff --git a/homeassistant/components/airtouch5/manifest.json b/homeassistant/components/airtouch5/manifest.json index 312a627d0e8..58ef8668ebe 100644 --- a/homeassistant/components/airtouch5/manifest.json +++ b/homeassistant/components/airtouch5/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/airtouch5", "iot_class": "local_push", "loggers": ["airtouch5py"], - "requirements": ["airtouch5py==0.2.10"] + "requirements": ["airtouch5py==0.2.11"] } diff --git a/homeassistant/components/alarm_control_panel/__init__.py b/homeassistant/components/alarm_control_panel/__init__.py index a9e433a3650..4bcd2adb60f 100644 --- a/homeassistant/components/alarm_control_panel/__init__.py +++ b/homeassistant/components/alarm_control_panel/__init__.py @@ -35,6 +35,7 @@ from homeassistant.helpers.deprecation import ( from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.entity_platform import EntityPlatform +from homeassistant.helpers.frame import ReportBehavior, report_usage from homeassistant.helpers.typing import ConfigType from homeassistant.util.hass_dict import HassKey @@ -163,7 +164,6 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A _alarm_control_panel_option_default_code: str | None = None __alarm_legacy_state: bool = False - __alarm_legacy_state_reported: bool = False def __init_subclass__(cls, **kwargs: Any) -> None: """Post initialisation processing.""" @@ -173,17 +173,15 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A # setting the state directly. cls.__alarm_legacy_state = True - def __setattr__(self, __name: str, __value: Any) -> None: + def __setattr__(self, name: str, value: Any, /) -> None: """Set attribute. Deprecation warning if setting '_attr_state' directly unless already reported. """ - if __name == "_attr_state": - if self.__alarm_legacy_state_reported is not True: - self._report_deprecated_alarm_state_handling() - self.__alarm_legacy_state_reported = True - return super().__setattr__(__name, __value) + if name == "_attr_state": + self._report_deprecated_alarm_state_handling() + return super().__setattr__(name, value) @callback def add_to_platform_start( @@ -194,7 +192,7 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A ) -> None: """Start adding an entity to a platform.""" super().add_to_platform_start(hass, platform, parallel_updates) - if self.__alarm_legacy_state and not self.__alarm_legacy_state_reported: + if self.__alarm_legacy_state: self._report_deprecated_alarm_state_handling() @callback @@ -203,19 +201,16 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A Integrations should implement alarm_state instead of using state directly. """ - self.__alarm_legacy_state_reported = True - if "custom_components" in type(self).__module__: - # Do not report on core integrations as they have been fixed. - report_issue = "report it to the custom integration author." - _LOGGER.warning( - "Entity %s (%s) is setting state directly" - " which will stop working in HA Core 2025.11." - " Entities should implement the 'alarm_state' property and" - " return its state using the AlarmControlPanelState enum, please %s", - self.entity_id, - type(self), - report_issue, - ) + report_usage( + "is setting state directly." + f" Entity {self.entity_id} ({type(self)}) should implement the 'alarm_state'" + " property and return its state using the AlarmControlPanelState enum", + core_integration_behavior=ReportBehavior.ERROR, + custom_integration_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2025.11", + integration_domain=self.platform.platform_name if self.platform else None, + exclude_integrations={DOMAIN}, + ) @final @property @@ -275,7 +270,6 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A """Check if arm code is required, raise if no code is given.""" if not (_code := self.code_or_default_code(code)) and self.code_arm_required: raise ServiceValidationError( - f"Arming requires a code but none was given for {self.entity_id}", translation_domain=DOMAIN, translation_key="code_arm_required", translation_placeholders={ diff --git a/homeassistant/components/alarm_control_panel/strings.json b/homeassistant/components/alarm_control_panel/strings.json index 6dac4d069a1..5f718280566 100644 --- a/homeassistant/components/alarm_control_panel/strings.json +++ b/homeassistant/components/alarm_control_panel/strings.json @@ -130,7 +130,7 @@ }, "alarm_trigger": { "name": "Trigger", - "description": "Enables an external alarm trigger.", + "description": "Trigger the alarm manually.", "fields": { "code": { "name": "[%key:component::alarm_control_panel::services::alarm_disarm::fields::code::name%]", @@ -138,5 +138,10 @@ } } } + }, + "exceptions": { + "code_arm_required": { + "message": "Arming requires a code but none was given for {entity_id}." + } } } diff --git a/homeassistant/components/alexa/capabilities.py b/homeassistant/components/alexa/capabilities.py index 09b461428ac..b2cda8ad76e 100644 --- a/homeassistant/components/alexa/capabilities.py +++ b/homeassistant/components/alexa/capabilities.py @@ -816,13 +816,19 @@ class AlexaPlaybackController(AlexaCapability): """ supported_features = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0) - operations = { - media_player.MediaPlayerEntityFeature.NEXT_TRACK: "Next", - media_player.MediaPlayerEntityFeature.PAUSE: "Pause", - media_player.MediaPlayerEntityFeature.PLAY: "Play", - media_player.MediaPlayerEntityFeature.PREVIOUS_TRACK: "Previous", - media_player.MediaPlayerEntityFeature.STOP: "Stop", - } + operations: dict[ + cover.CoverEntityFeature | media_player.MediaPlayerEntityFeature, str + ] + if self.entity.domain == cover.DOMAIN: + operations = {cover.CoverEntityFeature.STOP: "Stop"} + else: + operations = { + media_player.MediaPlayerEntityFeature.NEXT_TRACK: "Next", + media_player.MediaPlayerEntityFeature.PAUSE: "Pause", + media_player.MediaPlayerEntityFeature.PLAY: "Play", + media_player.MediaPlayerEntityFeature.PREVIOUS_TRACK: "Previous", + media_player.MediaPlayerEntityFeature.STOP: "Stop", + } return [ value diff --git a/homeassistant/components/alexa/entities.py b/homeassistant/components/alexa/entities.py index ca7b389a0f1..8c139d66369 100644 --- a/homeassistant/components/alexa/entities.py +++ b/homeassistant/components/alexa/entities.py @@ -559,6 +559,10 @@ class CoverCapabilities(AlexaEntity): ) if supported & cover.CoverEntityFeature.SET_TILT_POSITION: yield AlexaRangeController(self.entity, instance=f"{cover.DOMAIN}.tilt") + if supported & ( + cover.CoverEntityFeature.STOP | cover.CoverEntityFeature.STOP_TILT + ): + yield AlexaPlaybackController(self.entity, instance=f"{cover.DOMAIN}.stop") yield AlexaEndpointHealth(self.hass, self.entity) yield Alexa(self.entity) diff --git a/homeassistant/components/alexa/handlers.py b/homeassistant/components/alexa/handlers.py index 8ea61ddbceb..89e47673f07 100644 --- a/homeassistant/components/alexa/handlers.py +++ b/homeassistant/components/alexa/handlers.py @@ -2,6 +2,7 @@ from __future__ import annotations +import asyncio from collections.abc import Callable, Coroutine import logging import math @@ -764,9 +765,25 @@ async def async_api_stop( entity = directive.entity data: dict[str, Any] = {ATTR_ENTITY_ID: entity.entity_id} - await hass.services.async_call( - entity.domain, SERVICE_MEDIA_STOP, data, blocking=False, context=context - ) + if entity.domain == cover.DOMAIN: + supported: int = entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0) + feature_services: dict[int, str] = { + cover.CoverEntityFeature.STOP.value: cover.SERVICE_STOP_COVER, + cover.CoverEntityFeature.STOP_TILT.value: cover.SERVICE_STOP_COVER_TILT, + } + await asyncio.gather( + *( + hass.services.async_call( + entity.domain, service, data, blocking=False, context=context + ) + for feature, service in feature_services.items() + if feature & supported + ) + ) + else: + await hass.services.async_call( + entity.domain, SERVICE_MEDIA_STOP, data, blocking=False, context=context + ) return directive.response() diff --git a/homeassistant/components/alpha_vantage/manifest.json b/homeassistant/components/alpha_vantage/manifest.json index c94da6bf487..cdfa847d115 100644 --- a/homeassistant/components/alpha_vantage/manifest.json +++ b/homeassistant/components/alpha_vantage/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/alpha_vantage", "iot_class": "cloud_polling", "loggers": ["alpha_vantage"], + "quality_scale": "legacy", "requirements": ["alpha-vantage==2.3.1"] } diff --git a/homeassistant/components/amazon_polly/manifest.json b/homeassistant/components/amazon_polly/manifest.json index b057967d1e2..e7fbf8edc74 100644 --- a/homeassistant/components/amazon_polly/manifest.json +++ b/homeassistant/components/amazon_polly/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/amazon_polly", "iot_class": "cloud_push", "loggers": ["boto3", "botocore", "s3transfer"], + "quality_scale": "legacy", "requirements": ["boto3==1.34.131"] } diff --git a/homeassistant/components/amberelectric/__init__.py b/homeassistant/components/amberelectric/__init__.py index cd44886c9ef..29d8f166f2a 100644 --- a/homeassistant/components/amberelectric/__init__.py +++ b/homeassistant/components/amberelectric/__init__.py @@ -1,7 +1,6 @@ """Support for Amber Electric.""" -from amberelectric import Configuration -from amberelectric.api import amber_api +import amberelectric from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_TOKEN @@ -15,8 +14,9 @@ type AmberConfigEntry = ConfigEntry[AmberUpdateCoordinator] async def async_setup_entry(hass: HomeAssistant, entry: AmberConfigEntry) -> bool: """Set up Amber Electric from a config entry.""" - configuration = Configuration(access_token=entry.data[CONF_API_TOKEN]) - api_instance = amber_api.AmberApi.create(configuration) + configuration = amberelectric.Configuration(access_token=entry.data[CONF_API_TOKEN]) + api_client = amberelectric.ApiClient(configuration) + api_instance = amberelectric.AmberApi(api_client) site_id = entry.data[CONF_SITE_ID] coordinator = AmberUpdateCoordinator(hass, api_instance, site_id) diff --git a/homeassistant/components/amberelectric/config_flow.py b/homeassistant/components/amberelectric/config_flow.py index a94700c27d1..c25258e2e33 100644 --- a/homeassistant/components/amberelectric/config_flow.py +++ b/homeassistant/components/amberelectric/config_flow.py @@ -3,8 +3,8 @@ from __future__ import annotations import amberelectric -from amberelectric.api import amber_api -from amberelectric.model.site import Site, SiteStatus +from amberelectric.models.site import Site +from amberelectric.models.site_status import SiteStatus import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -23,11 +23,15 @@ API_URL = "https://app.amber.com.au/developers" def generate_site_selector_name(site: Site) -> str: """Generate the name to show in the site drop down in the configuration flow.""" + # For some reason the generated API key returns this as any, not a string. Thanks pydantic + nmi = str(site.nmi) if site.status == SiteStatus.CLOSED: - return site.nmi + " (Closed: " + site.closed_on.isoformat() + ")" # type: ignore[no-any-return] + if site.closed_on is None: + return f"{nmi} (Closed)" + return f"{nmi} (Closed: {site.closed_on.isoformat()})" if site.status == SiteStatus.PENDING: - return site.nmi + " (Pending)" # type: ignore[no-any-return] - return site.nmi # type: ignore[no-any-return] + return f"{nmi} (Pending)" + return nmi def filter_sites(sites: list[Site]) -> list[Site]: @@ -35,7 +39,7 @@ def filter_sites(sites: list[Site]) -> list[Site]: filtered: list[Site] = [] filtered_nmi: set[str] = set() - for site in sorted(sites, key=lambda site: site.status.value): + for site in sorted(sites, key=lambda site: site.status): if site.status == SiteStatus.ACTIVE or site.nmi not in filtered_nmi: filtered.append(site) filtered_nmi.add(site.nmi) @@ -56,7 +60,8 @@ class AmberElectricConfigFlow(ConfigFlow, domain=DOMAIN): def _fetch_sites(self, token: str) -> list[Site] | None: configuration = amberelectric.Configuration(access_token=token) - api: amber_api.AmberApi = amber_api.AmberApi.create(configuration) + api_client = amberelectric.ApiClient(configuration) + api = amberelectric.AmberApi(api_client) try: sites: list[Site] = filter_sites(api.get_sites()) diff --git a/homeassistant/components/amberelectric/coordinator.py b/homeassistant/components/amberelectric/coordinator.py index a95aa3fa529..57028e07d21 100644 --- a/homeassistant/components/amberelectric/coordinator.py +++ b/homeassistant/components/amberelectric/coordinator.py @@ -5,13 +5,13 @@ from __future__ import annotations from datetime import timedelta from typing import Any -from amberelectric import ApiException -from amberelectric.api import amber_api -from amberelectric.model.actual_interval import ActualInterval -from amberelectric.model.channel import ChannelType -from amberelectric.model.current_interval import CurrentInterval -from amberelectric.model.forecast_interval import ForecastInterval -from amberelectric.model.interval import Descriptor +import amberelectric +from amberelectric.models.actual_interval import ActualInterval +from amberelectric.models.channel import ChannelType +from amberelectric.models.current_interval import CurrentInterval +from amberelectric.models.forecast_interval import ForecastInterval +from amberelectric.models.price_descriptor import PriceDescriptor +from amberelectric.rest import ApiException from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -31,22 +31,22 @@ def is_forecast(interval: ActualInterval | CurrentInterval | ForecastInterval) - def is_general(interval: ActualInterval | CurrentInterval | ForecastInterval) -> bool: """Return true if the supplied interval is on the general channel.""" - return interval.channel_type == ChannelType.GENERAL # type: ignore[no-any-return] + return interval.channel_type == ChannelType.GENERAL def is_controlled_load( interval: ActualInterval | CurrentInterval | ForecastInterval, ) -> bool: """Return true if the supplied interval is on the controlled load channel.""" - return interval.channel_type == ChannelType.CONTROLLED_LOAD # type: ignore[no-any-return] + return interval.channel_type == ChannelType.CONTROLLEDLOAD def is_feed_in(interval: ActualInterval | CurrentInterval | ForecastInterval) -> bool: """Return true if the supplied interval is on the feed in channel.""" - return interval.channel_type == ChannelType.FEED_IN # type: ignore[no-any-return] + return interval.channel_type == ChannelType.FEEDIN -def normalize_descriptor(descriptor: Descriptor) -> str | None: +def normalize_descriptor(descriptor: PriceDescriptor | None) -> str | None: """Return the snake case versions of descriptor names. Returns None if the name is not recognized.""" if descriptor is None: return None @@ -71,7 +71,7 @@ class AmberUpdateCoordinator(DataUpdateCoordinator): """AmberUpdateCoordinator - In charge of downloading the data for a site, which all the sensors read.""" def __init__( - self, hass: HomeAssistant, api: amber_api.AmberApi, site_id: str + self, hass: HomeAssistant, api: amberelectric.AmberApi, site_id: str ) -> None: """Initialise the data service.""" super().__init__( @@ -93,12 +93,13 @@ class AmberUpdateCoordinator(DataUpdateCoordinator): "grid": {}, } try: - data = self._api.get_current_price(self.site_id, next=48) + data = self._api.get_current_prices(self.site_id, next=48) + intervals = [interval.actual_instance for interval in data] except ApiException as api_exception: raise UpdateFailed("Missing price data, skipping update") from api_exception - current = [interval for interval in data if is_current(interval)] - forecasts = [interval for interval in data if is_forecast(interval)] + current = [interval for interval in intervals if is_current(interval)] + forecasts = [interval for interval in intervals if is_forecast(interval)] general = [interval for interval in current if is_general(interval)] if len(general) == 0: @@ -137,7 +138,7 @@ class AmberUpdateCoordinator(DataUpdateCoordinator): interval for interval in forecasts if is_feed_in(interval) ] - LOGGER.debug("Fetched new Amber data: %s", data) + LOGGER.debug("Fetched new Amber data: %s", intervals) return result async def _async_update_data(self) -> dict[str, Any]: diff --git a/homeassistant/components/amberelectric/manifest.json b/homeassistant/components/amberelectric/manifest.json index 51be42cfa68..401eb1629a1 100644 --- a/homeassistant/components/amberelectric/manifest.json +++ b/homeassistant/components/amberelectric/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/amberelectric", "iot_class": "cloud_polling", "loggers": ["amberelectric"], - "requirements": ["amberelectric==1.1.1"] + "requirements": ["amberelectric==2.0.12"] } diff --git a/homeassistant/components/amberelectric/sensor.py b/homeassistant/components/amberelectric/sensor.py index 52c0c42e7bc..cdf40e5804d 100644 --- a/homeassistant/components/amberelectric/sensor.py +++ b/homeassistant/components/amberelectric/sensor.py @@ -8,9 +8,9 @@ from __future__ import annotations from typing import Any -from amberelectric.model.channel import ChannelType -from amberelectric.model.current_interval import CurrentInterval -from amberelectric.model.forecast_interval import ForecastInterval +from amberelectric.models.channel import ChannelType +from amberelectric.models.current_interval import CurrentInterval +from amberelectric.models.forecast_interval import ForecastInterval from homeassistant.components.sensor import ( SensorEntity, @@ -52,7 +52,7 @@ class AmberSensor(CoordinatorEntity[AmberUpdateCoordinator], SensorEntity): self, coordinator: AmberUpdateCoordinator, description: SensorEntityDescription, - channel_type: ChannelType, + channel_type: str, ) -> None: """Initialize the Sensor.""" super().__init__(coordinator) @@ -73,7 +73,7 @@ class AmberPriceSensor(AmberSensor): """Return the current price in $/kWh.""" interval = self.coordinator.data[self.entity_description.key][self.channel_type] - if interval.channel_type == ChannelType.FEED_IN: + if interval.channel_type == ChannelType.FEEDIN: return format_cents_to_dollars(interval.per_kwh) * -1 return format_cents_to_dollars(interval.per_kwh) @@ -87,9 +87,9 @@ class AmberPriceSensor(AmberSensor): return data data["duration"] = interval.duration - data["date"] = interval.date.isoformat() + data["date"] = interval.var_date.isoformat() data["per_kwh"] = format_cents_to_dollars(interval.per_kwh) - if interval.channel_type == ChannelType.FEED_IN: + if interval.channel_type == ChannelType.FEEDIN: data["per_kwh"] = data["per_kwh"] * -1 data["nem_date"] = interval.nem_time.isoformat() data["spot_per_kwh"] = format_cents_to_dollars(interval.spot_per_kwh) @@ -120,7 +120,7 @@ class AmberForecastSensor(AmberSensor): return None interval = intervals[0] - if interval.channel_type == ChannelType.FEED_IN: + if interval.channel_type == ChannelType.FEEDIN: return format_cents_to_dollars(interval.per_kwh) * -1 return format_cents_to_dollars(interval.per_kwh) @@ -142,10 +142,10 @@ class AmberForecastSensor(AmberSensor): for interval in intervals: datum = {} datum["duration"] = interval.duration - datum["date"] = interval.date.isoformat() + datum["date"] = interval.var_date.isoformat() datum["nem_date"] = interval.nem_time.isoformat() datum["per_kwh"] = format_cents_to_dollars(interval.per_kwh) - if interval.channel_type == ChannelType.FEED_IN: + if interval.channel_type == ChannelType.FEEDIN: datum["per_kwh"] = datum["per_kwh"] * -1 datum["spot_per_kwh"] = format_cents_to_dollars(interval.spot_per_kwh) datum["start_time"] = interval.start_time.isoformat() diff --git a/homeassistant/components/amcrest/manifest.json b/homeassistant/components/amcrest/manifest.json index 8b8d87092c4..7d8f8f9e6c8 100644 --- a/homeassistant/components/amcrest/manifest.json +++ b/homeassistant/components/amcrest/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/amcrest", "iot_class": "local_polling", "loggers": ["amcrest"], + "quality_scale": "legacy", "requirements": ["amcrest==1.9.8"] } diff --git a/homeassistant/components/ampio/manifest.json b/homeassistant/components/ampio/manifest.json index bc9c09d817a..17fc3eb3d96 100644 --- a/homeassistant/components/ampio/manifest.json +++ b/homeassistant/components/ampio/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/ampio", "iot_class": "cloud_polling", "loggers": ["asmog"], + "quality_scale": "legacy", "requirements": ["asmog==0.0.6"] } diff --git a/homeassistant/components/analytics_insights/config_flow.py b/homeassistant/components/analytics_insights/config_flow.py index baf0190967d..c36755f5403 100644 --- a/homeassistant/components/analytics_insights/config_flow.py +++ b/homeassistant/components/analytics_insights/config_flow.py @@ -16,7 +16,6 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.core import callback from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -46,9 +45,11 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> HomeassistantAnalyticsOptionsFlowHandler: """Get the options flow for this handler.""" - return HomeassistantAnalyticsOptionsFlowHandler(config_entry) + return HomeassistantAnalyticsOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -132,7 +133,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN): ) -class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithConfigEntry): +class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlow): """Handle Homeassistant Analytics options.""" async def async_step_init( @@ -211,6 +212,6 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithConfigEntry): ), }, ), - self.options, + self.config_entry.options, ), ) diff --git a/homeassistant/components/androidtv/config_flow.py b/homeassistant/components/androidtv/config_flow.py index af6f1d14dcd..afaba5175da 100644 --- a/homeassistant/components/androidtv/config_flow.py +++ b/homeassistant/components/androidtv/config_flow.py @@ -13,7 +13,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_DEVICE_CLASS, CONF_HOST, CONF_PORT from homeassistant.core import callback @@ -186,16 +186,14 @@ class AndroidTVFlowHandler(ConfigFlow, domain=DOMAIN): return OptionsFlowHandler(config_entry) -class OptionsFlowHandler(OptionsFlowWithConfigEntry): +class OptionsFlowHandler(OptionsFlow): """Handle an option flow for Android Debug Bridge.""" def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - super().__init__(config_entry) - - self._apps: dict[str, Any] = self.options.setdefault(CONF_APPS, {}) - self._state_det_rules: dict[str, Any] = self.options.setdefault( - CONF_STATE_DETECTION_RULES, {} + self._apps: dict[str, Any] = dict(config_entry.options.get(CONF_APPS, {})) + self._state_det_rules: dict[str, Any] = dict( + config_entry.options.get(CONF_STATE_DETECTION_RULES, {}) ) self._conf_app_id: str | None = None self._conf_rule_id: str | None = None @@ -237,7 +235,7 @@ class OptionsFlowHandler(OptionsFlowWithConfigEntry): SelectOptionDict(value=k, label=v) for k, v in apps_list.items() ] rules = [RULES_NEW_ID, *self._state_det_rules] - options = self.options + options = self.config_entry.options data_schema = vol.Schema( { diff --git a/homeassistant/components/androidtv/manifest.json b/homeassistant/components/androidtv/manifest.json index 2d0b062c750..fe8e36f0c2f 100644 --- a/homeassistant/components/androidtv/manifest.json +++ b/homeassistant/components/androidtv/manifest.json @@ -9,7 +9,7 @@ "loggers": ["adb_shell", "androidtv", "pure_python_adb"], "requirements": [ "adb-shell[async]==0.4.4", - "androidtv[async]==0.0.73", + "androidtv[async]==0.0.75", "pure-python-adb[async]==0.3.0.dev0" ] } diff --git a/homeassistant/components/androidtv_remote/config_flow.py b/homeassistant/components/androidtv_remote/config_flow.py index 3512dd5ea65..3500e4ff47b 100644 --- a/homeassistant/components/androidtv_remote/config_flow.py +++ b/homeassistant/components/androidtv_remote/config_flow.py @@ -20,7 +20,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME from homeassistant.core import callback @@ -221,13 +221,12 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN): return AndroidTVRemoteOptionsFlowHandler(config_entry) -class AndroidTVRemoteOptionsFlowHandler(OptionsFlowWithConfigEntry): +class AndroidTVRemoteOptionsFlowHandler(OptionsFlow): """Android TV Remote options flow.""" def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - super().__init__(config_entry) - self._apps: dict[str, Any] = self.options.setdefault(CONF_APPS, {}) + self._apps: dict[str, Any] = dict(config_entry.options.get(CONF_APPS, {})) self._conf_app_id: str | None = None @callback diff --git a/homeassistant/components/androidtv_remote/manifest.json b/homeassistant/components/androidtv_remote/manifest.json index a06152fa570..d9c2dd05c44 100644 --- a/homeassistant/components/androidtv_remote/manifest.json +++ b/homeassistant/components/androidtv_remote/manifest.json @@ -7,7 +7,6 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["androidtvremote2"], - "quality_scale": "platinum", "requirements": ["androidtvremote2==0.1.2"], "zeroconf": ["_androidtvremote2._tcp.local."] } diff --git a/homeassistant/components/anel_pwrctrl/manifest.json b/homeassistant/components/anel_pwrctrl/manifest.json index 48cc3b96ec0..67c881a3db2 100644 --- a/homeassistant/components/anel_pwrctrl/manifest.json +++ b/homeassistant/components/anel_pwrctrl/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/anel_pwrctrl", "iot_class": "local_polling", "loggers": ["anel_pwrctrl"], + "quality_scale": "legacy", "requirements": ["anel-pwrctrl-homeassistant==0.0.1.dev2"] } diff --git a/homeassistant/components/anthropic/config_flow.py b/homeassistant/components/anthropic/config_flow.py index 5ea167090c6..fa43a3c4bcc 100644 --- a/homeassistant/components/anthropic/config_flow.py +++ b/homeassistant/components/anthropic/config_flow.py @@ -121,7 +121,6 @@ class AnthropicOptionsFlow(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry self.last_rendered_recommended = config_entry.options.get( CONF_RECOMMENDED, False ) diff --git a/homeassistant/components/aosmith/manifest.json b/homeassistant/components/aosmith/manifest.json index 4cd1eb32cd1..eae7981d5b9 100644 --- a/homeassistant/components/aosmith/manifest.json +++ b/homeassistant/components/aosmith/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/aosmith", "iot_class": "cloud_polling", - "requirements": ["py-aosmith==1.0.10"] + "requirements": ["py-aosmith==1.0.11"] } diff --git a/homeassistant/components/apache_kafka/manifest.json b/homeassistant/components/apache_kafka/manifest.json index f6593631bc0..05baaac32a2 100644 --- a/homeassistant/components/apache_kafka/manifest.json +++ b/homeassistant/components/apache_kafka/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/apache_kafka", "iot_class": "local_push", "loggers": ["aiokafka", "kafka_python"], + "quality_scale": "legacy", "requirements": ["aiokafka==0.10.0"] } diff --git a/homeassistant/components/apcupsd/manifest.json b/homeassistant/components/apcupsd/manifest.json index b20e0c8aacf..3713b74fff7 100644 --- a/homeassistant/components/apcupsd/manifest.json +++ b/homeassistant/components/apcupsd/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/apcupsd", "iot_class": "local_polling", "loggers": ["apcaccess"], - "quality_scale": "silver", "requirements": ["aioapcaccess==0.4.2"] } diff --git a/homeassistant/components/apple_tv/manifest.json b/homeassistant/components/apple_tv/manifest.json index b4e1b354878..b10a14af32b 100644 --- a/homeassistant/components/apple_tv/manifest.json +++ b/homeassistant/components/apple_tv/manifest.json @@ -7,7 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/apple_tv", "iot_class": "local_push", "loggers": ["pyatv", "srptools"], - "requirements": ["pyatv==0.15.1"], + "requirements": ["pyatv==0.16.0"], "zeroconf": [ "_mediaremotetv._tcp.local.", "_companion-link._tcp.local.", diff --git a/homeassistant/components/apprise/manifest.json b/homeassistant/components/apprise/manifest.json index 838611e4798..4f3c4d7ef4e 100644 --- a/homeassistant/components/apprise/manifest.json +++ b/homeassistant/components/apprise/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/apprise", "iot_class": "cloud_push", "loggers": ["apprise"], + "quality_scale": "legacy", "requirements": ["apprise==1.9.0"] } diff --git a/homeassistant/components/aprs/manifest.json b/homeassistant/components/aprs/manifest.json index 63826f5a385..7518405f1ec 100644 --- a/homeassistant/components/aprs/manifest.json +++ b/homeassistant/components/aprs/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/aprs", "iot_class": "cloud_push", "loggers": ["aprslib", "geographiclib", "geopy"], + "quality_scale": "legacy", "requirements": ["aprslib==0.7.2", "geopy==2.3.0"] } diff --git a/homeassistant/components/apsystems/coordinator.py b/homeassistant/components/apsystems/coordinator.py index b6e951343f7..e56cb826840 100644 --- a/homeassistant/components/apsystems/coordinator.py +++ b/homeassistant/components/apsystems/coordinator.py @@ -5,12 +5,17 @@ from __future__ import annotations from dataclasses import dataclass from datetime import timedelta -from APsystemsEZ1 import APsystemsEZ1M, ReturnAlarmInfo, ReturnOutputData +from APsystemsEZ1 import ( + APsystemsEZ1M, + InverterReturnedError, + ReturnAlarmInfo, + ReturnOutputData, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import LOGGER +from .const import DOMAIN, LOGGER @dataclass @@ -43,6 +48,11 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]): self.api.min_power = device_info.minPower async def _async_update_data(self) -> ApSystemsSensorData: - output_data = await self.api.get_output_data() - alarm_info = await self.api.get_alarm_info() + try: + output_data = await self.api.get_output_data() + alarm_info = await self.api.get_alarm_info() + except InverterReturnedError: + raise UpdateFailed( + translation_domain=DOMAIN, translation_key="inverter_error" + ) from None return ApSystemsSensorData(output_data=output_data, alarm_info=alarm_info) diff --git a/homeassistant/components/apsystems/strings.json b/homeassistant/components/apsystems/strings.json index e02f86c2730..b3a10ca49a7 100644 --- a/homeassistant/components/apsystems/strings.json +++ b/homeassistant/components/apsystems/strings.json @@ -72,5 +72,10 @@ "name": "Inverter status" } } + }, + "exceptions": { + "inverter_error": { + "message": "Inverter returned an error" + } } } diff --git a/homeassistant/components/aqualogic/manifest.json b/homeassistant/components/aqualogic/manifest.json index 783e4c8c204..cc807e4bb19 100644 --- a/homeassistant/components/aqualogic/manifest.json +++ b/homeassistant/components/aqualogic/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/aqualogic", "iot_class": "local_push", "loggers": ["aqualogic"], + "quality_scale": "legacy", "requirements": ["aqualogic==2.6"] } diff --git a/homeassistant/components/aquostv/manifest.json b/homeassistant/components/aquostv/manifest.json index 1bac2bdfb5f..6fc1092d33c 100644 --- a/homeassistant/components/aquostv/manifest.json +++ b/homeassistant/components/aquostv/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/aquostv", "iot_class": "local_polling", "loggers": ["sharp_aquos_rc"], + "quality_scale": "legacy", "requirements": ["sharp_aquos_rc==0.3.2"] } diff --git a/homeassistant/components/arest/manifest.json b/homeassistant/components/arest/manifest.json index 53732d15064..be43b3aafc9 100644 --- a/homeassistant/components/arest/manifest.json +++ b/homeassistant/components/arest/manifest.json @@ -3,5 +3,6 @@ "name": "aREST", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/arest", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/arris_tg2492lg/manifest.json b/homeassistant/components/arris_tg2492lg/manifest.json index c36423d287a..98778de5f2a 100644 --- a/homeassistant/components/arris_tg2492lg/manifest.json +++ b/homeassistant/components/arris_tg2492lg/manifest.json @@ -6,5 +6,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["arris_tg2492lg"], + "quality_scale": "legacy", "requirements": ["arris-tg2492lg==2.2.0"] } diff --git a/homeassistant/components/aruba/manifest.json b/homeassistant/components/aruba/manifest.json index 0d1fabf51b8..c98dda754cd 100644 --- a/homeassistant/components/aruba/manifest.json +++ b/homeassistant/components/aruba/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/aruba", "iot_class": "local_polling", "loggers": ["pexpect", "ptyprocess"], + "quality_scale": "legacy", "requirements": ["pexpect==4.6.0"] } diff --git a/homeassistant/components/arwn/manifest.json b/homeassistant/components/arwn/manifest.json index 15eb656e974..8cabb045b64 100644 --- a/homeassistant/components/arwn/manifest.json +++ b/homeassistant/components/arwn/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["mqtt"], "documentation": "https://www.home-assistant.io/integrations/arwn", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/assist_pipeline/audio_enhancer.py b/homeassistant/components/assist_pipeline/audio_enhancer.py index ff2b122187a..1fabc7790e7 100644 --- a/homeassistant/components/assist_pipeline/audio_enhancer.py +++ b/homeassistant/components/assist_pipeline/audio_enhancer.py @@ -22,8 +22,8 @@ class EnhancedAudioChunk: timestamp_ms: int """Timestamp relative to start of audio stream (milliseconds)""" - is_speech: bool | None - """True if audio chunk likely contains speech, False if not, None if unknown""" + speech_probability: float | None + """Probability that audio chunk contains speech (0-1), None if unknown""" class AudioEnhancer(ABC): @@ -70,27 +70,27 @@ class MicroVadSpeexEnhancer(AudioEnhancer): ) self.vad: MicroVad | None = None - self.threshold = 0.5 if self.is_vad_enabled: self.vad = MicroVad() - _LOGGER.debug("Initialized microVAD with threshold=%s", self.threshold) + _LOGGER.debug("Initialized microVAD") def enhance_chunk(self, audio: bytes, timestamp_ms: int) -> EnhancedAudioChunk: """Enhance 10ms chunk of PCM audio @ 16Khz with 16-bit mono samples.""" - is_speech: bool | None = None + speech_probability: float | None = None assert len(audio) == BYTES_PER_CHUNK if self.vad is not None: # Run VAD - speech_prob = self.vad.Process10ms(audio) - is_speech = speech_prob > self.threshold + speech_probability = self.vad.Process10ms(audio) if self.audio_processor is not None: # Run noise suppression and auto gain audio = self.audio_processor.Process10ms(audio).audio return EnhancedAudioChunk( - audio=audio, timestamp_ms=timestamp_ms, is_speech=is_speech + audio=audio, + timestamp_ms=timestamp_ms, + speech_probability=speech_probability, ) diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index a4255e37756..9e9e84fb5d6 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -31,6 +31,7 @@ from homeassistant.components.tts import ( ) from homeassistant.core import Context, HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import intent from homeassistant.helpers.collection import ( CHANGE_UPDATED, CollectionError, @@ -109,6 +110,7 @@ PIPELINE_FIELDS: VolDictType = { vol.Required("tts_voice"): vol.Any(str, None), vol.Required("wake_word_entity"): vol.Any(str, None), vol.Required("wake_word_id"): vol.Any(str, None), + vol.Optional("prefer_local_intents"): bool, } STORED_PIPELINE_RUNS = 10 @@ -322,6 +324,7 @@ async def async_update_pipeline( tts_voice: str | None | UndefinedType = UNDEFINED, wake_word_entity: str | None | UndefinedType = UNDEFINED, wake_word_id: str | None | UndefinedType = UNDEFINED, + prefer_local_intents: bool | UndefinedType = UNDEFINED, ) -> None: """Update a pipeline.""" pipeline_data: PipelineData = hass.data[DOMAIN] @@ -345,6 +348,7 @@ async def async_update_pipeline( ("tts_voice", tts_voice), ("wake_word_entity", wake_word_entity), ("wake_word_id", wake_word_id), + ("prefer_local_intents", prefer_local_intents), ) if val is not UNDEFINED } @@ -398,6 +402,7 @@ class Pipeline: tts_voice: str | None wake_word_entity: str | None wake_word_id: str | None + prefer_local_intents: bool = False id: str = field(default_factory=ulid_util.ulid_now) @@ -421,6 +426,7 @@ class Pipeline: tts_voice=data["tts_voice"], wake_word_entity=data["wake_word_entity"], wake_word_id=data["wake_word_id"], + prefer_local_intents=data.get("prefer_local_intents", False), ) def to_json(self) -> dict[str, Any]: @@ -438,6 +444,7 @@ class Pipeline: "tts_voice": self.tts_voice, "wake_word_entity": self.wake_word_entity, "wake_word_id": self.wake_word_id, + "prefer_local_intents": self.prefer_local_intents, } @@ -780,7 +787,9 @@ class PipelineRun: # speaking the voice command. audio_chunks_for_stt.extend( EnhancedAudioChunk( - audio=chunk_ts[0], timestamp_ms=chunk_ts[1], is_speech=False + audio=chunk_ts[0], + timestamp_ms=chunk_ts[1], + speech_probability=None, ) for chunk_ts in result.queued_audio ) @@ -827,7 +836,7 @@ class PipelineRun: if wake_word_vad is not None: chunk_seconds = (len(chunk.audio) // sample_width) / sample_rate - if not wake_word_vad.process(chunk_seconds, chunk.is_speech): + if not wake_word_vad.process(chunk_seconds, chunk.speech_probability): raise WakeWordTimeoutError( code="wake-word-timeout", message="Wake word was not detected" ) @@ -955,7 +964,7 @@ class PipelineRun: if stt_vad is not None: chunk_seconds = (len(chunk.audio) // sample_width) / sample_rate - if not stt_vad.process(chunk_seconds, chunk.is_speech): + if not stt_vad.process(chunk_seconds, chunk.speech_probability): # Silence detected at the end of voice command self.process_event( PipelineEvent( @@ -1009,20 +1018,64 @@ class PipelineRun: "intent_input": intent_input, "conversation_id": conversation_id, "device_id": device_id, + "prefer_local_intents": self.pipeline.prefer_local_intents, }, ) ) try: - conversation_result = await conversation.async_converse( - hass=self.hass, + user_input = conversation.ConversationInput( text=intent_input, + context=self.context, conversation_id=conversation_id, device_id=device_id, - context=self.context, - language=self.pipeline.conversation_language, + language=self.pipeline.language, agent_id=self.intent_agent, ) + processed_locally = self.intent_agent == conversation.HOME_ASSISTANT_AGENT + + conversation_result: conversation.ConversationResult | None = None + if user_input.agent_id != conversation.HOME_ASSISTANT_AGENT: + # Sentence triggers override conversation agent + if ( + trigger_response_text + := await conversation.async_handle_sentence_triggers( + self.hass, user_input + ) + ) is not None: + # Sentence trigger matched + trigger_response = intent.IntentResponse( + self.pipeline.conversation_language + ) + trigger_response.async_set_speech(trigger_response_text) + conversation_result = conversation.ConversationResult( + response=trigger_response, + conversation_id=user_input.conversation_id, + ) + # Try local intents first, if preferred. + elif self.pipeline.prefer_local_intents and ( + intent_response := await conversation.async_handle_intents( + self.hass, user_input + ) + ): + # Local intent matched + conversation_result = conversation.ConversationResult( + response=intent_response, + conversation_id=user_input.conversation_id, + ) + processed_locally = True + + if conversation_result is None: + # Fall back to pipeline conversation agent + conversation_result = await conversation.async_converse( + hass=self.hass, + text=user_input.text, + conversation_id=user_input.conversation_id, + device_id=user_input.device_id, + context=user_input.context, + language=user_input.language, + agent_id=user_input.agent_id, + ) except Exception as src_error: _LOGGER.exception("Unexpected error during intent recognition") raise IntentRecognitionError( @@ -1035,7 +1088,10 @@ class PipelineRun: self.process_event( PipelineEvent( PipelineEventType.INTENT_END, - {"intent_output": conversation_result.as_dict()}, + { + "processed_locally": processed_locally, + "intent_output": conversation_result.as_dict(), + }, ) ) @@ -1221,7 +1277,7 @@ class PipelineRun: yield EnhancedAudioChunk( audio=sub_chunk, timestamp_ms=timestamp_ms, - is_speech=None, # no VAD + speech_probability=None, # no VAD ) timestamp_ms += MS_PER_CHUNK diff --git a/homeassistant/components/assist_pipeline/vad.py b/homeassistant/components/assist_pipeline/vad.py index 4782d14dee4..deae5b9b7b3 100644 --- a/homeassistant/components/assist_pipeline/vad.py +++ b/homeassistant/components/assist_pipeline/vad.py @@ -75,7 +75,7 @@ class AudioBuffer: class VoiceCommandSegmenter: """Segments an audio stream into voice commands.""" - speech_seconds: float = 0.3 + speech_seconds: float = 0.1 """Seconds of speech before voice command has started.""" command_seconds: float = 1.0 @@ -96,6 +96,12 @@ class VoiceCommandSegmenter: timed_out: bool = False """True a timeout occurred during voice command.""" + before_command_speech_threshold: float = 0.2 + """Probability threshold for speech before voice command.""" + + in_command_speech_threshold: float = 0.5 + """Probability threshold for speech during voice command.""" + _speech_seconds_left: float = 0.0 """Seconds left before considering voice command as started.""" @@ -124,7 +130,7 @@ class VoiceCommandSegmenter: self._reset_seconds_left = self.reset_seconds self.in_command = False - def process(self, chunk_seconds: float, is_speech: bool | None) -> bool: + def process(self, chunk_seconds: float, speech_probability: float | None) -> bool: """Process samples using external VAD. Returns False when command is done. @@ -142,7 +148,12 @@ class VoiceCommandSegmenter: self.timed_out = True return False + if speech_probability is None: + speech_probability = 0.0 + if not self.in_command: + # Before command + is_speech = speech_probability > self.before_command_speech_threshold if is_speech: self._reset_seconds_left = self.reset_seconds self._speech_seconds_left -= chunk_seconds @@ -160,24 +171,29 @@ class VoiceCommandSegmenter: if self._reset_seconds_left <= 0: self._speech_seconds_left = self.speech_seconds self._reset_seconds_left = self.reset_seconds - elif not is_speech: - # Silence in command - self._reset_seconds_left = self.reset_seconds - self._silence_seconds_left -= chunk_seconds - self._command_seconds_left -= chunk_seconds - if (self._silence_seconds_left <= 0) and (self._command_seconds_left <= 0): - # Command finished successfully - self.reset() - _LOGGER.debug("Voice command finished") - return False else: - # Speech in command. - # Reset silence counter if enough speech. - self._reset_seconds_left -= chunk_seconds - self._command_seconds_left -= chunk_seconds - if self._reset_seconds_left <= 0: - self._silence_seconds_left = self.silence_seconds + # In command + is_speech = speech_probability > self.in_command_speech_threshold + if not is_speech: + # Silence in command self._reset_seconds_left = self.reset_seconds + self._silence_seconds_left -= chunk_seconds + self._command_seconds_left -= chunk_seconds + if (self._silence_seconds_left <= 0) and ( + self._command_seconds_left <= 0 + ): + # Command finished successfully + self.reset() + _LOGGER.debug("Voice command finished") + return False + else: + # Speech in command. + # Reset silence counter if enough speech. + self._reset_seconds_left -= chunk_seconds + self._command_seconds_left -= chunk_seconds + if self._reset_seconds_left <= 0: + self._silence_seconds_left = self.silence_seconds + self._reset_seconds_left = self.reset_seconds return True @@ -226,6 +242,9 @@ class VoiceActivityTimeout: reset_seconds: float = 0.5 """Seconds of speech before resetting timeout.""" + speech_threshold: float = 0.5 + """Threshold for speech.""" + _silence_seconds_left: float = 0.0 """Seconds left before considering voice command as stopped.""" @@ -241,12 +260,15 @@ class VoiceActivityTimeout: self._silence_seconds_left = self.silence_seconds self._reset_seconds_left = self.reset_seconds - def process(self, chunk_seconds: float, is_speech: bool | None) -> bool: + def process(self, chunk_seconds: float, speech_probability: float | None) -> bool: """Process samples using external VAD. Returns False when timeout is reached. """ - if is_speech: + if speech_probability is None: + speech_probability = 0.0 + + if speech_probability > self.speech_threshold: # Speech self._reset_seconds_left -= chunk_seconds if self._reset_seconds_left <= 0: diff --git a/homeassistant/components/aten_pe/manifest.json b/homeassistant/components/aten_pe/manifest.json index 3b4ade637cb..1e2c74f2636 100644 --- a/homeassistant/components/aten_pe/manifest.json +++ b/homeassistant/components/aten_pe/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@mtdcr"], "documentation": "https://www.home-assistant.io/integrations/aten_pe", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["atenpdu==0.3.2"] } diff --git a/homeassistant/components/atome/manifest.json b/homeassistant/components/atome/manifest.json index cafe24e2e13..f00dd5ea757 100644 --- a/homeassistant/components/atome/manifest.json +++ b/homeassistant/components/atome/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/atome", "iot_class": "cloud_polling", "loggers": ["pyatome"], + "quality_scale": "legacy", "requirements": ["pyAtome==0.1.1"] } diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index 4bc7e77d2d8..96ed982e4ec 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.0"] + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.1"] } diff --git a/homeassistant/components/aussie_broadband/config_flow.py b/homeassistant/components/aussie_broadband/config_flow.py index 5bc6ed1aa5c..72ff0b3b2b2 100644 --- a/homeassistant/components/aussie_broadband/config_flow.py +++ b/homeassistant/components/aussie_broadband/config_flow.py @@ -22,13 +22,14 @@ class AussieBroadbandConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + _reauth_username: str + def __init__(self) -> None: """Initialize the config flow.""" self.data: dict = {} self.options: dict = {CONF_SERVICES: []} self.services: list[dict[str, Any]] = [] self.client: AussieBB | None = None - self._reauth_username: str | None = None async def async_auth(self, user_input: dict[str, str]) -> dict[str, str] | None: """Reusable Auth Helper.""" @@ -92,7 +93,7 @@ class AussieBroadbandConfigFlow(ConfigFlow, domain=DOMAIN): errors: dict[str, str] | None = None - if user_input and self._reauth_username: + if user_input: data = { CONF_USERNAME: self._reauth_username, CONF_PASSWORD: user_input[CONF_PASSWORD], diff --git a/homeassistant/components/autarco/quality_scale.yaml b/homeassistant/components/autarco/quality_scale.yaml new file mode 100644 index 00000000000..f0eb4771447 --- /dev/null +++ b/homeassistant/components/autarco/quality_scale.yaml @@ -0,0 +1,99 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: done + brands: done + common-modules: + status: todo + comment: | + The entity.py file is not used in this integration. + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: | + This integration does not provide additional actions. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + This integration does not have an options flow. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: + status: exempt + comment: | + This integration only polls data using a coordinator. + Since the integration is read-only and poll-only (only provide sensor + data), there is no need to implement parallel updates. + reauthentication-flow: todo + test-coverage: done + + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: | + This integration cannot be discovered, it is a connecting to a service + provider, which uses the users home address to get the data. + discovery: + status: exempt + comment: | + This integration cannot be discovered, it is a connecting to a service + provider, which uses the users home address to get the data. + docs-data-update: done + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: + status: exempt + comment: | + This is an service, which doesn't integrate with any devices. + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: done + dynamic-devices: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: + status: exempt + comment: | + This integration does not have any entities that should disabled by default. + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + stale-devices: todo + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/avea/manifest.json b/homeassistant/components/avea/manifest.json index 43c46c96e66..7e6c080481e 100644 --- a/homeassistant/components/avea/manifest.json +++ b/homeassistant/components/avea/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/avea", "iot_class": "local_polling", "loggers": ["avea"], + "quality_scale": "legacy", "requirements": ["avea==1.5.1"] } diff --git a/homeassistant/components/avion/manifest.json b/homeassistant/components/avion/manifest.json index 505dca870a7..8488e949af3 100644 --- a/homeassistant/components/avion/manifest.json +++ b/homeassistant/components/avion/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/avion", "iot_class": "assumed_state", + "quality_scale": "legacy", "requirements": ["avion==0.10"] } diff --git a/homeassistant/components/aws/config_flow.py b/homeassistant/components/aws/config_flow.py index 3175e6bc56c..090d9747a64 100644 --- a/homeassistant/components/aws/config_flow.py +++ b/homeassistant/components/aws/config_flow.py @@ -14,7 +14,4 @@ class AWSFlowHandler(ConfigFlow, domain=DOMAIN): async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - return self.async_create_entry(title="configuration.yaml", data=import_data) diff --git a/homeassistant/components/aws/manifest.json b/homeassistant/components/aws/manifest.json index 6238bffce36..12149e4388a 100644 --- a/homeassistant/components/aws/manifest.json +++ b/homeassistant/components/aws/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/aws", "iot_class": "cloud_push", "loggers": ["aiobotocore", "botocore"], + "quality_scale": "legacy", "requirements": ["aiobotocore==2.13.1", "botocore==1.34.131"] } diff --git a/homeassistant/components/axis/config_flow.py b/homeassistant/components/axis/config_flow.py index 84d9880b7f8..592b1e2d41f 100644 --- a/homeassistant/components/axis/config_flow.py +++ b/homeassistant/components/axis/config_flow.py @@ -18,7 +18,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import ( CONF_HOST, @@ -59,9 +59,11 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> AxisOptionsFlowHandler: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> AxisOptionsFlowHandler: """Get the options flow for this handler.""" - return AxisOptionsFlowHandler(config_entry) + return AxisOptionsFlowHandler() def __init__(self) -> None: """Initialize the Axis config flow.""" @@ -264,7 +266,7 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN): return await self.async_step_user() -class AxisOptionsFlowHandler(OptionsFlowWithConfigEntry): +class AxisOptionsFlowHandler(OptionsFlow): """Handle Axis device options.""" config_entry: AxisConfigEntry @@ -282,8 +284,7 @@ class AxisOptionsFlowHandler(OptionsFlowWithConfigEntry): ) -> ConfigFlowResult: """Manage the Axis device stream options.""" if user_input is not None: - self.options.update(user_input) - return self.async_create_entry(title="", data=self.options) + return self.async_create_entry(data=self.config_entry.options | user_input) schema = {} diff --git a/homeassistant/components/axis/manifest.json b/homeassistant/components/axis/manifest.json index d2265307d47..7163437361a 100644 --- a/homeassistant/components/axis/manifest.json +++ b/homeassistant/components/axis/manifest.json @@ -29,7 +29,6 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["axis"], - "quality_scale": "platinum", "requirements": ["axis==63"], "ssdp": [ { diff --git a/homeassistant/components/azure_event_hub/config_flow.py b/homeassistant/components/azure_event_hub/config_flow.py index 60ac9bff8cd..baed866042e 100644 --- a/homeassistant/components/azure_event_hub/config_flow.py +++ b/homeassistant/components/azure_event_hub/config_flow.py @@ -102,8 +102,6 @@ class AEHConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial user step.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") if user_input is None: return self.async_show_form(step_id=STEP_USER, data_schema=BASE_SCHEMA) @@ -160,8 +158,6 @@ class AEHConfigFlow(ConfigFlow, domain=DOMAIN): async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import config from configuration.yaml.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") if CONF_SEND_INTERVAL in import_data: self._options[CONF_SEND_INTERVAL] = import_data.pop(CONF_SEND_INTERVAL) if CONF_MAX_DELAY in import_data: diff --git a/homeassistant/components/azure_event_hub/manifest.json b/homeassistant/components/azure_event_hub/manifest.json index c6d5835fd1d..45fbf8c4a56 100644 --- a/homeassistant/components/azure_event_hub/manifest.json +++ b/homeassistant/components/azure_event_hub/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/azure_event_hub", "iot_class": "cloud_push", "loggers": ["azure"], - "requirements": ["azure-eventhub==5.11.1"] + "requirements": ["azure-eventhub==5.11.1"], + "single_config_entry": true } diff --git a/homeassistant/components/azure_event_hub/strings.json b/homeassistant/components/azure_event_hub/strings.json index 3319a29a154..d17c4a385c0 100644 --- a/homeassistant/components/azure_event_hub/strings.json +++ b/homeassistant/components/azure_event_hub/strings.json @@ -31,7 +31,6 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "cannot_connect": "Connecting with the credentials from the configuration.yaml failed, please remove from yaml and use the config flow.", "unknown": "Connecting with the credentials from the configuration.yaml failed with an unknown error, please remove from yaml and use the config flow." } diff --git a/homeassistant/components/azure_service_bus/manifest.json b/homeassistant/components/azure_service_bus/manifest.json index 059f6300aec..31c1edac686 100644 --- a/homeassistant/components/azure_service_bus/manifest.json +++ b/homeassistant/components/azure_service_bus/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/azure_service_bus", "iot_class": "cloud_push", "loggers": ["azure"], + "quality_scale": "legacy", "requirements": ["azure-servicebus==7.10.0"] } diff --git a/homeassistant/components/backup/const.py b/homeassistant/components/backup/const.py index 90faa33fc7f..f613f7cc352 100644 --- a/homeassistant/components/backup/const.py +++ b/homeassistant/components/backup/const.py @@ -17,6 +17,7 @@ LOGGER = getLogger(__package__) EXCLUDE_FROM_BACKUP = [ "__pycache__/*", ".DS_Store", + ".HA_RESTORE", "*.db-shm", "*.log.*", "*.log", diff --git a/homeassistant/components/backup/http.py b/homeassistant/components/backup/http.py index 4cc4e61c9e4..42693035bd3 100644 --- a/homeassistant/components/backup/http.py +++ b/homeassistant/components/backup/http.py @@ -2,23 +2,26 @@ from __future__ import annotations +import asyncio from http import HTTPStatus +from typing import cast +from aiohttp import BodyPartReader from aiohttp.hdrs import CONTENT_DISPOSITION from aiohttp.web import FileResponse, Request, Response -from homeassistant.components.http import KEY_HASS, HomeAssistantView +from homeassistant.components.http import KEY_HASS, HomeAssistantView, require_admin from homeassistant.core import HomeAssistant, callback from homeassistant.util import slugify -from .const import DOMAIN -from .manager import BaseBackupManager +from .const import DATA_MANAGER @callback def async_register_http_views(hass: HomeAssistant) -> None: """Register the http views.""" hass.http.register_view(DownloadBackupView) + hass.http.register_view(UploadBackupView) class DownloadBackupView(HomeAssistantView): @@ -36,7 +39,7 @@ class DownloadBackupView(HomeAssistantView): if not request["hass_user"].is_admin: return Response(status=HTTPStatus.UNAUTHORIZED) - manager: BaseBackupManager = request.app[KEY_HASS].data[DOMAIN] + manager = request.app[KEY_HASS].data[DATA_MANAGER] backup = await manager.async_get_backup(slug=slug) if backup is None or not backup.path.exists(): @@ -48,3 +51,29 @@ class DownloadBackupView(HomeAssistantView): CONTENT_DISPOSITION: f"attachment; filename={slugify(backup.name)}.tar" }, ) + + +class UploadBackupView(HomeAssistantView): + """Generate backup view.""" + + url = "/api/backup/upload" + name = "api:backup:upload" + + @require_admin + async def post(self, request: Request) -> Response: + """Upload a backup file.""" + manager = request.app[KEY_HASS].data[DATA_MANAGER] + reader = await request.multipart() + contents = cast(BodyPartReader, await reader.next()) + + try: + await manager.async_receive_backup(contents=contents) + except OSError as err: + return Response( + body=f"Can't write backup file {err}", + status=HTTPStatus.INTERNAL_SERVER_ERROR, + ) + except asyncio.CancelledError: + return Response(status=HTTPStatus.INTERNAL_SERVER_ERROR) + + return Response(status=HTTPStatus.CREATED) diff --git a/homeassistant/components/backup/manager.py b/homeassistant/components/backup/manager.py index 701174e1b8d..4300f75eed0 100644 --- a/homeassistant/components/backup/manager.py +++ b/homeassistant/components/backup/manager.py @@ -9,13 +9,18 @@ import hashlib import io import json from pathlib import Path +from queue import SimpleQueue +import shutil import tarfile from tarfile import TarError +from tempfile import TemporaryDirectory import time from typing import Any, Protocol, cast +import aiohttp from securetar import SecureTarFile, atomic_contents_add +from homeassistant.backup_restore import RESTORE_BACKUP_FILE from homeassistant.const import __version__ as HAVERSION from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError @@ -123,6 +128,10 @@ class BaseBackupManager(abc.ABC): LOGGER.debug("Loaded %s platforms", len(self.platforms)) self.loaded_platforms = True + @abc.abstractmethod + async def async_restore_backup(self, slug: str, **kwargs: Any) -> None: + """Restore a backup.""" + @abc.abstractmethod async def async_create_backup(self, **kwargs: Any) -> Backup: """Generate a backup.""" @@ -142,6 +151,15 @@ class BaseBackupManager(abc.ABC): async def async_remove_backup(self, *, slug: str, **kwargs: Any) -> None: """Remove a backup.""" + @abc.abstractmethod + async def async_receive_backup( + self, + *, + contents: aiohttp.BodyPartReader, + **kwargs: Any, + ) -> None: + """Receive and store a backup file from upload.""" + class BackupManager(BaseBackupManager): """Backup manager for the Backup integration.""" @@ -217,6 +235,63 @@ class BackupManager(BaseBackupManager): LOGGER.debug("Removed backup located at %s", backup.path) self.backups.pop(slug) + async def async_receive_backup( + self, + *, + contents: aiohttp.BodyPartReader, + **kwargs: Any, + ) -> None: + """Receive and store a backup file from upload.""" + queue: SimpleQueue[tuple[bytes, asyncio.Future[None] | None] | None] = ( + SimpleQueue() + ) + temp_dir_handler = await self.hass.async_add_executor_job(TemporaryDirectory) + target_temp_file = Path( + temp_dir_handler.name, contents.filename or "backup.tar" + ) + + def _sync_queue_consumer() -> None: + with target_temp_file.open("wb") as file_handle: + while True: + if (_chunk_future := queue.get()) is None: + break + _chunk, _future = _chunk_future + if _future is not None: + self.hass.loop.call_soon_threadsafe(_future.set_result, None) + file_handle.write(_chunk) + + fut: asyncio.Future[None] | None = None + try: + fut = self.hass.async_add_executor_job(_sync_queue_consumer) + megabytes_sending = 0 + while chunk := await contents.read_chunk(BUF_SIZE): + megabytes_sending += 1 + if megabytes_sending % 5 != 0: + queue.put_nowait((chunk, None)) + continue + + chunk_future = self.hass.loop.create_future() + queue.put_nowait((chunk, chunk_future)) + await asyncio.wait( + (fut, chunk_future), + return_when=asyncio.FIRST_COMPLETED, + ) + if fut.done(): + # The executor job failed + break + + queue.put_nowait(None) # terminate queue consumer + finally: + if fut is not None: + await fut + + def _move_and_cleanup() -> None: + shutil.move(target_temp_file, self.backup_dir / target_temp_file.name) + temp_dir_handler.cleanup() + + await self.hass.async_add_executor_job(_move_and_cleanup) + await self.load_backups() + async def async_create_backup(self, **kwargs: Any) -> Backup: """Generate a backup.""" if self.backing_up: @@ -291,6 +366,25 @@ class BackupManager(BaseBackupManager): return tar_file_path.stat().st_size + async def async_restore_backup(self, slug: str, **kwargs: Any) -> None: + """Restore a backup. + + This will write the restore information to .HA_RESTORE which + will be handled during startup by the restore_backup module. + """ + if (backup := await self.async_get_backup(slug=slug)) is None: + raise HomeAssistantError(f"Backup {slug} not found") + + def _write_restore_file() -> None: + """Write the restore file.""" + Path(self.hass.config.path(RESTORE_BACKUP_FILE)).write_text( + json.dumps({"path": backup.path.as_posix()}), + encoding="utf-8", + ) + + await self.hass.async_add_executor_job(_write_restore_file) + await self.hass.services.async_call("homeassistant", "restart", {}) + def _generate_slug(date: str, name: str) -> str: """Generate a backup slug.""" diff --git a/homeassistant/components/backup/manifest.json b/homeassistant/components/backup/manifest.json index 1ec9b748cda..0a906bb6dfa 100644 --- a/homeassistant/components/backup/manifest.json +++ b/homeassistant/components/backup/manifest.json @@ -7,5 +7,5 @@ "integration_type": "system", "iot_class": "calculated", "quality_scale": "internal", - "requirements": ["securetar==2024.2.1"] + "requirements": ["securetar==2024.11.0"] } diff --git a/homeassistant/components/backup/websocket.py b/homeassistant/components/backup/websocket.py index 7daaaad1ec7..3ac8a7ace3e 100644 --- a/homeassistant/components/backup/websocket.py +++ b/homeassistant/components/backup/websocket.py @@ -22,6 +22,7 @@ def async_register_websocket_handlers(hass: HomeAssistant, with_hassio: bool) -> websocket_api.async_register_command(hass, handle_info) websocket_api.async_register_command(hass, handle_create) websocket_api.async_register_command(hass, handle_remove) + websocket_api.async_register_command(hass, handle_restore) @websocket_api.require_admin @@ -85,6 +86,24 @@ async def handle_remove( connection.send_result(msg["id"]) +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "backup/restore", + vol.Required("slug"): str, + } +) +@websocket_api.async_response +async def handle_restore( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Restore a backup.""" + await hass.data[DATA_MANAGER].async_restore_backup(msg["slug"]) + connection.send_result(msg["id"]) + + @websocket_api.require_admin @websocket_api.websocket_command({vol.Required("type"): "backup/generate"}) @websocket_api.async_response diff --git a/homeassistant/components/baidu/manifest.json b/homeassistant/components/baidu/manifest.json index 8213b7cbe5e..32f14100b81 100644 --- a/homeassistant/components/baidu/manifest.json +++ b/homeassistant/components/baidu/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/baidu", "iot_class": "cloud_push", "loggers": ["aip"], + "quality_scale": "legacy", "requirements": ["baidu-aip==1.6.6"] } diff --git a/homeassistant/components/bang_olufsen/const.py b/homeassistant/components/bang_olufsen/const.py index 1e06f153cdb..209311d3e8a 100644 --- a/homeassistant/components/bang_olufsen/const.py +++ b/homeassistant/components/bang_olufsen/const.py @@ -17,62 +17,9 @@ from homeassistant.components.media_player import ( class BangOlufsenSource: """Class used for associating device source ids with friendly names. May not include all sources.""" - URI_STREAMER: Final[Source] = Source( - name="Audio Streamer", - id="uriStreamer", - is_seekable=False, - is_enabled=True, - is_playable=True, - ) - BLUETOOTH: Final[Source] = Source( - name="Bluetooth", - id="bluetooth", - is_seekable=False, - is_enabled=True, - is_playable=True, - ) - CHROMECAST: Final[Source] = Source( - name="Chromecast built-in", - id="chromeCast", - is_seekable=False, - is_enabled=True, - is_playable=True, - ) - LINE_IN: Final[Source] = Source( - name="Line-In", - id="lineIn", - is_seekable=False, - is_enabled=True, - is_playable=True, - ) - SPDIF: Final[Source] = Source( - name="Optical", - id="spdif", - is_seekable=False, - is_enabled=True, - is_playable=True, - ) - NET_RADIO: Final[Source] = Source( - name="B&O Radio", - id="netRadio", - is_seekable=False, - is_enabled=True, - is_playable=True, - ) - DEEZER: Final[Source] = Source( - name="Deezer", - id="deezer", - is_seekable=True, - is_enabled=True, - is_playable=True, - ) - TIDAL: Final[Source] = Source( - name="Tidal", - id="tidal", - is_seekable=True, - is_enabled=True, - is_playable=True, - ) + LINE_IN: Final[Source] = Source(name="Line-In", id="lineIn") + SPDIF: Final[Source] = Source(name="Optical", id="spdif") + URI_STREAMER: Final[Source] = Source(name="Audio Streamer", id="uriStreamer") BANG_OLUFSEN_STATES: dict[str, MediaPlayerState] = { diff --git a/homeassistant/components/bang_olufsen/diagnostics.py b/homeassistant/components/bang_olufsen/diagnostics.py new file mode 100644 index 00000000000..cab7eae5e25 --- /dev/null +++ b/homeassistant/components/bang_olufsen/diagnostics.py @@ -0,0 +1,40 @@ +"""Support for Bang & Olufsen diagnostics.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN +from homeassistant.core import HomeAssistant +import homeassistant.helpers.entity_registry as er + +from . import BangOlufsenConfigEntry +from .const import DOMAIN + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, config_entry: BangOlufsenConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + + data: dict = { + "config_entry": config_entry.as_dict(), + "websocket_connected": config_entry.runtime_data.client.websocket_connected, + } + + if TYPE_CHECKING: + assert config_entry.unique_id + + # Add media_player entity's state + entity_registry = er.async_get(hass) + if entity_id := entity_registry.async_get_entity_id( + MEDIA_PLAYER_DOMAIN, DOMAIN, config_entry.unique_id + ): + if state := hass.states.get(entity_id): + state_dict = dict(state.as_dict()) + + # Remove context as it is not relevant + state_dict.pop("context") + data["media_player"] = state_dict + + return data diff --git a/homeassistant/components/bang_olufsen/icons.json b/homeassistant/components/bang_olufsen/icons.json new file mode 100644 index 00000000000..fec0bf20937 --- /dev/null +++ b/homeassistant/components/bang_olufsen/icons.json @@ -0,0 +1,9 @@ +{ + "services": { + "beolink_join": { "service": "mdi:location-enter" }, + "beolink_expand": { "service": "mdi:location-enter" }, + "beolink_unexpand": { "service": "mdi:location-exit" }, + "beolink_leave": { "service": "mdi:close-circle-outline" }, + "beolink_allstandby": { "service": "mdi:close-circle-multiple-outline" } + } +} diff --git a/homeassistant/components/bang_olufsen/manifest.json b/homeassistant/components/bang_olufsen/manifest.json index b4a92d4da25..1565c98e979 100644 --- a/homeassistant/components/bang_olufsen/manifest.json +++ b/homeassistant/components/bang_olufsen/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/bang_olufsen", "integration_type": "device", "iot_class": "local_push", - "requirements": ["mozart-api==4.1.1.116.0"], + "requirements": ["mozart-api==4.1.1.116.3"], "zeroconf": ["_bangolufsen._tcp.local."] } diff --git a/homeassistant/components/bang_olufsen/media_player.py b/homeassistant/components/bang_olufsen/media_player.py index e8108ee2cf7..96e7cca0175 100644 --- a/homeassistant/components/bang_olufsen/media_player.py +++ b/homeassistant/components/bang_olufsen/media_player.py @@ -11,7 +11,7 @@ from typing import TYPE_CHECKING, Any, cast from aiohttp import ClientConnectorError from mozart_api import __version__ as MOZART_API_VERSION -from mozart_api.exceptions import ApiException +from mozart_api.exceptions import ApiException, NotFoundException from mozart_api.models import ( Action, Art, @@ -38,6 +38,7 @@ from mozart_api.models import ( VolumeState, ) from mozart_api.mozart_client import MozartClient, get_highest_resolution_artwork +import voluptuous as vol from homeassistant.components import media_source from homeassistant.components.media_player import ( @@ -55,10 +56,17 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_MODEL, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import ( + config_validation as cv, + device_registry as dr, + entity_registry as er, +) from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.entity_platform import ( + AddEntitiesCallback, + async_get_current_platform, +) from homeassistant.util.dt import utcnow from . import BangOlufsenConfigEntry @@ -78,6 +86,8 @@ from .const import ( from .entity import BangOlufsenEntity from .util import get_serial_number_from_jid +PARALLEL_UPDATES = 0 + SCAN_INTERVAL = timedelta(seconds=30) _LOGGER = logging.getLogger(__name__) @@ -116,11 +126,62 @@ async def async_setup_entry( ] ) + # Register actions. + platform = async_get_current_platform() + + jid_regex = vol.Match( + r"(^\d{4})[.](\d{7})[.](\d{8})(@products\.bang-olufsen\.com)$" + ) + + platform.async_register_entity_service( + name="beolink_join", + schema={vol.Optional("beolink_jid"): jid_regex}, + func="async_beolink_join", + ) + + platform.async_register_entity_service( + name="beolink_expand", + schema={ + vol.Exclusive("all_discovered", "devices", ""): cv.boolean, + vol.Exclusive( + "beolink_jids", + "devices", + "Define either specific Beolink JIDs or all discovered", + ): vol.All( + cv.ensure_list, + [jid_regex], + ), + }, + func="async_beolink_expand", + ) + + platform.async_register_entity_service( + name="beolink_unexpand", + schema={ + vol.Required("beolink_jids"): vol.All( + cv.ensure_list, + [jid_regex], + ), + }, + func="async_beolink_unexpand", + ) + + platform.async_register_entity_service( + name="beolink_leave", + schema=None, + func="async_beolink_leave", + ) + + platform.async_register_entity_service( + name="beolink_allstandby", + schema=None, + func="async_beolink_allstandby", + ) + class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): """Representation of a media player.""" - _attr_icon = "mdi:speaker-wireless" _attr_name = None _attr_device_class = MediaPlayerDeviceClass.SPEAKER @@ -156,6 +217,8 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): # Beolink compatible sources self._beolink_sources: dict[str, bool] = {} self._remote_leader: BeolinkLeader | None = None + # Extra state attributes for showing Beolink: peer(s), listener(s), leader and self + self._beolink_attributes: dict[str, dict[str, dict[str, str]]] = {} async def async_added_to_hass(self) -> None: """Turn on the dispatchers.""" @@ -165,6 +228,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): CONNECTION_STATUS: self._async_update_connection_state, WebsocketNotification.ACTIVE_LISTENING_MODE: self._async_update_sound_modes, WebsocketNotification.BEOLINK: self._async_update_beolink, + WebsocketNotification.CONFIGURATION: self._async_update_name_and_beolink, WebsocketNotification.PLAYBACK_ERROR: self._async_update_playback_error, WebsocketNotification.PLAYBACK_METADATA: self._async_update_playback_metadata_and_beolink, WebsocketNotification.PLAYBACK_PROGRESS: self._async_update_playback_progress, @@ -230,6 +294,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): await self._async_update_sound_modes() + # Update beolink attributes and device name. + await self._async_update_name_and_beolink() + async def async_update(self) -> None: """Update queue settings.""" # The WebSocket event listener is the main handler for connection state. @@ -372,9 +439,44 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): self.async_write_ha_state() + async def _async_update_name_and_beolink(self) -> None: + """Update the device friendly name.""" + beolink_self = await self._client.get_beolink_self() + + # Update device name + device_registry = dr.async_get(self.hass) + assert self.device_entry is not None + + device_registry.async_update_device( + device_id=self.device_entry.id, + name=beolink_self.friendly_name, + ) + + await self._async_update_beolink() + async def _async_update_beolink(self) -> None: """Update the current Beolink leader, listeners, peers and self.""" + self._beolink_attributes = {} + + assert self.device_entry is not None + assert self.device_entry.name is not None + + # Add Beolink self + self._beolink_attributes = { + "beolink": {"self": {self.device_entry.name: self._beolink_jid}} + } + + # Add Beolink peers + peers = await self._client.get_beolink_peers() + + if len(peers) > 0: + self._beolink_attributes["beolink"]["peers"] = {} + for peer in peers: + self._beolink_attributes["beolink"]["peers"][peer.friendly_name] = ( + peer.jid + ) + # Add Beolink listeners / leader self._remote_leader = self._playback_metadata.remote_leader @@ -394,9 +496,14 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): # Add self group_members.append(self.entity_id) + self._beolink_attributes["beolink"]["leader"] = { + self._remote_leader.friendly_name: self._remote_leader.jid, + } + # If not listener, check if leader. else: beolink_listeners = await self._client.get_beolink_listeners() + beolink_listeners_attribute = {} # Check if the device is a leader. if len(beolink_listeners) > 0: @@ -417,6 +524,18 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): for beolink_listener in beolink_listeners ] ) + # Update Beolink attributes + for beolink_listener in beolink_listeners: + for peer in peers: + if peer.jid == beolink_listener.jid: + # Get the friendly names for the listeners from the peers + beolink_listeners_attribute[peer.friendly_name] = ( + beolink_listener.jid + ) + break + self._beolink_attributes["beolink"]["listeners"] = ( + beolink_listeners_attribute + ) self._attr_group_members = group_members @@ -570,38 +689,19 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): @property def source(self) -> str | None: """Return the current audio source.""" - - # Try to fix some of the source_change chromecast weirdness. - if hasattr(self._playback_metadata, "title"): - # source_change is chromecast but line in is selected. - if self._playback_metadata.title == BangOlufsenSource.LINE_IN.name: - return BangOlufsenSource.LINE_IN.name - - # source_change is chromecast but bluetooth is selected. - if self._playback_metadata.title == BangOlufsenSource.BLUETOOTH.name: - return BangOlufsenSource.BLUETOOTH.name - - # source_change is line in, bluetooth or optical but stale metadata is sent through the WebSocket, - # And the source has not changed. - if self._source_change.id in ( - BangOlufsenSource.BLUETOOTH.id, - BangOlufsenSource.LINE_IN.id, - BangOlufsenSource.SPDIF.id, - ): - return BangOlufsenSource.CHROMECAST.name - - # source_change is chromecast and there is metadata but no artwork. Bluetooth does support metadata but not artwork - # So i assume that it is bluetooth and not chromecast - if ( - hasattr(self._playback_metadata, "art") - and self._playback_metadata.art is not None - and len(self._playback_metadata.art) == 0 - and self._source_change.id == BangOlufsenSource.CHROMECAST.id - ): - return BangOlufsenSource.BLUETOOTH.name - return self._source_change.name + @property + def extra_state_attributes(self) -> dict[str, Any] | None: + """Return information that is not returned anywhere else.""" + attributes: dict[str, Any] = {} + + # Add Beolink attributes + if self._beolink_attributes: + attributes.update(self._beolink_attributes) + + return attributes + async def async_turn_off(self) -> None: """Set the device to "networkStandby".""" await self._client.post_standby() @@ -873,23 +973,30 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): # Beolink compatible B&O device. # Repeated presses / calls will cycle between compatible playing devices. if len(group_members) == 0: - await self._async_beolink_join() + await self.async_beolink_join() return # Get JID for each group member jids = [self._get_beolink_jid(group_member) for group_member in group_members] - await self._async_beolink_expand(jids) + await self.async_beolink_expand(jids) async def async_unjoin_player(self) -> None: """Unjoin Beolink session. End session if leader.""" - await self._async_beolink_leave() + await self.async_beolink_leave() - async def _async_beolink_join(self) -> None: + # Custom actions: + async def async_beolink_join(self, beolink_jid: str | None = None) -> None: """Join a Beolink multi-room experience.""" - await self._client.join_latest_beolink_experience() + if beolink_jid is None: + await self._client.join_latest_beolink_experience() + else: + await self._client.join_beolink_peer(jid=beolink_jid) - async def _async_beolink_expand(self, beolink_jids: list[str]) -> None: + async def async_beolink_expand( + self, beolink_jids: list[str] | None = None, all_discovered: bool = False + ) -> None: """Expand a Beolink multi-room experience with a device or devices.""" + # Ensure that the current source is expandable if not self._beolink_sources[cast(str, self._source_change.id)]: raise ServiceValidationError( @@ -901,10 +1008,37 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): }, ) - # Try to expand to all defined devices - for beolink_jid in beolink_jids: - await self._client.post_beolink_expand(jid=beolink_jid) + # Expand to all discovered devices + if all_discovered: + peers = await self._client.get_beolink_peers() - async def _async_beolink_leave(self) -> None: + for peer in peers: + try: + await self._client.post_beolink_expand(jid=peer.jid) + except NotFoundException: + _LOGGER.warning("Unable to expand to %s", peer.jid) + + # Try to expand to all defined devices + elif beolink_jids: + for beolink_jid in beolink_jids: + try: + await self._client.post_beolink_expand(jid=beolink_jid) + except NotFoundException: + _LOGGER.warning( + "Unable to expand to %s. Is the device available on the network?", + beolink_jid, + ) + + async def async_beolink_unexpand(self, beolink_jids: list[str]) -> None: + """Unexpand a Beolink multi-room experience with a device or devices.""" + # Unexpand all defined devices + for beolink_jid in beolink_jids: + await self._client.post_beolink_unexpand(jid=beolink_jid) + + async def async_beolink_leave(self) -> None: """Leave the current Beolink experience.""" await self._client.post_beolink_leave() + + async def async_beolink_allstandby(self) -> None: + """Set all connected Beolink devices to standby.""" + await self._client.post_beolink_allstandby() diff --git a/homeassistant/components/bang_olufsen/services.yaml b/homeassistant/components/bang_olufsen/services.yaml new file mode 100644 index 00000000000..e5d61420dff --- /dev/null +++ b/homeassistant/components/bang_olufsen/services.yaml @@ -0,0 +1,79 @@ +beolink_allstandby: + target: + entity: + integration: bang_olufsen + domain: media_player + device: + integration: bang_olufsen + +beolink_expand: + target: + entity: + integration: bang_olufsen + domain: media_player + device: + integration: bang_olufsen + fields: + all_discovered: + required: false + example: false + selector: + boolean: + jid_options: + collapsed: false + fields: + beolink_jids: + required: false + example: >- + [ + 1111.2222222.33333333@products.bang-olufsen.com, + 4444.5555555.66666666@products.bang-olufsen.com + ] + selector: + object: + +beolink_join: + target: + entity: + integration: bang_olufsen + domain: media_player + device: + integration: bang_olufsen + fields: + jid_options: + collapsed: false + fields: + beolink_jid: + required: false + example: 1111.2222222.33333333@products.bang-olufsen.com + selector: + text: + +beolink_leave: + target: + entity: + integration: bang_olufsen + domain: media_player + device: + integration: bang_olufsen + +beolink_unexpand: + target: + entity: + integration: bang_olufsen + domain: media_player + device: + integration: bang_olufsen + fields: + jid_options: + collapsed: false + fields: + beolink_jids: + required: true + example: >- + [ + 1111.2222222.33333333@products.bang-olufsen.com, + 4444.5555555.66666666@products.bang-olufsen.com + ] + selector: + object: diff --git a/homeassistant/components/bang_olufsen/strings.json b/homeassistant/components/bang_olufsen/strings.json index 3e336f7d2d8..6e75d2f26c8 100644 --- a/homeassistant/components/bang_olufsen/strings.json +++ b/homeassistant/components/bang_olufsen/strings.json @@ -1,4 +1,8 @@ { + "common": { + "jid_options_name": "JID options", + "jid_options_description": "Advanced grouping options, where devices' unique Beolink IDs (Called JIDs) are used directly. JIDs can be found in the state attributes of the media player entity." + }, "config": { "error": { "api_exception": "[%key:common::config_flow::error::cannot_connect%]", @@ -7,7 +11,7 @@ "invalid_ip": "Invalid IPv4 address" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::single_instance_allowed%]", + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]" }, "flow_title": "{name}", @@ -25,6 +29,68 @@ } } }, + "services": { + "beolink_allstandby": { + "name": "Beolink all standby", + "description": "Set all Connected Beolink devices to standby." + }, + "beolink_expand": { + "name": "Beolink expand", + "description": "Expand current Beolink experience.", + "fields": { + "all_discovered": { + "name": "All discovered", + "description": "Expand Beolink experience to all discovered devices." + }, + "beolink_jids": { + "name": "Beolink JIDs", + "description": "Specify which Beolink JIDs will join current Beolink experience." + } + }, + "sections": { + "jid_options": { + "name": "[%key:component::bang_olufsen::common::jid_options_name%]", + "description": "[%key:component::bang_olufsen::common::jid_options_description%]" + } + } + }, + "beolink_join": { + "name": "Beolink join", + "description": "Join a Beolink experience.", + "fields": { + "beolink_jid": { + "name": "Beolink JID", + "description": "Manually specify Beolink JID to join." + } + }, + "sections": { + "jid_options": { + "name": "[%key:component::bang_olufsen::common::jid_options_name%]", + "description": "[%key:component::bang_olufsen::common::jid_options_description%]" + } + } + }, + "beolink_leave": { + "name": "Beolink leave", + "description": "Leave a Beolink experience." + }, + "beolink_unexpand": { + "name": "Beolink unexpand", + "description": "Unexpand from current Beolink experience.", + "fields": { + "beolink_jids": { + "name": "Beolink JIDs", + "description": "Specify which Beolink JIDs will leave from current Beolink experience." + } + }, + "sections": { + "jid_options": { + "name": "[%key:component::bang_olufsen::common::jid_options_name%]", + "description": "[%key:component::bang_olufsen::common::jid_options_description%]" + } + } + } + }, "exceptions": { "m3u_invalid_format": { "message": "Media sources with the .m3u extension are not supported." diff --git a/homeassistant/components/bang_olufsen/websocket.py b/homeassistant/components/bang_olufsen/websocket.py index 94b84189ccc..bc817226b61 100644 --- a/homeassistant/components/bang_olufsen/websocket.py +++ b/homeassistant/components/bang_olufsen/websocket.py @@ -15,7 +15,7 @@ from mozart_api.models import ( VolumeState, WebsocketNotificationTag, ) -from mozart_api.mozart_client import MozartClient +from mozart_api.mozart_client import BaseWebSocketResponse, MozartClient from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -120,6 +120,11 @@ class BangOlufsenWebsocket(BangOlufsenBase): self.hass, f"{self._unique_id}_{WebsocketNotification.BEOLINK}", ) + elif notification_type is WebsocketNotification.CONFIGURATION: + async_dispatcher_send( + self.hass, + f"{self._unique_id}_{WebsocketNotification.CONFIGURATION}", + ) elif notification_type is WebsocketNotification.REMOTE_MENU_CHANGED: async_dispatcher_send( self.hass, @@ -197,12 +202,13 @@ class BangOlufsenWebsocket(BangOlufsenBase): sw_version=software_status.software_version, ) - def on_all_notifications_raw(self, notification: dict) -> None: + def on_all_notifications_raw(self, notification: BaseWebSocketResponse) -> None: """Receive all notifications.""" + debug_notification = { + "device_id": self._device.id, + "serial_number": int(self._unique_id), + **notification, + } - # Add the device_id and serial_number to the notification - notification["device_id"] = self._device.id - notification["serial_number"] = int(self._unique_id) - - _LOGGER.debug("%s", notification) - self.hass.bus.async_fire(BANG_OLUFSEN_WEBSOCKET_EVENT, notification) + _LOGGER.debug("%s", debug_notification) + self.hass.bus.async_fire(BANG_OLUFSEN_WEBSOCKET_EVENT, debug_notification) diff --git a/homeassistant/components/bbox/manifest.json b/homeassistant/components/bbox/manifest.json index 9035bea74bc..67e54ae2359 100644 --- a/homeassistant/components/bbox/manifest.json +++ b/homeassistant/components/bbox/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/bbox", "iot_class": "local_polling", "loggers": ["pybbox"], + "quality_scale": "legacy", "requirements": ["pybbox==0.0.5-alpha"] } diff --git a/homeassistant/components/beewi_smartclim/manifest.json b/homeassistant/components/beewi_smartclim/manifest.json index 3555f9181bb..baf41be4345 100644 --- a/homeassistant/components/beewi_smartclim/manifest.json +++ b/homeassistant/components/beewi_smartclim/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/beewi_smartclim", "iot_class": "local_polling", "loggers": ["beewi_smartclim"], + "quality_scale": "legacy", "requirements": ["beewi-smartclim==0.0.10"] } diff --git a/homeassistant/components/bitcoin/manifest.json b/homeassistant/components/bitcoin/manifest.json index 6f5fd678009..b208e904cab 100644 --- a/homeassistant/components/bitcoin/manifest.json +++ b/homeassistant/components/bitcoin/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/bitcoin", "iot_class": "cloud_polling", "loggers": ["blockchain"], + "quality_scale": "legacy", "requirements": ["blockchain==1.4.4"] } diff --git a/homeassistant/components/bizkaibus/manifest.json b/homeassistant/components/bizkaibus/manifest.json index b47df75bbe5..5a333546401 100644 --- a/homeassistant/components/bizkaibus/manifest.json +++ b/homeassistant/components/bizkaibus/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/bizkaibus", "iot_class": "cloud_polling", "loggers": ["bizkaibus"], + "quality_scale": "legacy", "requirements": ["bizkaibus==0.1.1"] } diff --git a/homeassistant/components/blackbird/manifest.json b/homeassistant/components/blackbird/manifest.json index d75b69dfaf8..a0f4b0c383c 100644 --- a/homeassistant/components/blackbird/manifest.json +++ b/homeassistant/components/blackbird/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/blackbird", "iot_class": "local_polling", "loggers": ["pyblackbird"], + "quality_scale": "legacy", "requirements": ["pyblackbird==0.6"] } diff --git a/homeassistant/components/blink/config_flow.py b/homeassistant/components/blink/config_flow.py index 62f15bd6e10..e37df26aaa8 100644 --- a/homeassistant/components/blink/config_flow.py +++ b/homeassistant/components/blink/config_flow.py @@ -10,7 +10,7 @@ from blinkpy.auth import Auth, LoginError, TokenRefreshFailed from blinkpy.blinkpy import Blink, BlinkSetupError import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_PIN, CONF_USERNAME from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError @@ -61,6 +61,8 @@ class BlinkConfigFlow(ConfigFlow, domain=DOMAIN): session=async_get_clientsession(self.hass), ) await self.async_set_unique_id(user_input[CONF_USERNAME]) + if self.source != SOURCE_REAUTH: + self._abort_if_unique_id_configured() try: await validate_input(self.auth) diff --git a/homeassistant/components/blink/sensor.py b/homeassistant/components/blink/sensor.py index f20f8188b42..e0b5989cc80 100644 --- a/homeassistant/components/blink/sensor.py +++ b/homeassistant/components/blink/sensor.py @@ -10,7 +10,11 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.const import EntityCategory, UnitOfTemperature +from homeassistant.const import ( + SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + EntityCategory, + UnitOfTemperature, +) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -32,6 +36,8 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( SensorEntityDescription( key=TYPE_WIFI_STRENGTH, translation_key="wifi_strength", + native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + device_class=SensorDeviceClass.SIGNAL_STRENGTH, entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, ), diff --git a/homeassistant/components/blinksticklight/manifest.json b/homeassistant/components/blinksticklight/manifest.json index 70fac896ff2..d3592b6af6e 100644 --- a/homeassistant/components/blinksticklight/manifest.json +++ b/homeassistant/components/blinksticklight/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/blinksticklight", "iot_class": "local_polling", "loggers": ["blinkstick"], + "quality_scale": "legacy", "requirements": ["BlinkStick==1.2.0"] } diff --git a/homeassistant/components/blockchain/manifest.json b/homeassistant/components/blockchain/manifest.json index 2e58dc5aa03..6c9182ee0c4 100644 --- a/homeassistant/components/blockchain/manifest.json +++ b/homeassistant/components/blockchain/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/blockchain", "iot_class": "cloud_polling", "loggers": ["pyblockchain"], + "quality_scale": "legacy", "requirements": ["python-blockchain-api==0.0.2"] } diff --git a/homeassistant/components/bluesound/media_player.py b/homeassistant/components/bluesound/media_player.py index 97985a74300..38ef78fad3a 100644 --- a/homeassistant/components/bluesound/media_player.py +++ b/homeassistant/components/bluesound/media_player.py @@ -292,14 +292,6 @@ class BluesoundPlayer(MediaPlayerEntity): self._last_status_update = dt_util.utcnow() self._status = status - group_name = status.group_name - if group_name != self._group_name: - _LOGGER.debug("Group name change detected on device: %s", self.id) - self._group_name = group_name - - # rebuild ordered list of entity_ids that are in the group, master is first - self._group_list = self.rebuild_bluesound_group() - self.async_write_ha_state() except PlayerUnreachableError: self._attr_available = False @@ -323,6 +315,8 @@ class BluesoundPlayer(MediaPlayerEntity): self._sync_status = sync_status + self._group_list = self.rebuild_bluesound_group() + if sync_status.master is not None: self._is_master = False master_id = f"{sync_status.master.ip}:{sync_status.master.port}" @@ -619,21 +613,32 @@ class BluesoundPlayer(MediaPlayerEntity): def rebuild_bluesound_group(self) -> list[str]: """Rebuild the list of entities in speaker group.""" - if self._group_name is None: + if self.sync_status.master is None and self.sync_status.slaves is None: return [] - device_group = self._group_name.split("+") + player_entities: list[BluesoundPlayer] = self.hass.data[DATA_BLUESOUND] - sorted_entities: list[BluesoundPlayer] = sorted( - self.hass.data[DATA_BLUESOUND], - key=lambda entity: entity.is_master, - reverse=True, - ) - return [ - entity.sync_status.name - for entity in sorted_entities - if entity.bluesound_device_name in device_group + leader_sync_status: SyncStatus | None = None + if self.sync_status.master is None: + leader_sync_status = self.sync_status + else: + required_id = f"{self.sync_status.master.ip}:{self.sync_status.master.port}" + for x in player_entities: + if x.sync_status.id == required_id: + leader_sync_status = x.sync_status + break + + if leader_sync_status is None or leader_sync_status.slaves is None: + return [] + + follower_ids = [f"{x.ip}:{x.port}" for x in leader_sync_status.slaves] + follower_names = [ + x.sync_status.name + for x in player_entities + if x.sync_status.id in follower_ids ] + follower_names.insert(0, leader_sync_status.name) + return follower_names async def async_unjoin(self) -> None: """Unjoin the player from a group.""" diff --git a/homeassistant/components/bluetooth_le_tracker/manifest.json b/homeassistant/components/bluetooth_le_tracker/manifest.json index 79f885cad18..4abf5f7607e 100644 --- a/homeassistant/components/bluetooth_le_tracker/manifest.json +++ b/homeassistant/components/bluetooth_le_tracker/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/bluetooth_le_tracker", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/bluetooth_tracker/manifest.json b/homeassistant/components/bluetooth_tracker/manifest.json index 0a0356e6669..8fb35b311c9 100644 --- a/homeassistant/components/bluetooth_tracker/manifest.json +++ b/homeassistant/components/bluetooth_tracker/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/bluetooth_tracker", "iot_class": "local_polling", "loggers": ["bluetooth", "bt_proximity"], + "quality_scale": "legacy", "requirements": ["bt-proximity==0.2.1", "PyBluez==0.22"] } diff --git a/homeassistant/components/bmw_connected_drive/config_flow.py b/homeassistant/components/bmw_connected_drive/config_flow.py index 6803bbac600..8831895c71e 100644 --- a/homeassistant/components/bmw_connected_drive/config_flow.py +++ b/homeassistant/components/bmw_connected_drive/config_flow.py @@ -21,15 +21,24 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_PASSWORD, CONF_REGION, CONF_SOURCE, CONF_USERNAME from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig +from homeassistant.util.ssl import get_default_context from . import DOMAIN -from .const import CONF_ALLOWED_REGIONS, CONF_GCID, CONF_READ_ONLY, CONF_REFRESH_TOKEN +from .const import ( + CONF_ALLOWED_REGIONS, + CONF_CAPTCHA_REGIONS, + CONF_CAPTCHA_TOKEN, + CONF_CAPTCHA_URL, + CONF_GCID, + CONF_READ_ONLY, + CONF_REFRESH_TOKEN, +) DATA_SCHEMA = vol.Schema( { @@ -41,7 +50,14 @@ DATA_SCHEMA = vol.Schema( translation_key="regions", ) ), - } + }, + extra=vol.REMOVE_EXTRA, +) +CAPTCHA_SCHEMA = vol.Schema( + { + vol.Required(CONF_CAPTCHA_TOKEN): str, + }, + extra=vol.REMOVE_EXTRA, ) @@ -54,6 +70,8 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, data[CONF_USERNAME], data[CONF_PASSWORD], get_region_from_name(data[CONF_REGION]), + hcaptcha_token=data.get(CONF_CAPTCHA_TOKEN), + verify=get_default_context(), ) try: @@ -79,15 +97,17 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + data: dict[str, Any] = {} + _existing_entry_data: Mapping[str, Any] | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - errors: dict[str, str] = {} + errors: dict[str, str] = self.data.pop("errors", {}) - if user_input is not None: + if user_input is not None and not errors: unique_id = f"{user_input[CONF_REGION]}-{user_input[CONF_USERNAME]}" await self.async_set_unique_id(unique_id) @@ -96,22 +116,35 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN): else: self._abort_if_unique_id_configured() + # Store user input for later use + self.data.update(user_input) + + # North America and Rest of World require captcha token + if ( + self.data.get(CONF_REGION) in CONF_CAPTCHA_REGIONS + and CONF_CAPTCHA_TOKEN not in self.data + ): + return await self.async_step_captcha() + info = None try: - info = await validate_input(self.hass, user_input) - entry_data = { - **user_input, - CONF_REFRESH_TOKEN: info.get(CONF_REFRESH_TOKEN), - CONF_GCID: info.get(CONF_GCID), - } + info = await validate_input(self.hass, self.data) except MissingCaptcha: errors["base"] = "missing_captcha" except CannotConnect: errors["base"] = "cannot_connect" except InvalidAuth: errors["base"] = "invalid_auth" + finally: + self.data.pop(CONF_CAPTCHA_TOKEN, None) if info: + entry_data = { + **self.data, + CONF_REFRESH_TOKEN: info.get(CONF_REFRESH_TOKEN), + CONF_GCID: info.get(CONF_GCID), + } + if self.source == SOURCE_REAUTH: return self.async_update_reload_and_abort( self._get_reauth_entry(), data=entry_data @@ -128,7 +161,7 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN): schema = self.add_suggested_values_to_schema( DATA_SCHEMA, - self._existing_entry_data, + self._existing_entry_data or self.data, ) return self.async_show_form(step_id="user", data_schema=schema, errors=errors) @@ -147,16 +180,32 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN): self._existing_entry_data = self._get_reconfigure_entry().data return await self.async_step_user() + async def async_step_captcha( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Show captcha form.""" + if user_input and user_input.get(CONF_CAPTCHA_TOKEN): + self.data[CONF_CAPTCHA_TOKEN] = user_input[CONF_CAPTCHA_TOKEN].strip() + return await self.async_step_user(self.data) + + return self.async_show_form( + step_id="captcha", + data_schema=CAPTCHA_SCHEMA, + description_placeholders={ + "captcha_url": CONF_CAPTCHA_URL.format(region=self.data[CONF_REGION]) + }, + ) + @staticmethod @callback def async_get_options_flow( config_entry: ConfigEntry, ) -> BMWOptionsFlow: """Return a MyBMW option flow.""" - return BMWOptionsFlow(config_entry) + return BMWOptionsFlow() -class BMWOptionsFlow(OptionsFlowWithConfigEntry): +class BMWOptionsFlow(OptionsFlow): """Handle a option flow for MyBMW.""" async def async_step_init( diff --git a/homeassistant/components/bmw_connected_drive/const.py b/homeassistant/components/bmw_connected_drive/const.py index 98d4acbfc91..750289e9d0a 100644 --- a/homeassistant/components/bmw_connected_drive/const.py +++ b/homeassistant/components/bmw_connected_drive/const.py @@ -8,10 +8,15 @@ ATTR_DIRECTION = "direction" ATTR_VIN = "vin" CONF_ALLOWED_REGIONS = ["china", "north_america", "rest_of_world"] +CONF_CAPTCHA_REGIONS = ["north_america", "rest_of_world"] CONF_READ_ONLY = "read_only" CONF_ACCOUNT = "account" CONF_REFRESH_TOKEN = "refresh_token" CONF_GCID = "gcid" +CONF_CAPTCHA_TOKEN = "captcha_token" +CONF_CAPTCHA_URL = ( + "https://bimmer-connected.readthedocs.io/en/stable/captcha/{region}.html" +) DATA_HASS_CONFIG = "hass_config" diff --git a/homeassistant/components/bmw_connected_drive/coordinator.py b/homeassistant/components/bmw_connected_drive/coordinator.py index d38b7ffacc2..4f560d16f9c 100644 --- a/homeassistant/components/bmw_connected_drive/coordinator.py +++ b/homeassistant/components/bmw_connected_drive/coordinator.py @@ -84,11 +84,6 @@ class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]): if self.account.refresh_token != old_refresh_token: self._update_config_entry_refresh_token(self.account.refresh_token) - _LOGGER.debug( - "bimmer_connected: refresh token %s > %s", - old_refresh_token, - self.account.refresh_token, - ) def _update_config_entry_refresh_token(self, refresh_token: str | None) -> None: """Update or delete the refresh_token in the Config Entry.""" diff --git a/homeassistant/components/bmw_connected_drive/manifest.json b/homeassistant/components/bmw_connected_drive/manifest.json index 584eb1eebb5..81928a59a52 100644 --- a/homeassistant/components/bmw_connected_drive/manifest.json +++ b/homeassistant/components/bmw_connected_drive/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/bmw_connected_drive", "iot_class": "cloud_polling", "loggers": ["bimmer_connected"], - "quality_scale": "platinum", - "requirements": ["bimmer-connected[china]==0.16.4"] + "requirements": ["bimmer-connected[china]==0.17.2"] } diff --git a/homeassistant/components/bmw_connected_drive/strings.json b/homeassistant/components/bmw_connected_drive/strings.json index 0e7a4a32ef4..8078971acd1 100644 --- a/homeassistant/components/bmw_connected_drive/strings.json +++ b/homeassistant/components/bmw_connected_drive/strings.json @@ -7,6 +7,16 @@ "password": "[%key:common::config_flow::data::password%]", "region": "ConnectedDrive Region" } + }, + "captcha": { + "title": "Are you a robot?", + "description": "A captcha is required for BMW login. Visit the external website to complete the challenge and submit the form. Copy the resulting token into the field below.\n\n{captcha_url}\n\nNo data will be exposed outside of your Home Assistant instance.", + "data": { + "captcha_token": "Captcha token" + }, + "data_description": { + "captcha_token": "One-time token retrieved from the captcha challenge." + } } }, "error": { diff --git a/homeassistant/components/bond/manifest.json b/homeassistant/components/bond/manifest.json index 08e4fb007b7..1d4c110f4fd 100644 --- a/homeassistant/components/bond/manifest.json +++ b/homeassistant/components/bond/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/bond", "iot_class": "local_push", "loggers": ["bond_async"], - "quality_scale": "platinum", "requirements": ["bond-async==0.2.1"], "zeroconf": ["_bond._tcp.local."] } diff --git a/homeassistant/components/brother/manifest.json b/homeassistant/components/brother/manifest.json index 4e773a6cff2..fa70f3a5dc5 100644 --- a/homeassistant/components/brother/manifest.json +++ b/homeassistant/components/brother/manifest.json @@ -8,7 +8,6 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["brother", "pyasn1", "pysmi", "pysnmp"], - "quality_scale": "platinum", "requirements": ["brother==4.3.1"], "zeroconf": [ { diff --git a/homeassistant/components/brother/sensor.py b/homeassistant/components/brother/sensor.py index e86eb59d6bc..d49ebdf07ca 100644 --- a/homeassistant/components/brother/sensor.py +++ b/homeassistant/components/brother/sensor.py @@ -30,8 +30,6 @@ from .const import DOMAIN ATTR_COUNTER = "counter" ATTR_REMAINING_PAGES = "remaining_pages" -UNIT_PAGES = "p" - _LOGGER = logging.getLogger(__name__) @@ -52,7 +50,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="page_counter", translation_key="page_counter", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.page_counter, @@ -60,7 +57,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="bw_counter", translation_key="bw_pages", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.bw_counter, @@ -68,7 +64,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="color_counter", translation_key="color_pages", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.color_counter, @@ -76,7 +71,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="duplex_unit_pages_counter", translation_key="duplex_unit_page_counter", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.duplex_unit_pages_counter, @@ -92,7 +86,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="drum_remaining_pages", translation_key="drum_remaining_pages", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.drum_remaining_pages, @@ -100,7 +93,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="drum_counter", translation_key="drum_page_counter", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.drum_counter, @@ -116,7 +108,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="black_drum_remaining_pages", translation_key="black_drum_remaining_pages", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.black_drum_remaining_pages, @@ -124,7 +115,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="black_drum_counter", translation_key="black_drum_page_counter", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.black_drum_counter, @@ -140,7 +130,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="cyan_drum_remaining_pages", translation_key="cyan_drum_remaining_pages", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.cyan_drum_remaining_pages, @@ -148,7 +137,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="cyan_drum_counter", translation_key="cyan_drum_page_counter", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.cyan_drum_counter, @@ -164,7 +152,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="magenta_drum_remaining_pages", translation_key="magenta_drum_remaining_pages", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.magenta_drum_remaining_pages, @@ -172,7 +159,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="magenta_drum_counter", translation_key="magenta_drum_page_counter", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.magenta_drum_counter, @@ -188,7 +174,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="yellow_drum_remaining_pages", translation_key="yellow_drum_remaining_pages", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.yellow_drum_remaining_pages, @@ -196,7 +181,6 @@ SENSOR_TYPES: tuple[BrotherSensorEntityDescription, ...] = ( BrotherSensorEntityDescription( key="yellow_drum_counter", translation_key="yellow_drum_page_counter", - native_unit_of_measurement=UNIT_PAGES, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda data: data.yellow_drum_counter, diff --git a/homeassistant/components/brother/strings.json b/homeassistant/components/brother/strings.json index 3b5b38ce9a0..b502ed7e3b9 100644 --- a/homeassistant/components/brother/strings.json +++ b/homeassistant/components/brother/strings.json @@ -46,61 +46,75 @@ "name": "Status" }, "page_counter": { - "name": "Page counter" + "name": "Page counter", + "unit_of_measurement": "pages" }, "bw_pages": { - "name": "B/W pages" + "name": "B/W pages", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "color_pages": { - "name": "Color pages" + "name": "Color pages", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "duplex_unit_page_counter": { - "name": "Duplex unit page counter" + "name": "Duplex unit page counter", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "drum_remaining_life": { "name": "Drum remaining lifetime" }, "drum_remaining_pages": { - "name": "Drum remaining pages" + "name": "Drum remaining pages", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "drum_page_counter": { - "name": "Drum page counter" + "name": "Drum page counter", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "black_drum_remaining_life": { "name": "Black drum remaining lifetime" }, "black_drum_remaining_pages": { - "name": "Black drum remaining pages" + "name": "Black drum remaining pages", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "black_drum_page_counter": { - "name": "Black drum page counter" + "name": "Black drum page counter", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "cyan_drum_remaining_life": { "name": "Cyan drum remaining lifetime" }, "cyan_drum_remaining_pages": { - "name": "Cyan drum remaining pages" + "name": "Cyan drum remaining pages", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "cyan_drum_page_counter": { - "name": "Cyan drum page counter" + "name": "Cyan drum page counter", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "magenta_drum_remaining_life": { "name": "Magenta drum remaining lifetime" }, "magenta_drum_remaining_pages": { - "name": "Magenta drum remaining pages" + "name": "Magenta drum remaining pages", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "magenta_drum_page_counter": { - "name": "Magenta drum page counter" + "name": "Magenta drum page counter", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "yellow_drum_remaining_life": { "name": "Yellow drum remaining lifetime" }, "yellow_drum_remaining_pages": { - "name": "Yellow drum remaining pages" + "name": "Yellow drum remaining pages", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "yellow_drum_page_counter": { - "name": "Yellow drum page counter" + "name": "Yellow drum page counter", + "unit_of_measurement": "[%key:component::brother::entity::sensor::page_counter::unit_of_measurement%]" }, "belt_unit_remaining_life": { "name": "Belt unit remaining lifetime" diff --git a/homeassistant/components/bsblan/__init__.py b/homeassistant/components/bsblan/__init__.py index 4d3c6ee2073..623bfbfef56 100644 --- a/homeassistant/components/bsblan/__init__.py +++ b/homeassistant/components/bsblan/__init__.py @@ -18,7 +18,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import CONF_PASSKEY from .coordinator import BSBLanUpdateCoordinator -PLATFORMS = [Platform.CLIMATE, Platform.SENSOR] +PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.WATER_HEATER] type BSBLanConfigEntry = ConfigEntry[BSBLanData] diff --git a/homeassistant/components/bsblan/climate.py b/homeassistant/components/bsblan/climate.py index fcbe88f2fac..6d992da395a 100644 --- a/homeassistant/components/bsblan/climate.py +++ b/homeassistant/components/bsblan/climate.py @@ -15,7 +15,7 @@ from homeassistant.components.climate import ( ClimateEntityFeature, HVACMode, ) -from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature +from homeassistant.const import ATTR_TEMPERATURE from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.device_registry import format_mac @@ -75,26 +75,19 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity): super().__init__(data.coordinator, data) self._attr_unique_id = f"{format_mac(data.device.MAC)}-climate" - self._attr_min_temp = float(data.static.min_temp.value) - self._attr_max_temp = float(data.static.max_temp.value) - if data.static.min_temp.unit in ("°C", "°C"): - self._attr_temperature_unit = UnitOfTemperature.CELSIUS - else: - self._attr_temperature_unit = UnitOfTemperature.FAHRENHEIT + self._attr_min_temp = data.static.min_temp.value + self._attr_max_temp = data.static.max_temp.value + self._attr_temperature_unit = data.coordinator.client.get_temperature_unit @property def current_temperature(self) -> float | None: """Return the current temperature.""" - if self.coordinator.data.state.current_temperature.value == "---": - # device returns no current temperature - return None - - return float(self.coordinator.data.state.current_temperature.value) + return self.coordinator.data.state.current_temperature.value @property def target_temperature(self) -> float | None: """Return the temperature we try to reach.""" - return float(self.coordinator.data.state.target_temperature.value) + return self.coordinator.data.state.target_temperature.value @property def hvac_mode(self) -> HVACMode | None: diff --git a/homeassistant/components/bsblan/coordinator.py b/homeassistant/components/bsblan/coordinator.py index 1a4299fe72f..be9030d95b0 100644 --- a/homeassistant/components/bsblan/coordinator.py +++ b/homeassistant/components/bsblan/coordinator.py @@ -4,7 +4,7 @@ from dataclasses import dataclass from datetime import timedelta from random import randint -from bsblan import BSBLAN, BSBLANConnectionError, Sensor, State +from bsblan import BSBLAN, BSBLANConnectionError, HotWaterState, Sensor, State from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST @@ -20,6 +20,7 @@ class BSBLanCoordinatorData: state: State sensor: Sensor + dhw: HotWaterState class BSBLanUpdateCoordinator(DataUpdateCoordinator[BSBLanCoordinatorData]): @@ -59,6 +60,7 @@ class BSBLanUpdateCoordinator(DataUpdateCoordinator[BSBLanCoordinatorData]): state = await self.client.state() sensor = await self.client.sensor() + dhw = await self.client.hot_water_state() except BSBLANConnectionError as err: host = self.config_entry.data[CONF_HOST] if self.config_entry else "unknown" raise UpdateFailed( @@ -66,4 +68,4 @@ class BSBLanUpdateCoordinator(DataUpdateCoordinator[BSBLanCoordinatorData]): ) from err self.update_interval = self._get_update_interval() - return BSBLanCoordinatorData(state=state, sensor=sensor) + return BSBLanCoordinatorData(state=state, sensor=sensor, dhw=dhw) diff --git a/homeassistant/components/bsblan/manifest.json b/homeassistant/components/bsblan/manifest.json index 3f100aef04f..aa9c03abf4a 100644 --- a/homeassistant/components/bsblan/manifest.json +++ b/homeassistant/components/bsblan/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["bsblan"], - "requirements": ["python-bsblan==0.6.4"] + "requirements": ["python-bsblan==1.2.1"] } diff --git a/homeassistant/components/bsblan/sensor.py b/homeassistant/components/bsblan/sensor.py index eab03d7a50c..c13b4ad7650 100644 --- a/homeassistant/components/bsblan/sensor.py +++ b/homeassistant/components/bsblan/sensor.py @@ -72,11 +72,9 @@ class BSBLanSensor(BSBLanEntity, SensorEntity): super().__init__(data.coordinator, data) self.entity_description = description self._attr_unique_id = f"{data.device.MAC}-{description.key}" + self._attr_temperature_unit = data.coordinator.client.get_temperature_unit @property def native_value(self) -> StateType: """Return the state of the sensor.""" - value = self.entity_description.value_fn(self.coordinator.data) - if value == "---": - return None - return value + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/bsblan/strings.json b/homeassistant/components/bsblan/strings.json index 4fb374fee75..a73a89ca1cc 100644 --- a/homeassistant/components/bsblan/strings.json +++ b/homeassistant/components/bsblan/strings.json @@ -31,6 +31,12 @@ }, "set_data_error": { "message": "An error occurred while sending the data to the BSBLAN device" + }, + "set_temperature_error": { + "message": "An error occurred while setting the temperature" + }, + "set_operation_mode_error": { + "message": "An error occurred while setting the operation mode" } }, "entity": { diff --git a/homeassistant/components/bsblan/water_heater.py b/homeassistant/components/bsblan/water_heater.py new file mode 100644 index 00000000000..318408a9124 --- /dev/null +++ b/homeassistant/components/bsblan/water_heater.py @@ -0,0 +1,107 @@ +"""BSBLAN platform to control a compatible Water Heater Device.""" + +from __future__ import annotations + +from typing import Any + +from bsblan import BSBLANError + +from homeassistant.components.water_heater import ( + STATE_ECO, + STATE_OFF, + WaterHeaterEntity, + WaterHeaterEntityFeature, +) +from homeassistant.const import ATTR_TEMPERATURE, STATE_ON +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.device_registry import format_mac +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import BSBLanConfigEntry, BSBLanData +from .const import DOMAIN +from .entity import BSBLanEntity + +PARALLEL_UPDATES = 1 + +# Mapping between BSBLan and HA operation modes +OPERATION_MODES = { + "Eco": STATE_ECO, # Energy saving mode + "Off": STATE_OFF, # Protection mode + "On": STATE_ON, # Continuous comfort mode +} + +OPERATION_MODES_REVERSE = {v: k for k, v in OPERATION_MODES.items()} + + +async def async_setup_entry( + hass: HomeAssistant, + entry: BSBLanConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up BSBLAN water heater based on a config entry.""" + data = entry.runtime_data + async_add_entities([BSBLANWaterHeater(data)]) + + +class BSBLANWaterHeater(BSBLanEntity, WaterHeaterEntity): + """Defines a BSBLAN water heater entity.""" + + _attr_name = None + _attr_supported_features = ( + WaterHeaterEntityFeature.TARGET_TEMPERATURE + | WaterHeaterEntityFeature.OPERATION_MODE + ) + + def __init__(self, data: BSBLanData) -> None: + """Initialize BSBLAN water heater.""" + super().__init__(data.coordinator, data) + self._attr_unique_id = format_mac(data.device.MAC) + self._attr_operation_list = list(OPERATION_MODES_REVERSE.keys()) + + # Set temperature limits based on device capabilities + self._attr_temperature_unit = data.coordinator.client.get_temperature_unit + self._attr_min_temp = data.coordinator.data.dhw.reduced_setpoint.value + self._attr_max_temp = data.coordinator.data.dhw.nominal_setpoint_max.value + + @property + def current_operation(self) -> str | None: + """Return current operation.""" + current_mode = self.coordinator.data.dhw.operating_mode.desc + return OPERATION_MODES.get(current_mode) + + @property + def current_temperature(self) -> float | None: + """Return the current temperature.""" + return self.coordinator.data.dhw.dhw_actual_value_top_temperature.value + + @property + def target_temperature(self) -> float | None: + """Return the temperature we try to reach.""" + return self.coordinator.data.dhw.nominal_setpoint.value + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new target temperature.""" + temperature = kwargs.get(ATTR_TEMPERATURE) + try: + await self.coordinator.client.set_hot_water(nominal_setpoint=temperature) + except BSBLANError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="set_temperature_error", + ) from err + + await self.coordinator.async_request_refresh() + + async def async_set_operation_mode(self, operation_mode: str) -> None: + """Set new operation mode.""" + bsblan_mode = OPERATION_MODES_REVERSE.get(operation_mode) + try: + await self.coordinator.client.set_hot_water(operating_mode=bsblan_mode) + except BSBLANError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="set_operation_mode_error", + ) from err + + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/bt_home_hub_5/manifest.json b/homeassistant/components/bt_home_hub_5/manifest.json index c2d708d9a02..e260d443dc7 100644 --- a/homeassistant/components/bt_home_hub_5/manifest.json +++ b/homeassistant/components/bt_home_hub_5/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/bt_home_hub_5", "iot_class": "local_polling", "loggers": ["bthomehub5_devicelist"], + "quality_scale": "legacy", "requirements": ["bthomehub5-devicelist==0.1.1"] } diff --git a/homeassistant/components/bt_smarthub/manifest.json b/homeassistant/components/bt_smarthub/manifest.json index 8f2dc631e80..31dd99a493f 100644 --- a/homeassistant/components/bt_smarthub/manifest.json +++ b/homeassistant/components/bt_smarthub/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/bt_smarthub", "iot_class": "local_polling", "loggers": ["btsmarthub_devicelist"], + "quality_scale": "legacy", "requirements": ["btsmarthub-devicelist==0.2.3"] } diff --git a/homeassistant/components/buienradar/sensor.py b/homeassistant/components/buienradar/sensor.py index afce293402e..712f765237e 100644 --- a/homeassistant/components/buienradar/sensor.py +++ b/homeassistant/components/buienradar/sensor.py @@ -742,6 +742,7 @@ class BrSensor(SensorEntity): ) -> None: """Initialize the sensor.""" self.entity_description = description + self._data: BrData | None = None self._measured = None self._attr_unique_id = ( f"{coordinates[CONF_LATITUDE]:2.6f}{coordinates[CONF_LONGITUDE]:2.6f}" @@ -756,17 +757,29 @@ class BrSensor(SensorEntity): if description.key.startswith(PRECIPITATION_FORECAST): self._timeframe = None + async def async_added_to_hass(self) -> None: + """Handle entity being added to hass.""" + if self._data is None: + return + self._update() + @callback def data_updated(self, data: BrData): - """Update data.""" - if self._load_data(data.data) and self.hass: + """Handle data update.""" + self._data = data + if not self.hass: + return + self._update() + + def _update(self): + """Update sensor data.""" + _LOGGER.debug("Updating sensor %s", self.entity_id) + if self._load_data(self._data.data): self.async_write_ha_state() @callback def _load_data(self, data): # noqa: C901 """Load the sensor with relevant data.""" - # Find sensor - # Check if we have a new measurement, # otherwise we do not have to update the sensor if self._measured == data.get(MEASURED): diff --git a/homeassistant/components/caldav/calendar.py b/homeassistant/components/caldav/calendar.py index d9ebe8e73fd..fb53947a723 100644 --- a/homeassistant/components/caldav/calendar.py +++ b/homeassistant/components/caldav/calendar.py @@ -109,6 +109,7 @@ async def async_setup_platform( entity_id = async_generate_entity_id(ENTITY_ID_FORMAT, device_id, hass=hass) coordinator = CalDavUpdateCoordinator( hass, + None, calendar=calendar, days=days, include_all_day=True, @@ -126,6 +127,7 @@ async def async_setup_platform( entity_id = async_generate_entity_id(ENTITY_ID_FORMAT, device_id, hass=hass) coordinator = CalDavUpdateCoordinator( hass, + None, calendar=calendar, days=days, include_all_day=False, @@ -152,6 +154,7 @@ async def async_setup_entry( async_generate_entity_id(ENTITY_ID_FORMAT, calendar.name, hass=hass), CalDavUpdateCoordinator( hass, + entry, calendar=calendar, days=CONFIG_ENTRY_DEFAULT_DAYS, include_all_day=True, @@ -204,7 +207,8 @@ class WebDavCalendarEntity(CoordinatorEntity[CalDavUpdateCoordinator], CalendarE if self._supports_offset: self._attr_extra_state_attributes = { "offset_reached": is_offset_reached( - self._event.start_datetime_local, self.coordinator.offset + self._event.start_datetime_local, + self.coordinator.offset, # type: ignore[arg-type] ) if self._event else False diff --git a/homeassistant/components/caldav/coordinator.py b/homeassistant/components/caldav/coordinator.py index 3a10b567167..eb09e3f5452 100644 --- a/homeassistant/components/caldav/coordinator.py +++ b/homeassistant/components/caldav/coordinator.py @@ -6,6 +6,9 @@ from datetime import date, datetime, time, timedelta from functools import partial import logging import re +from typing import TYPE_CHECKING + +import caldav from homeassistant.components.calendar import CalendarEvent, extract_offset from homeassistant.core import HomeAssistant @@ -14,6 +17,9 @@ from homeassistant.util import dt as dt_util from .api import get_attr_value +if TYPE_CHECKING: + from . import CalDavConfigEntry + _LOGGER = logging.getLogger(__name__) MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=15) @@ -23,11 +29,20 @@ OFFSET = "!!" class CalDavUpdateCoordinator(DataUpdateCoordinator[CalendarEvent | None]): """Class to utilize the calendar dav client object to get next event.""" - def __init__(self, hass, calendar, days, include_all_day, search): + def __init__( + self, + hass: HomeAssistant, + entry: CalDavConfigEntry | None, + calendar: caldav.Calendar, + days: int, + include_all_day: bool, + search: str | None, + ) -> None: """Set up how we are going to search the WebDav calendar.""" super().__init__( hass, _LOGGER, + config_entry=entry, name=f"CalDAV {calendar.name}", update_interval=MIN_TIME_BETWEEN_UPDATES, ) @@ -35,7 +50,7 @@ class CalDavUpdateCoordinator(DataUpdateCoordinator[CalendarEvent | None]): self.days = days self.include_all_day = include_all_day self.search = search - self.offset = None + self.offset: timedelta | None = None async def async_get_events( self, hass: HomeAssistant, start_date: datetime, end_date: datetime @@ -109,7 +124,7 @@ class CalDavUpdateCoordinator(DataUpdateCoordinator[CalendarEvent | None]): _start_of_tomorrow = start_of_tomorrow if _start_of_today <= start_dt < _start_of_tomorrow: new_event = event.copy() - new_vevent = new_event.instance.vevent + new_vevent = new_event.instance.vevent # type: ignore[attr-defined] if hasattr(new_vevent, "dtend"): dur = new_vevent.dtend.value - new_vevent.dtstart.value new_vevent.dtend.value = start_dt + dur diff --git a/homeassistant/components/calendar/strings.json b/homeassistant/components/calendar/strings.json index 76e6c42b666..c0127c20d05 100644 --- a/homeassistant/components/calendar/strings.json +++ b/homeassistant/components/calendar/strings.json @@ -82,11 +82,11 @@ }, "end_date_time": { "name": "End time", - "description": "Returns active events before this time (exclusive). Cannot be used with 'duration'." + "description": "Returns active events before this time (exclusive). Cannot be used with Duration." }, "duration": { "name": "Duration", - "description": "Returns active events from start_date_time until the specified duration." + "description": "Returns active events from Start time for the specified duration." } } } diff --git a/homeassistant/components/cambridge_audio/__init__.py b/homeassistant/components/cambridge_audio/__init__.py index a584f0db6c1..8b910bb81bb 100644 --- a/homeassistant/components/cambridge_audio/__init__.py +++ b/homeassistant/components/cambridge_audio/__init__.py @@ -12,6 +12,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import CONNECT_TIMEOUT, DOMAIN, STREAM_MAGIC_EXCEPTIONS @@ -27,7 +28,7 @@ async def async_setup_entry( ) -> bool: """Set up Cambridge Audio integration from a config entry.""" - client = StreamMagicClient(entry.data[CONF_HOST]) + client = StreamMagicClient(entry.data[CONF_HOST], async_get_clientsession(hass)) async def _connection_update_callback( _client: StreamMagicClient, _callback_type: CallbackType diff --git a/homeassistant/components/cambridge_audio/config_flow.py b/homeassistant/components/cambridge_audio/config_flow.py index 201e531608d..ca587ee9a48 100644 --- a/homeassistant/components/cambridge_audio/config_flow.py +++ b/homeassistant/components/cambridge_audio/config_flow.py @@ -9,6 +9,7 @@ import voluptuous as vol from homeassistant.components import zeroconf from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_NAME +from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import CONNECT_TIMEOUT, DOMAIN, STREAM_MAGIC_EXCEPTIONS @@ -30,7 +31,7 @@ class CambridgeAudioConfigFlow(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(discovery_info.properties["serial"]) self._abort_if_unique_id_configured(updates={CONF_HOST: host}) - client = StreamMagicClient(host) + client = StreamMagicClient(host, async_get_clientsession(self.hass)) try: async with asyncio.timeout(CONNECT_TIMEOUT): await client.connect() @@ -69,7 +70,9 @@ class CambridgeAudioConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a flow initialized by the user.""" errors: dict[str, str] = {} if user_input: - client = StreamMagicClient(user_input[CONF_HOST]) + client = StreamMagicClient( + user_input[CONF_HOST], async_get_clientsession(self.hass) + ) try: async with asyncio.timeout(CONNECT_TIMEOUT): await client.connect() diff --git a/homeassistant/components/cambridge_audio/manifest.json b/homeassistant/components/cambridge_audio/manifest.json index c359ca14a21..7b7e341e3c6 100644 --- a/homeassistant/components/cambridge_audio/manifest.json +++ b/homeassistant/components/cambridge_audio/manifest.json @@ -7,6 +7,6 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["aiostreammagic"], - "requirements": ["aiostreammagic==2.8.5"], + "requirements": ["aiostreammagic==2.10.0"], "zeroconf": ["_stream-magic._tcp.local.", "_smoip._tcp.local."] } diff --git a/homeassistant/components/cambridge_audio/media_player.py b/homeassistant/components/cambridge_audio/media_player.py index 5e340cdd21e..805cf8ec7f6 100644 --- a/homeassistant/components/cambridge_audio/media_player.py +++ b/homeassistant/components/cambridge_audio/media_player.py @@ -57,6 +57,8 @@ TRANSPORT_FEATURES: dict[TransportControl, MediaPlayerEntityFeature] = { TransportControl.STOP: MediaPlayerEntityFeature.STOP, } +PARALLEL_UPDATES = 0 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/cambridge_audio/select.py b/homeassistant/components/cambridge_audio/select.py index c99abc853e5..b1bc0f9e4df 100644 --- a/homeassistant/components/cambridge_audio/select.py +++ b/homeassistant/components/cambridge_audio/select.py @@ -12,7 +12,9 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .entity import CambridgeAudioEntity +from .entity import CambridgeAudioEntity, command + +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) @@ -116,6 +118,7 @@ class CambridgeAudioSelect(CambridgeAudioEntity, SelectEntity): """Return the state of the select.""" return self.entity_description.value_fn(self.client) + @command async def async_select_option(self, option: str) -> None: """Change the selected option.""" await self.entity_description.set_value_fn(self.client, option) diff --git a/homeassistant/components/cambridge_audio/switch.py b/homeassistant/components/cambridge_audio/switch.py index 3209b275d46..72aa0d3cbea 100644 --- a/homeassistant/components/cambridge_audio/switch.py +++ b/homeassistant/components/cambridge_audio/switch.py @@ -12,7 +12,9 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .entity import CambridgeAudioEntity +from .entity import CambridgeAudioEntity, command + +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) @@ -73,10 +75,12 @@ class CambridgeAudioSwitch(CambridgeAudioEntity, SwitchEntity): """Return the state of the switch.""" return self.entity_description.value_fn(self.client) + @command async def async_turn_on(self, **kwargs: Any) -> None: """Turn the switch on.""" await self.entity_description.set_value_fn(self.client, True) + @command async def async_turn_off(self, **kwargs: Any) -> None: """Turn the switch off.""" await self.entity_description.set_value_fn(self.client, False) diff --git a/homeassistant/components/camera/__init__.py b/homeassistant/components/camera/__init__.py index 6d65ea255c7..781388f12d6 100644 --- a/homeassistant/components/camera/__init__.py +++ b/homeassistant/components/camera/__init__.py @@ -20,7 +20,7 @@ from aiohttp import hdrs, web import attr from propcache import cached_property, under_cached_property import voluptuous as vol -from webrtc_models import RTCIceCandidate, RTCIceServer +from webrtc_models import RTCIceCandidateInit, RTCIceServer from homeassistant.components import websocket_api from homeassistant.components.http import KEY_AUTHENTICATED, HomeAssistantView @@ -55,11 +55,13 @@ from homeassistant.helpers.deprecation import ( DeprecatedConstantEnum, all_with_deprecated_constants, check_if_deprecated_constant, + deprecated_function, dir_with_deprecated_constants, ) from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.event import async_track_time_interval +from homeassistant.helpers.frame import ReportBehavior, report_usage from homeassistant.helpers.network import get_url from homeassistant.helpers.template import Template from homeassistant.helpers.typing import ConfigType, VolDictType @@ -421,8 +423,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: if hass.config.webrtc.ice_servers: return hass.config.webrtc.ice_servers return [ - RTCIceServer(urls="stun:stun.home-assistant.io:80"), - RTCIceServer(urls="stun:stun.home-assistant.io:3478"), + RTCIceServer( + urls=[ + "stun:stun.home-assistant.io:80", + "stun:stun.home-assistant.io:3478", + ] + ), ] async_register_ice_servers(hass, get_ice_servers) @@ -462,6 +468,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): # Entity Properties _attr_brand: str | None = None _attr_frame_interval: float = MIN_STREAM_INTERVAL + # Deprecated in 2024.12. Remove in 2025.6 _attr_frontend_stream_type: StreamType | None _attr_is_on: bool = True _attr_is_recording: bool = False @@ -493,6 +500,16 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): type(self).async_handle_async_webrtc_offer != Camera.async_handle_async_webrtc_offer ) + self._deprecate_attr_frontend_stream_type_logged = False + if type(self).frontend_stream_type != Camera.frontend_stream_type: + report_usage( + ( + f"is overwriting the 'frontend_stream_type' property in the {type(self).__name__} class," + " which is deprecated and will be removed in Home Assistant 2025.6, " + ), + core_integration_behavior=ReportBehavior.ERROR, + exclude_integrations={DOMAIN}, + ) @cached_property def entity_picture(self) -> str: @@ -562,11 +579,29 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): frontend which camera attributes and player to use. The default type is to use HLS, and components can override to change the type. """ + # Deprecated in 2024.12. Remove in 2025.6 + # Use the camera_capabilities instead if hasattr(self, "_attr_frontend_stream_type"): + if not self._deprecate_attr_frontend_stream_type_logged: + report_usage( + ( + f"is setting the '_attr_frontend_stream_type' attribute in the {type(self).__name__} class," + " which is deprecated and will be removed in Home Assistant 2025.6, " + ), + core_integration_behavior=ReportBehavior.ERROR, + exclude_integrations={DOMAIN}, + ) + + self._deprecate_attr_frontend_stream_type_logged = True return self._attr_frontend_stream_type if CameraEntityFeature.STREAM not in self.supported_features_compat: return None - if self._webrtc_provider or self._legacy_webrtc_provider: + if ( + self._webrtc_provider + or self._legacy_webrtc_provider + or self._supports_native_sync_webrtc + or self._supports_native_async_webrtc + ): return StreamType.WEB_RTC return StreamType.HLS @@ -624,14 +659,17 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): Async means that it could take some time to process the offer and responses/message will be sent with the send_message callback. - This method is used by cameras with CameraEntityFeature.STREAM and StreamType.WEB_RTC. + This method is used by cameras with CameraEntityFeature.STREAM. An integration overriding this method must also implement async_on_webrtc_candidate. Integrations can override with a native WebRTC implementation. """ if self._supports_native_sync_webrtc: try: - answer = await self.async_handle_web_rtc_offer(offer_sdp) + answer = await deprecated_function( + "async_handle_async_webrtc_offer", + breaks_in_ha_version="2025.6", + )(self.async_handle_web_rtc_offer)(offer_sdp) except ValueError as ex: _LOGGER.error("Error handling WebRTC offer: %s", ex) send_message( @@ -861,7 +899,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): return config async def async_on_webrtc_candidate( - self, session_id: str, candidate: RTCIceCandidate + self, session_id: str, candidate: RTCIceCandidateInit ) -> None: """Handle a WebRTC candidate.""" if self._webrtc_provider: @@ -892,7 +930,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): else: frontend_stream_types.add(StreamType.HLS) - if self._webrtc_provider: + if self._webrtc_provider or self._legacy_webrtc_provider: frontend_stream_types.add(StreamType.WEB_RTC) return CameraCapabilities(frontend_stream_types) diff --git a/homeassistant/components/camera/media_source.py b/homeassistant/components/camera/media_source.py index ea30dafb09e..701457afc3e 100644 --- a/homeassistant/components/camera/media_source.py +++ b/homeassistant/components/camera/media_source.py @@ -64,7 +64,7 @@ class CameraMediaSource(MediaSource): if not camera: raise Unresolvable(f"Could not resolve media item: {item.identifier}") - if (stream_type := camera.frontend_stream_type) is None: + if not (stream_types := camera.camera_capabilities.frontend_stream_types): return PlayMedia( f"/api/camera_proxy_stream/{camera.entity_id}", camera.content_type ) @@ -76,7 +76,7 @@ class CameraMediaSource(MediaSource): url = await _async_stream_endpoint_url(self.hass, camera, HLS_PROVIDER) except HomeAssistantError as err: # Handle known error - if stream_type != StreamType.HLS: + if StreamType.HLS not in stream_types: raise Unresolvable( "Camera does not support MJPEG or HLS streaming." ) from err @@ -95,14 +95,16 @@ class CameraMediaSource(MediaSource): can_stream_hls = "stream" in self.hass.config.components async def _filter_browsable_camera(camera: Camera) -> BrowseMediaSource | None: - stream_type = camera.frontend_stream_type - if stream_type is None: + stream_types = camera.camera_capabilities.frontend_stream_types + if not stream_types: return _media_source_for_camera(self.hass, camera, camera.content_type) if not can_stream_hls: return None content_type = FORMAT_CONTENT_TYPE[HLS_PROVIDER] - if stream_type != StreamType.HLS and not (await camera.stream_source()): + if StreamType.HLS not in stream_types and not ( + await camera.stream_source() + ): return None return _media_source_for_camera(self.hass, camera, content_type) diff --git a/homeassistant/components/camera/webrtc.py b/homeassistant/components/camera/webrtc.py index 0612c96e40c..3630acf1cfe 100644 --- a/homeassistant/components/camera/webrtc.py +++ b/homeassistant/components/camera/webrtc.py @@ -6,17 +6,24 @@ from abc import ABC, abstractmethod import asyncio from collections.abc import Awaitable, Callable, Iterable from dataclasses import asdict, dataclass, field -from functools import cache, partial +from functools import cache, partial, wraps import logging from typing import TYPE_CHECKING, Any, Protocol +from mashumaro import MissingField import voluptuous as vol -from webrtc_models import RTCConfiguration, RTCIceCandidate, RTCIceServer +from webrtc_models import ( + RTCConfiguration, + RTCIceCandidate, + RTCIceCandidateInit, + RTCIceServer, +) from homeassistant.components import websocket_api from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv, issue_registry as ir +from homeassistant.helpers.deprecation import deprecated_function from homeassistant.util.hass_dict import HassKey from homeassistant.util.ulid import ulid @@ -78,13 +85,13 @@ class WebRTCAnswer(WebRTCMessage): class WebRTCCandidate(WebRTCMessage): """WebRTC candidate.""" - candidate: RTCIceCandidate + candidate: RTCIceCandidate | RTCIceCandidateInit def as_dict(self) -> dict[str, Any]: """Return a dict representation of the message.""" return { "type": self._get_type(), - "candidate": self.candidate.candidate, + "candidate": self.candidate.to_dict(), } @@ -146,7 +153,7 @@ class CameraWebRTCProvider(ABC): @abstractmethod async def async_on_webrtc_candidate( - self, session_id: str, candidate: RTCIceCandidate + self, session_id: str, candidate: RTCIceCandidateInit ) -> None: """Handle the WebRTC candidate.""" @@ -205,6 +212,51 @@ async def _async_refresh_providers(hass: HomeAssistant) -> None: ) +type WsCommandWithCamera = Callable[ + [websocket_api.ActiveConnection, dict[str, Any], Camera], + Awaitable[None], +] + + +def require_webrtc_support( + error_code: str, +) -> Callable[[WsCommandWithCamera], websocket_api.AsyncWebSocketCommandHandler]: + """Validate that the camera supports WebRTC.""" + + def decorate( + func: WsCommandWithCamera, + ) -> websocket_api.AsyncWebSocketCommandHandler: + """Decorate func.""" + + @wraps(func) + async def validate( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], + ) -> None: + """Validate that the camera supports WebRTC.""" + entity_id = msg["entity_id"] + camera = get_camera_from_entity_id(hass, entity_id) + if StreamType.WEB_RTC not in ( + stream_types := camera.camera_capabilities.frontend_stream_types + ): + connection.send_error( + msg["id"], + error_code, + ( + "Camera does not support WebRTC," + f" frontend_stream_types={stream_types}" + ), + ) + return + + await func(connection, msg, camera) + + return validate + + return decorate + + @websocket_api.websocket_command( { vol.Required("type"): "camera/webrtc/offer", @@ -213,8 +265,9 @@ async def _async_refresh_providers(hass: HomeAssistant) -> None: } ) @websocket_api.async_response +@require_webrtc_support("webrtc_offer_failed") async def ws_webrtc_offer( - hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] + connection: websocket_api.ActiveConnection, msg: dict[str, Any], camera: Camera ) -> None: """Handle the signal path for a WebRTC stream. @@ -226,20 +279,7 @@ async def ws_webrtc_offer( Async friendly. """ - entity_id = msg["entity_id"] offer = msg["offer"] - camera = get_camera_from_entity_id(hass, entity_id) - if camera.frontend_stream_type != StreamType.WEB_RTC: - connection.send_error( - msg["id"], - "webrtc_offer_failed", - ( - "Camera does not support WebRTC," - f" frontend_stream_type={camera.frontend_stream_type}" - ), - ) - return - session_id = ulid() connection.subscriptions[msg["id"]] = partial( camera.close_webrtc_session, session_id @@ -278,23 +318,11 @@ async def ws_webrtc_offer( } ) @websocket_api.async_response +@require_webrtc_support("webrtc_get_client_config_failed") async def ws_get_client_config( - hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] + connection: websocket_api.ActiveConnection, msg: dict[str, Any], camera: Camera ) -> None: """Handle get WebRTC client config websocket command.""" - entity_id = msg["entity_id"] - camera = get_camera_from_entity_id(hass, entity_id) - if camera.frontend_stream_type != StreamType.WEB_RTC: - connection.send_error( - msg["id"], - "webrtc_get_client_config_failed", - ( - "Camera does not support WebRTC," - f" frontend_stream_type={camera.frontend_stream_type}" - ), - ) - return - config = camera.async_get_webrtc_client_configuration().to_frontend_dict() connection.send_result( msg["id"], @@ -302,35 +330,29 @@ async def ws_get_client_config( ) +def _parse_webrtc_candidate_init(value: Any) -> RTCIceCandidateInit: + """Validate and parse a WebRTCCandidateInit dict.""" + try: + return RTCIceCandidateInit.from_dict(value) + except (MissingField, ValueError) as ex: + raise vol.Invalid(str(ex)) from ex + + @websocket_api.websocket_command( { vol.Required("type"): "camera/webrtc/candidate", vol.Required("entity_id"): cv.entity_id, vol.Required("session_id"): str, - vol.Required("candidate"): str, + vol.Required("candidate"): _parse_webrtc_candidate_init, } ) @websocket_api.async_response +@require_webrtc_support("webrtc_candidate_failed") async def ws_candidate( - hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any] + connection: websocket_api.ActiveConnection, msg: dict[str, Any], camera: Camera ) -> None: """Handle WebRTC candidate websocket command.""" - entity_id = msg["entity_id"] - camera = get_camera_from_entity_id(hass, entity_id) - if camera.frontend_stream_type != StreamType.WEB_RTC: - connection.send_error( - msg["id"], - "webrtc_candidate_failed", - ( - "Camera does not support WebRTC," - f" frontend_stream_type={camera.frontend_stream_type}" - ), - ) - return - - await camera.async_on_webrtc_candidate( - msg["session_id"], RTCIceCandidate(msg["candidate"]) - ) + await camera.async_on_webrtc_candidate(msg["session_id"], msg["candidate"]) connection.send_message(websocket_api.result_message(msg["id"])) @@ -424,6 +446,7 @@ class _CameraRtspToWebRTCProvider(CameraWebRTCLegacyProvider): return await self._fn(stream_source, offer_sdp, camera.entity_id) +@deprecated_function("async_register_webrtc_provider", breaks_in_ha_version="2025.6") def async_register_rtsp_to_web_rtc_provider( hass: HomeAssistant, domain: str, diff --git a/homeassistant/components/canary/config_flow.py b/homeassistant/components/canary/config_flow.py index 5af7142af8f..17e660e96ac 100644 --- a/homeassistant/components/canary/config_flow.py +++ b/homeassistant/components/canary/config_flow.py @@ -52,7 +52,7 @@ class CanaryConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: """Get the options flow for this handler.""" - return CanaryOptionsFlowHandler(config_entry) + return CanaryOptionsFlowHandler() async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle a flow initiated by configuration file.""" @@ -62,9 +62,6 @@ class CanaryConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - errors = {} default_username = "" @@ -104,10 +101,6 @@ class CanaryConfigFlow(ConfigFlow, domain=DOMAIN): class CanaryOptionsFlowHandler(OptionsFlow): """Handle Canary client options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/canary/manifest.json b/homeassistant/components/canary/manifest.json index 4d5adf4a32b..9383bc91556 100644 --- a/homeassistant/components/canary/manifest.json +++ b/homeassistant/components/canary/manifest.json @@ -7,5 +7,6 @@ "documentation": "https://www.home-assistant.io/integrations/canary", "iot_class": "cloud_polling", "loggers": ["canary"], - "requirements": ["py-canary==0.5.4"] + "requirements": ["py-canary==0.5.4"], + "single_config_entry": true } diff --git a/homeassistant/components/canary/strings.json b/homeassistant/components/canary/strings.json index 9555756deff..699e8b25e11 100644 --- a/homeassistant/components/canary/strings.json +++ b/homeassistant/components/canary/strings.json @@ -14,7 +14,6 @@ "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" }, "abort": { - "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "unknown": "[%key:common::config_flow::error::unknown%]" } }, diff --git a/homeassistant/components/cast/config_flow.py b/homeassistant/components/cast/config_flow.py index 0ebfa553f62..03a3f2ea1f8 100644 --- a/homeassistant/components/cast/config_flow.py +++ b/homeassistant/components/cast/config_flow.py @@ -41,7 +41,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> CastOptionsFlowHandler: """Get the options flow for this handler.""" - return CastOptionsFlowHandler(config_entry) + return CastOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -109,9 +109,8 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): class CastOptionsFlowHandler(OptionsFlow): """Handle Google Cast options.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize Google Cast options flow.""" - self.config_entry = config_entry self.updated_config: dict[str, Any] = {} async def async_step_init(self, user_input: None = None) -> ConfigFlowResult: diff --git a/homeassistant/components/channels/manifest.json b/homeassistant/components/channels/manifest.json index 0455ca2e8ad..9476e006eda 100644 --- a/homeassistant/components/channels/manifest.json +++ b/homeassistant/components/channels/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/channels", "iot_class": "local_polling", "loggers": ["pychannels"], + "quality_scale": "legacy", "requirements": ["pychannels==1.2.3"] } diff --git a/homeassistant/components/cisco_ios/manifest.json b/homeassistant/components/cisco_ios/manifest.json index dd0d4213973..ba0678c167f 100644 --- a/homeassistant/components/cisco_ios/manifest.json +++ b/homeassistant/components/cisco_ios/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/cisco_ios", "iot_class": "local_polling", "loggers": ["pexpect", "ptyprocess"], + "quality_scale": "legacy", "requirements": ["pexpect==4.6.0"] } diff --git a/homeassistant/components/cisco_mobility_express/manifest.json b/homeassistant/components/cisco_mobility_express/manifest.json index 02786e80cd8..f9ee1c92ed1 100644 --- a/homeassistant/components/cisco_mobility_express/manifest.json +++ b/homeassistant/components/cisco_mobility_express/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/cisco_mobility_express", "iot_class": "local_polling", "loggers": ["ciscomobilityexpress"], + "quality_scale": "legacy", "requirements": ["ciscomobilityexpress==0.3.9"] } diff --git a/homeassistant/components/cisco_webex_teams/manifest.json b/homeassistant/components/cisco_webex_teams/manifest.json index 3da31a0b453..85cfeb7eddf 100644 --- a/homeassistant/components/cisco_webex_teams/manifest.json +++ b/homeassistant/components/cisco_webex_teams/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/cisco_webex_teams", "iot_class": "cloud_push", "loggers": ["webexpythonsdk"], + "quality_scale": "legacy", "requirements": ["webexpythonsdk==2.0.1"] } diff --git a/homeassistant/components/citybikes/manifest.json b/homeassistant/components/citybikes/manifest.json index e163b85ec08..8dac7def832 100644 --- a/homeassistant/components/citybikes/manifest.json +++ b/homeassistant/components/citybikes/manifest.json @@ -3,5 +3,6 @@ "name": "CityBikes", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/citybikes", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/clementine/manifest.json b/homeassistant/components/clementine/manifest.json index 88e7f35f49a..42fe81d0e9b 100644 --- a/homeassistant/components/clementine/manifest.json +++ b/homeassistant/components/clementine/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/clementine", "iot_class": "local_polling", "loggers": ["clementineremote"], + "quality_scale": "legacy", "requirements": ["python-clementine-remote==1.0.1"] } diff --git a/homeassistant/components/clickatell/manifest.json b/homeassistant/components/clickatell/manifest.json index 31456b25c64..3c5ee8b0053 100644 --- a/homeassistant/components/clickatell/manifest.json +++ b/homeassistant/components/clickatell/manifest.json @@ -3,5 +3,6 @@ "name": "Clickatell", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/clickatell", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/clicksend/manifest.json b/homeassistant/components/clicksend/manifest.json index 41bd10108f4..8a43428026b 100644 --- a/homeassistant/components/clicksend/manifest.json +++ b/homeassistant/components/clicksend/manifest.json @@ -3,5 +3,6 @@ "name": "ClickSend SMS", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/clicksend", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/clicksend_tts/manifest.json b/homeassistant/components/clicksend_tts/manifest.json index ffa35fd070f..eb884e41203 100644 --- a/homeassistant/components/clicksend_tts/manifest.json +++ b/homeassistant/components/clicksend_tts/manifest.json @@ -3,5 +3,6 @@ "name": "ClickSend TTS", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/clicksend_tts", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/climate/__init__.py b/homeassistant/components/climate/__init__.py index 94db8008aa1..de9c90c81b8 100644 --- a/homeassistant/components/climate/__init__.py +++ b/homeassistant/components/climate/__init__.py @@ -70,6 +70,8 @@ from .const import ( # noqa: F401 ATTR_MIN_TEMP, ATTR_PRESET_MODE, ATTR_PRESET_MODES, + ATTR_SWING_HORIZONTAL_MODE, + ATTR_SWING_HORIZONTAL_MODES, ATTR_SWING_MODE, ATTR_SWING_MODES, ATTR_TARGET_TEMP_HIGH, @@ -101,6 +103,7 @@ from .const import ( # noqa: F401 SERVICE_SET_HUMIDITY, SERVICE_SET_HVAC_MODE, SERVICE_SET_PRESET_MODE, + SERVICE_SET_SWING_HORIZONTAL_MODE, SERVICE_SET_SWING_MODE, SERVICE_SET_TEMPERATURE, SWING_BOTH, @@ -219,6 +222,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: "async_handle_set_swing_mode_service", [ClimateEntityFeature.SWING_MODE], ) + component.async_register_entity_service( + SERVICE_SET_SWING_HORIZONTAL_MODE, + {vol.Required(ATTR_SWING_HORIZONTAL_MODE): cv.string}, + "async_handle_set_swing_horizontal_mode_service", + [ClimateEntityFeature.SWING_HORIZONTAL_MODE], + ) return True @@ -256,6 +265,8 @@ CACHED_PROPERTIES_WITH_ATTR_ = { "fan_modes", "swing_mode", "swing_modes", + "swing_horizontal_mode", + "swing_horizontal_modes", "supported_features", "min_temp", "max_temp", @@ -300,6 +311,8 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): _attr_supported_features: ClimateEntityFeature = ClimateEntityFeature(0) _attr_swing_mode: str | None _attr_swing_modes: list[str] | None + _attr_swing_horizontal_mode: str | None + _attr_swing_horizontal_modes: list[str] | None _attr_target_humidity: float | None = None _attr_target_temperature_high: float | None _attr_target_temperature_low: float | None @@ -314,14 +327,14 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): # once migrated and set the feature flags TURN_ON/TURN_OFF as needed. _enable_turn_on_off_backwards_compatibility: bool = True - def __getattribute__(self, __name: str) -> Any: + def __getattribute__(self, name: str, /) -> Any: """Get attribute. Modify return of `supported_features` to include `_mod_supported_features` if attribute is set. """ - if __name != "supported_features": - return super().__getattribute__(__name) + if name != "supported_features": + return super().__getattribute__(name) # Convert the supported features to ClimateEntityFeature. # Remove this compatibility shim in 2025.1 or later. @@ -513,6 +526,9 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): if ClimateEntityFeature.SWING_MODE in supported_features: data[ATTR_SWING_MODES] = self.swing_modes + if ClimateEntityFeature.SWING_HORIZONTAL_MODE in supported_features: + data[ATTR_SWING_HORIZONTAL_MODES] = self.swing_horizontal_modes + return data @final @@ -564,6 +580,9 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): if ClimateEntityFeature.SWING_MODE in supported_features: data[ATTR_SWING_MODE] = self.swing_mode + if ClimateEntityFeature.SWING_HORIZONTAL_MODE in supported_features: + data[ATTR_SWING_HORIZONTAL_MODE] = self.swing_horizontal_mode + if ClimateEntityFeature.AUX_HEAT in supported_features: data[ATTR_AUX_HEAT] = STATE_ON if self.is_aux_heat else STATE_OFF if ( @@ -691,11 +710,27 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): """ return self._attr_swing_modes + @cached_property + def swing_horizontal_mode(self) -> str | None: + """Return the horizontal swing setting. + + Requires ClimateEntityFeature.SWING_HORIZONTAL_MODE. + """ + return self._attr_swing_horizontal_mode + + @cached_property + def swing_horizontal_modes(self) -> list[str] | None: + """Return the list of available horizontal swing modes. + + Requires ClimateEntityFeature.SWING_HORIZONTAL_MODE. + """ + return self._attr_swing_horizontal_modes + @final @callback def _valid_mode_or_raise( self, - mode_type: Literal["preset", "swing", "fan", "hvac"], + mode_type: Literal["preset", "horizontal_swing", "swing", "fan", "hvac"], mode: str | HVACMode, modes: list[str] | list[HVACMode] | None, ) -> None: @@ -793,6 +828,26 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): """Set new target swing operation.""" await self.hass.async_add_executor_job(self.set_swing_mode, swing_mode) + @final + async def async_handle_set_swing_horizontal_mode_service( + self, swing_horizontal_mode: str + ) -> None: + """Validate and set new horizontal swing mode.""" + self._valid_mode_or_raise( + "horizontal_swing", swing_horizontal_mode, self.swing_horizontal_modes + ) + await self.async_set_swing_horizontal_mode(swing_horizontal_mode) + + def set_swing_horizontal_mode(self, swing_horizontal_mode: str) -> None: + """Set new target horizontal swing operation.""" + raise NotImplementedError + + async def async_set_swing_horizontal_mode(self, swing_horizontal_mode: str) -> None: + """Set new target horizontal swing operation.""" + await self.hass.async_add_executor_job( + self.set_swing_horizontal_mode, swing_horizontal_mode + ) + @final async def async_handle_set_preset_mode_service(self, preset_mode: str) -> None: """Validate and set new preset mode.""" diff --git a/homeassistant/components/climate/const.py b/homeassistant/components/climate/const.py index a84a2f3c628..b22d5df93ba 100644 --- a/homeassistant/components/climate/const.py +++ b/homeassistant/components/climate/const.py @@ -92,6 +92,10 @@ SWING_BOTH = "both" SWING_VERTICAL = "vertical" SWING_HORIZONTAL = "horizontal" +# Possible horizontal swing state +SWING_HORIZONTAL_ON = "on" +SWING_HORIZONTAL_OFF = "off" + class HVACAction(StrEnum): """HVAC action for climate devices.""" @@ -134,6 +138,8 @@ ATTR_HVAC_MODES = "hvac_modes" ATTR_HVAC_MODE = "hvac_mode" ATTR_SWING_MODES = "swing_modes" ATTR_SWING_MODE = "swing_mode" +ATTR_SWING_HORIZONTAL_MODE = "swing_horizontal_mode" +ATTR_SWING_HORIZONTAL_MODES = "swing_horizontal_modes" ATTR_TARGET_TEMP_HIGH = "target_temp_high" ATTR_TARGET_TEMP_LOW = "target_temp_low" ATTR_TARGET_TEMP_STEP = "target_temp_step" @@ -153,6 +159,7 @@ SERVICE_SET_PRESET_MODE = "set_preset_mode" SERVICE_SET_HUMIDITY = "set_humidity" SERVICE_SET_HVAC_MODE = "set_hvac_mode" SERVICE_SET_SWING_MODE = "set_swing_mode" +SERVICE_SET_SWING_HORIZONTAL_MODE = "set_swing_horizontal_mode" SERVICE_SET_TEMPERATURE = "set_temperature" @@ -168,6 +175,7 @@ class ClimateEntityFeature(IntFlag): AUX_HEAT = 64 TURN_OFF = 128 TURN_ON = 256 + SWING_HORIZONTAL_MODE = 512 # These SUPPORT_* constants are deprecated as of Home Assistant 2022.5. diff --git a/homeassistant/components/climate/icons.json b/homeassistant/components/climate/icons.json index c9a8d12d01b..8f4ffa6b19f 100644 --- a/homeassistant/components/climate/icons.json +++ b/homeassistant/components/climate/icons.json @@ -51,6 +51,13 @@ "on": "mdi:arrow-oscillating", "vertical": "mdi:arrow-up-down" } + }, + "swing_horizontal_mode": { + "default": "mdi:circle-medium", + "state": { + "off": "mdi:arrow-oscillating-off", + "on": "mdi:arrow-expand-horizontal" + } } } } @@ -65,6 +72,9 @@ "set_swing_mode": { "service": "mdi:arrow-oscillating" }, + "set_swing_horizontal_mode": { + "service": "mdi:arrow-expand-horizontal" + }, "set_temperature": { "service": "mdi:thermometer" }, diff --git a/homeassistant/components/climate/reproduce_state.py b/homeassistant/components/climate/reproduce_state.py index 99357777fba..d38e243cb62 100644 --- a/homeassistant/components/climate/reproduce_state.py +++ b/homeassistant/components/climate/reproduce_state.py @@ -14,6 +14,7 @@ from .const import ( ATTR_HUMIDITY, ATTR_HVAC_MODE, ATTR_PRESET_MODE, + ATTR_SWING_HORIZONTAL_MODE, ATTR_SWING_MODE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, @@ -23,6 +24,7 @@ from .const import ( SERVICE_SET_HUMIDITY, SERVICE_SET_HVAC_MODE, SERVICE_SET_PRESET_MODE, + SERVICE_SET_SWING_HORIZONTAL_MODE, SERVICE_SET_SWING_MODE, SERVICE_SET_TEMPERATURE, ) @@ -76,6 +78,14 @@ async def _async_reproduce_states( ): await call_service(SERVICE_SET_SWING_MODE, [ATTR_SWING_MODE]) + if ( + ATTR_SWING_HORIZONTAL_MODE in state.attributes + and state.attributes[ATTR_SWING_HORIZONTAL_MODE] is not None + ): + await call_service( + SERVICE_SET_SWING_HORIZONTAL_MODE, [ATTR_SWING_HORIZONTAL_MODE] + ) + if ( ATTR_FAN_MODE in state.attributes and state.attributes[ATTR_FAN_MODE] is not None diff --git a/homeassistant/components/climate/services.yaml b/homeassistant/components/climate/services.yaml index 12a8e6f001f..68421bf2386 100644 --- a/homeassistant/components/climate/services.yaml +++ b/homeassistant/components/climate/services.yaml @@ -131,7 +131,20 @@ set_swing_mode: fields: swing_mode: required: true - example: "horizontal" + example: "on" + selector: + text: + +set_swing_horizontal_mode: + target: + entity: + domain: climate + supported_features: + - climate.ClimateEntityFeature.SWING_HORIZONTAL_MODE + fields: + swing_horizontal_mode: + required: true + example: "on" selector: text: diff --git a/homeassistant/components/climate/significant_change.py b/homeassistant/components/climate/significant_change.py index 0c4cdd4ac6a..2b7e2c5d8b1 100644 --- a/homeassistant/components/climate/significant_change.py +++ b/homeassistant/components/climate/significant_change.py @@ -19,6 +19,7 @@ from . import ( ATTR_HUMIDITY, ATTR_HVAC_ACTION, ATTR_PRESET_MODE, + ATTR_SWING_HORIZONTAL_MODE, ATTR_SWING_MODE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, @@ -34,6 +35,7 @@ SIGNIFICANT_ATTRIBUTES: set[str] = { ATTR_HVAC_ACTION, ATTR_PRESET_MODE, ATTR_SWING_MODE, + ATTR_SWING_HORIZONTAL_MODE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, ATTR_TEMPERATURE, @@ -70,6 +72,7 @@ def async_check_significant_change( ATTR_HVAC_ACTION, ATTR_PRESET_MODE, ATTR_SWING_MODE, + ATTR_SWING_HORIZONTAL_MODE, ]: return True diff --git a/homeassistant/components/climate/strings.json b/homeassistant/components/climate/strings.json index 26a06821d84..6d8b2c5449d 100644 --- a/homeassistant/components/climate/strings.json +++ b/homeassistant/components/climate/strings.json @@ -123,6 +123,16 @@ "swing_modes": { "name": "Swing modes" }, + "swing_horizontal_mode": { + "name": "Horizontal swing mode", + "state": { + "off": "[%key:common::state::off%]", + "on": "[%key:common::state::on%]" + } + }, + "swing_horizontal_modes": { + "name": "Horizontal swing modes" + }, "target_temp_high": { "name": "Upper target temperature" }, @@ -161,19 +171,19 @@ }, "set_temperature": { "name": "Set target temperature", - "description": "Sets target temperature.", + "description": "Sets the temperature setpoint.", "fields": { "temperature": { - "name": "Temperature", - "description": "Target temperature." + "name": "Target temperature", + "description": "The temperature setpoint." }, "target_temp_high": { - "name": "Target temperature high", - "description": "High target temperature." + "name": "Upper target temperature", + "description": "The max temperature setpoint." }, "target_temp_low": { - "name": "Target temperature low", - "description": "Low target temperature." + "name": "Lower target temperature", + "description": "The min temperature setpoint." }, "hvac_mode": { "name": "HVAC mode", @@ -221,6 +231,16 @@ } } }, + "set_swing_horizontal_mode": { + "name": "Set horizontal swing mode", + "description": "Sets horizontal swing operation mode.", + "fields": { + "swing_horizontal_mode": { + "name": "Horizontal swing mode", + "description": "Horizontal swing operation mode." + } + } + }, "turn_on": { "name": "[%key:common::action::turn_on%]", "description": "Turns climate device on." @@ -264,6 +284,9 @@ "not_valid_swing_mode": { "message": "Swing mode {mode} is not valid. Valid swing modes are: {modes}." }, + "not_valid_horizontal_swing_mode": { + "message": "Horizontal swing mode {mode} is not valid. Valid horizontal swing modes are: {modes}." + }, "not_valid_fan_mode": { "message": "Fan mode {mode} is not valid. Valid fan modes are: {modes}." }, diff --git a/homeassistant/components/cloud/assist_pipeline.py b/homeassistant/components/cloud/assist_pipeline.py index f3a591d6eda..c97e5bdc0a2 100644 --- a/homeassistant/components/cloud/assist_pipeline.py +++ b/homeassistant/components/cloud/assist_pipeline.py @@ -1,6 +1,7 @@ """Handle Cloud assist pipelines.""" import asyncio +from typing import Any from homeassistant.components.assist_pipeline import ( async_create_default_pipeline, @@ -98,7 +99,7 @@ async def async_migrate_cloud_pipeline_engine( # is an after dependency of cloud await async_setup_pipeline_store(hass) - kwargs: dict[str, str] = {pipeline_attribute: engine_id} + kwargs: dict[str, Any] = {pipeline_attribute: engine_id} pipelines = async_get_pipelines(hass) for pipeline in pipelines: if getattr(pipeline, pipeline_attribute) == DOMAIN: diff --git a/homeassistant/components/cloud/http_api.py b/homeassistant/components/cloud/http_api.py index 844f0e9f11d..4f2ad0ddcf7 100644 --- a/homeassistant/components/cloud/http_api.py +++ b/homeassistant/components/cloud/http_api.py @@ -440,16 +440,16 @@ def validate_language_voice(value: tuple[str, str]) -> tuple[str, str]: @websocket_api.websocket_command( { vol.Required("type"): "cloud/update_prefs", - vol.Optional(PREF_ENABLE_GOOGLE): bool, - vol.Optional(PREF_ENABLE_ALEXA): bool, vol.Optional(PREF_ALEXA_REPORT_STATE): bool, + vol.Optional(PREF_ENABLE_ALEXA): bool, + vol.Optional(PREF_ENABLE_CLOUD_ICE_SERVERS): bool, + vol.Optional(PREF_ENABLE_GOOGLE): bool, vol.Optional(PREF_GOOGLE_REPORT_STATE): bool, vol.Optional(PREF_GOOGLE_SECURE_DEVICES_PIN): vol.Any(None, str), + vol.Optional(PREF_REMOTE_ALLOW_REMOTE_ENABLE): bool, vol.Optional(PREF_TTS_DEFAULT_VOICE): vol.All( vol.Coerce(tuple), validate_language_voice ), - vol.Optional(PREF_REMOTE_ALLOW_REMOTE_ENABLE): bool, - vol.Optional(PREF_ENABLE_CLOUD_ICE_SERVERS): bool, } ) @websocket_api.async_response diff --git a/homeassistant/components/cloud/manifest.json b/homeassistant/components/cloud/manifest.json index 8d2b40ff8ba..60b105b401e 100644 --- a/homeassistant/components/cloud/manifest.json +++ b/homeassistant/components/cloud/manifest.json @@ -8,6 +8,6 @@ "integration_type": "system", "iot_class": "cloud_push", "loggers": ["hass_nabucasa"], - "requirements": ["hass-nabucasa==0.83.0"], + "requirements": ["hass-nabucasa==0.85.0"], "single_config_entry": true } diff --git a/homeassistant/components/cloud/prefs.py b/homeassistant/components/cloud/prefs.py index a0811393097..ae4b2794e1b 100644 --- a/homeassistant/components/cloud/prefs.py +++ b/homeassistant/components/cloud/prefs.py @@ -163,21 +163,21 @@ class CloudPreferences: async def async_update( self, *, - google_enabled: bool | UndefinedType = UNDEFINED, alexa_enabled: bool | UndefinedType = UNDEFINED, - remote_enabled: bool | UndefinedType = UNDEFINED, - google_secure_devices_pin: str | None | UndefinedType = UNDEFINED, - cloudhooks: dict[str, dict[str, str | bool]] | UndefinedType = UNDEFINED, - cloud_user: str | UndefinedType = UNDEFINED, alexa_report_state: bool | UndefinedType = UNDEFINED, - google_report_state: bool | UndefinedType = UNDEFINED, - tts_default_voice: tuple[str, str] | UndefinedType = UNDEFINED, - remote_domain: str | None | UndefinedType = UNDEFINED, alexa_settings_version: int | UndefinedType = UNDEFINED, - google_settings_version: int | UndefinedType = UNDEFINED, - google_connected: bool | UndefinedType = UNDEFINED, - remote_allow_remote_enable: bool | UndefinedType = UNDEFINED, cloud_ice_servers_enabled: bool | UndefinedType = UNDEFINED, + cloud_user: str | UndefinedType = UNDEFINED, + cloudhooks: dict[str, dict[str, str | bool]] | UndefinedType = UNDEFINED, + google_connected: bool | UndefinedType = UNDEFINED, + google_enabled: bool | UndefinedType = UNDEFINED, + google_report_state: bool | UndefinedType = UNDEFINED, + google_secure_devices_pin: str | None | UndefinedType = UNDEFINED, + google_settings_version: int | UndefinedType = UNDEFINED, + remote_allow_remote_enable: bool | UndefinedType = UNDEFINED, + remote_domain: str | None | UndefinedType = UNDEFINED, + remote_enabled: bool | UndefinedType = UNDEFINED, + tts_default_voice: tuple[str, str] | UndefinedType = UNDEFINED, ) -> None: """Update user preferences.""" prefs = {**self._prefs} @@ -186,21 +186,21 @@ class CloudPreferences: { key: value for key, value in ( - (PREF_ENABLE_GOOGLE, google_enabled), - (PREF_ENABLE_ALEXA, alexa_enabled), - (PREF_ENABLE_REMOTE, remote_enabled), - (PREF_GOOGLE_SECURE_DEVICES_PIN, google_secure_devices_pin), - (PREF_CLOUDHOOKS, cloudhooks), - (PREF_CLOUD_USER, cloud_user), (PREF_ALEXA_REPORT_STATE, alexa_report_state), - (PREF_GOOGLE_REPORT_STATE, google_report_state), (PREF_ALEXA_SETTINGS_VERSION, alexa_settings_version), - (PREF_GOOGLE_SETTINGS_VERSION, google_settings_version), - (PREF_TTS_DEFAULT_VOICE, tts_default_voice), - (PREF_REMOTE_DOMAIN, remote_domain), - (PREF_GOOGLE_CONNECTED, google_connected), - (PREF_REMOTE_ALLOW_REMOTE_ENABLE, remote_allow_remote_enable), + (PREF_CLOUD_USER, cloud_user), + (PREF_CLOUDHOOKS, cloudhooks), + (PREF_ENABLE_ALEXA, alexa_enabled), (PREF_ENABLE_CLOUD_ICE_SERVERS, cloud_ice_servers_enabled), + (PREF_ENABLE_GOOGLE, google_enabled), + (PREF_ENABLE_REMOTE, remote_enabled), + (PREF_GOOGLE_CONNECTED, google_connected), + (PREF_GOOGLE_REPORT_STATE, google_report_state), + (PREF_GOOGLE_SECURE_DEVICES_PIN, google_secure_devices_pin), + (PREF_GOOGLE_SETTINGS_VERSION, google_settings_version), + (PREF_REMOTE_ALLOW_REMOTE_ENABLE, remote_allow_remote_enable), + (PREF_REMOTE_DOMAIN, remote_domain), + (PREF_TTS_DEFAULT_VOICE, tts_default_voice), ) if value is not UNDEFINED } @@ -242,6 +242,7 @@ class CloudPreferences: PREF_ALEXA_REPORT_STATE: self.alexa_report_state, PREF_CLOUDHOOKS: self.cloudhooks, PREF_ENABLE_ALEXA: self.alexa_enabled, + PREF_ENABLE_CLOUD_ICE_SERVERS: self.cloud_ice_servers_enabled, PREF_ENABLE_GOOGLE: self.google_enabled, PREF_ENABLE_REMOTE: self.remote_enabled, PREF_GOOGLE_DEFAULT_EXPOSE: self.google_default_expose, @@ -249,7 +250,6 @@ class CloudPreferences: PREF_GOOGLE_SECURE_DEVICES_PIN: self.google_secure_devices_pin, PREF_REMOTE_ALLOW_REMOTE_ENABLE: self.remote_allow_remote_enable, PREF_TTS_DEFAULT_VOICE: self.tts_default_voice, - PREF_ENABLE_CLOUD_ICE_SERVERS: self.cloud_ice_servers_enabled, } @property diff --git a/homeassistant/components/cmus/manifest.json b/homeassistant/components/cmus/manifest.json index f7591599022..9678dc52a68 100644 --- a/homeassistant/components/cmus/manifest.json +++ b/homeassistant/components/cmus/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/cmus", "iot_class": "local_polling", "loggers": ["pbr", "pycmus"], + "quality_scale": "legacy", "requirements": ["pycmus==0.1.1"] } diff --git a/homeassistant/components/coinbase/config_flow.py b/homeassistant/components/coinbase/config_flow.py index 616fdaf8f7a..8b7b4b9e313 100644 --- a/homeassistant/components/coinbase/config_flow.py +++ b/homeassistant/components/coinbase/config_flow.py @@ -158,16 +158,12 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for Coinbase.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/comed_hourly_pricing/manifest.json b/homeassistant/components/comed_hourly_pricing/manifest.json index 791a824af8f..a3a29903ac7 100644 --- a/homeassistant/components/comed_hourly_pricing/manifest.json +++ b/homeassistant/components/comed_hourly_pricing/manifest.json @@ -3,5 +3,6 @@ "name": "ComEd Hourly Pricing", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/comed_hourly_pricing", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/comelit/manifest.json b/homeassistant/components/comelit/manifest.json index d25d5c1d7d5..d7417ad4aad 100644 --- a/homeassistant/components/comelit/manifest.json +++ b/homeassistant/components/comelit/manifest.json @@ -7,6 +7,5 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["aiocomelit"], - "quality_scale": "silver", "requirements": ["aiocomelit==0.9.1"] } diff --git a/homeassistant/components/comfoconnect/manifest.json b/homeassistant/components/comfoconnect/manifest.json index ae9a092f5d9..4157cb6c311 100644 --- a/homeassistant/components/comfoconnect/manifest.json +++ b/homeassistant/components/comfoconnect/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/comfoconnect", "iot_class": "local_push", "loggers": ["pycomfoconnect"], + "quality_scale": "legacy", "requirements": ["pycomfoconnect==0.5.1"] } diff --git a/homeassistant/components/command_line/sensor.py b/homeassistant/components/command_line/sensor.py index 7c31af165f9..e4c1370d5f7 100644 --- a/homeassistant/components/command_line/sensor.py +++ b/homeassistant/components/command_line/sensor.py @@ -187,13 +187,11 @@ class CommandSensor(ManualTriggerSensorEntity): SensorDeviceClass.TIMESTAMP, }: self._attr_native_value = value - self._process_manual_data(value) - return - - if value is not None: + elif value is not None: self._attr_native_value = async_parse_date_datetime( value, self.entity_id, self.device_class ) + self._process_manual_data(value) self.async_write_ha_state() diff --git a/homeassistant/components/compensation/manifest.json b/homeassistant/components/compensation/manifest.json index caae9190bca..5b3cc5ac2ac 100644 --- a/homeassistant/components/compensation/manifest.json +++ b/homeassistant/components/compensation/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@Petro31"], "documentation": "https://www.home-assistant.io/integrations/compensation", "iot_class": "calculated", - "requirements": ["numpy==1.26.4"] + "quality_scale": "legacy", + "requirements": ["numpy==2.1.3"] } diff --git a/homeassistant/components/concord232/manifest.json b/homeassistant/components/concord232/manifest.json index e0aea5d64d9..ebd1d68064b 100644 --- a/homeassistant/components/concord232/manifest.json +++ b/homeassistant/components/concord232/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/concord232", "iot_class": "local_polling", "loggers": ["concord232", "stevedore"], + "quality_scale": "legacy", "requirements": ["concord232==0.15.1"] } diff --git a/homeassistant/components/control4/config_flow.py b/homeassistant/components/control4/config_flow.py index 77ae2c98c7d..19fae1ef7ca 100644 --- a/homeassistant/components/control4/config_flow.py +++ b/homeassistant/components/control4/config_flow.py @@ -154,16 +154,12 @@ class Control4ConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for Control4.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/conversation/__init__.py b/homeassistant/components/conversation/__init__.py index 17f3b6f5ccc..898b7b2cf4f 100644 --- a/homeassistant/components/conversation/__init__.py +++ b/homeassistant/components/conversation/__init__.py @@ -44,7 +44,7 @@ from .const import ( SERVICE_RELOAD, ConversationEntityFeature, ) -from .default_agent import async_setup_default_agent +from .default_agent import DefaultAgent, async_setup_default_agent from .entity import ConversationEntity from .http import async_setup as async_setup_conversation_http from .models import AbstractConversationAgent, ConversationInput, ConversationResult @@ -207,6 +207,32 @@ async def async_prepare_agent( await agent.async_prepare(language) +async def async_handle_sentence_triggers( + hass: HomeAssistant, user_input: ConversationInput +) -> str | None: + """Try to match input against sentence triggers and return response text. + + Returns None if no match occurred. + """ + default_agent = async_get_agent(hass) + assert isinstance(default_agent, DefaultAgent) + + return await default_agent.async_handle_sentence_triggers(user_input) + + +async def async_handle_intents( + hass: HomeAssistant, user_input: ConversationInput +) -> intent.IntentResponse | None: + """Try to match input against registered intents and return response. + + Returns None if no match occurred. + """ + default_agent = async_get_agent(hass) + assert isinstance(default_agent, DefaultAgent) + + return await default_agent.async_handle_intents(user_input) + + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Register the process service.""" entity_component = EntityComponent[ConversationEntity](_LOGGER, DOMAIN, hass) diff --git a/homeassistant/components/conversation/default_agent.py b/homeassistant/components/conversation/default_agent.py index a7110c35795..1194091fd46 100644 --- a/homeassistant/components/conversation/default_agent.py +++ b/homeassistant/components/conversation/default_agent.py @@ -3,8 +3,10 @@ from __future__ import annotations import asyncio +from collections import OrderedDict from collections.abc import Awaitable, Callable, Iterable from dataclasses import dataclass +from enum import Enum, auto import functools import logging from pathlib import Path @@ -12,15 +14,22 @@ import re import time from typing import IO, Any, cast -from hassil.expression import Expression, ListReference, Sequence -from hassil.intents import Intents, SlotList, TextSlotList, WildcardSlotList +from hassil.expression import Expression, ListReference, Sequence, TextChunk +from hassil.intents import ( + Intents, + SlotList, + TextSlotList, + TextSlotValue, + WildcardSlotList, +) from hassil.recognize import ( MISSING_ENTITY, - MatchEntity, RecognizeResult, - UnmatchedTextEntity, recognize_all, + recognize_best, ) +from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity +from hassil.trie import Trie from hassil.util import merge_dict from home_assistant_intents import ErrorKey, get_intents, get_languages import yaml @@ -102,6 +111,77 @@ class SentenceTriggerResult: matched_triggers: dict[int, RecognizeResult] +class IntentMatchingStage(Enum): + """Stages of intent matching.""" + + EXPOSED_ENTITIES_ONLY = auto() + """Match against exposed entities only.""" + + UNEXPOSED_ENTITIES = auto() + """Match against unexposed entities in Home Assistant.""" + + FUZZY = auto() + """Capture names that are not known to Home Assistant.""" + + +@dataclass(frozen=True) +class IntentCacheKey: + """Key for IntentCache.""" + + text: str + """User input text.""" + + language: str + """Language of text.""" + + device_id: str | None + """Device id from user input.""" + + +@dataclass(frozen=True) +class IntentCacheValue: + """Value for IntentCache.""" + + result: RecognizeResult | None + """Result of intent recognition.""" + + stage: IntentMatchingStage + """Stage where result was found.""" + + +class IntentCache: + """LRU cache for intent recognition results.""" + + def __init__(self, capacity: int) -> None: + """Initialize cache.""" + self.cache: OrderedDict[IntentCacheKey, IntentCacheValue] = OrderedDict() + self.capacity = capacity + + def get(self, key: IntentCacheKey) -> IntentCacheValue | None: + """Get value for cache or None.""" + if key not in self.cache: + return None + + # Move the key to the end to show it was recently used + self.cache.move_to_end(key) + return self.cache[key] + + def put(self, key: IntentCacheKey, value: IntentCacheValue) -> None: + """Put a value in the cache, evicting the least recently used item if necessary.""" + if key in self.cache: + # Update value and mark as recently used + self.cache.move_to_end(key) + elif len(self.cache) >= self.capacity: + # Evict the oldest item + self.cache.popitem(last=False) + + self.cache[key] = value + + def clear(self) -> None: + """Clear the cache.""" + self.cache.clear() + + def _get_language_variations(language: str) -> Iterable[str]: """Generate language codes with and without region.""" yield language @@ -161,12 +241,19 @@ class DefaultAgent(ConversationEntity): self._config_intents: dict[str, Any] = config_intents self._slot_lists: dict[str, SlotList] | None = None + # Used to filter slot lists before intent matching + self._exposed_names_trie: Trie | None = None + self._unexposed_names_trie: Trie | None = None + # Sentences that will trigger a callback (skipping intent recognition) self._trigger_sentences: list[TriggerData] = [] self._trigger_intents: Intents | None = None self._unsub_clear_slot_list: list[Callable[[], None]] | None = None self._load_intents_lock = asyncio.Lock() + # LRU cache to avoid unnecessary intent matching + self._intent_cache = IntentCache(capacity=128) + @property def supported_languages(self) -> list[str]: """Return a list of supported languages.""" @@ -213,13 +300,10 @@ class DefaultAgent(ConversationEntity): async_listen_entity_updates(self.hass, DOMAIN, self._async_clear_slot_list), ] - async def async_recognize( - self, user_input: ConversationInput - ) -> RecognizeResult | SentenceTriggerResult | None: + async def async_recognize_intent( + self, user_input: ConversationInput, strict_intents_only: bool = False + ) -> RecognizeResult | None: """Recognize intent from user input.""" - if trigger_result := await self._match_triggers(user_input.text): - return trigger_result - language = user_input.language or self.hass.config.language lang_intents = await self.async_get_or_load_intents(language) @@ -231,6 +315,16 @@ class DefaultAgent(ConversationEntity): slot_lists = self._make_slot_lists() intent_context = self._make_intent_context(user_input) + if self._exposed_names_trie is not None: + # Filter by input string + text_lower = user_input.text.strip().lower() + slot_lists["name"] = TextSlotList( + name="name", + values=[ + result[2] for result in self._exposed_names_trie.find(text_lower) + ], + ) + start = time.monotonic() result = await self.hass.async_add_executor_job( @@ -240,6 +334,7 @@ class DefaultAgent(ConversationEntity): slot_lists, intent_context, language, + strict_intents_only, ) _LOGGER.debug( @@ -251,56 +346,36 @@ class DefaultAgent(ConversationEntity): async def async_process(self, user_input: ConversationInput) -> ConversationResult: """Process a sentence.""" - language = user_input.language or self.hass.config.language - conversation_id = None # Not supported - - result = await self.async_recognize(user_input) # Check if a trigger matched - if isinstance(result, SentenceTriggerResult): - # Gather callback responses in parallel - trigger_callbacks = [ - self._trigger_sentences[trigger_id].callback( - result.sentence, trigger_result, user_input.device_id - ) - for trigger_id, trigger_result in result.matched_triggers.items() - ] - - # Use first non-empty result as response. - # - # There may be multiple copies of a trigger running when editing in - # the UI, so it's critical that we filter out empty responses here. - response_text: str | None = None - response_set_by_trigger = False - for trigger_future in asyncio.as_completed(trigger_callbacks): - trigger_response = await trigger_future - if trigger_response is None: - continue - - response_text = trigger_response - response_set_by_trigger = True - break + if trigger_result := await self.async_recognize_sentence_trigger(user_input): + # Process callbacks and get response + response_text = await self._handle_trigger_result( + trigger_result, user_input + ) # Convert to conversation result - response = intent.IntentResponse(language=language) + response = intent.IntentResponse( + language=user_input.language or self.hass.config.language + ) response.response_type = intent.IntentResponseType.ACTION_DONE - - if response_set_by_trigger: - # Response was explicitly set to empty - response_text = response_text or "" - elif not response_text: - # Use translated acknowledgment for pipeline language - translations = await translation.async_get_translations( - self.hass, language, DOMAIN, [DOMAIN] - ) - response_text = translations.get( - f"component.{DOMAIN}.conversation.agent.done", "Done" - ) - response.async_set_speech(response_text) return ConversationResult(response=response) + # Match intents + intent_result = await self.async_recognize_intent(user_input) + return await self._async_process_intent_result(intent_result, user_input) + + async def _async_process_intent_result( + self, + result: RecognizeResult | None, + user_input: ConversationInput, + ) -> ConversationResult: + """Process user input with intents.""" + language = user_input.language or self.hass.config.language + conversation_id = None # Not supported + # Intent match or failure lang_intents = await self.async_get_or_load_intents(language) @@ -436,21 +511,235 @@ class DefaultAgent(ConversationEntity): slot_lists: dict[str, SlotList], intent_context: dict[str, Any] | None, language: str, + strict_intents_only: bool, ) -> RecognizeResult | None: """Search intents for a match to user input.""" - strict_result = self._recognize_strict( - user_input, lang_intents, slot_lists, intent_context, language - ) + skip_exposed_match = False - if strict_result is not None: - # Successful strict match - return strict_result + # Try cache first + cache_key = IntentCacheKey( + text=user_input.text, language=language, device_id=user_input.device_id + ) + cache_value = self._intent_cache.get(cache_key) + if cache_value is not None: + if (cache_value.result is not None) and ( + cache_value.stage == IntentMatchingStage.EXPOSED_ENTITIES_ONLY + ): + _LOGGER.debug("Got cached result for exposed entities") + return cache_value.result + + # Continue with matching, but we know we won't succeed for exposed + # entities only. + skip_exposed_match = True + + if not skip_exposed_match: + start_time = time.monotonic() + strict_result = self._recognize_strict( + user_input, lang_intents, slot_lists, intent_context, language + ) + _LOGGER.debug( + "Checked exposed entities in %s second(s)", + time.monotonic() - start_time, + ) + + # Update cache + self._intent_cache.put( + cache_key, + IntentCacheValue( + result=strict_result, + stage=IntentMatchingStage.EXPOSED_ENTITIES_ONLY, + ), + ) + + if strict_result is not None: + # Successful strict match with exposed entities + return strict_result + + if strict_intents_only: + # Don't try matching against all entities or doing a fuzzy match + return None # Try again with all entities (including unexposed) + skip_unexposed_entities_match = False + if cache_value is not None: + if (cache_value.result is not None) and ( + cache_value.stage == IntentMatchingStage.UNEXPOSED_ENTITIES + ): + _LOGGER.debug("Got cached result for all entities") + return cache_value.result + + # Continue with matching, but we know we won't succeed for all + # entities. + skip_unexposed_entities_match = True + + if not skip_unexposed_entities_match: + unexposed_entities_slot_lists = { + **slot_lists, + "name": self._get_unexposed_entity_names(user_input.text), + } + + start_time = time.monotonic() + strict_result = self._recognize_strict( + user_input, + lang_intents, + unexposed_entities_slot_lists, + intent_context, + language, + ) + + _LOGGER.debug( + "Checked all entities in %s second(s)", time.monotonic() - start_time + ) + + # Update cache + self._intent_cache.put( + cache_key, + IntentCacheValue( + result=strict_result, stage=IntentMatchingStage.UNEXPOSED_ENTITIES + ), + ) + + if strict_result is not None: + # Not a successful match, but useful for an error message. + # This should fail the intent handling phase (async_match_targets). + return strict_result + + # Try again with missing entities enabled + skip_fuzzy_match = False + if cache_value is not None: + if (cache_value.result is not None) and ( + cache_value.stage == IntentMatchingStage.FUZZY + ): + _LOGGER.debug("Got cached result for fuzzy match") + return cache_value.result + + # We know we won't succeed for fuzzy matching. + skip_fuzzy_match = True + + maybe_result: RecognizeResult | None = None + if not skip_fuzzy_match: + start_time = time.monotonic() + best_num_matched_entities = 0 + best_num_unmatched_entities = 0 + best_num_unmatched_ranges = 0 + for result in recognize_all( + user_input.text, + lang_intents.intents, + slot_lists=slot_lists, + intent_context=intent_context, + allow_unmatched_entities=True, + ): + if result.text_chunks_matched < 1: + # Skip results that don't match any literal text + continue + + # Don't count missing entities that couldn't be filled from context + num_matched_entities = 0 + for matched_entity in result.entities_list: + if matched_entity.name not in result.unmatched_entities: + num_matched_entities += 1 + + num_unmatched_entities = 0 + num_unmatched_ranges = 0 + for unmatched_entity in result.unmatched_entities_list: + if isinstance(unmatched_entity, UnmatchedTextEntity): + if unmatched_entity.text != MISSING_ENTITY: + num_unmatched_entities += 1 + elif isinstance(unmatched_entity, UnmatchedRangeEntity): + num_unmatched_ranges += 1 + num_unmatched_entities += 1 + else: + num_unmatched_entities += 1 + + if ( + (maybe_result is None) # first result + or (num_matched_entities > best_num_matched_entities) + or ( + # Fewer unmatched entities + (num_matched_entities == best_num_matched_entities) + and (num_unmatched_entities < best_num_unmatched_entities) + ) + or ( + # Prefer unmatched ranges + (num_matched_entities == best_num_matched_entities) + and (num_unmatched_entities == best_num_unmatched_entities) + and (num_unmatched_ranges > best_num_unmatched_ranges) + ) + or ( + # More literal text matched + (num_matched_entities == best_num_matched_entities) + and (num_unmatched_entities == best_num_unmatched_entities) + and (num_unmatched_ranges == best_num_unmatched_ranges) + and ( + result.text_chunks_matched + > maybe_result.text_chunks_matched + ) + ) + or ( + # Prefer match failures with entities + (result.text_chunks_matched == maybe_result.text_chunks_matched) + and (num_unmatched_entities == best_num_unmatched_entities) + and (num_unmatched_ranges == best_num_unmatched_ranges) + and ( + ("name" in result.entities) + or ("name" in result.unmatched_entities) + ) + ) + ): + maybe_result = result + best_num_matched_entities = num_matched_entities + best_num_unmatched_entities = num_unmatched_entities + best_num_unmatched_ranges = num_unmatched_ranges + + # Update cache + self._intent_cache.put( + cache_key, + IntentCacheValue(result=maybe_result, stage=IntentMatchingStage.FUZZY), + ) + + _LOGGER.debug( + "Did fuzzy match in %s second(s)", time.monotonic() - start_time + ) + + return maybe_result + + def _get_unexposed_entity_names(self, text: str) -> TextSlotList: + """Get filtered slot list with unexposed entity names in Home Assistant.""" + if self._unexposed_names_trie is None: + # Build trie + self._unexposed_names_trie = Trie() + for name_tuple in self._get_entity_name_tuples(exposed=False): + self._unexposed_names_trie.insert( + name_tuple[0].lower(), + TextSlotValue.from_tuple(name_tuple, allow_template=False), + ) + + # Build filtered slot list + text_lower = text.strip().lower() + return TextSlotList( + name="name", + values=[ + result[2] for result in self._unexposed_names_trie.find(text_lower) + ], + ) + + def _get_entity_name_tuples( + self, exposed: bool + ) -> Iterable[tuple[str, str, dict[str, Any]]]: + """Yield (input name, output name, context) tuples for entities.""" entity_registry = er.async_get(self.hass) - all_entity_names: list[tuple[str, str, dict[str, Any]]] = [] for state in self.hass.states.async_all(): + entity_exposed = async_should_expose(self.hass, DOMAIN, state.entity_id) + if exposed and (not entity_exposed): + # Required exposed, entity is not + continue + + if (not exposed) and entity_exposed: + # Required not exposed, entity is + continue + + # Checked against "requires_context" and "excludes_context" in hassil context = {"domain": state.domain} if state.attributes: # Include some attributes @@ -459,99 +748,18 @@ class DefaultAgent(ConversationEntity): continue context[attr] = state.attributes[attr] - if entity := entity_registry.async_get(state.entity_id): - # Skip config/hidden entities - if (entity.entity_category is not None) or ( - entity.hidden_by is not None - ): - continue + if ( + entity := entity_registry.async_get(state.entity_id) + ) and entity.aliases: + for alias in entity.aliases: + alias = alias.strip() + if not alias: + continue - if entity.aliases: - # Also add aliases - for alias in entity.aliases: - if not alias.strip(): - continue - - all_entity_names.append((alias, alias, context)) + yield (alias, alias, context) # Default name - all_entity_names.append((state.name, state.name, context)) - - slot_lists = { - **slot_lists, - "name": TextSlotList.from_tuples(all_entity_names, allow_template=False), - } - - strict_result = self._recognize_strict( - user_input, - lang_intents, - slot_lists, - intent_context, - language, - ) - - if strict_result is not None: - # Not a successful match, but useful for an error message. - # This should fail the intent handling phase (async_match_targets). - return strict_result - - # Try again with missing entities enabled - maybe_result: RecognizeResult | None = None - best_num_matched_entities = 0 - best_num_unmatched_entities = 0 - for result in recognize_all( - user_input.text, - lang_intents.intents, - slot_lists=slot_lists, - intent_context=intent_context, - allow_unmatched_entities=True, - ): - if result.text_chunks_matched < 1: - # Skip results that don't match any literal text - continue - - # Don't count missing entities that couldn't be filled from context - num_matched_entities = 0 - for matched_entity in result.entities_list: - if matched_entity.name not in result.unmatched_entities: - num_matched_entities += 1 - - num_unmatched_entities = 0 - for unmatched_entity in result.unmatched_entities_list: - if isinstance(unmatched_entity, UnmatchedTextEntity): - if unmatched_entity.text != MISSING_ENTITY: - num_unmatched_entities += 1 - else: - num_unmatched_entities += 1 - - if ( - (maybe_result is None) # first result - or (num_matched_entities > best_num_matched_entities) - or ( - # Fewer unmatched entities - (num_matched_entities == best_num_matched_entities) - and (num_unmatched_entities < best_num_unmatched_entities) - ) - or ( - # More literal text matched - (num_matched_entities == best_num_matched_entities) - and (num_unmatched_entities == best_num_unmatched_entities) - and (result.text_chunks_matched > maybe_result.text_chunks_matched) - ) - or ( - # Prefer match failures with entities - (result.text_chunks_matched == maybe_result.text_chunks_matched) - and ( - ("name" in result.entities) - or ("name" in result.unmatched_entities) - ) - ) - ): - maybe_result = result - best_num_matched_entities = num_matched_entities - best_num_unmatched_entities = num_unmatched_entities - - return maybe_result + yield (state.name, state.name, context) def _recognize_strict( self, @@ -562,76 +770,15 @@ class DefaultAgent(ConversationEntity): language: str, ) -> RecognizeResult | None: """Search intents for a strict match to user input.""" - custom_found = False - name_found = False - best_results: list[RecognizeResult] = [] - best_name_quality: int | None = None - best_text_chunks_matched: int | None = None - for result in recognize_all( + return recognize_best( user_input.text, lang_intents.intents, slot_lists=slot_lists, intent_context=intent_context, language=language, - ): - # Prioritize user intents - is_custom = ( - result.intent_metadata is not None - and result.intent_metadata.get(METADATA_CUSTOM_SENTENCE) - ) - - if custom_found and not is_custom: - continue - - if not custom_found and is_custom: - custom_found = True - # Clear builtin results - name_found = False - best_results = [] - best_name_quality = None - best_text_chunks_matched = None - - # Prioritize results with a "name" slot - name = result.entities.get("name") - is_name = name and not name.is_wildcard - - if name_found and not is_name: - continue - - if not name_found and is_name: - name_found = True - # Clear non-name results - best_results = [] - best_text_chunks_matched = None - - if is_name: - # Prioritize results with a better "name" slot - name_quality = len(cast(MatchEntity, name).value.split()) - if (best_name_quality is None) or (name_quality > best_name_quality): - best_name_quality = name_quality - # Clear worse name results - best_results = [] - best_text_chunks_matched = None - elif name_quality < best_name_quality: - continue - - # Prioritize results with more literal text - # This causes wildcards to match last. - if (best_text_chunks_matched is None) or ( - result.text_chunks_matched > best_text_chunks_matched - ): - best_results = [result] - best_text_chunks_matched = result.text_chunks_matched - elif result.text_chunks_matched == best_text_chunks_matched: - # Accumulate results with the same number of literal text matched. - # We will resolve the ambiguity below. - best_results.append(result) - - if best_results: - # Successful strict match - return best_results[0] - - return None + best_metadata_key=METADATA_CUSTOM_SENTENCE, + best_slot_name="name", + ) async def _build_speech( self, @@ -717,6 +864,9 @@ class DefaultAgent(ConversationEntity): self._lang_intents.pop(language, None) _LOGGER.debug("Cleared intents for language: %s", language) + # Intents have changed, so we must clear the cache + self._intent_cache.clear() + async def async_prepare(self, language: str | None = None) -> None: """Load intents for a language.""" if language is None: @@ -901,10 +1051,15 @@ class DefaultAgent(ConversationEntity): if self._unsub_clear_slot_list is None: return self._slot_lists = None + self._exposed_names_trie = None + self._unexposed_names_trie = None for unsub in self._unsub_clear_slot_list: unsub() self._unsub_clear_slot_list = None + # Slot lists have changed, so we must clear the cache + self._intent_cache.clear() + @core.callback def _make_slot_lists(self) -> dict[str, SlotList]: """Create slot lists with areas and entity names/aliases.""" @@ -913,8 +1068,6 @@ class DefaultAgent(ConversationEntity): start = time.monotonic() - entity_registry = er.async_get(self.hass) - # Gather entity names, keeping track of exposed names. # We try intent recognition with only exposed names first, then all names. # @@ -922,35 +1075,7 @@ class DefaultAgent(ConversationEntity): # have the same name. The intent matcher doesn't gather all matching # values for a list, just the first. So we will need to match by name no # matter what. - exposed_entity_names = [] - for state in self.hass.states.async_all(): - is_exposed = async_should_expose(self.hass, DOMAIN, state.entity_id) - - # Checked against "requires_context" and "excludes_context" in hassil - context = {"domain": state.domain} - if state.attributes: - # Include some attributes - for attr in DEFAULT_EXPOSED_ATTRIBUTES: - if attr not in state.attributes: - continue - context[attr] = state.attributes[attr] - - if ( - entity := entity_registry.async_get(state.entity_id) - ) and entity.aliases: - for alias in entity.aliases: - if not alias.strip(): - continue - - name_tuple = (alias, alias, context) - if is_exposed: - exposed_entity_names.append(name_tuple) - - # Default name - name_tuple = (state.name, state.name, context) - if is_exposed: - exposed_entity_names.append(name_tuple) - + exposed_entity_names = list(self._get_entity_name_tuples(exposed=True)) _LOGGER.debug("Exposed entities: %s", exposed_entity_names) # Expose all areas. @@ -983,11 +1108,17 @@ class DefaultAgent(ConversationEntity): floor_names.append((alias, floor.name)) + # Build trie + self._exposed_names_trie = Trie() + name_list = TextSlotList.from_tuples(exposed_entity_names, allow_template=False) + for name_value in name_list.values: + assert isinstance(name_value.text_in, TextChunk) + name_text = name_value.text_in.text.strip().lower() + self._exposed_names_trie.insert(name_text, name_value) + self._slot_lists = { "area": TextSlotList.from_tuples(area_names, allow_template=False), - "name": TextSlotList.from_tuples( - exposed_entity_names, allow_template=False - ), + "name": name_list, "floor": TextSlotList.from_tuples(floor_names, allow_template=False), } @@ -1102,7 +1233,9 @@ class DefaultAgent(ConversationEntity): # Force rebuild on next use self._trigger_intents = None - async def _match_triggers(self, sentence: str) -> SentenceTriggerResult | None: + async def async_recognize_sentence_trigger( + self, user_input: ConversationInput + ) -> SentenceTriggerResult | None: """Try to match sentence against registered trigger sentences. Calls the registered callbacks if there's a match and returns a sentence @@ -1120,7 +1253,7 @@ class DefaultAgent(ConversationEntity): matched_triggers: dict[int, RecognizeResult] = {} matched_template: str | None = None - for result in recognize_all(sentence, self._trigger_intents): + for result in recognize_all(user_input.text, self._trigger_intents): if result.intent_sentence is not None: matched_template = result.intent_sentence.text @@ -1137,12 +1270,88 @@ class DefaultAgent(ConversationEntity): _LOGGER.debug( "'%s' matched %s trigger(s): %s", - sentence, + user_input.text, len(matched_triggers), list(matched_triggers), ) - return SentenceTriggerResult(sentence, matched_template, matched_triggers) + return SentenceTriggerResult( + user_input.text, matched_template, matched_triggers + ) + + async def _handle_trigger_result( + self, result: SentenceTriggerResult, user_input: ConversationInput + ) -> str: + """Run sentence trigger callbacks and return response text.""" + + # Gather callback responses in parallel + trigger_callbacks = [ + self._trigger_sentences[trigger_id].callback( + user_input.text, trigger_result, user_input.device_id + ) + for trigger_id, trigger_result in result.matched_triggers.items() + ] + + # Use first non-empty result as response. + # + # There may be multiple copies of a trigger running when editing in + # the UI, so it's critical that we filter out empty responses here. + response_text = "" + response_set_by_trigger = False + for trigger_future in asyncio.as_completed(trigger_callbacks): + trigger_response = await trigger_future + if trigger_response is None: + continue + + response_text = trigger_response + response_set_by_trigger = True + break + + if response_set_by_trigger: + # Response was explicitly set to empty + response_text = response_text or "" + elif not response_text: + # Use translated acknowledgment for pipeline language + language = user_input.language or self.hass.config.language + translations = await translation.async_get_translations( + self.hass, language, DOMAIN, [DOMAIN] + ) + response_text = translations.get( + f"component.{DOMAIN}.conversation.agent.done", "Done" + ) + + return response_text + + async def async_handle_sentence_triggers( + self, user_input: ConversationInput + ) -> str | None: + """Try to input sentence against sentence triggers and return response text. + + Returns None if no match occurred. + """ + if trigger_result := await self.async_recognize_sentence_trigger(user_input): + return await self._handle_trigger_result(trigger_result, user_input) + + return None + + async def async_handle_intents( + self, + user_input: ConversationInput, + ) -> intent.IntentResponse | None: + """Try to match sentence against registered intents and return response. + + Only performs strict matching with exposed entities and exact wording. + Returns None if no match occurred. + """ + result = await self.async_recognize_intent(user_input, strict_intents_only=True) + if not isinstance(result, RecognizeResult): + # No error message on failed match + return None + + conversation_result = await self._async_process_intent_result( + result, user_input + ) + return conversation_result.response def _make_error_result( @@ -1154,7 +1363,6 @@ def _make_error_result( """Create conversation result with error code and text.""" response = intent.IntentResponse(language=language) response.async_set_error(error_code, response_text) - return ConversationResult(response, conversation_id) diff --git a/homeassistant/components/conversation/http.py b/homeassistant/components/conversation/http.py index df1ffc7f74f..ebc5d70f1ef 100644 --- a/homeassistant/components/conversation/http.py +++ b/homeassistant/components/conversation/http.py @@ -6,12 +6,8 @@ from collections.abc import Iterable from typing import Any from aiohttp import web -from hassil.recognize import ( - MISSING_ENTITY, - RecognizeResult, - UnmatchedRangeEntity, - UnmatchedTextEntity, -) +from hassil.recognize import MISSING_ENTITY, RecognizeResult +from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity import voluptuous as vol from homeassistant.components import http, websocket_api @@ -28,11 +24,7 @@ from .agent_manager import ( get_agent_manager, ) from .const import DATA_COMPONENT, DATA_DEFAULT_ENTITY -from .default_agent import ( - METADATA_CUSTOM_FILE, - METADATA_CUSTOM_SENTENCE, - SentenceTriggerResult, -) +from .default_agent import METADATA_CUSTOM_FILE, METADATA_CUSTOM_SENTENCE, DefaultAgent from .entity import ConversationEntity from .models import ConversationInput @@ -171,44 +163,42 @@ async def websocket_hass_agent_debug( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict ) -> None: """Return intents that would be matched by the default agent for a list of sentences.""" - results = [ - await hass.data[DATA_DEFAULT_ENTITY].async_recognize( - ConversationInput( - text=sentence, - context=connection.context(msg), - conversation_id=None, - device_id=msg.get("device_id"), - language=msg.get("language", hass.config.language), - agent_id=None, - ) - ) - for sentence in msg["sentences"] - ] + agent = hass.data.get(DATA_DEFAULT_ENTITY) + assert isinstance(agent, DefaultAgent) # Return results for each sentence in the same order as the input. result_dicts: list[dict[str, Any] | None] = [] - for result in results: + for sentence in msg["sentences"]: + user_input = ConversationInput( + text=sentence, + context=connection.context(msg), + conversation_id=None, + device_id=msg.get("device_id"), + language=msg.get("language", hass.config.language), + agent_id=None, + ) result_dict: dict[str, Any] | None = None - if isinstance(result, SentenceTriggerResult): + + if trigger_result := await agent.async_recognize_sentence_trigger(user_input): result_dict = { # Matched a user-defined sentence trigger. # We can't provide the response here without executing the # trigger. "match": True, "source": "trigger", - "sentence_template": result.sentence_template or "", + "sentence_template": trigger_result.sentence_template or "", } - elif isinstance(result, RecognizeResult): - successful_match = not result.unmatched_entities + elif intent_result := await agent.async_recognize_intent(user_input): + successful_match = not intent_result.unmatched_entities result_dict = { # Name of the matching intent (or the closest) "intent": { - "name": result.intent.name, + "name": intent_result.intent.name, }, # Slot values that would be received by the intent "slots": { # direct access to values entity_key: entity.text or entity.value - for entity_key, entity in result.entities.items() + for entity_key, entity in intent_result.entities.items() }, # Extra slot details, such as the originally matched text "details": { @@ -217,7 +207,7 @@ async def websocket_hass_agent_debug( "value": entity.value, "text": entity.text, } - for entity_key, entity in result.entities.items() + for entity_key, entity in intent_result.entities.items() }, # Entities/areas/etc. that would be targeted "targets": {}, @@ -226,24 +216,26 @@ async def websocket_hass_agent_debug( # Text of the sentence template that matched (or was closest) "sentence_template": "", # When match is incomplete, this will contain the best slot guesses - "unmatched_slots": _get_unmatched_slots(result), + "unmatched_slots": _get_unmatched_slots(intent_result), } if successful_match: result_dict["targets"] = { state.entity_id: {"matched": is_matched} - for state, is_matched in _get_debug_targets(hass, result) + for state, is_matched in _get_debug_targets(hass, intent_result) } - if result.intent_sentence is not None: - result_dict["sentence_template"] = result.intent_sentence.text + if intent_result.intent_sentence is not None: + result_dict["sentence_template"] = intent_result.intent_sentence.text # Inspect metadata to determine if this matched a custom sentence - if result.intent_metadata and result.intent_metadata.get( + if intent_result.intent_metadata and intent_result.intent_metadata.get( METADATA_CUSTOM_SENTENCE ): result_dict["source"] = "custom" - result_dict["file"] = result.intent_metadata.get(METADATA_CUSTOM_FILE) + result_dict["file"] = intent_result.intent_metadata.get( + METADATA_CUSTOM_FILE + ) else: result_dict["source"] = "builtin" diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index 8b5c6ef173f..72e1cebf462 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==1.7.4", "home-assistant-intents==2024.11.6"] + "requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.4"] } diff --git a/homeassistant/components/conversation/trigger.py b/homeassistant/components/conversation/trigger.py index ec7ecc76da0..a4f64ffbad9 100644 --- a/homeassistant/components/conversation/trigger.py +++ b/homeassistant/components/conversation/trigger.py @@ -4,7 +4,8 @@ from __future__ import annotations from typing import Any -from hassil.recognize import PUNCTUATION, RecognizeResult +from hassil.recognize import RecognizeResult +from hassil.util import PUNCTUATION_ALL import voluptuous as vol from homeassistant.const import CONF_COMMAND, CONF_PLATFORM @@ -20,7 +21,7 @@ from .const import DATA_DEFAULT_ENTITY, DOMAIN def has_no_punctuation(value: list[str]) -> list[str]: """Validate result does not contain punctuation.""" for sentence in value: - if PUNCTUATION.search(sentence): + if PUNCTUATION_ALL.search(sentence): raise vol.Invalid("sentence should not contain punctuation") return value diff --git a/homeassistant/components/cppm_tracker/manifest.json b/homeassistant/components/cppm_tracker/manifest.json index d8c387cdbf4..ca2fdf71a45 100644 --- a/homeassistant/components/cppm_tracker/manifest.json +++ b/homeassistant/components/cppm_tracker/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/cppm_tracker", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["clearpasspy==1.0.2"] } diff --git a/homeassistant/components/cpuspeed/config_flow.py b/homeassistant/components/cpuspeed/config_flow.py index ac35cc0fc4f..21dc577b5bf 100644 --- a/homeassistant/components/cpuspeed/config_flow.py +++ b/homeassistant/components/cpuspeed/config_flow.py @@ -23,7 +23,6 @@ class CPUSpeedFlowHandler(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" await self.async_set_unique_id(DOMAIN) - self._abort_if_unique_id_configured() if user_input is None: return self.async_show_form(step_id="user") diff --git a/homeassistant/components/cpuspeed/manifest.json b/homeassistant/components/cpuspeed/manifest.json index ff3a41d9c09..0c7f549a1b9 100644 --- a/homeassistant/components/cpuspeed/manifest.json +++ b/homeassistant/components/cpuspeed/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/cpuspeed", "integration_type": "device", "iot_class": "local_push", - "requirements": ["py-cpuinfo==9.0.0"] + "requirements": ["py-cpuinfo==9.0.0"], + "single_config_entry": true } diff --git a/homeassistant/components/cpuspeed/strings.json b/homeassistant/components/cpuspeed/strings.json index e82c6a0db12..6f4b3133b1b 100644 --- a/homeassistant/components/cpuspeed/strings.json +++ b/homeassistant/components/cpuspeed/strings.json @@ -8,7 +8,6 @@ } }, "abort": { - "already_configured": "[%key:common::config_flow::abort::single_instance_allowed%]", "not_compatible": "Unable to get CPU information, this integration is not compatible with your system" } } diff --git a/homeassistant/components/crownstone/config_flow.py b/homeassistant/components/crownstone/config_flow.py index 7d86fbbd7fb..bf6e9204714 100644 --- a/homeassistant/components/crownstone/config_flow.py +++ b/homeassistant/components/crownstone/config_flow.py @@ -213,18 +213,19 @@ class CrownstoneOptionsFlowHandler(BaseCrownstoneFlowHandler, OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize Crownstone options.""" super().__init__(OPTIONS_FLOW, self.async_create_new_entry) - self.entry = config_entry - self.updated_options = config_entry.options.copy() + self.options = config_entry.options.copy() async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Manage Crownstone options.""" - self.cloud: CrownstoneCloud = self.hass.data[DOMAIN][self.entry.entry_id].cloud + self.cloud: CrownstoneCloud = self.hass.data[DOMAIN][ + self.config_entry.entry_id + ].cloud spheres = {sphere.name: sphere.cloud_id for sphere in self.cloud.cloud_data} - usb_path = self.entry.options.get(CONF_USB_PATH) - usb_sphere = self.entry.options.get(CONF_USB_SPHERE) + usb_path = self.config_entry.options.get(CONF_USB_PATH) + usb_sphere = self.config_entry.options.get(CONF_USB_SPHERE) options_schema = vol.Schema( {vol.Optional(CONF_USE_USB_OPTION, default=usb_path is not None): bool} @@ -243,14 +244,14 @@ class CrownstoneOptionsFlowHandler(BaseCrownstoneFlowHandler, OptionsFlow): if user_input[CONF_USE_USB_OPTION] and usb_path is None: return await self.async_step_usb_config() if not user_input[CONF_USE_USB_OPTION] and usb_path is not None: - self.updated_options[CONF_USB_PATH] = None - self.updated_options[CONF_USB_SPHERE] = None + self.options[CONF_USB_PATH] = None + self.options[CONF_USB_SPHERE] = None elif ( CONF_USB_SPHERE_OPTION in user_input and spheres[user_input[CONF_USB_SPHERE_OPTION]] != usb_sphere ): sphere_id = spheres[user_input[CONF_USB_SPHERE_OPTION]] - self.updated_options[CONF_USB_SPHERE] = sphere_id + self.options[CONF_USB_SPHERE] = sphere_id return self.async_create_new_entry() @@ -260,7 +261,7 @@ class CrownstoneOptionsFlowHandler(BaseCrownstoneFlowHandler, OptionsFlow): """Create a new entry.""" # these attributes will only change when a usb was configured if self.usb_path is not None and self.usb_sphere_id is not None: - self.updated_options[CONF_USB_PATH] = self.usb_path - self.updated_options[CONF_USB_SPHERE] = self.usb_sphere_id + self.options[CONF_USB_PATH] = self.usb_path + self.options[CONF_USB_SPHERE] = self.usb_sphere_id - return super().async_create_entry(title="", data=self.updated_options) + return super().async_create_entry(title="", data=self.options) diff --git a/homeassistant/components/cups/manifest.json b/homeassistant/components/cups/manifest.json index 3e5b46770fb..c4aa596f01e 100644 --- a/homeassistant/components/cups/manifest.json +++ b/homeassistant/components/cups/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@fabaff"], "documentation": "https://www.home-assistant.io/integrations/cups", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["pycups==1.9.73"] } diff --git a/homeassistant/components/currencylayer/manifest.json b/homeassistant/components/currencylayer/manifest.json index d66331c4ab0..82d9d4050d4 100644 --- a/homeassistant/components/currencylayer/manifest.json +++ b/homeassistant/components/currencylayer/manifest.json @@ -3,5 +3,6 @@ "name": "currencylayer", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/currencylayer", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/danfoss_air/manifest.json b/homeassistant/components/danfoss_air/manifest.json index 9eea3221bbe..57cb1aa7218 100644 --- a/homeassistant/components/danfoss_air/manifest.json +++ b/homeassistant/components/danfoss_air/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/danfoss_air", "iot_class": "local_polling", "loggers": ["pydanfossair"], + "quality_scale": "legacy", "requirements": ["pydanfossair==0.1.0"] } diff --git a/homeassistant/components/datadog/manifest.json b/homeassistant/components/datadog/manifest.json index 4ae24a80c6c..ca9681effca 100644 --- a/homeassistant/components/datadog/manifest.json +++ b/homeassistant/components/datadog/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/datadog", "iot_class": "local_push", "loggers": ["datadog"], + "quality_scale": "legacy", "requirements": ["datadog==0.15.0"] } diff --git a/homeassistant/components/ddwrt/manifest.json b/homeassistant/components/ddwrt/manifest.json index 98ea17b0659..9a2b2470131 100644 --- a/homeassistant/components/ddwrt/manifest.json +++ b/homeassistant/components/ddwrt/manifest.json @@ -3,5 +3,6 @@ "name": "DD-WRT", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/ddwrt", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/deconz/config_flow.py b/homeassistant/components/deconz/config_flow.py index 3fb025b4d99..ed54701f656 100644 --- a/homeassistant/components/deconz/config_flow.py +++ b/homeassistant/components/deconz/config_flow.py @@ -74,9 +74,11 @@ class DeconzFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> DeconzOptionsFlowHandler: """Get the options flow for this handler.""" - return DeconzOptionsFlowHandler(config_entry) + return DeconzOptionsFlowHandler() def __init__(self) -> None: """Initialize the deCONZ config flow.""" @@ -299,11 +301,6 @@ class DeconzOptionsFlowHandler(OptionsFlow): gateway: DeconzHub - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize deCONZ options flow.""" - self.config_entry = config_entry - self.options = dict(config_entry.options) - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -315,8 +312,7 @@ class DeconzOptionsFlowHandler(OptionsFlow): ) -> ConfigFlowResult: """Manage the deconz devices options.""" if user_input is not None: - self.options.update(user_input) - return self.async_create_entry(title="", data=self.options) + return self.async_create_entry(data=self.config_entry.options | user_input) schema_options = {} for option, default in ( diff --git a/homeassistant/components/deconz/manifest.json b/homeassistant/components/deconz/manifest.json index 04aaa6bc324..93ae8e392c8 100644 --- a/homeassistant/components/deconz/manifest.json +++ b/homeassistant/components/deconz/manifest.json @@ -7,7 +7,6 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["pydeconz"], - "quality_scale": "platinum", "requirements": ["pydeconz==118"], "ssdp": [ { diff --git a/homeassistant/components/decora/manifest.json b/homeassistant/components/decora/manifest.json index bef42f8b4ab..64dc01d09a1 100644 --- a/homeassistant/components/decora/manifest.json +++ b/homeassistant/components/decora/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/decora", "iot_class": "local_polling", "loggers": ["bluepy", "decora"], + "quality_scale": "legacy", "requirements": ["bluepy==1.3.0", "decora==0.6"] } diff --git a/homeassistant/components/decora_wifi/manifest.json b/homeassistant/components/decora_wifi/manifest.json index 0bead527e78..25892dc3e64 100644 --- a/homeassistant/components/decora_wifi/manifest.json +++ b/homeassistant/components/decora_wifi/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/decora_wifi", "iot_class": "cloud_polling", "loggers": ["decora_wifi"], + "quality_scale": "legacy", "requirements": ["decora-wifi==1.4"] } diff --git a/homeassistant/components/delijn/manifest.json b/homeassistant/components/delijn/manifest.json index d25dab4234e..b87242d6e94 100644 --- a/homeassistant/components/delijn/manifest.json +++ b/homeassistant/components/delijn/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/delijn", "iot_class": "cloud_polling", "loggers": ["pydelijn"], + "quality_scale": "legacy", "requirements": ["pydelijn==1.1.0"] } diff --git a/homeassistant/components/demo/climate.py b/homeassistant/components/demo/climate.py index ff0ed5746ca..5424591f021 100644 --- a/homeassistant/components/demo/climate.py +++ b/homeassistant/components/demo/climate.py @@ -43,6 +43,7 @@ async def async_setup_entry( target_humidity=None, current_humidity=None, swing_mode=None, + swing_horizontal_mode=None, hvac_mode=HVACMode.HEAT, hvac_action=HVACAction.HEATING, target_temp_high=None, @@ -60,6 +61,7 @@ async def async_setup_entry( target_humidity=67.4, current_humidity=54.2, swing_mode="off", + swing_horizontal_mode="auto", hvac_mode=HVACMode.COOL, hvac_action=HVACAction.COOLING, target_temp_high=None, @@ -78,6 +80,7 @@ async def async_setup_entry( target_humidity=None, current_humidity=None, swing_mode="auto", + swing_horizontal_mode=None, hvac_mode=HVACMode.HEAT_COOL, hvac_action=None, target_temp_high=24, @@ -109,6 +112,7 @@ class DemoClimate(ClimateEntity): target_humidity: float | None, current_humidity: float | None, swing_mode: str | None, + swing_horizontal_mode: str | None, hvac_mode: HVACMode, hvac_action: HVACAction | None, target_temp_high: float | None, @@ -129,6 +133,8 @@ class DemoClimate(ClimateEntity): self._attr_supported_features |= ClimateEntityFeature.TARGET_HUMIDITY if swing_mode is not None: self._attr_supported_features |= ClimateEntityFeature.SWING_MODE + if swing_horizontal_mode is not None: + self._attr_supported_features |= ClimateEntityFeature.SWING_HORIZONTAL_MODE if HVACMode.HEAT_COOL in hvac_modes or HVACMode.AUTO in hvac_modes: self._attr_supported_features |= ( ClimateEntityFeature.TARGET_TEMPERATURE_RANGE @@ -147,9 +153,11 @@ class DemoClimate(ClimateEntity): self._hvac_action = hvac_action self._hvac_mode = hvac_mode self._current_swing_mode = swing_mode + self._current_swing_horizontal_mode = swing_horizontal_mode self._fan_modes = ["on_low", "on_high", "auto_low", "auto_high", "off"] self._hvac_modes = hvac_modes self._swing_modes = ["auto", "1", "2", "3", "off"] + self._swing_horizontal_modes = ["auto", "rangefull", "off"] self._target_temperature_high = target_temp_high self._target_temperature_low = target_temp_low self._attr_device_info = DeviceInfo( @@ -242,6 +250,16 @@ class DemoClimate(ClimateEntity): """List of available swing modes.""" return self._swing_modes + @property + def swing_horizontal_mode(self) -> str | None: + """Return the swing setting.""" + return self._current_swing_horizontal_mode + + @property + def swing_horizontal_modes(self) -> list[str]: + """List of available swing modes.""" + return self._swing_horizontal_modes + async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperatures.""" if kwargs.get(ATTR_TEMPERATURE) is not None: @@ -266,6 +284,11 @@ class DemoClimate(ClimateEntity): self._current_swing_mode = swing_mode self.async_write_ha_state() + async def async_set_swing_horizontal_mode(self, swing_horizontal_mode: str) -> None: + """Set new swing mode.""" + self._current_swing_horizontal_mode = swing_horizontal_mode + self.async_write_ha_state() + async def async_set_fan_mode(self, fan_mode: str) -> None: """Set new fan mode.""" self._current_fan_mode = fan_mode diff --git a/homeassistant/components/demo/config_flow.py b/homeassistant/components/demo/config_flow.py index 241f62bed69..53c1678aa81 100644 --- a/homeassistant/components/demo/config_flow.py +++ b/homeassistant/components/demo/config_flow.py @@ -47,7 +47,6 @@ class OptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry self.options = dict(config_entry.options) async def async_step_init( diff --git a/homeassistant/components/demo/icons.json b/homeassistant/components/demo/icons.json index 17425a6d119..eafcbb9161a 100644 --- a/homeassistant/components/demo/icons.json +++ b/homeassistant/components/demo/icons.json @@ -19,6 +19,13 @@ "auto": "mdi:arrow-oscillating", "off": "mdi:arrow-oscillating-off" } + }, + "swing_horizontal_mode": { + "state": { + "rangefull": "mdi:pan-horizontal", + "auto": "mdi:compare-horizontal", + "off": "mdi:arrow-oscillating-off" + } } } } diff --git a/homeassistant/components/demo/strings.json b/homeassistant/components/demo/strings.json index aa5554e9fcc..da72b33d3ca 100644 --- a/homeassistant/components/demo/strings.json +++ b/homeassistant/components/demo/strings.json @@ -42,6 +42,13 @@ "auto": "Auto", "off": "[%key:common::state::off%]" } + }, + "swing_horizontal_mode": { + "state": { + "rangefull": "Full range", + "auto": "Auto", + "off": "[%key:common::state::off%]" + } } } } diff --git a/homeassistant/components/denon/manifest.json b/homeassistant/components/denon/manifest.json index d94e8a264e3..9e840b43fcf 100644 --- a/homeassistant/components/denon/manifest.json +++ b/homeassistant/components/denon/manifest.json @@ -3,5 +3,6 @@ "name": "Denon Network Receivers", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/denon", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/denonavr/config_flow.py b/homeassistant/components/denonavr/config_flow.py index 9a7d2a30438..9ff05411588 100644 --- a/homeassistant/components/denonavr/config_flow.py +++ b/homeassistant/components/denonavr/config_flow.py @@ -52,10 +52,6 @@ CONFIG_SCHEMA = vol.Schema({vol.Optional(CONF_HOST): str}) class OptionsFlowHandler(OptionsFlow): """Options for the component.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Init object.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -119,7 +115,7 @@ class DenonAvrFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/denonavr/manifest.json b/homeassistant/components/denonavr/manifest.json index eff70b94a18..328ab504bd1 100644 --- a/homeassistant/components/denonavr/manifest.json +++ b/homeassistant/components/denonavr/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/denonavr", "iot_class": "local_push", "loggers": ["denonavr"], - "requirements": ["denonavr==1.0.0"], + "requirements": ["denonavr==1.0.1"], "ssdp": [ { "manufacturer": "Denon", diff --git a/homeassistant/components/derivative/strings.json b/homeassistant/components/derivative/strings.json index 4b66c893d57..bfdf861a019 100644 --- a/homeassistant/components/derivative/strings.json +++ b/homeassistant/components/derivative/strings.json @@ -3,7 +3,7 @@ "config": { "step": { "user": { - "title": "Add Derivative sensor", + "title": "Create Derivative sensor", "description": "Create a sensor that estimates the derivative of a sensor.", "data": { "name": "[%key:common::config_flow::data::name%]", diff --git a/homeassistant/components/device_tracker/strings.json b/homeassistant/components/device_tracker/strings.json index d6e36d92300..294333a5d80 100644 --- a/homeassistant/components/device_tracker/strings.json +++ b/homeassistant/components/device_tracker/strings.json @@ -48,7 +48,7 @@ "services": { "see": { "name": "See", - "description": "Records a seen tracked device.", + "description": "Manually update the records of a seen legacy device tracker in the known_devices.yaml file.", "fields": { "mac": { "name": "MAC address", diff --git a/homeassistant/components/devolo_home_control/manifest.json b/homeassistant/components/devolo_home_control/manifest.json index eb85e827551..a9715fffa84 100644 --- a/homeassistant/components/devolo_home_control/manifest.json +++ b/homeassistant/components/devolo_home_control/manifest.json @@ -8,7 +8,6 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["devolo_home_control_api"], - "quality_scale": "gold", "requirements": ["devolo-home-control-api==0.18.3"], "zeroconf": ["_dvl-deviceapi._tcp.local."] } diff --git a/homeassistant/components/devolo_home_network/__init__.py b/homeassistant/components/devolo_home_network/__init__.py index 70a94531431..7f6784f2404 100644 --- a/homeassistant/components/devolo_home_network/__init__.py +++ b/homeassistant/components/devolo_home_network/__init__.py @@ -83,7 +83,6 @@ async def async_setup_entry( ) except DeviceNotFound as err: raise ConfigEntryNotReady( - f"Unable to connect to {entry.data[CONF_IP_ADDRESS]}", translation_domain=DOMAIN, translation_key="connection_failed", translation_placeholders={"ip_address": entry.data[CONF_IP_ADDRESS]}, @@ -98,7 +97,11 @@ async def async_setup_entry( try: return await device.device.async_check_firmware_available() except DeviceUnavailable as err: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_failed", + translation_placeholders={"error": str(err)}, + ) from err async def async_update_connected_plc_devices() -> LogicalNetwork: """Fetch data from API endpoint.""" @@ -107,7 +110,11 @@ async def async_setup_entry( try: return await device.plcnet.async_get_network_overview() except DeviceUnavailable as err: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_failed", + translation_placeholders={"error": str(err)}, + ) from err async def async_update_guest_wifi_status() -> WifiGuestAccessGet: """Fetch data from API endpoint.""" @@ -116,10 +123,14 @@ async def async_setup_entry( try: return await device.device.async_get_wifi_guest_access() except DeviceUnavailable as err: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_failed", + translation_placeholders={"error": str(err)}, + ) from err except DevicePasswordProtected as err: raise ConfigEntryAuthFailed( - err, translation_domain=DOMAIN, translation_key="password_wrong" + translation_domain=DOMAIN, translation_key="password_wrong" ) from err async def async_update_led_status() -> bool: @@ -129,7 +140,11 @@ async def async_setup_entry( try: return await device.device.async_get_led_setting() except DeviceUnavailable as err: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_failed", + translation_placeholders={"error": str(err)}, + ) from err async def async_update_last_restart() -> int: """Fetch data from API endpoint.""" @@ -138,10 +153,14 @@ async def async_setup_entry( try: return await device.device.async_uptime() except DeviceUnavailable as err: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_failed", + translation_placeholders={"error": str(err)}, + ) from err except DevicePasswordProtected as err: raise ConfigEntryAuthFailed( - err, translation_domain=DOMAIN, translation_key="password_wrong" + translation_domain=DOMAIN, translation_key="password_wrong" ) from err async def async_update_wifi_connected_station() -> list[ConnectedStationInfo]: @@ -151,7 +170,11 @@ async def async_setup_entry( try: return await device.device.async_get_wifi_connected_station() except DeviceUnavailable as err: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_failed", + translation_placeholders={"error": str(err)}, + ) from err async def async_update_wifi_neighbor_access_points() -> list[NeighborAPInfo]: """Fetch data from API endpoint.""" @@ -160,7 +183,11 @@ async def async_setup_entry( try: return await device.device.async_get_wifi_neighbor_access_points() except DeviceUnavailable as err: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_failed", + translation_placeholders={"error": str(err)}, + ) from err async def disconnect(event: Event) -> None: """Disconnect from device.""" diff --git a/homeassistant/components/devolo_home_network/manifest.json b/homeassistant/components/devolo_home_network/manifest.json index 27fd08898c0..d10e14f9081 100644 --- a/homeassistant/components/devolo_home_network/manifest.json +++ b/homeassistant/components/devolo_home_network/manifest.json @@ -7,7 +7,6 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["devolo_plc_api"], - "quality_scale": "platinum", "requirements": ["devolo-plc-api==1.4.1"], "zeroconf": [ { diff --git a/homeassistant/components/devolo_home_network/strings.json b/homeassistant/components/devolo_home_network/strings.json index 0799bb14172..4b683b5d2fa 100644 --- a/homeassistant/components/devolo_home_network/strings.json +++ b/homeassistant/components/devolo_home_network/strings.json @@ -6,11 +6,17 @@ "description": "[%key:common::config_flow::description::confirm_setup%]", "data": { "ip_address": "[%key:common::config_flow::data::ip%]" + }, + "data_description": { + "ip_address": "IP address of your devolo Home Network device. This can be found in the devolo Home Network App on the device dashboard." } }, "reauth_confirm": { "data": { "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "Password you protected the device with." } }, "zeroconf_confirm": { @@ -94,6 +100,9 @@ }, "password_wrong": { "message": "The used password is wrong" + }, + "update_failed": { + "message": "Error while updating the data: {error}" } } } diff --git a/homeassistant/components/dexcom/__init__.py b/homeassistant/components/dexcom/__init__.py index b9a3bdba12d..e93e8e66358 100644 --- a/homeassistant/components/dexcom/__init__.py +++ b/homeassistant/components/dexcom/__init__.py @@ -6,12 +6,12 @@ import logging from pydexcom import AccountError, Dexcom, GlucoseReading, SessionError from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_PASSWORD, CONF_UNIT_OF_MEASUREMENT, CONF_USERNAME +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import CONF_SERVER, DOMAIN, MG_DL, PLATFORMS, SERVER_OUS +from .const import CONF_SERVER, DOMAIN, PLATFORMS, SERVER_OUS _LOGGER = logging.getLogger(__name__) @@ -32,11 +32,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except SessionError as error: raise ConfigEntryNotReady from error - if not entry.options: - hass.config_entries.async_update_entry( - entry, options={CONF_UNIT_OF_MEASUREMENT: MG_DL} - ) - async def async_update_data(): try: return await hass.async_add_executor_job(dexcom.get_current_glucose_reading) @@ -55,8 +50,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator - entry.async_on_unload(entry.add_update_listener(update_listener)) - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True @@ -67,8 +60,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): hass.data[DOMAIN].pop(entry.entry_id) return unload_ok - - -async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: - """Handle options update.""" - await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/dexcom/config_flow.py b/homeassistant/components/dexcom/config_flow.py index c3ed43c8e9a..90917e0ce2c 100644 --- a/homeassistant/components/dexcom/config_flow.py +++ b/homeassistant/components/dexcom/config_flow.py @@ -7,16 +7,10 @@ from typing import Any from pydexcom import AccountError, Dexcom, SessionError import voluptuous as vol -from homeassistant.config_entries import ( - ConfigEntry, - ConfigFlow, - ConfigFlowResult, - OptionsFlow, -) -from homeassistant.const import CONF_PASSWORD, CONF_UNIT_OF_MEASUREMENT, CONF_USERNAME -from homeassistant.core import callback +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME -from .const import CONF_SERVER, DOMAIN, MG_DL, MMOL_L, SERVER_OUS, SERVER_US +from .const import CONF_SERVER, DOMAIN, SERVER_OUS, SERVER_US DATA_SCHEMA = vol.Schema( { @@ -62,38 +56,3 @@ class DexcomConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors=errors ) - - @staticmethod - @callback - def async_get_options_flow( - config_entry: ConfigEntry, - ) -> DexcomOptionsFlowHandler: - """Get the options flow for this handler.""" - return DexcomOptionsFlowHandler(config_entry) - - -class DexcomOptionsFlowHandler(OptionsFlow): - """Handle a option flow for Dexcom.""" - - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - - async def async_step_init( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle options flow.""" - if user_input is not None: - return self.async_create_entry(title="", data=user_input) - - data_schema = vol.Schema( - { - vol.Optional( - CONF_UNIT_OF_MEASUREMENT, - default=self.config_entry.options.get( - CONF_UNIT_OF_MEASUREMENT, MG_DL - ), - ): vol.In({MG_DL, MMOL_L}), - } - ) - return self.async_show_form(step_id="init", data_schema=data_schema) diff --git a/homeassistant/components/dexcom/const.py b/homeassistant/components/dexcom/const.py index 487a844eb2b..66999e51e4b 100644 --- a/homeassistant/components/dexcom/const.py +++ b/homeassistant/components/dexcom/const.py @@ -5,9 +5,6 @@ from homeassistant.const import Platform DOMAIN = "dexcom" PLATFORMS = [Platform.SENSOR] -MMOL_L = "mmol/L" -MG_DL = "mg/dL" - CONF_SERVER = "server" SERVER_OUS = "EU" diff --git a/homeassistant/components/dexcom/sensor.py b/homeassistant/components/dexcom/sensor.py index 10b30f39fcb..850678e7ac9 100644 --- a/homeassistant/components/dexcom/sensor.py +++ b/homeassistant/components/dexcom/sensor.py @@ -6,7 +6,7 @@ from pydexcom import GlucoseReading from homeassistant.components.sensor import SensorDeviceClass, SensorEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_UNIT_OF_MEASUREMENT, CONF_USERNAME +from homeassistant.const import CONF_USERNAME, UnitOfBloodGlucoseConcentration from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -15,7 +15,7 @@ from homeassistant.helpers.update_coordinator import ( DataUpdateCoordinator, ) -from .const import DOMAIN, MG_DL +from .const import DOMAIN TRENDS = { 1: "rising_quickly", @@ -36,13 +36,10 @@ async def async_setup_entry( """Set up the Dexcom sensors.""" coordinator = hass.data[DOMAIN][config_entry.entry_id] username = config_entry.data[CONF_USERNAME] - unit_of_measurement = config_entry.options[CONF_UNIT_OF_MEASUREMENT] async_add_entities( [ DexcomGlucoseTrendSensor(coordinator, username, config_entry.entry_id), - DexcomGlucoseValueSensor( - coordinator, username, config_entry.entry_id, unit_of_measurement - ), + DexcomGlucoseValueSensor(coordinator, username, config_entry.entry_id), ], ) @@ -73,6 +70,10 @@ class DexcomSensorEntity( class DexcomGlucoseValueSensor(DexcomSensorEntity): """Representation of a Dexcom glucose value sensor.""" + _attr_device_class = SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION + _attr_native_unit_of_measurement = ( + UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER + ) _attr_translation_key = "glucose_value" def __init__( @@ -80,18 +81,15 @@ class DexcomGlucoseValueSensor(DexcomSensorEntity): coordinator: DataUpdateCoordinator, username: str, entry_id: str, - unit_of_measurement: str, ) -> None: """Initialize the sensor.""" super().__init__(coordinator, username, entry_id, "value") - self._attr_native_unit_of_measurement = unit_of_measurement - self._key = "mg_dl" if unit_of_measurement == MG_DL else "mmol_l" @property def native_value(self): """Return the state of the sensor.""" if self.coordinator.data: - return getattr(self.coordinator.data, self._key) + return self.coordinator.data.mg_dl return None diff --git a/homeassistant/components/digital_ocean/manifest.json b/homeassistant/components/digital_ocean/manifest.json index 7fee8ca5b2b..819a557491a 100644 --- a/homeassistant/components/digital_ocean/manifest.json +++ b/homeassistant/components/digital_ocean/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/digital_ocean", "iot_class": "local_polling", "loggers": ["digitalocean"], + "quality_scale": "legacy", "requirements": ["python-digitalocean==1.13.2"] } diff --git a/homeassistant/components/directv/manifest.json b/homeassistant/components/directv/manifest.json index 957bbff0acc..bee2c297635 100644 --- a/homeassistant/components/directv/manifest.json +++ b/homeassistant/components/directv/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/directv", "iot_class": "local_polling", "loggers": ["directv"], - "quality_scale": "silver", "requirements": ["directv==0.4.0"], "ssdp": [ { diff --git a/homeassistant/components/discogs/manifest.json b/homeassistant/components/discogs/manifest.json index fceb214aded..f724b4bc6fd 100644 --- a/homeassistant/components/discogs/manifest.json +++ b/homeassistant/components/discogs/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/discogs", "iot_class": "cloud_polling", "loggers": ["discogs_client"], + "quality_scale": "legacy", "requirements": ["discogs-client==2.3.0"] } diff --git a/homeassistant/components/discovergy/config_flow.py b/homeassistant/components/discovergy/config_flow.py index 05ed90bf354..f24fdd1e43d 100644 --- a/homeassistant/components/discovergy/config_flow.py +++ b/homeassistant/components/discovergy/config_flow.py @@ -11,12 +11,7 @@ from pydiscovergy.authentication import BasicAuth import pydiscovergy.error as discovergyError import voluptuous as vol -from homeassistant.config_entries import ( - SOURCE_REAUTH, - ConfigEntry, - ConfigFlow, - ConfigFlowResult, -) +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.helpers.httpx_client import get_async_client from homeassistant.helpers.selector import ( @@ -57,35 +52,14 @@ class DiscovergyConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _existing_entry: ConfigEntry - - async def async_step_user( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle the initial step.""" - if user_input is None: - return self.async_show_form( - step_id="user", - data_schema=CONFIG_SCHEMA, - ) - - return await self._validate_and_save(user_input) - async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle the initial step.""" - self._existing_entry = self._get_reauth_entry() - return await self.async_step_reauth_confirm() + return await self.async_step_user() - async def async_step_reauth_confirm( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Handle the reauth step.""" - return await self._validate_and_save(user_input, step_id="reauth_confirm") - - async def _validate_and_save( - self, user_input: Mapping[str, Any] | None = None, step_id: str = "user" + async def async_step_user( + self, user_input: Mapping[str, Any] | None = None ) -> ConfigFlowResult: """Validate user input and create config entry.""" errors = {} @@ -106,17 +80,17 @@ class DiscovergyConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected error occurred while getting meters") errors["base"] = "unknown" else: + await self.async_set_unique_id(user_input[CONF_EMAIL].lower()) + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch(reason="account_mismatch") return self.async_update_reload_and_abort( - entry=self._existing_entry, - data={ - CONF_EMAIL: user_input[CONF_EMAIL], + entry=self._get_reauth_entry(), + data_updates={ CONF_PASSWORD: user_input[CONF_PASSWORD], }, ) - # set unique id to title which is the account email - await self.async_set_unique_id(user_input[CONF_EMAIL].lower()) self._abort_if_unique_id_configured() return self.async_create_entry( @@ -124,10 +98,10 @@ class DiscovergyConfigFlow(ConfigFlow, domain=DOMAIN): ) return self.async_show_form( - step_id=step_id, + step_id="user", data_schema=self.add_suggested_values_to_schema( CONFIG_SCHEMA, - self._existing_entry.data + self._get_reauth_entry().data if self.source == SOURCE_REAUTH else user_input, ), diff --git a/homeassistant/components/discovergy/quality_scale.yaml b/homeassistant/components/discovergy/quality_scale.yaml new file mode 100644 index 00000000000..3caeaa6bbe0 --- /dev/null +++ b/homeassistant/components/discovergy/quality_scale.yaml @@ -0,0 +1,96 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + The integration does not provide any additional actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: + status: todo + comment: | + The data_descriptions are missing. + dependency-transparency: done + docs-actions: + status: exempt + comment: | + The integration does not provide any additional actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: | + The integration does not provide any additional actions. + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + The integration does not provide any additional options. + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: done + test-coverage: done + + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: | + This integration cannot be discovered, it is a connecting to a cloud service. + discovery: + status: exempt + comment: | + This integration cannot be discovered, it is a connecting to a cloud service. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + The integration connects to a single device per configuration entry. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: + status: exempt + comment: | + The integration does not provide any additional icons. + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration does not raise any repairable issues. + stale-devices: + status: exempt + comment: | + This integration connect to a single device per configuration entry. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/discovergy/strings.json b/homeassistant/components/discovergy/strings.json index 9a91fa92dc4..b626a11ea1e 100644 --- a/homeassistant/components/discovergy/strings.json +++ b/homeassistant/components/discovergy/strings.json @@ -6,12 +6,6 @@ "email": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" } - }, - "reauth_confirm": { - "data": { - "email": "[%key:common::config_flow::data::email%]", - "password": "[%key:common::config_flow::data::password%]" - } } }, "error": { @@ -21,6 +15,7 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "account_mismatch": "The inexogy account authenticated with, does not match the account needed re-authentication.", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, diff --git a/homeassistant/components/dlib_face_detect/manifest.json b/homeassistant/components/dlib_face_detect/manifest.json index e395a84f206..e8476583081 100644 --- a/homeassistant/components/dlib_face_detect/manifest.json +++ b/homeassistant/components/dlib_face_detect/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/dlib_face_detect", "iot_class": "local_push", "loggers": ["face_recognition"], + "quality_scale": "legacy", "requirements": ["face-recognition==1.2.3"] } diff --git a/homeassistant/components/dlib_face_identify/manifest.json b/homeassistant/components/dlib_face_identify/manifest.json index 60c0ef3c766..2a764e4a3e8 100644 --- a/homeassistant/components/dlib_face_identify/manifest.json +++ b/homeassistant/components/dlib_face_identify/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/dlib_face_identify", "iot_class": "local_push", "loggers": ["face_recognition"], + "quality_scale": "legacy", "requirements": ["face-recognition==1.2.3"] } diff --git a/homeassistant/components/dlna_dmr/config_flow.py b/homeassistant/components/dlna_dmr/config_flow.py index 06ac935e8d9..75f50192500 100644 --- a/homeassistant/components/dlna_dmr/config_flow.py +++ b/homeassistant/components/dlna_dmr/config_flow.py @@ -74,7 +74,7 @@ class DlnaDmrFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlow: """Define the config flow to handle options.""" - return DlnaDmrOptionsFlowHandler(config_entry) + return DlnaDmrOptionsFlowHandler() async def async_step_user(self, user_input: FlowInput = None) -> ConfigFlowResult: """Handle a flow initialized by the user. @@ -327,10 +327,6 @@ class DlnaDmrOptionsFlowHandler(OptionsFlow): Configures the single instance and updates the existing config entry. """ - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/dlna_dms/manifest.json b/homeassistant/components/dlna_dms/manifest.json index 091e083ceda..1913bb9d5d7 100644 --- a/homeassistant/components/dlna_dms/manifest.json +++ b/homeassistant/components/dlna_dms/manifest.json @@ -7,7 +7,6 @@ "dependencies": ["ssdp"], "documentation": "https://www.home-assistant.io/integrations/dlna_dms", "iot_class": "local_polling", - "quality_scale": "platinum", "requirements": ["async-upnp-client==0.41.0"], "ssdp": [ { diff --git a/homeassistant/components/dnsip/config_flow.py b/homeassistant/components/dnsip/config_flow.py index 6dda0c03910..8c2cfa5e556 100644 --- a/homeassistant/components/dnsip/config_flow.py +++ b/homeassistant/components/dnsip/config_flow.py @@ -14,7 +14,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_NAME, CONF_PORT from homeassistant.core import callback @@ -101,7 +101,7 @@ class DnsIPConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> DnsIPOptionsFlowHandler: """Return Option handler.""" - return DnsIPOptionsFlowHandler(config_entry) + return DnsIPOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -165,7 +165,7 @@ class DnsIPConfigFlow(ConfigFlow, domain=DOMAIN): ) -class DnsIPOptionsFlowHandler(OptionsFlowWithConfigEntry): +class DnsIPOptionsFlowHandler(OptionsFlow): """Handle a option config flow for dnsip integration.""" async def async_step_init( diff --git a/homeassistant/components/dominos/manifest.json b/homeassistant/components/dominos/manifest.json index 442f433db7c..5618c6f0d87 100644 --- a/homeassistant/components/dominos/manifest.json +++ b/homeassistant/components/dominos/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/dominos", "iot_class": "cloud_polling", "loggers": ["pizzapi"], + "quality_scale": "legacy", "requirements": ["pizzapi==0.0.6"] } diff --git a/homeassistant/components/doods/manifest.json b/homeassistant/components/doods/manifest.json index fabb2c30190..ae307bb4962 100644 --- a/homeassistant/components/doods/manifest.json +++ b/homeassistant/components/doods/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/doods", "iot_class": "local_polling", "loggers": ["pydoods"], - "requirements": ["pydoods==1.0.2", "Pillow==10.4.0"] + "quality_scale": "legacy", + "requirements": ["pydoods==1.0.2", "Pillow==11.0.0"] } diff --git a/homeassistant/components/doorbird/config_flow.py b/homeassistant/components/doorbird/config_flow.py index 650ddb8811d..ebb1d6fc126 100644 --- a/homeassistant/components/doorbird/config_flow.py +++ b/homeassistant/components/doorbird/config_flow.py @@ -213,16 +213,12 @@ class DoorBirdConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for doorbird.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/dovado/manifest.json b/homeassistant/components/dovado/manifest.json index 9a0fc46ad16..78b1e0c6719 100644 --- a/homeassistant/components/dovado/manifest.json +++ b/homeassistant/components/dovado/manifest.json @@ -5,5 +5,6 @@ "disabled": "This integration is disabled because it uses non-open source code to operate.", "documentation": "https://www.home-assistant.io/integrations/dovado", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["dovado==0.4.1"] } diff --git a/homeassistant/components/dsmr/config_flow.py b/homeassistant/components/dsmr/config_flow.py index 49e1818edcc..7d6a641b006 100644 --- a/homeassistant/components/dsmr/config_flow.py +++ b/homeassistant/components/dsmr/config_flow.py @@ -171,9 +171,11 @@ class DSMRFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> DSMROptionFlowHandler: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> DSMROptionFlowHandler: """Get the options flow for this handler.""" - return DSMROptionFlowHandler(config_entry) + return DSMROptionFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -311,10 +313,6 @@ class DSMRFlowHandler(ConfigFlow, domain=DOMAIN): class DSMROptionFlowHandler(OptionsFlow): """Handle options.""" - def __init__(self, entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.entry = entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -328,7 +326,7 @@ class DSMROptionFlowHandler(OptionsFlow): { vol.Optional( CONF_TIME_BETWEEN_UPDATE, - default=self.entry.options.get( + default=self.config_entry.options.get( CONF_TIME_BETWEEN_UPDATE, DEFAULT_TIME_BETWEEN_UPDATE ), ): vol.All(vol.Coerce(int), vol.Range(min=0)), diff --git a/homeassistant/components/dsmr_reader/manifest.json b/homeassistant/components/dsmr_reader/manifest.json index 7adb664fbd8..9c0e6da2c46 100644 --- a/homeassistant/components/dsmr_reader/manifest.json +++ b/homeassistant/components/dsmr_reader/manifest.json @@ -6,6 +6,5 @@ "dependencies": ["mqtt"], "documentation": "https://www.home-assistant.io/integrations/dsmr_reader", "iot_class": "local_push", - "mqtt": ["dsmr/#"], - "quality_scale": "gold" + "mqtt": ["dsmr/#"] } diff --git a/homeassistant/components/dte_energy_bridge/manifest.json b/homeassistant/components/dte_energy_bridge/manifest.json index f5b57d82869..8285469a745 100644 --- a/homeassistant/components/dte_energy_bridge/manifest.json +++ b/homeassistant/components/dte_energy_bridge/manifest.json @@ -3,5 +3,6 @@ "name": "DTE Energy Bridge", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/dte_energy_bridge", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/dublin_bus_transport/manifest.json b/homeassistant/components/dublin_bus_transport/manifest.json index 1866da8ed8d..3df22b0da00 100644 --- a/homeassistant/components/dublin_bus_transport/manifest.json +++ b/homeassistant/components/dublin_bus_transport/manifest.json @@ -3,5 +3,6 @@ "name": "Dublin Bus", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/dublin_bus_transport", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/duckdns/manifest.json b/homeassistant/components/duckdns/manifest.json index b14da053450..b48ed0b2394 100644 --- a/homeassistant/components/duckdns/manifest.json +++ b/homeassistant/components/duckdns/manifest.json @@ -3,5 +3,6 @@ "name": "Duck DNS", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/duckdns", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/duotecno/manifest.json b/homeassistant/components/duotecno/manifest.json index 2a427e36e84..7a79902eae3 100644 --- a/homeassistant/components/duotecno/manifest.json +++ b/homeassistant/components/duotecno/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/duotecno", "iot_class": "local_push", "loggers": ["pyduotecno", "pyduotecno-node", "pyduotecno-unit"], - "quality_scale": "silver", "requirements": ["pyDuotecno==2024.10.1"], "single_config_entry": true } diff --git a/homeassistant/components/dweet/manifest.json b/homeassistant/components/dweet/manifest.json index 4badf76f2e9..b4efd0744fb 100644 --- a/homeassistant/components/dweet/manifest.json +++ b/homeassistant/components/dweet/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/dweet", "iot_class": "cloud_polling", "loggers": ["dweepy"], + "quality_scale": "legacy", "requirements": ["dweepy==0.3.0"] } diff --git a/homeassistant/components/dynalite/__init__.py b/homeassistant/components/dynalite/__init__.py index 59b8e464bb0..7388c43cb89 100644 --- a/homeassistant/components/dynalite/__init__.py +++ b/homeassistant/components/dynalite/__init__.py @@ -4,21 +4,17 @@ from __future__ import annotations import voluptuous as vol -from homeassistant import config_entries from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType -# Loading the config flow file will register the flow from .bridge import DynaliteBridge from .const import ( ATTR_AREA, ATTR_CHANNEL, ATTR_HOST, - CONF_BRIDGES, DOMAIN, LOGGER, PLATFORMS, @@ -27,41 +23,14 @@ from .const import ( ) from .convert_config import convert_config from .panel import async_register_dynalite_frontend -from .schema import BRIDGE_SCHEMA -CONFIG_SCHEMA = vol.Schema( - vol.All( - cv.deprecated(DOMAIN), - { - DOMAIN: vol.Schema( - {vol.Optional(CONF_BRIDGES): vol.All(cv.ensure_list, [BRIDGE_SCHEMA])} - ), - }, - ), - extra=vol.ALLOW_EXTRA, -) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Dynalite platform.""" - conf = config.get(DOMAIN, {}) - LOGGER.debug("Setting up dynalite component config = %s", conf) hass.data[DOMAIN] = {} - bridges = conf.get(CONF_BRIDGES, []) - - for bridge_conf in bridges: - host = bridge_conf[CONF_HOST] - LOGGER.debug("Starting config entry flow host=%s conf=%s", host, bridge_conf) - - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data=bridge_conf, - ) - ) - async def dynalite_service(service_call: ServiceCall) -> None: data = service_call.data host = data.get(ATTR_HOST, "") diff --git a/homeassistant/components/dynalite/config_flow.py b/homeassistant/components/dynalite/config_flow.py index 928f7043a49..4b111c25cc9 100644 --- a/homeassistant/components/dynalite/config_flow.py +++ b/homeassistant/components/dynalite/config_flow.py @@ -8,9 +8,7 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PORT -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from .bridge import DynaliteBridge from .const import DEFAULT_PORT, DOMAIN, LOGGER @@ -26,38 +24,6 @@ class DynaliteFlowHandler(ConfigFlow, domain=DOMAIN): """Initialize the Dynalite flow.""" self.host = None - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import a new bridge as a config entry.""" - LOGGER.debug("Starting async_step_import (deprecated) - %s", import_data) - # Raise an issue that this is deprecated and has been imported - async_create_issue( - self.hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2023.12.0", - is_fixable=False, - is_persistent=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Dynalite", - }, - ) - - host = import_data[CONF_HOST] - # Check if host already exists - for entry in self._async_current_entries(): - if entry.data[CONF_HOST] == host: - self.hass.config_entries.async_update_entry( - entry, data=dict(import_data) - ) - return self.async_abort(reason="already_configured") - - # New entry - return await self._try_create(import_data) - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/dynalite/const.py b/homeassistant/components/dynalite/const.py index c1cb1a0fb1b..4712b14bea3 100644 --- a/homeassistant/components/dynalite/const.py +++ b/homeassistant/components/dynalite/const.py @@ -16,7 +16,6 @@ ACTIVE_OFF = "off" ACTIVE_ON = "on" CONF_AREA = "area" CONF_AUTO_DISCOVER = "autodiscover" -CONF_BRIDGES = "bridges" CONF_CHANNEL = "channel" CONF_CHANNEL_COVER = "channel_cover" CONF_CLOSE_PRESET = "close" diff --git a/homeassistant/components/easyenergy/manifest.json b/homeassistant/components/easyenergy/manifest.json index 4d45dc2d399..25432196169 100644 --- a/homeassistant/components/easyenergy/manifest.json +++ b/homeassistant/components/easyenergy/manifest.json @@ -5,6 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/easyenergy", "iot_class": "cloud_polling", - "quality_scale": "platinum", "requirements": ["easyenergy==2.1.2"] } diff --git a/homeassistant/components/ebox/manifest.json b/homeassistant/components/ebox/manifest.json index 952f9dc133d..d87c85b6612 100644 --- a/homeassistant/components/ebox/manifest.json +++ b/homeassistant/components/ebox/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/ebox", "iot_class": "cloud_polling", "loggers": ["pyebox"], + "quality_scale": "legacy", "requirements": ["pyebox==1.1.4"] } diff --git a/homeassistant/components/ebusd/manifest.json b/homeassistant/components/ebusd/manifest.json index 3ce18d6e8d3..b82e8f1b910 100644 --- a/homeassistant/components/ebusd/manifest.json +++ b/homeassistant/components/ebusd/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/ebusd", "iot_class": "local_polling", "loggers": ["ebusdpy"], + "quality_scale": "legacy", "requirements": ["ebusdpy==0.0.17"] } diff --git a/homeassistant/components/ecoal_boiler/manifest.json b/homeassistant/components/ecoal_boiler/manifest.json index 75dc95ae121..4d8202f8fde 100644 --- a/homeassistant/components/ecoal_boiler/manifest.json +++ b/homeassistant/components/ecoal_boiler/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/ecoal_boiler", "iot_class": "local_polling", "loggers": ["ecoaliface"], + "quality_scale": "legacy", "requirements": ["ecoaliface==0.4.0"] } diff --git a/homeassistant/components/ecobee/number.py b/homeassistant/components/ecobee/number.py index ab09407903d..ed3744bf11e 100644 --- a/homeassistant/components/ecobee/number.py +++ b/homeassistant/components/ecobee/number.py @@ -6,9 +6,14 @@ from collections.abc import Awaitable, Callable from dataclasses import dataclass import logging -from homeassistant.components.number import NumberEntity, NumberEntityDescription +from homeassistant.components.number import ( + NumberDeviceClass, + NumberEntity, + NumberEntityDescription, + NumberMode, +) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import UnitOfTime +from homeassistant.const import UnitOfTemperature, UnitOfTime from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -54,21 +59,30 @@ async def async_setup_entry( ) -> None: """Set up the ecobee thermostat number entity.""" data: EcobeeData = hass.data[DOMAIN] - _LOGGER.debug("Adding min time ventilators numbers (if present)") - async_add_entities( + assert data is not None + + entities: list[NumberEntity] = [ + EcobeeVentilatorMinTime(data, index, numbers) + for index, thermostat in enumerate(data.ecobee.thermostats) + if thermostat["settings"]["ventilatorType"] != "none" + for numbers in VENTILATOR_NUMBERS + ] + + _LOGGER.debug("Adding compressor min temp number (if present)") + entities.extend( ( - EcobeeVentilatorMinTime(data, index, numbers) + EcobeeCompressorMinTemp(data, index) for index, thermostat in enumerate(data.ecobee.thermostats) - if thermostat["settings"]["ventilatorType"] != "none" - for numbers in VENTILATOR_NUMBERS - ), - True, + if thermostat["settings"]["hasHeatPump"] + ) ) + async_add_entities(entities, True) + class EcobeeVentilatorMinTime(EcobeeBaseEntity, NumberEntity): - """A number class, representing min time for an ecobee thermostat with ventilator attached.""" + """A number class, representing min time for an ecobee thermostat with ventilator attached.""" entity_description: EcobeeNumberEntityDescription @@ -105,3 +119,53 @@ class EcobeeVentilatorMinTime(EcobeeBaseEntity, NumberEntity): """Set new ventilator Min On Time value.""" self.entity_description.set_fn(self.data, self.thermostat_index, int(value)) self.update_without_throttle = True + + +class EcobeeCompressorMinTemp(EcobeeBaseEntity, NumberEntity): + """Minimum outdoor temperature at which the compressor will operate. + + This applies more to air source heat pumps than geothermal. This serves as a safety + feature (compressors have a minimum operating temperature) as well as + providing the ability to choose fuel in a dual-fuel system (i.e. choose between + electrical heat pump and fossil auxiliary heat depending on Time of Use, Solar, + etc.). + Note that python-ecobee-api refers to this as Aux Cutover Threshold, but Ecobee + uses Compressor Protection Min Temp. + """ + + _attr_device_class = NumberDeviceClass.TEMPERATURE + _attr_has_entity_name = True + _attr_icon = "mdi:thermometer-off" + _attr_mode = NumberMode.BOX + _attr_native_min_value = -25 + _attr_native_max_value = 66 + _attr_native_step = 5 + _attr_native_unit_of_measurement = UnitOfTemperature.FAHRENHEIT + _attr_translation_key = "compressor_protection_min_temp" + + def __init__( + self, + data: EcobeeData, + thermostat_index: int, + ) -> None: + """Initialize ecobee compressor min temperature.""" + super().__init__(data, thermostat_index) + self._attr_unique_id = f"{self.base_unique_id}_compressor_protection_min_temp" + self.update_without_throttle = False + + async def async_update(self) -> None: + """Get the latest state from the thermostat.""" + if self.update_without_throttle: + await self.data.update(no_throttle=True) + self.update_without_throttle = False + else: + await self.data.update() + + self._attr_native_value = ( + (self.thermostat["settings"]["compressorProtectionMinTemp"]) / 10 + ) + + def set_native_value(self, value: float) -> None: + """Set new compressor minimum temperature.""" + self.data.ecobee.set_aux_cutover_threshold(self.thermostat_index, value) + self.update_without_throttle = True diff --git a/homeassistant/components/ecobee/strings.json b/homeassistant/components/ecobee/strings.json index 18929cb45de..8c636bd9b04 100644 --- a/homeassistant/components/ecobee/strings.json +++ b/homeassistant/components/ecobee/strings.json @@ -33,15 +33,18 @@ }, "number": { "ventilator_min_type_home": { - "name": "Ventilator min time home" + "name": "Ventilator minimum time home" }, "ventilator_min_type_away": { - "name": "Ventilator min time away" + "name": "Ventilator minimum time away" + }, + "compressor_protection_min_temp": { + "name": "Compressor minimum temperature" } }, "switch": { "aux_heat_only": { - "name": "Aux heat only" + "name": "Auxiliary heat only" } } }, diff --git a/homeassistant/components/ecobee/switch.py b/homeassistant/components/ecobee/switch.py index 67be78fb21d..89ee433c072 100644 --- a/homeassistant/components/ecobee/switch.py +++ b/homeassistant/components/ecobee/switch.py @@ -31,25 +31,26 @@ async def async_setup_entry( """Set up the ecobee thermostat switch entity.""" data: EcobeeData = hass.data[DOMAIN] - async_add_entities( - [ - EcobeeVentilator20MinSwitch( - data, - index, - (await dt_util.async_get_time_zone(thermostat["location"]["timeZone"])) - or dt_util.get_default_time_zone(), - ) + entities: list[SwitchEntity] = [ + EcobeeVentilator20MinSwitch( + data, + index, + (await dt_util.async_get_time_zone(thermostat["location"]["timeZone"])) + or dt_util.get_default_time_zone(), + ) + for index, thermostat in enumerate(data.ecobee.thermostats) + if thermostat["settings"]["ventilatorType"] != "none" + ] + + entities.extend( + ( + EcobeeSwitchAuxHeatOnly(data, index) for index, thermostat in enumerate(data.ecobee.thermostats) - if thermostat["settings"]["ventilatorType"] != "none" - ], - update_before_add=True, + if thermostat["settings"]["hasHeatPump"] + ) ) - async_add_entities( - EcobeeSwitchAuxHeatOnly(data, index) - for index, thermostat in enumerate(data.ecobee.thermostats) - if thermostat["settings"]["hasHeatPump"] - ) + async_add_entities(entities, update_before_add=True) class EcobeeVentilator20MinSwitch(EcobeeBaseEntity, SwitchEntity): diff --git a/homeassistant/components/ecovacs/controller.py b/homeassistant/components/ecovacs/controller.py index ec67845cf9f..3a70ab2af5b 100644 --- a/homeassistant/components/ecovacs/controller.py +++ b/homeassistant/components/ecovacs/controller.py @@ -13,7 +13,6 @@ from deebot_client.authentication import Authenticator, create_rest_config from deebot_client.const import UNDEFINED, UndefinedType from deebot_client.device import Device from deebot_client.exceptions import DeebotError, InvalidAuthenticationError -from deebot_client.models import DeviceInfo from deebot_client.mqtt_client import MqttClient, create_mqtt_config from deebot_client.util import md5 from deebot_client.util.continents import get_continent @@ -81,25 +80,32 @@ class EcovacsController: try: devices = await self._api_client.get_devices() credentials = await self._authenticator.authenticate() - for device_config in devices: - if isinstance(device_config, DeviceInfo): - # MQTT device - device = Device(device_config, self._authenticator) - mqtt = await self._get_mqtt_client() - await device.initialize(mqtt) - self._devices.append(device) - else: - # Legacy device - bot = VacBot( - credentials.user_id, - EcoVacsAPI.REALM, - self._device_id[0:8], - credentials.token, - device_config, - self._continent, - monitor=True, - ) - self._legacy_devices.append(bot) + for device_info in devices.mqtt: + device = Device(device_info, self._authenticator) + mqtt = await self._get_mqtt_client() + await device.initialize(mqtt) + self._devices.append(device) + for device_config in devices.xmpp: + bot = VacBot( + credentials.user_id, + EcoVacsAPI.REALM, + self._device_id[0:8], + credentials.token, + device_config, + self._continent, + monitor=True, + ) + self._legacy_devices.append(bot) + for device_config in devices.not_supported: + _LOGGER.warning( + ( + 'Device "%s" not supported. Please add support for it to ' + "https://github.com/DeebotUniverse/client.py: %s" + ), + device_config["deviceName"], + device_config, + ) + except InvalidAuthenticationError as ex: raise ConfigEntryError("Invalid credentials") from ex except DeebotError as ex: diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index 33977b3b0de..546aba01d90 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==8.4.0"] + "requirements": ["py-sucks==0.9.10", "deebot-client==9.1.0"] } diff --git a/homeassistant/components/ecovacs/sensor.py b/homeassistant/components/ecovacs/sensor.py index 28c4efbd0c6..7c190d27775 100644 --- a/homeassistant/components/ecovacs/sensor.py +++ b/homeassistant/components/ecovacs/sensor.py @@ -26,11 +26,11 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.const import ( - AREA_SQUARE_METERS, ATTR_BATTERY_LEVEL, CONF_DESCRIPTION, PERCENTAGE, EntityCategory, + UnitOfArea, UnitOfTime, ) from homeassistant.core import HomeAssistant @@ -67,7 +67,7 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSensorEntityDescription, ...] = ( capability_fn=lambda caps: caps.stats.clean, value_fn=lambda e: e.area, translation_key="stats_area", - native_unit_of_measurement=AREA_SQUARE_METERS, + native_unit_of_measurement=UnitOfArea.SQUARE_METERS, ), EcovacsSensorEntityDescription[StatsEvent]( key="stats_time", @@ -84,7 +84,7 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSensorEntityDescription, ...] = ( value_fn=lambda e: e.area, key="total_stats_area", translation_key="total_stats_area", - native_unit_of_measurement=AREA_SQUARE_METERS, + native_unit_of_measurement=UnitOfArea.SQUARE_METERS, state_class=SensorStateClass.TOTAL_INCREASING, ), EcovacsSensorEntityDescription[TotalStatsEvent]( diff --git a/homeassistant/components/ecowitt/strings.json b/homeassistant/components/ecowitt/strings.json index 95fcc3c3bb0..aaacb5e03dd 100644 --- a/homeassistant/components/ecowitt/strings.json +++ b/homeassistant/components/ecowitt/strings.json @@ -6,7 +6,7 @@ } }, "create_entry": { - "default": "To finish setting up the integration, use the Ecowitt App (on your phone) or access the Ecowitt WebUI in a browser at the station IP address.\n\nPick your station -> Menu Others -> DIY Upload Servers. Hit next and select 'Customized'\n\n- Server IP: `{server}`\n- Path: `{path}`\n- Port: `{port}`\n\nSelect **Save**." + "default": "To finish setting up the integration, you need to tell the Ecowitt station to send data to Home Assistant at the following address:\n\n- Server IP / Host Name: `{server}`\n- Path: `{path}`\n- Port: `{port}`\n\nYou can access the Ecowitt configuration in one of two ways:\n\n1. Use the Ecowitt App (on your phone):\n - Select the Menu Icon (☰) on the upper left, then **My Devices** → **Pick your station**\n - Select the Ellipsis Icon (⋯) → **Others**\n - Select **DIY Upload Servers** → **Customized**\n - Make sure to choose 'Protocol Type Same As: Ecowitt'\n - Enter the Server IP / Host Name, Path, and Port (printed above). _Note: The path has to match! Remove the first forward slash from the path, as the app will prepend one._\n - Save\n1. Navigate to the Ecowitt web UI in a browser at the station IP address:\n - Select **Weather Services** then scroll down to 'Customized'\n - Make sure to select 'Customized: 🔘 Enable' and 'Protocol Type Same As: 🔘 Ecowitt'\n - Enter the Server IP / Host Name, Path, and Port (printed above).\n - Save" } } } diff --git a/homeassistant/components/eddystone_temperature/manifest.json b/homeassistant/components/eddystone_temperature/manifest.json index b15a88d099f..18e67f55667 100644 --- a/homeassistant/components/eddystone_temperature/manifest.json +++ b/homeassistant/components/eddystone_temperature/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/eddystone_temperature", "iot_class": "local_polling", "loggers": ["beacontools"], + "quality_scale": "legacy", "requirements": ["beacontools[scan]==2.1.0"] } diff --git a/homeassistant/components/edimax/manifest.json b/homeassistant/components/edimax/manifest.json index f104ec40e64..a226ef3bbe8 100644 --- a/homeassistant/components/edimax/manifest.json +++ b/homeassistant/components/edimax/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/edimax", "iot_class": "local_polling", "loggers": ["pyedimax"], + "quality_scale": "legacy", "requirements": ["pyedimax==0.2.1"] } diff --git a/homeassistant/components/egardia/manifest.json b/homeassistant/components/egardia/manifest.json index 99f39c99cbc..08eb82df0e7 100644 --- a/homeassistant/components/egardia/manifest.json +++ b/homeassistant/components/egardia/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/egardia", "iot_class": "local_polling", "loggers": ["pythonegardia"], + "quality_scale": "legacy", "requirements": ["pythonegardia==1.0.52"] } diff --git a/homeassistant/components/eight_sleep/manifest.json b/homeassistant/components/eight_sleep/manifest.json index a4f7482c920..59de546824f 100644 --- a/homeassistant/components/eight_sleep/manifest.json +++ b/homeassistant/components/eight_sleep/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/eight_sleep", "integration_type": "system", "iot_class": "cloud_polling", + "quality_scale": "legacy", "requirements": [] } diff --git a/homeassistant/components/elevenlabs/config_flow.py b/homeassistant/components/elevenlabs/config_flow.py index b596ec05b00..227150a0f4e 100644 --- a/homeassistant/components/elevenlabs/config_flow.py +++ b/homeassistant/components/elevenlabs/config_flow.py @@ -14,7 +14,6 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant @@ -103,13 +102,12 @@ class ElevenLabsConfigFlow(ConfigFlow, domain=DOMAIN): return ElevenLabsOptionsFlow(config_entry) -class ElevenLabsOptionsFlow(OptionsFlowWithConfigEntry): +class ElevenLabsOptionsFlow(OptionsFlow): """ElevenLabs options flow.""" def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - super().__init__(config_entry) - self.api_key: str = self.config_entry.data[CONF_API_KEY] + self.api_key: str = config_entry.data[CONF_API_KEY] # id -> name self.voices: dict[str, str] = {} self.models: dict[str, str] = {} @@ -170,7 +168,7 @@ class ElevenLabsOptionsFlow(OptionsFlowWithConfigEntry): vol.Required(CONF_CONFIGURE_VOICE, default=False): bool, } ), - self.options, + self.config_entry.options, ) async def async_step_voice_settings( diff --git a/homeassistant/components/elgato/manifest.json b/homeassistant/components/elgato/manifest.json index c68902560b9..734ad5ec930 100644 --- a/homeassistant/components/elgato/manifest.json +++ b/homeassistant/components/elgato/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/elgato", "integration_type": "device", "iot_class": "local_polling", - "quality_scale": "platinum", "requirements": ["elgato==5.1.2"], "zeroconf": ["_elg._tcp.local."] } diff --git a/homeassistant/components/elgato/quality_scale.yaml b/homeassistant/components/elgato/quality_scale.yaml new file mode 100644 index 00000000000..2910bdb4473 --- /dev/null +++ b/homeassistant/components/elgato/quality_scale.yaml @@ -0,0 +1,85 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: + status: todo + comment: | + The data_description for port is missing. + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: done + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + test-coverage: done + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: todo + comment: | + The integration doesn't update the device info based on DHCP discovery + of known existing devices. + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: + status: todo + comment: | + Device are documented, but some are missing. For example, the their pro + strip is supported as well. + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + This integration connects to a single device. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration does not raise any repairable issues. + stale-devices: + status: exempt + comment: | + This integration connects to a single device. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/eliqonline/manifest.json b/homeassistant/components/eliqonline/manifest.json index 78fd62fbd33..70f2cd8a675 100644 --- a/homeassistant/components/eliqonline/manifest.json +++ b/homeassistant/components/eliqonline/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/eliqonline", "iot_class": "cloud_polling", + "quality_scale": "legacy", "requirements": ["eliqonline==1.2.2"] } diff --git a/homeassistant/components/elv/manifest.json b/homeassistant/components/elv/manifest.json index 9b71595e58f..5757aeb5e52 100644 --- a/homeassistant/components/elv/manifest.json +++ b/homeassistant/components/elv/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/elv", "iot_class": "local_polling", "loggers": ["pypca"], + "quality_scale": "legacy", "requirements": ["pypca==0.0.7"] } diff --git a/homeassistant/components/emby/manifest.json b/homeassistant/components/emby/manifest.json index 3f57f62eb0b..856cdaf189f 100644 --- a/homeassistant/components/emby/manifest.json +++ b/homeassistant/components/emby/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/emby", "iot_class": "local_push", "loggers": ["pyemby"], + "quality_scale": "legacy", "requirements": ["pyEmby==1.10"] } diff --git a/homeassistant/components/emoncms/__init__.py b/homeassistant/components/emoncms/__init__.py index 98ed6328578..0cd686b5b56 100644 --- a/homeassistant/components/emoncms/__init__.py +++ b/homeassistant/components/emoncms/__init__.py @@ -5,8 +5,11 @@ from pyemoncms import EmoncmsClient from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_URL, Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue +from .const import DOMAIN, EMONCMS_UUID_DOC_URL, LOGGER from .coordinator import EmoncmsCoordinator PLATFORMS: list[Platform] = [Platform.SENSOR] @@ -14,6 +17,49 @@ PLATFORMS: list[Platform] = [Platform.SENSOR] type EmonCMSConfigEntry = ConfigEntry[EmoncmsCoordinator] +def _migrate_unique_id( + hass: HomeAssistant, entry: EmonCMSConfigEntry, emoncms_unique_id: str +) -> None: + """Migrate to emoncms unique id if needed.""" + ent_reg = er.async_get(hass) + entry_entities = ent_reg.entities.get_entries_for_config_entry_id(entry.entry_id) + for entity in entry_entities: + if entity.unique_id.split("-")[0] == entry.entry_id: + feed_id = entity.unique_id.split("-")[-1] + LOGGER.debug(f"moving feed {feed_id} to hardware uuid") + ent_reg.async_update_entity( + entity.entity_id, new_unique_id=f"{emoncms_unique_id}-{feed_id}" + ) + hass.config_entries.async_update_entry( + entry, + unique_id=emoncms_unique_id, + ) + + +async def _check_unique_id_migration( + hass: HomeAssistant, entry: EmonCMSConfigEntry, emoncms_client: EmoncmsClient +) -> None: + """Check if we can migrate to the emoncms uuid.""" + emoncms_unique_id = await emoncms_client.async_get_uuid() + if emoncms_unique_id: + if entry.unique_id != emoncms_unique_id: + _migrate_unique_id(hass, entry, emoncms_unique_id) + else: + async_create_issue( + hass, + DOMAIN, + "migrate database", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="migrate_database", + translation_placeholders={ + "url": entry.data[CONF_URL], + "doc_url": EMONCMS_UUID_DOC_URL, + }, + ) + + async def async_setup_entry(hass: HomeAssistant, entry: EmonCMSConfigEntry) -> bool: """Load a config entry.""" emoncms_client = EmoncmsClient( @@ -21,6 +67,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: EmonCMSConfigEntry) -> b entry.data[CONF_API_KEY], session=async_get_clientsession(hass), ) + await _check_unique_id_migration(hass, entry, emoncms_client) coordinator = EmoncmsCoordinator(hass, emoncms_client) await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator diff --git a/homeassistant/components/emoncms/config_flow.py b/homeassistant/components/emoncms/config_flow.py index fdd5d29788e..e0d4d0d03e9 100644 --- a/homeassistant/components/emoncms/config_flow.py +++ b/homeassistant/components/emoncms/config_flow.py @@ -1,5 +1,7 @@ """Configflow for the emoncms integration.""" +from __future__ import annotations + from typing import Any from pyemoncms import EmoncmsClient @@ -9,10 +11,10 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_API_KEY, CONF_URL -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import callback from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.selector import selector from homeassistant.helpers.typing import ConfigType @@ -46,13 +48,10 @@ def sensor_name(url: str) -> str: return f"emoncms@{sensorip}" -async def get_feed_list(hass: HomeAssistant, url: str, api_key: str) -> dict[str, Any]: +async def get_feed_list( + emoncms_client: EmoncmsClient, +) -> dict[str, Any]: """Check connection to emoncms and return feed list if successful.""" - emoncms_client = EmoncmsClient( - url, - api_key, - session=async_get_clientsession(hass), - ) return await emoncms_client.async_request("/feed/list.json") @@ -68,7 +67,7 @@ class EmoncmsConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> OptionsFlowWithConfigEntry: + ) -> EmoncmsOptionsFlow: """Get the options flow for this handler.""" return EmoncmsOptionsFlow(config_entry) @@ -77,23 +76,28 @@ class EmoncmsConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Initiate a flow via the UI.""" errors: dict[str, str] = {} + description_placeholders = {} if user_input is not None: + self.url = user_input[CONF_URL] + self.api_key = user_input[CONF_API_KEY] self._async_abort_entries_match( { - CONF_API_KEY: user_input[CONF_API_KEY], - CONF_URL: user_input[CONF_URL], + CONF_API_KEY: self.api_key, + CONF_URL: self.url, } ) - result = await get_feed_list( - self.hass, user_input[CONF_URL], user_input[CONF_API_KEY] + emoncms_client = EmoncmsClient( + self.url, self.api_key, session=async_get_clientsession(self.hass) ) + result = await get_feed_list(emoncms_client) if not result[CONF_SUCCESS]: - errors["base"] = result[CONF_MESSAGE] + errors["base"] = "api_error" + description_placeholders = {"details": result[CONF_MESSAGE]} else: self.include_only_feeds = user_input.get(CONF_ONLY_INCLUDE_FEEDID) - self.url = user_input[CONF_URL] - self.api_key = user_input[CONF_API_KEY] + await self.async_set_unique_id(await emoncms_client.async_get_uuid()) + self._abort_if_unique_id_configured() options = get_options(result[CONF_MESSAGE]) self.dropdown = { "options": options, @@ -113,6 +117,7 @@ class EmoncmsConfigFlow(ConfigFlow, domain=DOMAIN): user_input, ), errors=errors, + description_placeholders=description_placeholders, ) async def async_step_choose_feeds( @@ -167,32 +172,41 @@ class EmoncmsConfigFlow(ConfigFlow, domain=DOMAIN): return result -class EmoncmsOptionsFlow(OptionsFlowWithConfigEntry): +class EmoncmsOptionsFlow(OptionsFlow): """Emoncms Options flow handler.""" + def __init__(self, config_entry: ConfigEntry) -> None: + """Initialize emoncms options flow.""" + self._url = config_entry.data[CONF_URL] + self._api_key = config_entry.data[CONF_API_KEY] + async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Manage the options.""" errors: dict[str, str] = {} - data = self.options if self.options else self._config_entry.data - url = data[CONF_URL] - api_key = data[CONF_API_KEY] - include_only_feeds = data.get(CONF_ONLY_INCLUDE_FEEDID, []) + description_placeholders = {} + include_only_feeds = self.config_entry.options.get( + CONF_ONLY_INCLUDE_FEEDID, + self.config_entry.data.get(CONF_ONLY_INCLUDE_FEEDID, []), + ) options: list = include_only_feeds - result = await get_feed_list(self.hass, url, api_key) + emoncms_client = EmoncmsClient( + self._url, + self._api_key, + session=async_get_clientsession(self.hass), + ) + result = await get_feed_list(emoncms_client) if not result[CONF_SUCCESS]: - errors["base"] = result[CONF_MESSAGE] + errors["base"] = "api_error" + description_placeholders = {"details": result[CONF_MESSAGE]} else: options = get_options(result[CONF_MESSAGE]) dropdown = {"options": options, "mode": "dropdown", "multiple": True} if user_input: include_only_feeds = user_input[CONF_ONLY_INCLUDE_FEEDID] return self.async_create_entry( - title=sensor_name(url), data={ - CONF_URL: url, - CONF_API_KEY: api_key, CONF_ONLY_INCLUDE_FEEDID: include_only_feeds, }, ) @@ -207,4 +221,5 @@ class EmoncmsOptionsFlow(OptionsFlowWithConfigEntry): } ), errors=errors, + description_placeholders=description_placeholders, ) diff --git a/homeassistant/components/emoncms/const.py b/homeassistant/components/emoncms/const.py index 256db5726bb..c53f7cc8a9f 100644 --- a/homeassistant/components/emoncms/const.py +++ b/homeassistant/components/emoncms/const.py @@ -7,6 +7,10 @@ CONF_ONLY_INCLUDE_FEEDID = "include_only_feed_id" CONF_MESSAGE = "message" CONF_SUCCESS = "success" DOMAIN = "emoncms" +EMONCMS_UUID_DOC_URL = ( + "https://docs.openenergymonitor.org/emoncms/update.html" + "#upgrading-to-a-version-producing-a-unique-identifier" +) FEED_ID = "id" FEED_NAME = "name" FEED_TAG = "tag" diff --git a/homeassistant/components/emoncms/sensor.py b/homeassistant/components/emoncms/sensor.py index 4add7c9625d..9273c24c7dc 100644 --- a/homeassistant/components/emoncms/sensor.py +++ b/homeassistant/components/emoncms/sensor.py @@ -10,16 +10,31 @@ from homeassistant.components.sensor import ( PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, SensorDeviceClass, SensorEntity, + SensorEntityDescription, SensorStateClass, ) from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import ( + CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + CONCENTRATION_PARTS_PER_MILLION, CONF_API_KEY, CONF_ID, CONF_UNIT_OF_MEASUREMENT, CONF_URL, CONF_VALUE_TEMPLATE, + PERCENTAGE, + UnitOfApparentPower, + UnitOfElectricCurrent, + UnitOfElectricPotential, + UnitOfEnergy, + UnitOfFrequency, UnitOfPower, + UnitOfPressure, + UnitOfSoundPressure, + UnitOfSpeed, + UnitOfTemperature, + UnitOfVolume, + UnitOfVolumeFlowRate, ) from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback from homeassistant.data_entry_flow import FlowResultType @@ -41,6 +56,146 @@ from .const import ( ) from .coordinator import EmoncmsCoordinator +SENSORS: dict[str | None, SensorEntityDescription] = { + "kWh": SensorEntityDescription( + key="energy|kWh", + translation_key="energy", + device_class=SensorDeviceClass.ENERGY, + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + state_class=SensorStateClass.TOTAL_INCREASING, + ), + "Wh": SensorEntityDescription( + key="energy|Wh", + translation_key="energy", + device_class=SensorDeviceClass.ENERGY, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + state_class=SensorStateClass.TOTAL_INCREASING, + ), + "kW": SensorEntityDescription( + key="power|kW", + translation_key="power", + device_class=SensorDeviceClass.POWER, + native_unit_of_measurement=UnitOfPower.KILO_WATT, + state_class=SensorStateClass.MEASUREMENT, + ), + "W": SensorEntityDescription( + key="power|W", + translation_key="power", + device_class=SensorDeviceClass.POWER, + native_unit_of_measurement=UnitOfPower.WATT, + state_class=SensorStateClass.MEASUREMENT, + ), + "V": SensorEntityDescription( + key="voltage", + translation_key="voltage", + device_class=SensorDeviceClass.VOLTAGE, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + state_class=SensorStateClass.MEASUREMENT, + ), + "A": SensorEntityDescription( + key="current", + translation_key="current", + device_class=SensorDeviceClass.CURRENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + state_class=SensorStateClass.MEASUREMENT, + ), + "VA": SensorEntityDescription( + key="apparent_power", + translation_key="apparent_power", + device_class=SensorDeviceClass.APPARENT_POWER, + native_unit_of_measurement=UnitOfApparentPower.VOLT_AMPERE, + state_class=SensorStateClass.MEASUREMENT, + ), + "°C": SensorEntityDescription( + key="temperature|celsius", + translation_key="temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + ), + "°F": SensorEntityDescription( + key="temperature|fahrenheit", + translation_key="temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.FAHRENHEIT, + state_class=SensorStateClass.MEASUREMENT, + ), + "K": SensorEntityDescription( + key="temperature|kelvin", + translation_key="temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.KELVIN, + state_class=SensorStateClass.MEASUREMENT, + ), + "Hz": SensorEntityDescription( + key="frequency", + translation_key="frequency", + device_class=SensorDeviceClass.FREQUENCY, + native_unit_of_measurement=UnitOfFrequency.HERTZ, + state_class=SensorStateClass.MEASUREMENT, + ), + "hPa": SensorEntityDescription( + key="pressure", + translation_key="pressure", + device_class=SensorDeviceClass.PRESSURE, + native_unit_of_measurement=UnitOfPressure.HPA, + state_class=SensorStateClass.MEASUREMENT, + ), + "dB": SensorEntityDescription( + key="decibel", + translation_key="decibel", + device_class=SensorDeviceClass.SIGNAL_STRENGTH, + native_unit_of_measurement=UnitOfSoundPressure.DECIBEL, + state_class=SensorStateClass.MEASUREMENT, + ), + "m³": SensorEntityDescription( + key="volume|cubic_meter", + translation_key="volume", + device_class=SensorDeviceClass.VOLUME_STORAGE, + native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, + state_class=SensorStateClass.MEASUREMENT, + ), + "m³/h": SensorEntityDescription( + key="flow|cubic_meters_per_hour", + translation_key="flow", + device_class=SensorDeviceClass.VOLUME_FLOW_RATE, + native_unit_of_measurement=UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR, + state_class=SensorStateClass.MEASUREMENT, + ), + "l/m": SensorEntityDescription( + key="flow|liters_per_minute", + translation_key="flow", + device_class=SensorDeviceClass.VOLUME_FLOW_RATE, + native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE, + state_class=SensorStateClass.MEASUREMENT, + ), + "m/s": SensorEntityDescription( + key="speed|meters_per_second", + translation_key="speed", + device_class=SensorDeviceClass.SPEED, + native_unit_of_measurement=UnitOfSpeed.METERS_PER_SECOND, + state_class=SensorStateClass.MEASUREMENT, + ), + "µg/m³": SensorEntityDescription( + key="concentration|microgram_per_cubic_meter", + translation_key="concentration", + native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, + state_class=SensorStateClass.MEASUREMENT, + ), + "ppm": SensorEntityDescription( + key="concentration|microgram_parts_per_million", + translation_key="concentration", + native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION, + state_class=SensorStateClass.MEASUREMENT, + ), + "%": SensorEntityDescription( + key="percent", + translation_key="percent", + native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, + ), +} + ATTR_FEEDID = "FeedId" ATTR_FEEDNAME = "FeedName" ATTR_LASTUPDATETIME = "LastUpdated" @@ -138,29 +293,30 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the emoncms sensors.""" - config = entry.options if entry.options else entry.data - name = sensor_name(config[CONF_URL]) - exclude_feeds = config.get(CONF_EXCLUDE_FEEDID) - include_only_feeds = config.get(CONF_ONLY_INCLUDE_FEEDID) + name = sensor_name(entry.data[CONF_URL]) + exclude_feeds = entry.data.get(CONF_EXCLUDE_FEEDID) + include_only_feeds = entry.options.get( + CONF_ONLY_INCLUDE_FEEDID, entry.data.get(CONF_ONLY_INCLUDE_FEEDID) + ) if exclude_feeds is None and include_only_feeds is None: return coordinator = entry.runtime_data + # uuid was added in emoncms database 11.5.7 + unique_id = entry.unique_id if entry.unique_id else entry.entry_id elems = coordinator.data if not elems: return - sensors: list[EmonCmsSensor] = [] for idx, elem in enumerate(elems): if include_only_feeds is not None and elem[FEED_ID] not in include_only_feeds: continue - sensors.append( EmonCmsSensor( coordinator, - entry.entry_id, + unique_id, elem["unit"], name, idx, @@ -172,10 +328,12 @@ async def async_setup_entry( class EmonCmsSensor(CoordinatorEntity[EmoncmsCoordinator], SensorEntity): """Implementation of an Emoncms sensor.""" + _attr_has_entity_name = True + def __init__( self, coordinator: EmoncmsCoordinator, - entry_id: str, + unique_id: str, unit_of_measurement: str | None, name: str, idx: int, @@ -186,33 +344,15 @@ class EmonCmsSensor(CoordinatorEntity[EmoncmsCoordinator], SensorEntity): elem = {} if self.coordinator.data: elem = self.coordinator.data[self.idx] - self._attr_name = f"{name} {elem[FEED_NAME]}" - self._attr_native_unit_of_measurement = unit_of_measurement - self._attr_unique_id = f"{entry_id}-{elem[FEED_ID]}" - if unit_of_measurement in ("kWh", "Wh"): - self._attr_device_class = SensorDeviceClass.ENERGY - self._attr_state_class = SensorStateClass.TOTAL_INCREASING - elif unit_of_measurement == "W": - self._attr_device_class = SensorDeviceClass.POWER - self._attr_state_class = SensorStateClass.MEASUREMENT - elif unit_of_measurement == "V": - self._attr_device_class = SensorDeviceClass.VOLTAGE - self._attr_state_class = SensorStateClass.MEASUREMENT - elif unit_of_measurement == "A": - self._attr_device_class = SensorDeviceClass.CURRENT - self._attr_state_class = SensorStateClass.MEASUREMENT - elif unit_of_measurement == "VA": - self._attr_device_class = SensorDeviceClass.APPARENT_POWER - self._attr_state_class = SensorStateClass.MEASUREMENT - elif unit_of_measurement in ("°C", "°F", "K"): - self._attr_device_class = SensorDeviceClass.TEMPERATURE - self._attr_state_class = SensorStateClass.MEASUREMENT - elif unit_of_measurement == "Hz": - self._attr_device_class = SensorDeviceClass.FREQUENCY - self._attr_state_class = SensorStateClass.MEASUREMENT - elif unit_of_measurement == "hPa": - self._attr_device_class = SensorDeviceClass.PRESSURE - self._attr_state_class = SensorStateClass.MEASUREMENT + self._attr_translation_placeholders = { + "emoncms_details": f"{elem[FEED_TAG]} {elem[FEED_NAME]}", + } + self._attr_unique_id = f"{unique_id}-{elem[FEED_ID]}" + description = SENSORS.get(unit_of_measurement) + if description is not None: + self.entity_description = description + else: + self._attr_native_unit_of_measurement = unit_of_measurement self._update_attributes(elem) def _update_attributes(self, elem: dict[str, Any]) -> None: diff --git a/homeassistant/components/emoncms/strings.json b/homeassistant/components/emoncms/strings.json index 4a700cc8981..5769e825944 100644 --- a/homeassistant/components/emoncms/strings.json +++ b/homeassistant/components/emoncms/strings.json @@ -1,5 +1,8 @@ { "config": { + "error": { + "api_error": "An error occured in the pyemoncms API : {details}" + }, "step": { "user": { "data": { @@ -16,9 +19,61 @@ "include_only_feed_id": "Choose feeds to include" } } + }, + "abort": { + "already_configured": "This server is already configured" + } + }, + "entity": { + "sensor": { + "energy": { + "name": "Energy {emoncms_details}" + }, + "power": { + "name": "Power {emoncms_details}" + }, + "percent": { + "name": "Percentage {emoncms_details}" + }, + "voltage": { + "name": "Voltage {emoncms_details}" + }, + "current": { + "name": "Current {emoncms_details}" + }, + "apparent_power": { + "name": "Apparent power {emoncms_details}" + }, + "temperature": { + "name": "Temperature {emoncms_details}" + }, + "frequency": { + "name": "Frequency {emoncms_details}" + }, + "pressure": { + "name": "Pressure {emoncms_details}" + }, + "decibel": { + "name": "Decibel {emoncms_details}" + }, + "volume": { + "name": "Volume {emoncms_details}" + }, + "flow": { + "name": "Flow rate {emoncms_details}" + }, + "speed": { + "name": "Speed {emoncms_details}" + }, + "concentration": { + "name": "Concentration {emoncms_details}" + } } }, "options": { + "error": { + "api_error": "[%key:component::emoncms::config::error::api_error%]" + }, "step": { "init": { "data": { @@ -35,6 +90,10 @@ "missing_include_only_feed_id": { "title": "No feed synchronized with the {domain} sensor", "description": "Configuring {domain} using YAML is being removed.\n\nPlease add manually the feeds you want to synchronize with the `configure` button of the integration." + }, + "migrate_database": { + "title": "Upgrade your emoncms version", + "description": "Your [emoncms]({url}) does not ship a unique identifier.\n\n Please upgrade to at least version 11.5.7 and migrate your emoncms database.\n\n More info on [emoncms documentation]({doc_url})" } } } diff --git a/homeassistant/components/emoncms_history/manifest.json b/homeassistant/components/emoncms_history/manifest.json index faa91e64017..e73f76f7528 100644 --- a/homeassistant/components/emoncms_history/manifest.json +++ b/homeassistant/components/emoncms_history/manifest.json @@ -3,5 +3,6 @@ "name": "Emoncms History", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/emoncms_history", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/emonitor/config_flow.py b/homeassistant/components/emonitor/config_flow.py index b924c7df522..833b80f9d47 100644 --- a/homeassistant/components/emonitor/config_flow.py +++ b/homeassistant/components/emonitor/config_flow.py @@ -92,6 +92,7 @@ class EmonitorConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Attempt to confirm.""" + assert self.discovered_ip is not None if user_input is not None: return self.async_create_entry( title=self.discovered_info["title"], diff --git a/homeassistant/components/energy/data.py b/homeassistant/components/energy/data.py index 9c5a9fbacd1..ff86177cf41 100644 --- a/homeassistant/components/energy/data.py +++ b/homeassistant/components/energy/data.py @@ -331,7 +331,7 @@ class EnergyManager: "device_consumption", ): if key in update: - data[key] = update[key] # type: ignore[literal-required] + data[key] = update[key] self.data = data self._store.async_delay_save(lambda: data, 60) diff --git a/homeassistant/components/energyzero/manifest.json b/homeassistant/components/energyzero/manifest.json index 807a0419967..bb867e88d85 100644 --- a/homeassistant/components/energyzero/manifest.json +++ b/homeassistant/components/energyzero/manifest.json @@ -5,6 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/energyzero", "iot_class": "cloud_polling", - "quality_scale": "platinum", "requirements": ["energyzero==2.1.1"] } diff --git a/homeassistant/components/enphase_envoy/config_flow.py b/homeassistant/components/enphase_envoy/config_flow.py index d04f77d8e88..23c769293c8 100644 --- a/homeassistant/components/enphase_envoy/config_flow.py +++ b/homeassistant/components/enphase_envoy/config_flow.py @@ -16,7 +16,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant, callback @@ -66,9 +66,11 @@ class EnphaseConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> EnvoyOptionsFlowHandler: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> EnvoyOptionsFlowHandler: """Options flow handler for Enphase_Envoy.""" - return EnvoyOptionsFlowHandler(config_entry) + return EnvoyOptionsFlowHandler() @callback def _async_generate_schema(self) -> vol.Schema: @@ -288,7 +290,7 @@ class EnphaseConfigFlow(ConfigFlow, domain=DOMAIN): ) -class EnvoyOptionsFlowHandler(OptionsFlowWithConfigEntry): +class EnvoyOptionsFlowHandler(OptionsFlow): """Envoy config flow options handler.""" async def async_step_init( diff --git a/homeassistant/components/enphase_envoy/manifest.json b/homeassistant/components/enphase_envoy/manifest.json index aa06a1ff79f..bdc90e6c634 100644 --- a/homeassistant/components/enphase_envoy/manifest.json +++ b/homeassistant/components/enphase_envoy/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/enphase_envoy", "iot_class": "local_polling", "loggers": ["pyenphase"], - "requirements": ["pyenphase==1.22.0"], + "requirements": ["pyenphase==1.23.0"], "zeroconf": [ { "type": "_enphase-envoy._tcp.local." diff --git a/homeassistant/components/entur_public_transport/manifest.json b/homeassistant/components/entur_public_transport/manifest.json index f75099c2c27..5e25eb4b4a7 100644 --- a/homeassistant/components/entur_public_transport/manifest.json +++ b/homeassistant/components/entur_public_transport/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/entur_public_transport", "iot_class": "cloud_polling", "loggers": ["enturclient"], + "quality_scale": "legacy", "requirements": ["enturclient==0.2.4"] } diff --git a/homeassistant/components/envisalink/manifest.json b/homeassistant/components/envisalink/manifest.json index 0cf9f165aa2..42587aa7c2f 100644 --- a/homeassistant/components/envisalink/manifest.json +++ b/homeassistant/components/envisalink/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/envisalink", "iot_class": "local_push", "loggers": ["pyenvisalink"], + "quality_scale": "legacy", "requirements": ["pyenvisalink==4.7"] } diff --git a/homeassistant/components/ephember/manifest.json b/homeassistant/components/ephember/manifest.json index dd7938ccbd2..547ab2918f5 100644 --- a/homeassistant/components/ephember/manifest.json +++ b/homeassistant/components/ephember/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/ephember", "iot_class": "local_polling", "loggers": ["pyephember"], + "quality_scale": "legacy", "requirements": ["pyephember==0.3.1"] } diff --git a/homeassistant/components/eq3btsmart/__init__.py b/homeassistant/components/eq3btsmart/__init__.py index f63e627ea7d..4493f944db3 100644 --- a/homeassistant/components/eq3btsmart/__init__.py +++ b/homeassistant/components/eq3btsmart/__init__.py @@ -15,17 +15,24 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.dispatcher import async_dispatcher_send -from .const import DOMAIN, SIGNAL_THERMOSTAT_CONNECTED, SIGNAL_THERMOSTAT_DISCONNECTED +from .const import SIGNAL_THERMOSTAT_CONNECTED, SIGNAL_THERMOSTAT_DISCONNECTED from .models import Eq3Config, Eq3ConfigEntryData PLATFORMS = [ + Platform.BINARY_SENSOR, Platform.CLIMATE, + Platform.NUMBER, + Platform.SENSOR, + Platform.SWITCH, ] _LOGGER = logging.getLogger(__name__) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +type Eq3ConfigEntry = ConfigEntry[Eq3ConfigEntryData] + + +async def async_setup_entry(hass: HomeAssistant, entry: Eq3ConfigEntry) -> bool: """Handle config entry setup.""" mac_address: str | None = entry.unique_id @@ -53,12 +60,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ble_device=device, ) - eq3_config_entry = Eq3ConfigEntryData(eq3_config=eq3_config, thermostat=thermostat) - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = eq3_config_entry - + entry.runtime_data = Eq3ConfigEntryData( + eq3_config=eq3_config, thermostat=thermostat + ) entry.async_on_unload(entry.add_update_listener(update_listener)) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - entry.async_create_background_task( hass, _async_run_thermostat(hass, entry), entry.entry_id ) @@ -66,29 +72,27 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: Eq3ConfigEntry) -> bool: """Handle config entry unload.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN].pop(entry.entry_id) - await eq3_config_entry.thermostat.async_disconnect() + await entry.runtime_data.thermostat.async_disconnect() return unload_ok -async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def update_listener(hass: HomeAssistant, entry: Eq3ConfigEntry) -> None: """Handle config entry update.""" await hass.config_entries.async_reload(entry.entry_id) -async def _async_run_thermostat(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def _async_run_thermostat(hass: HomeAssistant, entry: Eq3ConfigEntry) -> None: """Run the thermostat.""" - eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN][entry.entry_id] - thermostat = eq3_config_entry.thermostat - mac_address = eq3_config_entry.eq3_config.mac_address - scan_interval = eq3_config_entry.eq3_config.scan_interval + thermostat = entry.runtime_data.thermostat + mac_address = entry.runtime_data.eq3_config.mac_address + scan_interval = entry.runtime_data.eq3_config.scan_interval await _async_reconnect_thermostat(hass, entry) @@ -117,13 +121,14 @@ async def _async_run_thermostat(hass: HomeAssistant, entry: ConfigEntry) -> None await asyncio.sleep(scan_interval) -async def _async_reconnect_thermostat(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def _async_reconnect_thermostat( + hass: HomeAssistant, entry: Eq3ConfigEntry +) -> None: """Reconnect the thermostat.""" - eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN][entry.entry_id] - thermostat = eq3_config_entry.thermostat - mac_address = eq3_config_entry.eq3_config.mac_address - scan_interval = eq3_config_entry.eq3_config.scan_interval + thermostat = entry.runtime_data.thermostat + mac_address = entry.runtime_data.eq3_config.mac_address + scan_interval = entry.runtime_data.eq3_config.scan_interval while True: try: diff --git a/homeassistant/components/eq3btsmart/binary_sensor.py b/homeassistant/components/eq3btsmart/binary_sensor.py new file mode 100644 index 00000000000..27525d47972 --- /dev/null +++ b/homeassistant/components/eq3btsmart/binary_sensor.py @@ -0,0 +1,86 @@ +"""Platform for eq3 binary sensor entities.""" + +from collections.abc import Callable +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from eq3btsmart.models import Status + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import Eq3ConfigEntry +from .const import ENTITY_KEY_BATTERY, ENTITY_KEY_DST, ENTITY_KEY_WINDOW +from .entity import Eq3Entity + + +@dataclass(frozen=True, kw_only=True) +class Eq3BinarySensorEntityDescription(BinarySensorEntityDescription): + """Entity description for eq3 binary sensors.""" + + value_func: Callable[[Status], bool] + + +BINARY_SENSOR_ENTITY_DESCRIPTIONS = [ + Eq3BinarySensorEntityDescription( + value_func=lambda status: status.is_low_battery, + key=ENTITY_KEY_BATTERY, + device_class=BinarySensorDeviceClass.BATTERY, + entity_category=EntityCategory.DIAGNOSTIC, + ), + Eq3BinarySensorEntityDescription( + value_func=lambda status: status.is_window_open, + key=ENTITY_KEY_WINDOW, + device_class=BinarySensorDeviceClass.WINDOW, + ), + Eq3BinarySensorEntityDescription( + value_func=lambda status: status.is_dst, + key=ENTITY_KEY_DST, + translation_key=ENTITY_KEY_DST, + entity_category=EntityCategory.DIAGNOSTIC, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: Eq3ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the entry.""" + + async_add_entities( + Eq3BinarySensorEntity(entry, entity_description) + for entity_description in BINARY_SENSOR_ENTITY_DESCRIPTIONS + ) + + +class Eq3BinarySensorEntity(Eq3Entity, BinarySensorEntity): + """Base class for eQ-3 binary sensor entities.""" + + entity_description: Eq3BinarySensorEntityDescription + + def __init__( + self, + entry: Eq3ConfigEntry, + entity_description: Eq3BinarySensorEntityDescription, + ) -> None: + """Initialize the entity.""" + + super().__init__(entry, entity_description.key) + self.entity_description = entity_description + + @property + def is_on(self) -> bool: + """Return the state of the binary sensor.""" + + if TYPE_CHECKING: + assert self._thermostat.status is not None + + return self.entity_description.value_func(self._thermostat.status) diff --git a/homeassistant/components/eq3btsmart/climate.py b/homeassistant/components/eq3btsmart/climate.py index 7b8ccb6c990..ae01d0fc9a7 100644 --- a/homeassistant/components/eq3btsmart/climate.py +++ b/homeassistant/components/eq3btsmart/climate.py @@ -3,7 +3,6 @@ import logging from typing import Any -from eq3btsmart import Thermostat from eq3btsmart.const import EQ3BT_MAX_TEMP, EQ3BT_OFF_TEMP, Eq3Preset, OperationMode from eq3btsmart.exceptions import Eq3Exception @@ -15,45 +14,35 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, PRECISION_HALVES, UnitOfTemperature from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH, DeviceInfo -from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util import slugify +from . import Eq3ConfigEntry from .const import ( - DEVICE_MODEL, - DOMAIN, EQ_TO_HA_HVAC, HA_TO_EQ_HVAC, - MANUFACTURER, - SIGNAL_THERMOSTAT_CONNECTED, - SIGNAL_THERMOSTAT_DISCONNECTED, CurrentTemperatureSelector, Preset, TargetTemperatureSelector, ) from .entity import Eq3Entity -from .models import Eq3Config, Eq3ConfigEntryData _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: Eq3ConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Handle config entry setup.""" - eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN][config_entry.entry_id] - async_add_entities( - [Eq3Climate(eq3_config_entry.eq3_config, eq3_config_entry.thermostat)], + [Eq3Climate(entry)], ) @@ -80,53 +69,6 @@ class Eq3Climate(Eq3Entity, ClimateEntity): _attr_preset_mode: str | None = None _target_temperature: float | None = None - def __init__(self, eq3_config: Eq3Config, thermostat: Thermostat) -> None: - """Initialize the climate entity.""" - - super().__init__(eq3_config, thermostat) - self._attr_unique_id = dr.format_mac(eq3_config.mac_address) - self._attr_device_info = DeviceInfo( - name=slugify(self._eq3_config.mac_address), - manufacturer=MANUFACTURER, - model=DEVICE_MODEL, - connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)}, - ) - - async def async_added_to_hass(self) -> None: - """Run when entity about to be added to hass.""" - - self._thermostat.register_update_callback(self._async_on_updated) - - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{SIGNAL_THERMOSTAT_DISCONNECTED}_{self._eq3_config.mac_address}", - self._async_on_disconnected, - ) - ) - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{SIGNAL_THERMOSTAT_CONNECTED}_{self._eq3_config.mac_address}", - self._async_on_connected, - ) - ) - - async def async_will_remove_from_hass(self) -> None: - """Run when entity will be removed from hass.""" - - self._thermostat.unregister_update_callback(self._async_on_updated) - - @callback - def _async_on_disconnected(self) -> None: - self._attr_available = False - self.async_write_ha_state() - - @callback - def _async_on_connected(self) -> None: - self._attr_available = True - self.async_write_ha_state() - @callback def _async_on_updated(self) -> None: """Handle updated data from the thermostat.""" @@ -137,12 +79,15 @@ class Eq3Climate(Eq3Entity, ClimateEntity): if self._thermostat.device_data is not None: self._async_on_device_updated() - self.async_write_ha_state() + super()._async_on_updated() @callback def _async_on_status_updated(self) -> None: """Handle updated status from the thermostat.""" + if self._thermostat.status is None: + return + self._target_temperature = self._thermostat.status.target_temperature.value self._attr_hvac_mode = EQ_TO_HA_HVAC[self._thermostat.status.operation_mode] self._attr_current_temperature = self._get_current_temperature() @@ -154,13 +99,16 @@ class Eq3Climate(Eq3Entity, ClimateEntity): def _async_on_device_updated(self) -> None: """Handle updated device data from the thermostat.""" + if self._thermostat.device_data is None: + return + device_registry = dr.async_get(self.hass) if device := device_registry.async_get_device( connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)}, ): device_registry.async_update_device( device.id, - sw_version=self._thermostat.device_data.firmware_version, + sw_version=str(self._thermostat.device_data.firmware_version), serial_number=self._thermostat.device_data.device_serial.value, ) @@ -265,7 +213,7 @@ class Eq3Climate(Eq3Entity, ClimateEntity): self.async_write_ha_state() try: - await self._thermostat.async_set_temperature(self._target_temperature) + await self._thermostat.async_set_temperature(temperature) except Eq3Exception: _LOGGER.error( "[%s] Failed setting temperature", self._eq3_config.mac_address diff --git a/homeassistant/components/eq3btsmart/const.py b/homeassistant/components/eq3btsmart/const.py index 111c4d0eba4..a5f7ea2ff95 100644 --- a/homeassistant/components/eq3btsmart/const.py +++ b/homeassistant/components/eq3btsmart/const.py @@ -18,8 +18,21 @@ DOMAIN = "eq3btsmart" MANUFACTURER = "eQ-3 AG" DEVICE_MODEL = "CC-RT-BLE-EQ" -GET_DEVICE_TIMEOUT = 5 # seconds +ENTITY_KEY_DST = "dst" +ENTITY_KEY_BATTERY = "battery" +ENTITY_KEY_WINDOW = "window" +ENTITY_KEY_LOCK = "lock" +ENTITY_KEY_BOOST = "boost" +ENTITY_KEY_AWAY = "away" +ENTITY_KEY_COMFORT = "comfort" +ENTITY_KEY_ECO = "eco" +ENTITY_KEY_OFFSET = "offset" +ENTITY_KEY_WINDOW_OPEN_TEMPERATURE = "window_open_temperature" +ENTITY_KEY_WINDOW_OPEN_TIMEOUT = "window_open_timeout" +ENTITY_KEY_VALVE = "valve" +ENTITY_KEY_AWAY_UNTIL = "away_until" +GET_DEVICE_TIMEOUT = 5 # seconds EQ_TO_HA_HVAC: dict[OperationMode, HVACMode] = { OperationMode.OFF: HVACMode.OFF, @@ -71,3 +84,5 @@ DEFAULT_SCAN_INTERVAL = 10 # seconds SIGNAL_THERMOSTAT_DISCONNECTED = f"{DOMAIN}.thermostat_disconnected" SIGNAL_THERMOSTAT_CONNECTED = f"{DOMAIN}.thermostat_connected" + +EQ3BT_STEP = 0.5 diff --git a/homeassistant/components/eq3btsmart/entity.py b/homeassistant/components/eq3btsmart/entity.py index e8c00d4e3cf..e68545c08c7 100644 --- a/homeassistant/components/eq3btsmart/entity.py +++ b/homeassistant/components/eq3btsmart/entity.py @@ -1,10 +1,22 @@ """Base class for all eQ-3 entities.""" -from eq3btsmart.thermostat import Thermostat - +from homeassistant.core import callback +from homeassistant.helpers.device_registry import ( + CONNECTION_BLUETOOTH, + DeviceInfo, + format_mac, +) +from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity +from homeassistant.util import slugify -from .models import Eq3Config +from . import Eq3ConfigEntry +from .const import ( + DEVICE_MODEL, + MANUFACTURER, + SIGNAL_THERMOSTAT_CONNECTED, + SIGNAL_THERMOSTAT_DISCONNECTED, +) class Eq3Entity(Entity): @@ -12,8 +24,70 @@ class Eq3Entity(Entity): _attr_has_entity_name = True - def __init__(self, eq3_config: Eq3Config, thermostat: Thermostat) -> None: + def __init__( + self, + entry: Eq3ConfigEntry, + unique_id_key: str | None = None, + ) -> None: """Initialize the eq3 entity.""" - self._eq3_config = eq3_config - self._thermostat = thermostat + self._eq3_config = entry.runtime_data.eq3_config + self._thermostat = entry.runtime_data.thermostat + self._attr_device_info = DeviceInfo( + name=slugify(self._eq3_config.mac_address), + manufacturer=MANUFACTURER, + model=DEVICE_MODEL, + connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)}, + ) + suffix = f"_{unique_id_key}" if unique_id_key else "" + self._attr_unique_id = f"{format_mac(self._eq3_config.mac_address)}{suffix}" + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + + self._thermostat.register_update_callback(self._async_on_updated) + + self.async_on_remove( + async_dispatcher_connect( + self.hass, + f"{SIGNAL_THERMOSTAT_DISCONNECTED}_{self._eq3_config.mac_address}", + self._async_on_disconnected, + ) + ) + self.async_on_remove( + async_dispatcher_connect( + self.hass, + f"{SIGNAL_THERMOSTAT_CONNECTED}_{self._eq3_config.mac_address}", + self._async_on_connected, + ) + ) + + async def async_will_remove_from_hass(self) -> None: + """Run when entity will be removed from hass.""" + + self._thermostat.unregister_update_callback(self._async_on_updated) + + def _async_on_updated(self) -> None: + """Handle updated data from the thermostat.""" + + self.async_write_ha_state() + + @callback + def _async_on_disconnected(self) -> None: + """Handle disconnection from the thermostat.""" + + self._attr_available = False + self.async_write_ha_state() + + @callback + def _async_on_connected(self) -> None: + """Handle connection to the thermostat.""" + + self._attr_available = True + self.async_write_ha_state() + + @property + def available(self) -> bool: + """Whether the entity is available.""" + + return self._thermostat.status is not None and self._attr_available diff --git a/homeassistant/components/eq3btsmart/icons.json b/homeassistant/components/eq3btsmart/icons.json new file mode 100644 index 00000000000..892352c2ea4 --- /dev/null +++ b/homeassistant/components/eq3btsmart/icons.json @@ -0,0 +1,57 @@ +{ + "entity": { + "binary_sensor": { + "dst": { + "default": "mdi:sun-clock", + "state": { + "off": "mdi:sun-clock-outline" + } + } + }, + "number": { + "comfort": { + "default": "mdi:sun-thermometer" + }, + "eco": { + "default": "mdi:snowflake-thermometer" + }, + "offset": { + "default": "mdi:thermometer-plus" + }, + "window_open_temperature": { + "default": "mdi:window-open-variant" + }, + "window_open_timeout": { + "default": "mdi:timer-refresh" + } + }, + "sensor": { + "away_until": { + "default": "mdi:home-export-outline" + }, + "valve": { + "default": "mdi:pipe-valve" + } + }, + "switch": { + "away": { + "default": "mdi:home-account", + "state": { + "on": "mdi:home-export-outline" + } + }, + "lock": { + "default": "mdi:lock", + "state": { + "off": "mdi:lock-off" + } + }, + "boost": { + "default": "mdi:fire", + "state": { + "off": "mdi:fire-off" + } + } + } + } +} diff --git a/homeassistant/components/eq3btsmart/manifest.json b/homeassistant/components/eq3btsmart/manifest.json index e25c675bf82..ed80ad9aabf 100644 --- a/homeassistant/components/eq3btsmart/manifest.json +++ b/homeassistant/components/eq3btsmart/manifest.json @@ -22,6 +22,5 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["eq3btsmart"], - "quality_scale": "silver", - "requirements": ["eq3btsmart==1.2.0", "bleak-esphome==1.1.0"] + "requirements": ["eq3btsmart==1.4.1", "bleak-esphome==1.1.0"] } diff --git a/homeassistant/components/eq3btsmart/models.py b/homeassistant/components/eq3btsmart/models.py index 8ea0955dbdd..858465effa8 100644 --- a/homeassistant/components/eq3btsmart/models.py +++ b/homeassistant/components/eq3btsmart/models.py @@ -2,7 +2,6 @@ from dataclasses import dataclass -from eq3btsmart.const import DEFAULT_AWAY_HOURS, DEFAULT_AWAY_TEMP from eq3btsmart.thermostat import Thermostat from .const import ( @@ -23,8 +22,6 @@ class Eq3Config: target_temp_selector: TargetTemperatureSelector = DEFAULT_TARGET_TEMP_SELECTOR external_temp_sensor: str = "" scan_interval: int = DEFAULT_SCAN_INTERVAL - default_away_hours: float = DEFAULT_AWAY_HOURS - default_away_temperature: float = DEFAULT_AWAY_TEMP @dataclass(slots=True) diff --git a/homeassistant/components/eq3btsmart/number.py b/homeassistant/components/eq3btsmart/number.py new file mode 100644 index 00000000000..2e069180fa3 --- /dev/null +++ b/homeassistant/components/eq3btsmart/number.py @@ -0,0 +1,158 @@ +"""Platform for eq3 number entities.""" + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from eq3btsmart import Thermostat +from eq3btsmart.const import ( + EQ3BT_MAX_OFFSET, + EQ3BT_MAX_TEMP, + EQ3BT_MIN_OFFSET, + EQ3BT_MIN_TEMP, +) +from eq3btsmart.models import Presets + +from homeassistant.components.number import ( + NumberDeviceClass, + NumberEntity, + NumberEntityDescription, + NumberMode, +) +from homeassistant.const import EntityCategory, UnitOfTemperature, UnitOfTime +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import Eq3ConfigEntry +from .const import ( + ENTITY_KEY_COMFORT, + ENTITY_KEY_ECO, + ENTITY_KEY_OFFSET, + ENTITY_KEY_WINDOW_OPEN_TEMPERATURE, + ENTITY_KEY_WINDOW_OPEN_TIMEOUT, + EQ3BT_STEP, +) +from .entity import Eq3Entity + + +@dataclass(frozen=True, kw_only=True) +class Eq3NumberEntityDescription(NumberEntityDescription): + """Entity description for eq3 number entities.""" + + value_func: Callable[[Presets], float] + value_set_func: Callable[ + [Thermostat], + Callable[[float], Awaitable[None]], + ] + mode: NumberMode = NumberMode.BOX + entity_category: EntityCategory | None = EntityCategory.CONFIG + + +NUMBER_ENTITY_DESCRIPTIONS = [ + Eq3NumberEntityDescription( + key=ENTITY_KEY_COMFORT, + value_func=lambda presets: presets.comfort_temperature.value, + value_set_func=lambda thermostat: thermostat.async_configure_comfort_temperature, + translation_key=ENTITY_KEY_COMFORT, + native_min_value=EQ3BT_MIN_TEMP, + native_max_value=EQ3BT_MAX_TEMP, + native_step=EQ3BT_STEP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=NumberDeviceClass.TEMPERATURE, + ), + Eq3NumberEntityDescription( + key=ENTITY_KEY_ECO, + value_func=lambda presets: presets.eco_temperature.value, + value_set_func=lambda thermostat: thermostat.async_configure_eco_temperature, + translation_key=ENTITY_KEY_ECO, + native_min_value=EQ3BT_MIN_TEMP, + native_max_value=EQ3BT_MAX_TEMP, + native_step=EQ3BT_STEP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=NumberDeviceClass.TEMPERATURE, + ), + Eq3NumberEntityDescription( + key=ENTITY_KEY_WINDOW_OPEN_TEMPERATURE, + value_func=lambda presets: presets.window_open_temperature.value, + value_set_func=lambda thermostat: thermostat.async_configure_window_open_temperature, + translation_key=ENTITY_KEY_WINDOW_OPEN_TEMPERATURE, + native_min_value=EQ3BT_MIN_TEMP, + native_max_value=EQ3BT_MAX_TEMP, + native_step=EQ3BT_STEP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=NumberDeviceClass.TEMPERATURE, + ), + Eq3NumberEntityDescription( + key=ENTITY_KEY_OFFSET, + value_func=lambda presets: presets.offset_temperature.value, + value_set_func=lambda thermostat: thermostat.async_configure_temperature_offset, + translation_key=ENTITY_KEY_OFFSET, + native_min_value=EQ3BT_MIN_OFFSET, + native_max_value=EQ3BT_MAX_OFFSET, + native_step=EQ3BT_STEP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=NumberDeviceClass.TEMPERATURE, + ), + Eq3NumberEntityDescription( + key=ENTITY_KEY_WINDOW_OPEN_TIMEOUT, + value_set_func=lambda thermostat: thermostat.async_configure_window_open_duration, + value_func=lambda presets: presets.window_open_time.value.total_seconds() / 60, + translation_key=ENTITY_KEY_WINDOW_OPEN_TIMEOUT, + native_min_value=0, + native_max_value=60, + native_step=5, + native_unit_of_measurement=UnitOfTime.MINUTES, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: Eq3ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the entry.""" + + async_add_entities( + Eq3NumberEntity(entry, entity_description) + for entity_description in NUMBER_ENTITY_DESCRIPTIONS + ) + + +class Eq3NumberEntity(Eq3Entity, NumberEntity): + """Base class for all eq3 number entities.""" + + entity_description: Eq3NumberEntityDescription + + def __init__( + self, entry: Eq3ConfigEntry, entity_description: Eq3NumberEntityDescription + ) -> None: + """Initialize the entity.""" + + super().__init__(entry, entity_description.key) + self.entity_description = entity_description + + @property + def native_value(self) -> float: + """Return the state of the entity.""" + + if TYPE_CHECKING: + assert self._thermostat.status is not None + assert self._thermostat.status.presets is not None + + return self.entity_description.value_func(self._thermostat.status.presets) + + async def async_set_native_value(self, value: float) -> None: + """Set the state of the entity.""" + + await self.entity_description.value_set_func(self._thermostat)(value) + + @property + def available(self) -> bool: + """Return whether the entity is available.""" + + return ( + self._thermostat.status is not None + and self._thermostat.status.presets is not None + and self._attr_available + ) diff --git a/homeassistant/components/eq3btsmart/sensor.py b/homeassistant/components/eq3btsmart/sensor.py new file mode 100644 index 00000000000..bd2605042f4 --- /dev/null +++ b/homeassistant/components/eq3btsmart/sensor.py @@ -0,0 +1,84 @@ +"""Platform for eq3 sensor entities.""" + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import datetime +from typing import TYPE_CHECKING + +from eq3btsmart.models import Status + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, +) +from homeassistant.components.sensor.const import SensorStateClass +from homeassistant.const import PERCENTAGE +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import Eq3ConfigEntry +from .const import ENTITY_KEY_AWAY_UNTIL, ENTITY_KEY_VALVE +from .entity import Eq3Entity + + +@dataclass(frozen=True, kw_only=True) +class Eq3SensorEntityDescription(SensorEntityDescription): + """Entity description for eq3 sensor entities.""" + + value_func: Callable[[Status], int | datetime | None] + + +SENSOR_ENTITY_DESCRIPTIONS = [ + Eq3SensorEntityDescription( + key=ENTITY_KEY_VALVE, + translation_key=ENTITY_KEY_VALVE, + value_func=lambda status: status.valve, + native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, + ), + Eq3SensorEntityDescription( + key=ENTITY_KEY_AWAY_UNTIL, + translation_key=ENTITY_KEY_AWAY_UNTIL, + value_func=lambda status: ( + status.away_until.value if status.away_until else None + ), + device_class=SensorDeviceClass.DATE, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: Eq3ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the entry.""" + + async_add_entities( + Eq3SensorEntity(entry, entity_description) + for entity_description in SENSOR_ENTITY_DESCRIPTIONS + ) + + +class Eq3SensorEntity(Eq3Entity, SensorEntity): + """Base class for eq3 sensor entities.""" + + entity_description: Eq3SensorEntityDescription + + def __init__( + self, entry: Eq3ConfigEntry, entity_description: Eq3SensorEntityDescription + ) -> None: + """Initialize the entity.""" + + super().__init__(entry, entity_description.key) + self.entity_description = entity_description + + @property + def native_value(self) -> int | datetime | None: + """Return the value reported by the sensor.""" + + if TYPE_CHECKING: + assert self._thermostat.status is not None + + return self.entity_description.value_func(self._thermostat.status) diff --git a/homeassistant/components/eq3btsmart/strings.json b/homeassistant/components/eq3btsmart/strings.json index 5108baa1bcf..ab363f4d752 100644 --- a/homeassistant/components/eq3btsmart/strings.json +++ b/homeassistant/components/eq3btsmart/strings.json @@ -18,5 +18,48 @@ "error": { "invalid_mac_address": "Invalid MAC address" } + }, + "entity": { + "binary_sensor": { + "dst": { + "name": "Daylight saving time" + } + }, + "number": { + "comfort": { + "name": "Comfort temperature" + }, + "eco": { + "name": "Eco temperature" + }, + "offset": { + "name": "Offset temperature" + }, + "window_open_temperature": { + "name": "Window open temperature" + }, + "window_open_timeout": { + "name": "Window open timeout" + } + }, + "sensor": { + "away_until": { + "name": "Away until" + }, + "valve": { + "name": "Valve" + } + }, + "switch": { + "lock": { + "name": "Lock" + }, + "boost": { + "name": "Boost" + }, + "away": { + "name": "Away" + } + } } } diff --git a/homeassistant/components/eq3btsmart/switch.py b/homeassistant/components/eq3btsmart/switch.py new file mode 100644 index 00000000000..7525d8ca494 --- /dev/null +++ b/homeassistant/components/eq3btsmart/switch.py @@ -0,0 +1,94 @@ +"""Platform for eq3 switch entities.""" + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any + +from eq3btsmart import Thermostat +from eq3btsmart.models import Status + +from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import Eq3ConfigEntry +from .const import ENTITY_KEY_AWAY, ENTITY_KEY_BOOST, ENTITY_KEY_LOCK +from .entity import Eq3Entity + + +@dataclass(frozen=True, kw_only=True) +class Eq3SwitchEntityDescription(SwitchEntityDescription): + """Entity description for eq3 switch entities.""" + + toggle_func: Callable[[Thermostat], Callable[[bool], Awaitable[None]]] + value_func: Callable[[Status], bool] + + +SWITCH_ENTITY_DESCRIPTIONS = [ + Eq3SwitchEntityDescription( + key=ENTITY_KEY_LOCK, + translation_key=ENTITY_KEY_LOCK, + toggle_func=lambda thermostat: thermostat.async_set_locked, + value_func=lambda status: status.is_locked, + ), + Eq3SwitchEntityDescription( + key=ENTITY_KEY_BOOST, + translation_key=ENTITY_KEY_BOOST, + toggle_func=lambda thermostat: thermostat.async_set_boost, + value_func=lambda status: status.is_boost, + ), + Eq3SwitchEntityDescription( + key=ENTITY_KEY_AWAY, + translation_key=ENTITY_KEY_AWAY, + toggle_func=lambda thermostat: thermostat.async_set_away, + value_func=lambda status: status.is_away, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: Eq3ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the entry.""" + + async_add_entities( + Eq3SwitchEntity(entry, entity_description) + for entity_description in SWITCH_ENTITY_DESCRIPTIONS + ) + + +class Eq3SwitchEntity(Eq3Entity, SwitchEntity): + """Base class for eq3 switch entities.""" + + entity_description: Eq3SwitchEntityDescription + + def __init__( + self, + entry: Eq3ConfigEntry, + entity_description: Eq3SwitchEntityDescription, + ) -> None: + """Initialize the entity.""" + + super().__init__(entry, entity_description.key) + self.entity_description = entity_description + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the switch.""" + + await self.entity_description.toggle_func(self._thermostat)(True) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the switch.""" + + await self.entity_description.toggle_func(self._thermostat)(False) + + @property + def is_on(self) -> bool: + """Return the state of the switch.""" + + if TYPE_CHECKING: + assert self._thermostat.status is not None + + return self.entity_description.value_func(self._thermostat.status) diff --git a/homeassistant/components/esphome/assist_satellite.py b/homeassistant/components/esphome/assist_satellite.py index dc513a03e02..f60668b0a06 100644 --- a/homeassistant/components/esphome/assist_satellite.py +++ b/homeassistant/components/esphome/assist_satellite.py @@ -95,11 +95,7 @@ async def async_setup_entry( if entry_data.device_info.voice_assistant_feature_flags_compat( entry_data.api_version ): - async_add_entities( - [ - EsphomeAssistSatellite(entry, entry_data), - ] - ) + async_add_entities([EsphomeAssistSatellite(entry, entry_data)]) class EsphomeAssistSatellite( @@ -198,6 +194,9 @@ class EsphomeAssistSatellite( self._satellite_config.max_active_wake_words = config.max_active_wake_words _LOGGER.debug("Received satellite configuration: %s", self._satellite_config) + # Inform listeners that config has been updated + self.entry_data.async_assist_satellite_config_updated(self._satellite_config) + async def async_added_to_hass(self) -> None: """Run when entity about to be added to hass.""" await super().async_added_to_hass() @@ -254,6 +253,13 @@ class EsphomeAssistSatellite( # Will use media player for TTS/announcements self._update_tts_format() + # Update wake word select when config is updated + self.async_on_remove( + self.entry_data.async_register_assist_satellite_set_wake_word_callback( + self.async_set_wake_word + ) + ) + async def async_will_remove_from_hass(self) -> None: """Run when entity will be removed from hass.""" await super().async_will_remove_from_hass() @@ -478,6 +484,17 @@ class EsphomeAssistSatellite( """Handle announcement finished message (also sent for TTS).""" self.tts_response_finished() + @callback + def async_set_wake_word(self, wake_word_id: str) -> None: + """Set active wake word and update config on satellite.""" + self._satellite_config.active_wake_words = [wake_word_id] + self.config_entry.async_create_background_task( + self.hass, + self.async_set_configuration(self._satellite_config), + "esphome_voice_assistant_set_config", + ) + _LOGGER.debug("Setting active wake word: %s", wake_word_id) + def _update_tts_format(self) -> None: """Update the TTS format from the first media player.""" for supported_format in chain(*self.entry_data.media_player_formats.values()): diff --git a/homeassistant/components/esphome/config_flow.py b/homeassistant/components/esphome/config_flow.py index 87061b0366f..cb892b314cd 100644 --- a/homeassistant/components/esphome/config_flow.py +++ b/homeassistant/components/esphome/config_flow.py @@ -257,6 +257,9 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN): self, discovery_info: MqttServiceInfo ) -> ConfigFlowResult: """Handle MQTT discovery.""" + if not discovery_info.payload: + return self.async_abort(reason="mqtt_missing_payload") + device_info = json_loads_object(discovery_info.payload) if "mac" not in device_info: return self.async_abort(reason="mqtt_missing_mac") @@ -482,16 +485,12 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for esphome.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/esphome/coordinator.py b/homeassistant/components/esphome/coordinator.py index 284e17fd183..b31a74dcf3f 100644 --- a/homeassistant/components/esphome/coordinator.py +++ b/homeassistant/components/esphome/coordinator.py @@ -31,6 +31,7 @@ class ESPHomeDashboardCoordinator(DataUpdateCoordinator[dict[str, ConfiguredDevi super().__init__( hass, _LOGGER, + config_entry=None, name="ESPHome Dashboard", update_interval=timedelta(minutes=5), always_update=False, diff --git a/homeassistant/components/esphome/entry_data.py b/homeassistant/components/esphome/entry_data.py index f1b5218eec7..fc41ee99a00 100644 --- a/homeassistant/components/esphome/entry_data.py +++ b/homeassistant/components/esphome/entry_data.py @@ -48,6 +48,7 @@ from aioesphomeapi import ( from aioesphomeapi.model import ButtonInfo from bleak_esphome.backend.device import ESPHomeBluetoothDevice +from homeassistant.components.assist_satellite import AssistSatelliteConfiguration from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback @@ -152,6 +153,12 @@ class RuntimeEntryData: media_player_formats: dict[str, list[MediaPlayerSupportedFormat]] = field( default_factory=lambda: defaultdict(list) ) + assist_satellite_config_update_callbacks: list[ + Callable[[AssistSatelliteConfiguration], None] + ] = field(default_factory=list) + assist_satellite_set_wake_word_callbacks: list[Callable[[str], None]] = field( + default_factory=list + ) @property def name(self) -> str: @@ -504,3 +511,35 @@ class RuntimeEntryData: # We use this to determine if a deep sleep device should # be marked as unavailable or not. self.expected_disconnect = True + + @callback + def async_register_assist_satellite_config_updated_callback( + self, + callback_: Callable[[AssistSatelliteConfiguration], None], + ) -> CALLBACK_TYPE: + """Register to receive callbacks when the Assist satellite's configuration is updated.""" + self.assist_satellite_config_update_callbacks.append(callback_) + return lambda: self.assist_satellite_config_update_callbacks.remove(callback_) + + @callback + def async_assist_satellite_config_updated( + self, config: AssistSatelliteConfiguration + ) -> None: + """Notify listeners that the Assist satellite configuration has been updated.""" + for callback_ in self.assist_satellite_config_update_callbacks.copy(): + callback_(config) + + @callback + def async_register_assist_satellite_set_wake_word_callback( + self, + callback_: Callable[[str], None], + ) -> CALLBACK_TYPE: + """Register to receive callbacks when the Assist satellite's wake word is set.""" + self.assist_satellite_set_wake_word_callbacks.append(callback_) + return lambda: self.assist_satellite_set_wake_word_callbacks.remove(callback_) + + @callback + def async_assist_satellite_set_wake_word(self, wake_word_id: str) -> None: + """Notify listeners that the Assist satellite wake word has been set.""" + for callback_ in self.assist_satellite_set_wake_word_callbacks.copy(): + callback_(wake_word_id) diff --git a/homeassistant/components/esphome/ffmpeg_proxy.py b/homeassistant/components/esphome/ffmpeg_proxy.py index 2dacae52f75..9484d1e7593 100644 --- a/homeassistant/components/esphome/ffmpeg_proxy.py +++ b/homeassistant/components/esphome/ffmpeg_proxy.py @@ -212,6 +212,10 @@ class FFmpegConvertResponse(web.StreamResponse): assert proc.stdout is not None assert proc.stderr is not None + stderr_task = self.hass.async_create_background_task( + self._dump_ffmpeg_stderr(proc), "ESPHome media proxy dump stderr" + ) + try: # Pull audio chunks from ffmpeg and pass them to the HTTP client while ( @@ -230,18 +234,14 @@ class FFmpegConvertResponse(web.StreamResponse): raise # don't log error except: _LOGGER.exception("Unexpected error during ffmpeg conversion") - - # Process did not exit successfully - stderr_text = "" - while line := await proc.stderr.readline(): - stderr_text += line.decode() - _LOGGER.error("FFmpeg output: %s", stderr_text) - raise finally: # Allow conversion info to be removed self.convert_info.is_finished = True + # stop dumping ffmpeg stderr task + stderr_task.cancel() + # Terminate hangs, so kill is used if proc.returncode is None: proc.kill() @@ -250,6 +250,16 @@ class FFmpegConvertResponse(web.StreamResponse): if request.transport and not request.transport.is_closing(): await writer.write_eof() + async def _dump_ffmpeg_stderr( + self, + proc: asyncio.subprocess.Process, + ) -> None: + assert proc.stdout is not None + assert proc.stderr is not None + + while self.hass.is_running and (chunk := await proc.stderr.readline()): + _LOGGER.debug("ffmpeg[%s] output: %s", proc.pid, chunk.decode().rstrip()) + class FFmpegProxyView(HomeAssistantView): """FFmpeg web view to convert audio and stream back to client.""" diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json index b9b6a98dcd1..77a3164d94c 100644 --- a/homeassistant/components/esphome/manifest.json +++ b/homeassistant/components/esphome/manifest.json @@ -15,9 +15,8 @@ "iot_class": "local_push", "loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"], "mqtt": ["esphome/discover/#"], - "quality_scale": "platinum", "requirements": [ - "aioesphomeapi==27.0.1", + "aioesphomeapi==27.0.3", "esphome-dashboard-api==1.2.3", "bleak-esphome==1.1.0" ], diff --git a/homeassistant/components/esphome/media_player.py b/homeassistant/components/esphome/media_player.py index 3930b71d106..8a30814aa2c 100644 --- a/homeassistant/components/esphome/media_player.py +++ b/homeassistant/components/esphome/media_player.py @@ -20,6 +20,7 @@ from aioesphomeapi import ( from homeassistant.components import media_source from homeassistant.components.media_player import ( ATTR_MEDIA_ANNOUNCE, + ATTR_MEDIA_EXTRA, BrowseMedia, MediaPlayerDeviceClass, MediaPlayerEntity, @@ -50,6 +51,8 @@ _STATES: EsphomeEnumMapper[EspMediaPlayerState, MediaPlayerState] = EsphomeEnumM } ) +ATTR_BYPASS_PROXY = "bypass_proxy" + class EsphomeMediaPlayer( EsphomeEntity[MediaPlayerInfo, MediaPlayerEntityState], MediaPlayerEntity @@ -108,13 +111,15 @@ class EsphomeMediaPlayer( media_id = async_process_play_media_url(self.hass, media_id) announcement = kwargs.get(ATTR_MEDIA_ANNOUNCE) + bypass_proxy = kwargs.get(ATTR_MEDIA_EXTRA, {}).get(ATTR_BYPASS_PROXY) supported_formats: list[MediaPlayerSupportedFormat] | None = ( self._entry_data.media_player_formats.get(self._static_info.unique_id) ) if ( - supported_formats + not bypass_proxy + and supported_formats and _is_url(media_id) and ( proxy_url := self._get_proxy_url( diff --git a/homeassistant/components/esphome/select.py b/homeassistant/components/esphome/select.py index 623946503eb..71a21186d3d 100644 --- a/homeassistant/components/esphome/select.py +++ b/homeassistant/components/esphome/select.py @@ -8,8 +8,11 @@ from homeassistant.components.assist_pipeline.select import ( AssistPipelineSelect, VadSensitivitySelect, ) -from homeassistant.components.select import SelectEntity +from homeassistant.components.assist_satellite import AssistSatelliteConfiguration +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import restore_state from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN @@ -47,6 +50,7 @@ async def async_setup_entry( [ EsphomeAssistPipelineSelect(hass, entry_data), EsphomeVadSensitivitySelect(hass, entry_data), + EsphomeAssistSatelliteWakeWordSelect(hass, entry_data), ] ) @@ -89,3 +93,77 @@ class EsphomeVadSensitivitySelect(EsphomeAssistEntity, VadSensitivitySelect): """Initialize a VAD sensitivity selector.""" EsphomeAssistEntity.__init__(self, entry_data) VadSensitivitySelect.__init__(self, hass, self._device_info.mac_address) + + +class EsphomeAssistSatelliteWakeWordSelect( + EsphomeAssistEntity, SelectEntity, restore_state.RestoreEntity +): + """Wake word selector for esphome devices.""" + + entity_description = SelectEntityDescription( + key="wake_word", + translation_key="wake_word", + entity_category=EntityCategory.CONFIG, + ) + _attr_should_poll = False + _attr_current_option: str | None = None + _attr_options: list[str] = [] + + def __init__(self, hass: HomeAssistant, entry_data: RuntimeEntryData) -> None: + """Initialize a wake word selector.""" + EsphomeAssistEntity.__init__(self, entry_data) + + unique_id_prefix = self._device_info.mac_address + self._attr_unique_id = f"{unique_id_prefix}-wake_word" + + # name -> id + self._wake_words: dict[str, str] = {} + + @property + def available(self) -> bool: + """Return if entity is available.""" + return bool(self._attr_options) + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + await super().async_added_to_hass() + + # Update options when config is updated + self.async_on_remove( + self._entry_data.async_register_assist_satellite_config_updated_callback( + self.async_satellite_config_updated + ) + ) + + async def async_select_option(self, option: str) -> None: + """Select an option.""" + if wake_word_id := self._wake_words.get(option): + # _attr_current_option will be updated on + # async_satellite_config_updated after the device sets the wake + # word. + self._entry_data.async_assist_satellite_set_wake_word(wake_word_id) + + def async_satellite_config_updated( + self, config: AssistSatelliteConfiguration + ) -> None: + """Update options with available wake words.""" + if (not config.available_wake_words) or (config.max_active_wake_words < 1): + self._attr_current_option = None + self._wake_words.clear() + self.async_write_ha_state() + return + + self._wake_words = {w.wake_word: w.id for w in config.available_wake_words} + self._attr_options = sorted(self._wake_words) + + if config.active_wake_words: + # Select first active wake word + wake_word_id = config.active_wake_words[0] + for wake_word in config.available_wake_words: + if wake_word.id == wake_word_id: + self._attr_current_option = wake_word.wake_word + else: + # Select first available wake word + self._attr_current_option = config.available_wake_words[0].wake_word + + self.async_write_ha_state() diff --git a/homeassistant/components/esphome/strings.json b/homeassistant/components/esphome/strings.json index a764e9e6fd9..81b58de8df2 100644 --- a/homeassistant/components/esphome/strings.json +++ b/homeassistant/components/esphome/strings.json @@ -8,7 +8,8 @@ "service_received": "Action received", "mqtt_missing_mac": "Missing MAC address in MQTT properties.", "mqtt_missing_api": "Missing API port in MQTT properties.", - "mqtt_missing_ip": "Missing IP address in MQTT properties." + "mqtt_missing_ip": "Missing IP address in MQTT properties.", + "mqtt_missing_payload": "Missing MQTT Payload." }, "error": { "resolve_error": "Can't resolve address of the ESP. If this error persists, please set a static IP address", @@ -83,6 +84,12 @@ "aggressive": "[%key:component::assist_pipeline::entity::select::vad_sensitivity::state::aggressive%]", "relaxed": "[%key:component::assist_pipeline::entity::select::vad_sensitivity::state::relaxed%]" } + }, + "wake_word": { + "name": "Wake word", + "state": { + "okay_nabu": "Okay Nabu" + } } }, "climate": { diff --git a/homeassistant/components/esphome/update.py b/homeassistant/components/esphome/update.py index 5e571399ecb..2b593051742 100644 --- a/homeassistant/components/esphome/update.py +++ b/homeassistant/components/esphome/update.py @@ -61,6 +61,8 @@ async def async_setup_entry( if (dashboard := async_get_dashboard(hass)) is None: return entry_data = DomainData.get(hass).get_entry_data(entry) + assert entry_data.device_info is not None + device_name = entry_data.device_info.name unsubs: list[CALLBACK_TYPE] = [] @callback @@ -72,13 +74,22 @@ async def async_setup_entry( if not entry_data.available or not dashboard.last_update_success: return + # Do not add Dashboard Entity if this device is not known to the ESPHome dashboard. + if dashboard.data is None or dashboard.data.get(device_name) is None: + return + for unsub in unsubs: unsub() unsubs.clear() async_add_entities([ESPHomeDashboardUpdateEntity(entry_data, dashboard)]) - if entry_data.available and dashboard.last_update_success: + if ( + entry_data.available + and dashboard.last_update_success + and dashboard.data is not None + and dashboard.data.get(device_name) + ): _async_setup_update_entity() return @@ -133,10 +144,8 @@ class ESPHomeDashboardUpdateEntity( self._attr_supported_features = NO_FEATURES self._attr_installed_version = device_info.esphome_version device = coordinator.data.get(device_info.name) - if device is None: - self._attr_latest_version = None - else: - self._attr_latest_version = device["current_version"] + assert device is not None + self._attr_latest_version = device["current_version"] @callback def _handle_coordinator_update(self) -> None: diff --git a/homeassistant/components/etherscan/manifest.json b/homeassistant/components/etherscan/manifest.json index 1b296e4e4be..e5099ffaf9c 100644 --- a/homeassistant/components/etherscan/manifest.json +++ b/homeassistant/components/etherscan/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/etherscan", "iot_class": "cloud_polling", "loggers": ["pyetherscan"], + "quality_scale": "legacy", "requirements": ["python-etherscan-api==0.0.3"] } diff --git a/homeassistant/components/eufy/manifest.json b/homeassistant/components/eufy/manifest.json index ccf15144f9e..6ad1b7de81b 100644 --- a/homeassistant/components/eufy/manifest.json +++ b/homeassistant/components/eufy/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/eufy", "iot_class": "local_polling", "loggers": ["lakeside"], + "quality_scale": "legacy", "requirements": ["lakeside==0.13"] } diff --git a/homeassistant/components/everlights/manifest.json b/homeassistant/components/everlights/manifest.json index 6f856b26087..a2deeab2666 100644 --- a/homeassistant/components/everlights/manifest.json +++ b/homeassistant/components/everlights/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/everlights", "iot_class": "local_polling", "loggers": ["pyeverlights"], + "quality_scale": "legacy", "requirements": ["pyeverlights==0.1.0"] } diff --git a/homeassistant/components/evohome/__init__.py b/homeassistant/components/evohome/__init__.py index 1097f19f47c..612131919d4 100644 --- a/homeassistant/components/evohome/__init__.py +++ b/homeassistant/components/evohome/__init__.py @@ -240,6 +240,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=None, name=f"{DOMAIN}_coordinator", update_interval=config[DOMAIN][CONF_SCAN_INTERVAL], update_method=broker.async_update, diff --git a/homeassistant/components/evohome/manifest.json b/homeassistant/components/evohome/manifest.json index e81e71c5b07..da3d197f6aa 100644 --- a/homeassistant/components/evohome/manifest.json +++ b/homeassistant/components/evohome/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/evohome", "iot_class": "cloud_polling", "loggers": ["evohomeasync", "evohomeasync2"], + "quality_scale": "legacy", "requirements": ["evohome-async==0.4.20"] } diff --git a/homeassistant/components/ezviz/config_flow.py b/homeassistant/components/ezviz/config_flow.py index aa998cc6f60..a7551737c10 100644 --- a/homeassistant/components/ezviz/config_flow.py +++ b/homeassistant/components/ezviz/config_flow.py @@ -150,7 +150,7 @@ class EzvizConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> EzvizOptionsFlowHandler: """Get the options flow for this handler.""" - return EzvizOptionsFlowHandler(config_entry) + return EzvizOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -391,10 +391,6 @@ class EzvizConfigFlow(ConfigFlow, domain=DOMAIN): class EzvizOptionsFlowHandler(OptionsFlow): """Handle EZVIZ client options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/ezviz/manifest.json b/homeassistant/components/ezviz/manifest.json index 53976bf3002..7c796c74ef7 100644 --- a/homeassistant/components/ezviz/manifest.json +++ b/homeassistant/components/ezviz/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/ezviz", "iot_class": "cloud_polling", "loggers": ["paho_mqtt", "pyezviz"], - "requirements": ["pyezviz==0.2.1.2"] + "requirements": ["pyezviz==0.2.2.3"] } diff --git a/homeassistant/components/ezviz/update.py b/homeassistant/components/ezviz/update.py index 05735d152cf..25a506a0052 100644 --- a/homeassistant/components/ezviz/update.py +++ b/homeassistant/components/ezviz/update.py @@ -73,11 +73,9 @@ class EzvizUpdateEntity(EzvizEntity, UpdateEntity): return self.data["version"] @property - def in_progress(self) -> bool | int | None: + def in_progress(self) -> bool: """Update installation progress.""" - if self.data["upgrade_in_progress"]: - return self.data["upgrade_percent"] - return False + return bool(self.data["upgrade_in_progress"]) @property def latest_version(self) -> str | None: @@ -93,6 +91,13 @@ class EzvizUpdateEntity(EzvizEntity, UpdateEntity): return self.data["latest_firmware_info"].get("desc") return None + @property + def update_percentage(self) -> int | None: + """Update installation progress.""" + if self.data["upgrade_in_progress"]: + return self.data["upgrade_percent"] + return None + async def async_install( self, version: str | None, backup: bool, **kwargs: Any ) -> None: diff --git a/homeassistant/components/facebook/manifest.json b/homeassistant/components/facebook/manifest.json index 5074489852e..5a7eb216ccc 100644 --- a/homeassistant/components/facebook/manifest.json +++ b/homeassistant/components/facebook/manifest.json @@ -3,5 +3,6 @@ "name": "Facebook Messenger", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/facebook", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/fail2ban/manifest.json b/homeassistant/components/fail2ban/manifest.json index e348db1c695..1570afda6eb 100644 --- a/homeassistant/components/fail2ban/manifest.json +++ b/homeassistant/components/fail2ban/manifest.json @@ -3,5 +3,6 @@ "name": "Fail2Ban", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/fail2ban", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/familyhub/manifest.json b/homeassistant/components/familyhub/manifest.json index f57030efb27..cf4bf0ba68f 100644 --- a/homeassistant/components/familyhub/manifest.json +++ b/homeassistant/components/familyhub/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/familyhub", "iot_class": "local_polling", "loggers": ["pyfamilyhublocal"], + "quality_scale": "legacy", "requirements": ["python-family-hub-local==0.0.2"] } diff --git a/homeassistant/components/fan/__init__.py b/homeassistant/components/fan/__init__.py index b1c2b748520..b31a18d0eac 100644 --- a/homeassistant/components/fan/__init__.py +++ b/homeassistant/components/fan/__init__.py @@ -234,10 +234,10 @@ class FanEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): entity_description: FanEntityDescription _attr_current_direction: str | None = None _attr_oscillating: bool | None = None - _attr_percentage: int | None - _attr_preset_mode: str | None - _attr_preset_modes: list[str] | None - _attr_speed_count: int + _attr_percentage: int | None = 0 + _attr_preset_mode: str | None = None + _attr_preset_modes: list[str] | None = None + _attr_speed_count: int = 100 _attr_supported_features: FanEntityFeature = FanEntityFeature(0) __mod_supported_features: FanEntityFeature = FanEntityFeature(0) @@ -245,14 +245,14 @@ class FanEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): # once migrated and set the feature flags TURN_ON/TURN_OFF as needed. _enable_turn_on_off_backwards_compatibility: bool = True - def __getattribute__(self, __name: str) -> Any: + def __getattribute__(self, name: str, /) -> Any: """Get attribute. Modify return of `supported_features` to include `_mod_supported_features` if attribute is set. """ - if __name != "supported_features": - return super().__getattribute__(__name) + if name != "supported_features": + return super().__getattribute__(name) # Convert the supported features to ClimateEntityFeature. # Remove this compatibility shim in 2025.1 or later. @@ -463,16 +463,12 @@ class FanEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): @cached_property def percentage(self) -> int | None: """Return the current speed as a percentage.""" - if hasattr(self, "_attr_percentage"): - return self._attr_percentage - return 0 + return self._attr_percentage @cached_property def speed_count(self) -> int: """Return the number of speeds the fan supports.""" - if hasattr(self, "_attr_speed_count"): - return self._attr_speed_count - return 100 + return self._attr_speed_count @property def percentage_step(self) -> float: @@ -538,9 +534,7 @@ class FanEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): Requires FanEntityFeature.SET_SPEED. """ - if hasattr(self, "_attr_preset_mode"): - return self._attr_preset_mode - return None + return self._attr_preset_mode @cached_property def preset_modes(self) -> list[str] | None: @@ -548,9 +542,7 @@ class FanEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): Requires FanEntityFeature.SET_SPEED. """ - if hasattr(self, "_attr_preset_modes"): - return self._attr_preset_modes - return None + return self._attr_preset_modes # These can be removed if no deprecated constant are in this module anymore diff --git a/homeassistant/components/fastdotcom/manifest.json b/homeassistant/components/fastdotcom/manifest.json index 9e2e077858c..10b6fdb5b5d 100644 --- a/homeassistant/components/fastdotcom/manifest.json +++ b/homeassistant/components/fastdotcom/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/fastdotcom", "iot_class": "cloud_polling", "loggers": ["fastdotcom"], - "quality_scale": "gold", "requirements": ["fastdotcom==0.0.3"], "single_config_entry": true } diff --git a/homeassistant/components/feedreader/config_flow.py b/homeassistant/components/feedreader/config_flow.py index e05cf9a63e5..72042de25ed 100644 --- a/homeassistant/components/feedreader/config_flow.py +++ b/homeassistant/components/feedreader/config_flow.py @@ -16,7 +16,6 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import CONF_URL from homeassistant.core import HomeAssistant, callback @@ -47,9 +46,11 @@ class FeedReaderConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> OptionsFlow: """Get the options flow for this handler.""" - return FeedReaderOptionsFlowHandler(config_entry) + return FeedReaderOptionsFlowHandler() def show_user_form( self, @@ -148,7 +149,7 @@ class FeedReaderConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_abort(reason="reconfigure_successful") -class FeedReaderOptionsFlowHandler(OptionsFlowWithConfigEntry): +class FeedReaderOptionsFlowHandler(OptionsFlow): """Handle an options flow.""" async def async_step_init( @@ -163,7 +164,9 @@ class FeedReaderOptionsFlowHandler(OptionsFlowWithConfigEntry): { vol.Optional( CONF_MAX_ENTRIES, - default=self.options.get(CONF_MAX_ENTRIES, DEFAULT_MAX_ENTRIES), + default=self.config_entry.options.get( + CONF_MAX_ENTRIES, DEFAULT_MAX_ENTRIES + ), ): cv.positive_int, } ) diff --git a/homeassistant/components/ffmpeg_motion/manifest.json b/homeassistant/components/ffmpeg_motion/manifest.json index 0115ed712e3..f51a6206e2b 100644 --- a/homeassistant/components/ffmpeg_motion/manifest.json +++ b/homeassistant/components/ffmpeg_motion/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["ffmpeg"], "documentation": "https://www.home-assistant.io/integrations/ffmpeg_motion", - "iot_class": "calculated" + "iot_class": "calculated", + "quality_scale": "legacy" } diff --git a/homeassistant/components/ffmpeg_noise/manifest.json b/homeassistant/components/ffmpeg_noise/manifest.json index 6352fed88c4..f1c0cc9f673 100644 --- a/homeassistant/components/ffmpeg_noise/manifest.json +++ b/homeassistant/components/ffmpeg_noise/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["ffmpeg"], "documentation": "https://www.home-assistant.io/integrations/ffmpeg_noise", - "iot_class": "calculated" + "iot_class": "calculated", + "quality_scale": "legacy" } diff --git a/homeassistant/components/fibaro/light.py b/homeassistant/components/fibaro/light.py index 17831a36a4a..18f86b6df7d 100644 --- a/homeassistant/components/fibaro/light.py +++ b/homeassistant/components/fibaro/light.py @@ -132,32 +132,25 @@ class FibaroLight(FibaroEntity, LightEntity): """Turn the light off.""" self.call_turn_off() - @property - def is_on(self) -> bool | None: - """Return true if device is on. - - Dimmable and RGB lights can be on based on different - properties, so we need to check here several values. - - JSON for HC2 uses always string, HC3 uses int for integers. - """ - if self.current_binary_state: - return True - with suppress(TypeError): - if self.fibaro_device.brightness != 0: - return True - with suppress(TypeError): - if self.fibaro_device.current_program != 0: - return True - with suppress(TypeError): - if self.fibaro_device.current_program_id != 0: - return True - - return False - def update(self) -> None: """Update the state.""" super().update() + + # Dimmable and RGB lights can be on based on different + # properties, so we need to check here several values + # to see if the light is on. + light_is_on = self.current_binary_state + with suppress(TypeError): + if self.fibaro_device.brightness != 0: + light_is_on = True + with suppress(TypeError): + if self.fibaro_device.current_program != 0: + light_is_on = True + with suppress(TypeError): + if self.fibaro_device.current_program_id != 0: + light_is_on = True + self._attr_is_on = light_is_on + # Brightness handling if brightness_supported(self.supported_color_modes): self._attr_brightness = scaleto255(self.fibaro_device.value.int_value()) @@ -172,7 +165,7 @@ class FibaroLight(FibaroEntity, LightEntity): if rgbw == (0, 0, 0, 0) and self.fibaro_device.last_color_set.has_color: rgbw = self.fibaro_device.last_color_set.rgbw_color - if self._attr_color_mode == ColorMode.RGB: + if self.color_mode == ColorMode.RGB: self._attr_rgb_color = rgbw[:3] else: self._attr_rgbw_color = rgbw diff --git a/homeassistant/components/fido/manifest.json b/homeassistant/components/fido/manifest.json index dc440304646..23949a56ee2 100644 --- a/homeassistant/components/fido/manifest.json +++ b/homeassistant/components/fido/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/fido", "iot_class": "cloud_polling", "loggers": ["pyfido"], + "quality_scale": "legacy", "requirements": ["pyfido==2.1.2"] } diff --git a/homeassistant/components/file/__init__.py b/homeassistant/components/file/__init__.py index 0c9cfee5f4d..7bc206057c8 100644 --- a/homeassistant/components/file/__init__.py +++ b/homeassistant/components/file/__init__.py @@ -3,88 +3,16 @@ from copy import deepcopy from typing import Any -from homeassistant.components.notify import migrate_notify_issue -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import ( - CONF_FILE_PATH, - CONF_NAME, - CONF_PLATFORM, - CONF_SCAN_INTERVAL, - Platform, -) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_FILE_PATH, CONF_NAME, CONF_PLATFORM, Platform +from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import ( - config_validation as cv, - discovery, - issue_registry as ir, -) -from homeassistant.helpers.typing import ConfigType from .const import DOMAIN -from .notify import PLATFORM_SCHEMA as NOTIFY_PLATFORM_SCHEMA -from .sensor import PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA - -IMPORT_SCHEMA = { - Platform.SENSOR: SENSOR_PLATFORM_SCHEMA, - Platform.NOTIFY: NOTIFY_PLATFORM_SCHEMA, -} - -CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) PLATFORMS = [Platform.NOTIFY, Platform.SENSOR] -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the file integration.""" - - hass.data[DOMAIN] = config - if hass.config_entries.async_entries(DOMAIN): - # We skip import in case we already have config entries - return True - # The use of the legacy notify service was deprecated with HA Core 2024.6.0 - # and will be removed with HA Core 2024.12 - migrate_notify_issue(hass, DOMAIN, "File", "2024.12.0") - # The YAML config was imported with HA Core 2024.6.0 and will be removed with - # HA Core 2024.12 - ir.async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.12.0", - is_fixable=False, - issue_domain=DOMAIN, - learn_more_url="https://www.home-assistant.io/integrations/file/", - severity=ir.IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "File", - }, - ) - - # Import the YAML config into separate config entries - platforms_config: dict[Platform, list[ConfigType]] = { - domain: config[domain] for domain in PLATFORMS if domain in config - } - for domain, items in platforms_config.items(): - for item in items: - if item[CONF_PLATFORM] == DOMAIN: - file_config_item = IMPORT_SCHEMA[domain](item) - file_config_item[CONF_PLATFORM] = domain - if CONF_SCAN_INTERVAL in file_config_item: - del file_config_item[CONF_SCAN_INTERVAL] - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=file_config_item, - ) - ) - - return True - - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a file component entry.""" config = {**entry.data, **entry.options} @@ -102,20 +30,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry, [Platform(entry.data[CONF_PLATFORM])] ) entry.async_on_unload(entry.add_update_listener(update_listener)) - if entry.data[CONF_PLATFORM] == Platform.NOTIFY and CONF_NAME in entry.data: - # New notify entities are being setup through the config entry, - # but during the deprecation period we want to keep the legacy notify platform, - # so we forward the setup config through discovery. - # Only the entities from yaml will still be available as legacy service. - hass.async_create_task( - discovery.async_load_platform( - hass, - Platform.NOTIFY, - DOMAIN, - config, - hass.data[DOMAIN], - ) - ) return True diff --git a/homeassistant/components/file/config_flow.py b/homeassistant/components/file/config_flow.py index d74e36ce935..992635d05fd 100644 --- a/homeassistant/components/file/config_flow.py +++ b/homeassistant/components/file/config_flow.py @@ -1,7 +1,8 @@ """Config flow for file integration.""" +from __future__ import annotations + from copy import deepcopy -import os from typing import Any import voluptuous as vol @@ -11,11 +12,9 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import ( CONF_FILE_PATH, - CONF_FILENAME, CONF_NAME, CONF_PLATFORM, CONF_UNIT_OF_MEASUREMENT, @@ -74,9 +73,11 @@ class FileConfigFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> FileOptionsFlowHandler: """Get the options flow for this handler.""" - return FileOptionsFlowHandler(config_entry) + return FileOptionsFlowHandler() async def validate_file_path(self, file_path: str) -> bool: """Ensure the file path is valid.""" @@ -129,29 +130,8 @@ class FileConfigFlowHandler(ConfigFlow, domain=DOMAIN): """Handle file sensor config flow.""" return await self._async_handle_step(Platform.SENSOR.value, user_input) - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import `file`` config from configuration.yaml.""" - self._async_abort_entries_match(import_data) - platform = import_data[CONF_PLATFORM] - name: str = import_data.get(CONF_NAME, DEFAULT_NAME) - file_name: str - if platform == Platform.NOTIFY: - file_name = import_data.pop(CONF_FILENAME) - file_path: str = os.path.join(self.hass.config.config_dir, file_name) - import_data[CONF_FILE_PATH] = file_path - else: - file_path = import_data[CONF_FILE_PATH] - title = f"{name} [{file_path}]" - data = deepcopy(import_data) - options = {} - for key, value in import_data.items(): - if key not in (CONF_FILE_PATH, CONF_PLATFORM, CONF_NAME): - data.pop(key) - options[key] = value - return self.async_create_entry(title=title, data=data, options=options) - -class FileOptionsFlowHandler(OptionsFlowWithConfigEntry): +class FileOptionsFlowHandler(OptionsFlow): """Handle File options.""" async def async_step_init( diff --git a/homeassistant/components/file/notify.py b/homeassistant/components/file/notify.py index 9411b7cf1a8..10e3d4a4ac6 100644 --- a/homeassistant/components/file/notify.py +++ b/homeassistant/components/file/notify.py @@ -2,104 +2,23 @@ from __future__ import annotations -from functools import partial -import logging import os from typing import Any, TextIO -import voluptuous as vol - from homeassistant.components.notify import ( - ATTR_TITLE, ATTR_TITLE_DEFAULT, - PLATFORM_SCHEMA as NOTIFY_PLATFORM_SCHEMA, - BaseNotificationService, NotifyEntity, NotifyEntityFeature, - migrate_notify_issue, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_FILE_PATH, CONF_FILENAME, CONF_NAME +from homeassistant.const import CONF_FILE_PATH, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.dt as dt_util from .const import CONF_TIMESTAMP, DEFAULT_NAME, DOMAIN, FILE_ICON -_LOGGER = logging.getLogger(__name__) - -# The legacy platform schema uses a filename, after import -# The full file path is stored in the config entry -PLATFORM_SCHEMA = NOTIFY_PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_FILENAME): cv.string, - vol.Optional(CONF_TIMESTAMP, default=False): cv.boolean, - } -) - - -async def async_get_service( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, -) -> FileNotificationService | None: - """Get the file notification service.""" - if discovery_info is None: - # We only set up through discovery - return None - file_path: str = discovery_info[CONF_FILE_PATH] - timestamp: bool = discovery_info[CONF_TIMESTAMP] - - return FileNotificationService(file_path, timestamp) - - -class FileNotificationService(BaseNotificationService): - """Implement the notification service for the File service.""" - - def __init__(self, file_path: str, add_timestamp: bool) -> None: - """Initialize the service.""" - self._file_path = file_path - self.add_timestamp = add_timestamp - - async def async_send_message(self, message: str = "", **kwargs: Any) -> None: - """Send a message to a file.""" - # The use of the legacy notify service was deprecated with HA Core 2024.6.0 - # and will be removed with HA Core 2024.12 - migrate_notify_issue( - self.hass, DOMAIN, "File", "2024.12.0", service_name=self._service_name - ) - await self.hass.async_add_executor_job( - partial(self.send_message, message, **kwargs) - ) - - def send_message(self, message: str = "", **kwargs: Any) -> None: - """Send a message to a file.""" - file: TextIO - filepath = self._file_path - try: - with open(filepath, "a", encoding="utf8") as file: - if os.stat(filepath).st_size == 0: - title = ( - f"{kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)} notifications (Log" - f" started: {dt_util.utcnow().isoformat()})\n{'-' * 80}\n" - ) - file.write(title) - - if self.add_timestamp: - text = f"{dt_util.utcnow().isoformat()} {message}\n" - else: - text = f"{message}\n" - file.write(text) - except OSError as exc: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="write_access_failed", - translation_placeholders={"filename": filepath, "exc": f"{exc!r}"}, - ) from exc - async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/file/sensor.py b/homeassistant/components/file/sensor.py index e37a3df86a6..879c06e29f3 100644 --- a/homeassistant/components/file/sensor.py +++ b/homeassistant/components/file/sensor.py @@ -6,12 +6,8 @@ import logging import os from file_read_backwards import FileReadBackwards -import voluptuous as vol -from homeassistant.components.sensor import ( - PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, - SensorEntity, -) +from homeassistant.components.sensor import SensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_FILE_PATH, @@ -20,38 +16,13 @@ from homeassistant.const import ( CONF_VALUE_TEMPLATE, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.template import Template -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from .const import DEFAULT_NAME, FILE_ICON _LOGGER = logging.getLogger(__name__) -PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_FILE_PATH): cv.isfile, - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Optional(CONF_VALUE_TEMPLATE): cv.string, - vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string, - } -) - - -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up the file sensor from YAML. - - The YAML platform config is automatically - imported to a config entry, this method can be removed - when YAML support is removed. - """ - async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/file/strings.json b/homeassistant/components/file/strings.json index 8806c67cd96..bd8f23602e3 100644 --- a/homeassistant/components/file/strings.json +++ b/homeassistant/components/file/strings.json @@ -18,7 +18,7 @@ }, "data_description": { "file_path": "The local file path to retrieve the sensor value from", - "value_template": "A template to render the sensors value based on the file content", + "value_template": "A template to render the sensor's value based on the file content", "unit_of_measurement": "Unit of measurement for the sensor" } }, diff --git a/homeassistant/components/filesize/config_flow.py b/homeassistant/components/filesize/config_flow.py index 51eff46bdb3..8ffe3f94353 100644 --- a/homeassistant/components/filesize/config_flow.py +++ b/homeassistant/components/filesize/config_flow.py @@ -11,7 +11,6 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_FILE_PATH from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from .const import DOMAIN @@ -20,20 +19,20 @@ DATA_SCHEMA = vol.Schema({vol.Required(CONF_FILE_PATH): str}) _LOGGER = logging.getLogger(__name__) -def validate_path(hass: HomeAssistant, path: str) -> str: +def validate_path(hass: HomeAssistant, path: str) -> tuple[str | None, dict[str, str]]: """Validate path.""" get_path = pathlib.Path(path) if not get_path.exists() or not get_path.is_file(): _LOGGER.error("Can not access file %s", path) - raise NotValidError + return (None, {"base": "not_valid"}) if not hass.config.is_allowed_path(path): _LOGGER.error("Filepath %s is not allowed", path) - raise NotAllowedError + return (None, {"base": "not_allowed"}) full_path = get_path.absolute() - return str(full_path) + return (str(full_path), {}) class FilesizeConfigFlow(ConfigFlow, domain=DOMAIN): @@ -45,18 +44,13 @@ class FilesizeConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" - errors: dict[str, Any] = {} + errors: dict[str, str] = {} if user_input is not None: - try: - full_path = await self.hass.async_add_executor_job( - validate_path, self.hass, user_input[CONF_FILE_PATH] - ) - except NotValidError: - errors["base"] = "not_valid" - except NotAllowedError: - errors["base"] = "not_allowed" - else: + full_path, errors = await self.hass.async_add_executor_job( + validate_path, self.hass, user_input[CONF_FILE_PATH] + ) + if not errors: await self.async_set_unique_id(full_path) self._abort_if_unique_id_configured() @@ -70,10 +64,29 @@ class FilesizeConfigFlow(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=DATA_SCHEMA, errors=errors ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a reconfigure flow initialized by the user.""" + errors: dict[str, str] = {} -class NotValidError(HomeAssistantError): - """Path is not valid error.""" + if user_input is not None: + reconfigure_entry = self._get_reconfigure_entry() + full_path, errors = await self.hass.async_add_executor_job( + validate_path, self.hass, user_input[CONF_FILE_PATH] + ) + if not errors: + await self.async_set_unique_id(full_path) + self._abort_if_unique_id_configured() + name = str(user_input[CONF_FILE_PATH]).rsplit("/", maxsplit=1)[-1] + return self.async_update_reload_and_abort( + reconfigure_entry, + title=name, + unique_id=self.unique_id, + data_updates={CONF_FILE_PATH: user_input[CONF_FILE_PATH]}, + ) -class NotAllowedError(HomeAssistantError): - """Path is not allowed error.""" + return self.async_show_form( + step_id="reconfigure", data_schema=DATA_SCHEMA, errors=errors + ) diff --git a/homeassistant/components/filesize/coordinator.py b/homeassistant/components/filesize/coordinator.py index c0dbb14555e..8350cee91bf 100644 --- a/homeassistant/components/filesize/coordinator.py +++ b/homeassistant/components/filesize/coordinator.py @@ -60,12 +60,14 @@ class FileSizeCoordinator(DataUpdateCoordinator[dict[str, int | float | datetime statinfo = await self.hass.async_add_executor_job(self._update) size = statinfo.st_size last_updated = dt_util.utc_from_timestamp(statinfo.st_mtime) + created = dt_util.utc_from_timestamp(statinfo.st_ctime) _LOGGER.debug("size %s, last updated %s", size, last_updated) data: dict[str, int | float | datetime] = { "file": round(size / 1e6, 2), "bytes": size, "last_updated": last_updated, + "created": created, } return data diff --git a/homeassistant/components/filesize/icons.json b/homeassistant/components/filesize/icons.json index 15829589853..059a51a9e34 100644 --- a/homeassistant/components/filesize/icons.json +++ b/homeassistant/components/filesize/icons.json @@ -9,6 +9,9 @@ }, "last_updated": { "default": "mdi:file" + }, + "created": { + "default": "mdi:file" } } } diff --git a/homeassistant/components/filesize/sensor.py b/homeassistant/components/filesize/sensor.py index 71a4e50edfe..2eb170af99d 100644 --- a/homeassistant/components/filesize/sensor.py +++ b/homeassistant/components/filesize/sensor.py @@ -47,6 +47,13 @@ SENSOR_TYPES = ( device_class=SensorDeviceClass.TIMESTAMP, entity_category=EntityCategory.DIAGNOSTIC, ), + SensorEntityDescription( + key="created", + translation_key="created", + entity_registry_enabled_default=False, + device_class=SensorDeviceClass.TIMESTAMP, + entity_category=EntityCategory.DIAGNOSTIC, + ), ) @@ -75,7 +82,6 @@ class FilesizeEntity(CoordinatorEntity[FileSizeCoordinator], SensorEntity): ) -> None: """Initialize the Filesize sensor.""" super().__init__(coordinator) - base_name = str(coordinator.path.absolute()).rsplit("/", maxsplit=1)[-1] self._attr_unique_id = ( entry_id if description.key == "file" else f"{entry_id}-{description.key}" ) @@ -83,7 +89,6 @@ class FilesizeEntity(CoordinatorEntity[FileSizeCoordinator], SensorEntity): self._attr_device_info = DeviceInfo( entry_type=DeviceEntryType.SERVICE, identifiers={(DOMAIN, entry_id)}, - name=base_name, ) @property diff --git a/homeassistant/components/filesize/strings.json b/homeassistant/components/filesize/strings.json index 3323c3411b2..6623cf9c375 100644 --- a/homeassistant/components/filesize/strings.json +++ b/homeassistant/components/filesize/strings.json @@ -5,6 +5,11 @@ "data": { "file_path": "Path to file" } + }, + "reconfigure": { + "data": { + "file_path": "[%key:component::filesize::config::step::user::data::file_path%]" + } } }, "error": { @@ -12,7 +17,8 @@ "not_allowed": "Path is not allowed" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" } }, "title": "Filesize", @@ -26,6 +32,9 @@ }, "last_updated": { "name": "Last updated" + }, + "created": { + "name": "Created" } } } diff --git a/homeassistant/components/fints/manifest.json b/homeassistant/components/fints/manifest.json index 063e612d35d..0a9c5389cd9 100644 --- a/homeassistant/components/fints/manifest.json +++ b/homeassistant/components/fints/manifest.json @@ -6,5 +6,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["fints", "mt_940", "sepaxml"], + "quality_scale": "legacy", "requirements": ["fints==3.1.0"] } diff --git a/homeassistant/components/firmata/manifest.json b/homeassistant/components/firmata/manifest.json index a35b6f179ce..363b5bd60c6 100644 --- a/homeassistant/components/firmata/manifest.json +++ b/homeassistant/components/firmata/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/firmata", "iot_class": "local_push", "loggers": ["pymata_express"], + "quality_scale": "legacy", "requirements": ["pymata-express==1.19"] } diff --git a/homeassistant/components/fitbit/config_flow.py b/homeassistant/components/fitbit/config_flow.py index cb4e3fb4ea3..d5b33a731e3 100644 --- a/homeassistant/components/fitbit/config_flow.py +++ b/homeassistant/components/fitbit/config_flow.py @@ -86,7 +86,3 @@ class OAuth2FlowHandler( self._abort_if_unique_id_configured() return self.async_create_entry(title=profile.display_name, data=data) - - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Handle import from YAML.""" - return await self.async_oauth_create_entry(import_data) diff --git a/homeassistant/components/fitbit/sensor.py b/homeassistant/components/fitbit/sensor.py index ab9a593e195..d58dad4ca67 100644 --- a/homeassistant/components/fitbit/sensor.py +++ b/homeassistant/components/fitbit/sensor.py @@ -6,30 +6,16 @@ from collections.abc import Callable from dataclasses import dataclass import datetime import logging -import os from typing import Any, Final, cast -from fitbit import Fitbit -from oauthlib.oauth2.rfc6749.errors import OAuth2Error -import voluptuous as vol - -from homeassistant.components.application_credentials import ( - ClientCredential, - async_import_client_credential, -) from homeassistant.components.sensor import ( - PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, SensorDeviceClass, SensorEntity, SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( - CONF_CLIENT_ID, - CONF_CLIENT_SECRET, - CONF_TOKEN, - CONF_UNIT_SYSTEM, PERCENTAGE, EntityCategory, UnitOfLength, @@ -38,33 +24,13 @@ from homeassistant.const import ( UnitOfVolume, ) from homeassistant.core import HomeAssistant, callback -from homeassistant.data_entry_flow import FlowResultType -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.icon import icon_for_battery_level -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from homeassistant.util.json import load_json_object from .api import FitbitApi -from .const import ( - ATTR_ACCESS_TOKEN, - ATTR_LAST_SAVED_AT, - ATTR_REFRESH_TOKEN, - ATTRIBUTION, - BATTERY_LEVELS, - CONF_CLOCK_FORMAT, - CONF_MONITORED_RESOURCES, - DEFAULT_CLOCK_FORMAT, - DEFAULT_CONFIG, - DOMAIN, - FITBIT_CONFIG_FILE, - FITBIT_DEFAULT_RESOURCES, - FitbitScope, - FitbitUnitSystem, -) +from .const import ATTRIBUTION, BATTERY_LEVELS, DOMAIN, FitbitScope, FitbitUnitSystem from .coordinator import FitbitData, FitbitDeviceCoordinator from .exceptions import FitbitApiException, FitbitAuthException from .model import FitbitDevice, config_from_entry_data @@ -75,6 +41,8 @@ _CONFIGURING: dict[str, str] = {} SCAN_INTERVAL: Final = datetime.timedelta(minutes=30) +FITBIT_TRACKER_SUBSTRING = "/tracker/" + def _default_value_fn(result: dict[str, Any]) -> str: """Parse a Fitbit timeseries API responses.""" @@ -156,11 +124,34 @@ class FitbitSensorEntityDescription(SensorEntityDescription): unit_fn: Callable[[FitbitUnitSystem], str | None] = lambda x: None scope: FitbitScope | None = None + @property + def is_tracker(self) -> bool: + """Return if the entity is a tracker.""" + return FITBIT_TRACKER_SUBSTRING in self.key + + +def _build_device_info( + config_entry: ConfigEntry, entity_description: FitbitSensorEntityDescription +) -> DeviceInfo: + """Build device info for sensor entities info across devices.""" + unique_id = cast(str, config_entry.unique_id) + if entity_description.is_tracker: + return DeviceInfo( + entry_type=DeviceEntryType.SERVICE, + identifiers={(DOMAIN, f"{unique_id}_tracker")}, + translation_key="tracker", + translation_placeholders={"display_name": config_entry.title}, + ) + return DeviceInfo( + entry_type=DeviceEntryType.SERVICE, + identifiers={(DOMAIN, unique_id)}, + ) + FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( FitbitSensorEntityDescription( key="activities/activityCalories", - name="Activity Calories", + translation_key="activity_calories", native_unit_of_measurement="cal", icon="mdi:fire", scope=FitbitScope.ACTIVITY, @@ -169,7 +160,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/calories", - name="Calories", + translation_key="calories", native_unit_of_measurement="cal", icon="mdi:fire", scope=FitbitScope.ACTIVITY, @@ -177,7 +168,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/caloriesBMR", - name="Calories BMR", + translation_key="calories_bmr", native_unit_of_measurement="cal", icon="mdi:fire", scope=FitbitScope.ACTIVITY, @@ -187,7 +178,6 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/distance", - name="Distance", icon="mdi:map-marker", device_class=SensorDeviceClass.DISTANCE, value_fn=_distance_value_fn, @@ -197,7 +187,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/elevation", - name="Elevation", + translation_key="elevation", icon="mdi:walk", device_class=SensorDeviceClass.DISTANCE, unit_fn=_elevation_unit, @@ -207,7 +197,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/floors", - name="Floors", + translation_key="floors", native_unit_of_measurement="floors", icon="mdi:walk", scope=FitbitScope.ACTIVITY, @@ -216,7 +206,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/heart", - name="Resting Heart Rate", + translation_key="resting_heart_rate", native_unit_of_measurement="bpm", icon="mdi:heart-pulse", value_fn=_int_value_or_none("restingHeartRate"), @@ -225,7 +215,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/minutesFairlyActive", - name="Minutes Fairly Active", + translation_key="minutes_fairly_active", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:walk", device_class=SensorDeviceClass.DURATION, @@ -235,7 +225,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/minutesLightlyActive", - name="Minutes Lightly Active", + translation_key="minutes_lightly_active", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:walk", device_class=SensorDeviceClass.DURATION, @@ -245,7 +235,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/minutesSedentary", - name="Minutes Sedentary", + translation_key="minutes_sedentary", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:seat-recline-normal", device_class=SensorDeviceClass.DURATION, @@ -255,7 +245,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/minutesVeryActive", - name="Minutes Very Active", + translation_key="minutes_very_active", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:run", device_class=SensorDeviceClass.DURATION, @@ -265,7 +255,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/steps", - name="Steps", + translation_key="steps", native_unit_of_measurement="steps", icon="mdi:walk", scope=FitbitScope.ACTIVITY, @@ -273,7 +263,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/tracker/activityCalories", - name="Tracker Activity Calories", + translation_key="activity_calories", native_unit_of_measurement="cal", icon="mdi:fire", scope=FitbitScope.ACTIVITY, @@ -283,7 +273,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/tracker/calories", - name="Tracker Calories", + translation_key="calories", native_unit_of_measurement="cal", icon="mdi:fire", scope=FitbitScope.ACTIVITY, @@ -293,7 +283,6 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/tracker/distance", - name="Tracker Distance", icon="mdi:map-marker", device_class=SensorDeviceClass.DISTANCE, value_fn=_distance_value_fn, @@ -305,7 +294,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/tracker/elevation", - name="Tracker Elevation", + translation_key="elevation", icon="mdi:walk", device_class=SensorDeviceClass.DISTANCE, unit_fn=_elevation_unit, @@ -316,7 +305,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/tracker/floors", - name="Tracker Floors", + translation_key="floors", native_unit_of_measurement="floors", icon="mdi:walk", scope=FitbitScope.ACTIVITY, @@ -326,7 +315,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/tracker/minutesFairlyActive", - name="Tracker Minutes Fairly Active", + translation_key="minutes_fairly_active", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:walk", device_class=SensorDeviceClass.DURATION, @@ -337,7 +326,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/tracker/minutesLightlyActive", - name="Tracker Minutes Lightly Active", + translation_key="minutes_lightly_active", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:walk", device_class=SensorDeviceClass.DURATION, @@ -348,7 +337,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/tracker/minutesSedentary", - name="Tracker Minutes Sedentary", + translation_key="minutes_sedentary", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:seat-recline-normal", device_class=SensorDeviceClass.DURATION, @@ -359,7 +348,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/tracker/minutesVeryActive", - name="Tracker Minutes Very Active", + translation_key="minutes_very_active", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:run", device_class=SensorDeviceClass.DURATION, @@ -370,7 +359,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="activities/tracker/steps", - name="Tracker Steps", + translation_key="steps", native_unit_of_measurement="steps", icon="mdi:walk", scope=FitbitScope.ACTIVITY, @@ -380,7 +369,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="body/bmi", - name="BMI", + translation_key="bmi", native_unit_of_measurement="BMI", icon="mdi:human", state_class=SensorStateClass.MEASUREMENT, @@ -391,7 +380,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="body/fat", - name="Body Fat", + translation_key="body_fat", native_unit_of_measurement=PERCENTAGE, icon="mdi:human", state_class=SensorStateClass.MEASUREMENT, @@ -402,7 +391,6 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="body/weight", - name="Weight", icon="mdi:human", state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.WEIGHT, @@ -412,7 +400,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="sleep/awakeningsCount", - name="Awakenings Count", + translation_key="awakenings_count", native_unit_of_measurement="times awaken", icon="mdi:sleep", scope=FitbitScope.SLEEP, @@ -421,7 +409,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="sleep/efficiency", - name="Sleep Efficiency", + translation_key="sleep_efficiency", native_unit_of_measurement=PERCENTAGE, icon="mdi:sleep", state_class=SensorStateClass.MEASUREMENT, @@ -430,7 +418,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="sleep/minutesAfterWakeup", - name="Minutes After Wakeup", + translation_key="minutes_after_wakeup", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:sleep", device_class=SensorDeviceClass.DURATION, @@ -440,7 +428,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="sleep/minutesAsleep", - name="Sleep Minutes Asleep", + translation_key="sleep_minutes_asleep", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:sleep", device_class=SensorDeviceClass.DURATION, @@ -450,7 +438,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="sleep/minutesAwake", - name="Sleep Minutes Awake", + translation_key="sleep_minutes_awake", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:sleep", device_class=SensorDeviceClass.DURATION, @@ -460,7 +448,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="sleep/minutesToFallAsleep", - name="Sleep Minutes to Fall Asleep", + translation_key="sleep_minutes_to_fall_asleep", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:sleep", device_class=SensorDeviceClass.DURATION, @@ -470,7 +458,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="sleep/timeInBed", - name="Sleep Time in Bed", + translation_key="sleep_time_in_bed", native_unit_of_measurement=UnitOfTime.MINUTES, icon="mdi:hotel", device_class=SensorDeviceClass.DURATION, @@ -480,7 +468,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="foods/log/caloriesIn", - name="Calories In", + translation_key="calories_in", native_unit_of_measurement="cal", icon="mdi:food-apple", state_class=SensorStateClass.TOTAL_INCREASING, @@ -489,7 +477,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( ), FitbitSensorEntityDescription( key="foods/log/water", - name="Water", + translation_key="water", icon="mdi:cup-water", unit_fn=_water_unit, state_class=SensorStateClass.TOTAL_INCREASING, @@ -501,14 +489,14 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = ( # Different description depending on clock format SLEEP_START_TIME = FitbitSensorEntityDescription( key="sleep/startTime", - name="Sleep Start Time", + translation_key="sleep_start_time", icon="mdi:clock", scope=FitbitScope.SLEEP, entity_category=EntityCategory.DIAGNOSTIC, ) SLEEP_START_TIME_12HR = FitbitSensorEntityDescription( key="sleep/startTime", - name="Sleep Start Time", + translation_key="sleep_start_time", icon="mdi:clock", value_fn=_clock_format_12h, scope=FitbitScope.SLEEP, @@ -533,126 +521,6 @@ FITBIT_RESOURCE_BATTERY_LEVEL = FitbitSensorEntityDescription( native_unit_of_measurement=PERCENTAGE, ) -FITBIT_RESOURCES_KEYS: Final[list[str]] = [ - desc.key - for desc in (*FITBIT_RESOURCES_LIST, FITBIT_RESOURCE_BATTERY, SLEEP_START_TIME) -] - -PLATFORM_SCHEMA: Final = SENSOR_PLATFORM_SCHEMA.extend( - { - vol.Optional( - CONF_MONITORED_RESOURCES, default=FITBIT_DEFAULT_RESOURCES - ): vol.All(cv.ensure_list, [vol.In(FITBIT_RESOURCES_KEYS)]), - vol.Optional(CONF_CLOCK_FORMAT, default=DEFAULT_CLOCK_FORMAT): vol.In( - ["12H", "24H"] - ), - vol.Optional(CONF_UNIT_SYSTEM, default=FitbitUnitSystem.LEGACY_DEFAULT): vol.In( - [ - FitbitUnitSystem.EN_GB, - FitbitUnitSystem.EN_US, - FitbitUnitSystem.METRIC, - FitbitUnitSystem.LEGACY_DEFAULT, - ] - ), - } -) - -# Only import configuration if it was previously created successfully with all -# of the following fields. -FITBIT_CONF_KEYS = [ - CONF_CLIENT_ID, - CONF_CLIENT_SECRET, - ATTR_ACCESS_TOKEN, - ATTR_REFRESH_TOKEN, - ATTR_LAST_SAVED_AT, -] - - -def load_config_file(config_path: str) -> dict[str, Any] | None: - """Load existing valid fitbit.conf from disk for import.""" - if os.path.isfile(config_path): - config_file = load_json_object(config_path) - if config_file != DEFAULT_CONFIG and all( - key in config_file for key in FITBIT_CONF_KEYS - ): - return config_file - return None - - -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up the Fitbit sensor.""" - config_path = hass.config.path(FITBIT_CONFIG_FILE) - config_file = await hass.async_add_executor_job(load_config_file, config_path) - _LOGGER.debug("loaded config file: %s", config_file) - - if config_file is not None: - _LOGGER.debug("Importing existing fitbit.conf application credentials") - - # Refresh the token before importing to ensure it is working and not - # expired on first initialization. - authd_client = Fitbit( - config_file[CONF_CLIENT_ID], - config_file[CONF_CLIENT_SECRET], - access_token=config_file[ATTR_ACCESS_TOKEN], - refresh_token=config_file[ATTR_REFRESH_TOKEN], - expires_at=config_file[ATTR_LAST_SAVED_AT], - refresh_cb=lambda x: None, - ) - try: - updated_token = await hass.async_add_executor_job( - authd_client.client.refresh_token - ) - except OAuth2Error as err: - _LOGGER.debug("Unable to import fitbit OAuth2 credentials: %s", err) - translation_key = "deprecated_yaml_import_issue_cannot_connect" - else: - await async_import_client_credential( - hass, - DOMAIN, - ClientCredential( - config_file[CONF_CLIENT_ID], config_file[CONF_CLIENT_SECRET] - ), - ) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - "auth_implementation": DOMAIN, - CONF_TOKEN: { - ATTR_ACCESS_TOKEN: updated_token[ATTR_ACCESS_TOKEN], - ATTR_REFRESH_TOKEN: updated_token[ATTR_REFRESH_TOKEN], - "expires_at": updated_token["expires_at"], - "scope": " ".join(updated_token.get("scope", [])), - }, - CONF_CLOCK_FORMAT: config[CONF_CLOCK_FORMAT], - CONF_UNIT_SYSTEM: config[CONF_UNIT_SYSTEM], - CONF_MONITORED_RESOURCES: config[CONF_MONITORED_RESOURCES], - }, - ) - translation_key = "deprecated_yaml_import" - if ( - result.get("type") == FlowResultType.ABORT - and result.get("reason") == "cannot_connect" - ): - translation_key = "deprecated_yaml_import_issue_cannot_connect" - else: - translation_key = "deprecated_yaml_no_import" - - async_create_issue( - hass, - DOMAIN, - "deprecated_yaml", - breaks_in_ha_version="2024.5.0", - is_fixable=False, - severity=IssueSeverity.WARNING, - translation_key=translation_key, - ) - async def async_setup_entry( hass: HomeAssistant, @@ -694,6 +562,7 @@ async def async_setup_entry( description, units=description.unit_fn(unit_system), enable_default_override=is_explicit_enable(description), + device_info=_build_device_info(entry, description), ) for description in resource_list if is_allowed_resource(description) @@ -728,6 +597,7 @@ class FitbitSensor(SensorEntity): entity_description: FitbitSensorEntityDescription _attr_attribution = ATTRIBUTION + _attr_has_entity_name = True def __init__( self, @@ -737,6 +607,7 @@ class FitbitSensor(SensorEntity): description: FitbitSensorEntityDescription, units: str | None, enable_default_override: bool, + device_info: DeviceInfo, ) -> None: """Initialize the Fitbit sensor.""" self.config_entry = config_entry @@ -744,6 +615,7 @@ class FitbitSensor(SensorEntity): self.api = api self._attr_unique_id = f"{user_profile_id}_{description.key}" + self._attr_device_info = device_info if units is not None: self._attr_native_unit_of_measurement = units diff --git a/homeassistant/components/fitbit/strings.json b/homeassistant/components/fitbit/strings.json index e1ca1b01f7a..9029a8265bb 100644 --- a/homeassistant/components/fitbit/strings.json +++ b/homeassistant/components/fitbit/strings.json @@ -38,21 +38,82 @@ }, "battery_level": { "name": "Battery level" + }, + "activity_calories": { + "name": "Activity calories" + }, + "calories": { + "name": "Calories" + }, + "calories_bmr": { + "name": "Calories BMR" + }, + "elevation": { + "name": "Elevation" + }, + "floors": { + "name": "Floors" + }, + "resting_heart_rate": { + "name": "Resting heart rate" + }, + "minutes_fairly_active": { + "name": "Minutes fairly active" + }, + "minutes_lightly_active": { + "name": "Minutes lightly active" + }, + "minutes_sedentary": { + "name": "Minutes sedentary" + }, + "minutes_very_active": { + "name": "Minutes very active" + }, + "sleep_start_time": { + "name": "Sleep start time" + }, + "steps": { + "name": "Steps" + }, + "bmi": { + "name": "BMI" + }, + "body_fat": { + "name": "Body fat" + }, + "awakenings_count": { + "name": "Awakenings count" + }, + "sleep_efficiency": { + "name": "Sleep efficiency" + }, + "minutes_after_wakeup": { + "name": "Minutes after wakeup" + }, + "sleep_minutes_asleep": { + "name": "Sleep minutes asleep" + }, + "sleep_minutes_awake": { + "name": "Sleep minutes awake" + }, + "sleep_minutes_to_fall_asleep": { + "name": "Sleep minutes to fall asleep" + }, + "sleep_time_in_bed": { + "name": "Sleep time in bed" + }, + "calories_in": { + "name": "Calories in" + }, + "water": { + "name": "Water" } } }, - "issues": { - "deprecated_yaml_no_import": { - "title": "Fitbit YAML configuration is being removed", - "description": "Configuring Fitbit using YAML is being removed.\n\nRemove the `fitbit` configuration from your configuration.yaml file and remove fitbit.conf if it exists and restart Home Assistant and [set up the integration](/config/integrations/dashboard/add?domain=fitbit) manually." - }, - "deprecated_yaml_import": { - "title": "Fitbit YAML configuration is being removed", - "description": "Configuring Fitbit using YAML is being removed.\n\nYour existing YAML configuration has been imported into the UI automatically, including OAuth Application Credentials.\n\nRemove the `fitbit` configuration from your configuration.yaml file and remove fitbit.conf and restart Home Assistant to fix this issue." - }, - "deprecated_yaml_import_issue_cannot_connect": { - "title": "The Fitbit YAML configuration import failed", - "description": "Configuring Fitbit using YAML is being removed but there was a connection error importing your YAML configuration.\n\nRestart Home Assistant to try again or remove the Fitbit YAML configuration from your configuration.yaml file and remove the fitbit.conf and continue to [set up the integration](/config/integrations/dashboard/add?domain=fitbit) manually." + + "device": { + "tracker": { + "name": "{display_name} tracker" } } } diff --git a/homeassistant/components/fixer/manifest.json b/homeassistant/components/fixer/manifest.json index 052a594b745..3c457919ac3 100644 --- a/homeassistant/components/fixer/manifest.json +++ b/homeassistant/components/fixer/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/fixer", "iot_class": "cloud_polling", "loggers": ["fixerio"], + "quality_scale": "legacy", "requirements": ["fixerio==1.0.0a0"] } diff --git a/homeassistant/components/fleetgo/manifest.json b/homeassistant/components/fleetgo/manifest.json index 9e916bd7fcd..ad00ca3b7b1 100644 --- a/homeassistant/components/fleetgo/manifest.json +++ b/homeassistant/components/fleetgo/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/fleetgo", "iot_class": "cloud_polling", "loggers": ["geopy", "ritassist"], + "quality_scale": "legacy", "requirements": ["ritassist==0.9.2"] } diff --git a/homeassistant/components/flexit/manifest.json b/homeassistant/components/flexit/manifest.json index 98e5a3734a8..b3b66fb871e 100644 --- a/homeassistant/components/flexit/manifest.json +++ b/homeassistant/components/flexit/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["modbus"], "documentation": "https://www.home-assistant.io/integrations/flexit", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/flic/manifest.json b/homeassistant/components/flic/manifest.json index 0442e4a7b7b..67a9a2e901c 100644 --- a/homeassistant/components/flic/manifest.json +++ b/homeassistant/components/flic/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/flic", "iot_class": "local_push", "loggers": ["pyflic"], + "quality_scale": "legacy", "requirements": ["pyflic==2.0.4"] } diff --git a/homeassistant/components/flock/manifest.json b/homeassistant/components/flock/manifest.json index 29c3e1c881f..c4cd5cdadb3 100644 --- a/homeassistant/components/flock/manifest.json +++ b/homeassistant/components/flock/manifest.json @@ -3,5 +3,6 @@ "name": "Flock", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/flock", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/flux_led/config_flow.py b/homeassistant/components/flux_led/config_flow.py index d78fc699579..9a02120f33a 100644 --- a/homeassistant/components/flux_led/config_flow.py +++ b/homeassistant/components/flux_led/config_flow.py @@ -71,9 +71,11 @@ class FluxLedConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> FluxLedOptionsFlow: """Get the options flow for the Flux LED component.""" - return FluxLedOptionsFlow(config_entry) + return FluxLedOptionsFlow() async def async_step_dhcp( self, discovery_info: dhcp.DhcpServiceInfo @@ -320,10 +322,6 @@ class FluxLedConfigFlow(ConfigFlow, domain=DOMAIN): class FluxLedOptionsFlow(OptionsFlow): """Handle flux_led options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize the flux_led options flow.""" - self._config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -332,7 +330,7 @@ class FluxLedOptionsFlow(OptionsFlow): if user_input is not None: return self.async_create_entry(title="", data=user_input) - options = self._config_entry.options + options = self.config_entry.options options_schema = vol.Schema( { vol.Optional( diff --git a/homeassistant/components/folder/manifest.json b/homeassistant/components/folder/manifest.json index 2436d5dbe9a..984b287c2c0 100644 --- a/homeassistant/components/folder/manifest.json +++ b/homeassistant/components/folder/manifest.json @@ -3,5 +3,6 @@ "name": "Folder", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/folder", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/foobot/manifest.json b/homeassistant/components/foobot/manifest.json index a517f1fea6f..147a0037a18 100644 --- a/homeassistant/components/foobot/manifest.json +++ b/homeassistant/components/foobot/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/foobot", "iot_class": "cloud_polling", "loggers": ["foobot_async"], + "quality_scale": "legacy", "requirements": ["foobot_async==1.0.0"] } diff --git a/homeassistant/components/forecast_solar/config_flow.py b/homeassistant/components/forecast_solar/config_flow.py index 982f32eb07b..9a64ce6e1fb 100644 --- a/homeassistant/components/forecast_solar/config_flow.py +++ b/homeassistant/components/forecast_solar/config_flow.py @@ -41,7 +41,7 @@ class ForecastSolarFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> ForecastSolarOptionFlowHandler: """Get the options flow for this handler.""" - return ForecastSolarOptionFlowHandler(config_entry) + return ForecastSolarOptionFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -91,10 +91,6 @@ class ForecastSolarFlowHandler(ConfigFlow, domain=DOMAIN): class ForecastSolarOptionFlowHandler(OptionsFlow): """Handle options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/forecast_solar/manifest.json b/homeassistant/components/forecast_solar/manifest.json index f5dd79281e6..1eb9c98701d 100644 --- a/homeassistant/components/forecast_solar/manifest.json +++ b/homeassistant/components/forecast_solar/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/forecast_solar", "integration_type": "service", "iot_class": "cloud_polling", - "quality_scale": "platinum", - "requirements": ["forecast-solar==3.1.0"] + "requirements": ["forecast-solar==4.0.0"] } diff --git a/homeassistant/components/forked_daapd/config_flow.py b/homeassistant/components/forked_daapd/config_flow.py index 5f061aa4be1..5fb9f08f1c0 100644 --- a/homeassistant/components/forked_daapd/config_flow.py +++ b/homeassistant/components/forked_daapd/config_flow.py @@ -52,10 +52,6 @@ TEST_CONNECTION_ERROR_DICT = { class ForkedDaapdOptionsFlowHandler(OptionsFlow): """Handle a forked-daapd options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -122,7 +118,7 @@ class ForkedDaapdFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> ForkedDaapdOptionsFlowHandler: """Return options flow handler.""" - return ForkedDaapdOptionsFlowHandler(config_entry) + return ForkedDaapdOptionsFlowHandler() async def validate_input(self, user_input): """Validate the user input.""" diff --git a/homeassistant/components/fortios/manifest.json b/homeassistant/components/fortios/manifest.json index 93e55071178..22c44acfd82 100644 --- a/homeassistant/components/fortios/manifest.json +++ b/homeassistant/components/fortios/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/fortios", "iot_class": "local_polling", "loggers": ["fortiosapi", "paramiko"], + "quality_scale": "legacy", "requirements": ["fortiosapi==1.0.5"] } diff --git a/homeassistant/components/foursquare/manifest.json b/homeassistant/components/foursquare/manifest.json index ce1c87814d7..0503ea4abb5 100644 --- a/homeassistant/components/foursquare/manifest.json +++ b/homeassistant/components/foursquare/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/foursquare", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/free_mobile/manifest.json b/homeassistant/components/free_mobile/manifest.json index 61a1f94c19d..9ce9bc72c76 100644 --- a/homeassistant/components/free_mobile/manifest.json +++ b/homeassistant/components/free_mobile/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/free_mobile", "iot_class": "cloud_push", "loggers": ["freesms"], + "quality_scale": "legacy", "requirements": ["freesms==0.2.0"] } diff --git a/homeassistant/components/freedns/manifest.json b/homeassistant/components/freedns/manifest.json index ac320a51d93..7c6bceb11a6 100644 --- a/homeassistant/components/freedns/manifest.json +++ b/homeassistant/components/freedns/manifest.json @@ -3,5 +3,6 @@ "name": "FreeDNS", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/freedns", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/fritz/config_flow.py b/homeassistant/components/fritz/config_flow.py index 0d27894c8ab..920ecda1c52 100644 --- a/homeassistant/components/fritz/config_flow.py +++ b/homeassistant/components/fritz/config_flow.py @@ -23,7 +23,6 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import ( CONF_HOST, @@ -58,15 +57,18 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 + _host: str + @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> FritzBoxToolsOptionsFlowHandler: """Get the options flow for this handler.""" - return FritzBoxToolsOptionsFlowHandler(config_entry) + return FritzBoxToolsOptionsFlowHandler() def __init__(self) -> None: """Initialize FRITZ!Box Tools flow.""" - self._host: str | None = None self._name: str = "" self._password: str = "" self._use_tls: bool = False @@ -111,7 +113,6 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): async def async_check_configured_entry(self) -> ConfigEntry | None: """Check if entry is configured.""" - assert self._host current_host = await self.hass.async_add_executor_job( socket.gethostbyname, self._host ) @@ -153,15 +154,17 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle a flow initialized by discovery.""" ssdp_location: ParseResult = urlparse(discovery_info.ssdp_location or "") - self._host = ssdp_location.hostname + host = ssdp_location.hostname + if not host or ipaddress.ip_address(host).is_link_local: + return self.async_abort(reason="ignore_ip6_link_local") + + self._host = host self._name = ( discovery_info.upnp.get(ssdp.ATTR_UPNP_FRIENDLY_NAME) or discovery_info.upnp[ssdp.ATTR_UPNP_MODEL_NAME] ) - if not self._host or ipaddress.ip_address(self._host).is_link_local: - return self.async_abort(reason="ignore_ip6_link_local") - + uuid: str | None if uuid := discovery_info.upnp.get(ssdp.ATTR_UPNP_UDN): if uuid.startswith("uuid:"): uuid = uuid[5:] @@ -393,7 +396,7 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN): ) -class FritzBoxToolsOptionsFlowHandler(OptionsFlowWithConfigEntry): +class FritzBoxToolsOptionsFlowHandler(OptionsFlow): """Handle an options flow.""" async def async_step_init( @@ -404,19 +407,18 @@ class FritzBoxToolsOptionsFlowHandler(OptionsFlowWithConfigEntry): if user_input is not None: return self.async_create_entry(title="", data=user_input) + options = self.config_entry.options data_schema = vol.Schema( { vol.Optional( CONF_CONSIDER_HOME, - default=self.options.get( + default=options.get( CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME.total_seconds() ), ): vol.All(vol.Coerce(int), vol.Clamp(min=0, max=900)), vol.Optional( CONF_OLD_DISCOVERY, - default=self.options.get( - CONF_OLD_DISCOVERY, DEFAULT_CONF_OLD_DISCOVERY - ), + default=options.get(CONF_OLD_DISCOVERY, DEFAULT_CONF_OLD_DISCOVERY), ): bool, } ) diff --git a/homeassistant/components/fritz/coordinator.py b/homeassistant/components/fritz/coordinator.py index 31d8ff81491..90bd6068ecb 100644 --- a/homeassistant/components/fritz/coordinator.py +++ b/homeassistant/components/fritz/coordinator.py @@ -326,7 +326,11 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]): "call_deflections" ] = await self.async_update_call_deflections() except FRITZ_EXCEPTIONS as ex: - raise UpdateFailed(ex) from ex + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_failed", + translation_placeholders={"error": str(ex)}, + ) from ex _LOGGER.debug("enity_data: %s", entity_data) return entity_data diff --git a/homeassistant/components/fritz/manifest.json b/homeassistant/components/fritz/manifest.json index 35250d9d34d..27aa42d9b2c 100644 --- a/homeassistant/components/fritz/manifest.json +++ b/homeassistant/components/fritz/manifest.json @@ -1,7 +1,7 @@ { "domain": "fritz", "name": "AVM FRITZ!Box Tools", - "codeowners": ["@mammuth", "@AaronDavidSchneider", "@chemelli74", "@mib1185"], + "codeowners": ["@AaronDavidSchneider", "@chemelli74", "@mib1185"], "config_flow": true, "dependencies": ["network"], "documentation": "https://www.home-assistant.io/integrations/fritz", diff --git a/homeassistant/components/fritz/quality_scale.yaml b/homeassistant/components/fritz/quality_scale.yaml new file mode 100644 index 00000000000..b832492cf9d --- /dev/null +++ b/homeassistant/components/fritz/quality_scale.yaml @@ -0,0 +1,98 @@ +rules: + # Bronze + action-setup: + status: todo + comment: still in async_setup_entry, needs to be moved to async_setup + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: + status: todo + comment: one coverage miss in line 110 + config-flow: + status: todo + comment: data_description are missing + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: + status: todo + comment: include the proper docs snippet + entity-event-setup: done + entity-unique-id: done + has-entity-name: + status: todo + comment: partially done + runtime-data: + status: todo + comment: still uses hass.data + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: done + docs-installation-parameters: + status: todo + comment: add the proper configuration_basic block + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: + status: todo + comment: not set at the moment, we use a coordinator + reauthentication-flow: done + test-coverage: + status: todo + comment: we are close to the goal of 95% + + # Gold + devices: done + diagnostics: done + discovery-update-info: todo + discovery: done + docs-data-update: todo + docs-examples: done + docs-known-limitations: + status: exempt + comment: no known limitations, yet + docs-supported-devices: + status: todo + comment: add the known supported devices + docs-supported-functions: + status: todo + comment: need to be overhauled + docs-troubleshooting: done + docs-use-cases: + status: todo + comment: need to be overhauled + dynamic-devices: done + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: done + repair-issues: + status: exempt + comment: no known use cases for repair issues or flows, yet + stale-devices: + status: todo + comment: automate the current cleanup process and deprecate the corresponding button + + # Platinum + async-dependency: + status: todo + comment: | + the fritzconnection lib is not async + changing this might need a bit more efforts to be spent + inject-websession: + status: todo + comment: | + the fritzconnection lib is not async and relies on requests + changing this might need a bit more efforts to be spent + strict-typing: done diff --git a/homeassistant/components/fritz/strings.json b/homeassistant/components/fritz/strings.json index 96eb6243529..06a07cba79e 100644 --- a/homeassistant/components/fritz/strings.json +++ b/homeassistant/components/fritz/strings.json @@ -176,6 +176,9 @@ }, "unable_to_connect": { "message": "Unable to establish a connection" + }, + "update_failed": { + "message": "Error while uptaing the data: {error}" } } } diff --git a/homeassistant/components/fritzbox/config_flow.py b/homeassistant/components/fritzbox/config_flow.py index 76754fc5082..ffec4a9ea29 100644 --- a/homeassistant/components/fritzbox/config_flow.py +++ b/homeassistant/components/fritzbox/config_flow.py @@ -43,10 +43,11 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + _name: str + def __init__(self) -> None: """Initialize flow.""" self._host: str | None = None - self._name: str | None = None self._password: str | None = None self._username: str | None = None @@ -158,7 +159,6 @@ class FritzboxConfigFlow(ConfigFlow, domain=DOMAIN): result = await self.async_try_connect() if result == RESULT_SUCCESS: - assert self._name is not None return self._get_entry(self._name) if result != RESULT_INVALID_AUTH: return self.async_abort(reason=result) diff --git a/homeassistant/components/fritzbox/manifest.json b/homeassistant/components/fritzbox/manifest.json index 3735c16571e..1a127597b81 100644 --- a/homeassistant/components/fritzbox/manifest.json +++ b/homeassistant/components/fritzbox/manifest.json @@ -7,7 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["pyfritzhome"], - "quality_scale": "gold", "requirements": ["pyfritzhome==0.6.12"], "ssdp": [ { diff --git a/homeassistant/components/fritzbox_callmonitor/config_flow.py b/homeassistant/components/fritzbox_callmonitor/config_flow.py index 69efceae281..7bd0eacb66a 100644 --- a/homeassistant/components/fritzbox_callmonitor/config_flow.py +++ b/homeassistant/components/fritzbox_callmonitor/config_flow.py @@ -141,7 +141,7 @@ class FritzBoxCallMonitorConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> FritzBoxCallMonitorOptionsFlowHandler: """Get the options flow for this handler.""" - return FritzBoxCallMonitorOptionsFlowHandler(config_entry) + return FritzBoxCallMonitorOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -278,10 +278,6 @@ class FritzBoxCallMonitorConfigFlow(ConfigFlow, domain=DOMAIN): class FritzBoxCallMonitorOptionsFlowHandler(OptionsFlow): """Handle a fritzbox_callmonitor options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize.""" - self.config_entry = config_entry - @classmethod def _are_prefixes_valid(cls, prefixes: str | None) -> bool: """Check if prefixes are valid.""" diff --git a/homeassistant/components/fronius/manifest.json b/homeassistant/components/fronius/manifest.json index c2f635119aa..227234f9937 100644 --- a/homeassistant/components/fronius/manifest.json +++ b/homeassistant/components/fronius/manifest.json @@ -11,6 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/fronius", "iot_class": "local_polling", "loggers": ["pyfronius"], - "quality_scale": "platinum", "requirements": ["PyFronius==0.7.3"] } diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 4dc5a2b0ae4..97a67cbc082 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20241106.2"] + "requirements": ["home-assistant-frontend==20241127.4"] } diff --git a/homeassistant/components/fujitsu_fglair/manifest.json b/homeassistant/components/fujitsu_fglair/manifest.json index f7f3af8d037..ea08a2cfe02 100644 --- a/homeassistant/components/fujitsu_fglair/manifest.json +++ b/homeassistant/components/fujitsu_fglair/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/fujitsu_fglair", "iot_class": "cloud_polling", - "requirements": ["ayla-iot-unofficial==1.4.3"] + "requirements": ["ayla-iot-unofficial==1.4.4"] } diff --git a/homeassistant/components/fully_kiosk/__init__.py b/homeassistant/components/fully_kiosk/__init__.py index 99b477c2989..074ec3feaa0 100644 --- a/homeassistant/components/fully_kiosk/__init__.py +++ b/homeassistant/components/fully_kiosk/__init__.py @@ -10,6 +10,8 @@ from .const import DOMAIN from .coordinator import FullyKioskDataUpdateCoordinator from .services import async_setup_services +type FullyKioskConfigEntry = ConfigEntry[FullyKioskDataUpdateCoordinator] + PLATFORMS = [ Platform.BINARY_SENSOR, Platform.BUTTON, @@ -33,13 +35,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: FullyKioskConfigEntry) -> bool: """Set up Fully Kiosk Browser from a config entry.""" coordinator = FullyKioskDataUpdateCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) coordinator.async_update_listeners() @@ -47,10 +49,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: FullyKioskConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/fully_kiosk/binary_sensor.py b/homeassistant/components/fully_kiosk/binary_sensor.py index 3cf9adea1d5..c039baa0397 100644 --- a/homeassistant/components/fully_kiosk/binary_sensor.py +++ b/homeassistant/components/fully_kiosk/binary_sensor.py @@ -7,12 +7,11 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import FullyKioskConfigEntry from .coordinator import FullyKioskDataUpdateCoordinator from .entity import FullyKioskEntity @@ -38,13 +37,11 @@ SENSORS: tuple[BinarySensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: FullyKioskConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fully Kiosk Browser sensor.""" - coordinator: FullyKioskDataUpdateCoordinator = hass.data[DOMAIN][ - config_entry.entry_id - ] + coordinator = config_entry.runtime_data async_add_entities( FullyBinarySensor(coordinator, description) diff --git a/homeassistant/components/fully_kiosk/button.py b/homeassistant/components/fully_kiosk/button.py index 94c34b50de1..4b172d45ae2 100644 --- a/homeassistant/components/fully_kiosk/button.py +++ b/homeassistant/components/fully_kiosk/button.py @@ -13,12 +13,11 @@ from homeassistant.components.button import ( ButtonEntity, ButtonEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import FullyKioskConfigEntry from .coordinator import FullyKioskDataUpdateCoordinator from .entity import FullyKioskEntity @@ -68,13 +67,11 @@ BUTTONS: tuple[FullyButtonEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: FullyKioskConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fully Kiosk Browser button entities.""" - coordinator: FullyKioskDataUpdateCoordinator = hass.data[DOMAIN][ - config_entry.entry_id - ] + coordinator = config_entry.runtime_data async_add_entities( FullyButtonEntity(coordinator, description) for description in BUTTONS diff --git a/homeassistant/components/fully_kiosk/camera.py b/homeassistant/components/fully_kiosk/camera.py index d55875e094f..7dfbe9e9257 100644 --- a/homeassistant/components/fully_kiosk/camera.py +++ b/homeassistant/components/fully_kiosk/camera.py @@ -5,21 +5,22 @@ from __future__ import annotations from fullykiosk import FullyKioskError from homeassistant.components.camera import Camera, CameraEntityFeature -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import FullyKioskConfigEntry from .coordinator import FullyKioskDataUpdateCoordinator from .entity import FullyKioskEntity async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: FullyKioskConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the cameras.""" - coordinator: FullyKioskDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities([FullyCameraEntity(coordinator)]) diff --git a/homeassistant/components/fully_kiosk/diagnostics.py b/homeassistant/components/fully_kiosk/diagnostics.py index 0ff567b0b46..c8364c77753 100644 --- a/homeassistant/components/fully_kiosk/diagnostics.py +++ b/homeassistant/components/fully_kiosk/diagnostics.py @@ -5,11 +5,10 @@ from __future__ import annotations from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from .const import DOMAIN +from . import FullyKioskConfigEntry DEVICE_INFO_TO_REDACT = { "serial", @@ -57,10 +56,10 @@ SETTINGS_TO_REDACT = { async def async_get_device_diagnostics( - hass: HomeAssistant, entry: ConfigEntry, device: dr.DeviceEntry + hass: HomeAssistant, entry: FullyKioskConfigEntry, device: dr.DeviceEntry ) -> dict[str, Any]: """Return device diagnostics.""" - coordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data data = coordinator.data data["settings"] = async_redact_data(data["settings"], SETTINGS_TO_REDACT) return async_redact_data(data, DEVICE_INFO_TO_REDACT) diff --git a/homeassistant/components/fully_kiosk/image.py b/homeassistant/components/fully_kiosk/image.py index fbf3481e38b..00318a77ab5 100644 --- a/homeassistant/components/fully_kiosk/image.py +++ b/homeassistant/components/fully_kiosk/image.py @@ -9,13 +9,12 @@ from typing import Any from fullykiosk import FullyKiosk, FullyKioskError from homeassistant.components.image import ImageEntity, ImageEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.dt as dt_util -from .const import DOMAIN +from . import FullyKioskConfigEntry from .coordinator import FullyKioskDataUpdateCoordinator from .entity import FullyKioskEntity @@ -37,10 +36,12 @@ IMAGES: tuple[FullyImageEntityDescription, ...] = ( async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: FullyKioskConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fully Kiosk Browser image entities.""" - coordinator: FullyKioskDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities( FullyImageEntity(coordinator, description) for description in IMAGES ) diff --git a/homeassistant/components/fully_kiosk/manifest.json b/homeassistant/components/fully_kiosk/manifest.json index 4d7d1a2d7da..1fbbb6656a2 100644 --- a/homeassistant/components/fully_kiosk/manifest.json +++ b/homeassistant/components/fully_kiosk/manifest.json @@ -12,5 +12,6 @@ "documentation": "https://www.home-assistant.io/integrations/fully_kiosk", "iot_class": "local_polling", "mqtt": ["fully/deviceInfo/+"], + "quality_scale": "bronze", "requirements": ["python-fullykiosk==0.0.14"] } diff --git a/homeassistant/components/fully_kiosk/media_player.py b/homeassistant/components/fully_kiosk/media_player.py index ae61a39bb81..24f002a7544 100644 --- a/homeassistant/components/fully_kiosk/media_player.py +++ b/homeassistant/components/fully_kiosk/media_player.py @@ -12,23 +12,23 @@ from homeassistant.components.media_player import ( MediaType, async_process_play_media_url, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import AUDIOMANAGER_STREAM_MUSIC, DOMAIN, MEDIA_SUPPORT_FULLYKIOSK +from . import FullyKioskConfigEntry +from .const import AUDIOMANAGER_STREAM_MUSIC, MEDIA_SUPPORT_FULLYKIOSK from .coordinator import FullyKioskDataUpdateCoordinator from .entity import FullyKioskEntity async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: FullyKioskConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fully Kiosk Browser media player entity.""" - coordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data async_add_entities([FullyMediaPlayer(coordinator)]) diff --git a/homeassistant/components/fully_kiosk/notify.py b/homeassistant/components/fully_kiosk/notify.py index aa47c178f03..bddc07439b3 100644 --- a/homeassistant/components/fully_kiosk/notify.py +++ b/homeassistant/components/fully_kiosk/notify.py @@ -7,12 +7,11 @@ from dataclasses import dataclass from fullykiosk import FullyKioskError from homeassistant.components.notify import NotifyEntity, NotifyEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import FullyKioskConfigEntry from .coordinator import FullyKioskDataUpdateCoordinator from .entity import FullyKioskEntity @@ -39,10 +38,12 @@ NOTIFIERS: tuple[FullyNotifyEntityDescription, ...] = ( async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: FullyKioskConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fully Kiosk Browser notify entities.""" - coordinator: FullyKioskDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities( FullyNotifyEntity(coordinator, description) for description in NOTIFIERS ) diff --git a/homeassistant/components/fully_kiosk/number.py b/homeassistant/components/fully_kiosk/number.py index 59c249fd1c2..ef25a69f1ee 100644 --- a/homeassistant/components/fully_kiosk/number.py +++ b/homeassistant/components/fully_kiosk/number.py @@ -5,12 +5,11 @@ from __future__ import annotations from contextlib import suppress from homeassistant.components.number import NumberEntity, NumberEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory, UnitOfTime from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import FullyKioskConfigEntry from .coordinator import FullyKioskDataUpdateCoordinator from .entity import FullyKioskEntity @@ -54,11 +53,11 @@ ENTITY_TYPES: tuple[NumberEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: FullyKioskConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fully Kiosk Browser number entities.""" - coordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data async_add_entities( FullyNumberEntity(coordinator, entity) diff --git a/homeassistant/components/fully_kiosk/quality_scale.yaml b/homeassistant/components/fully_kiosk/quality_scale.yaml new file mode 100644 index 00000000000..68fa7b9c3f9 --- /dev/null +++ b/homeassistant/components/fully_kiosk/quality_scale.yaml @@ -0,0 +1,66 @@ +rules: + # Bronze + config-flow: done + test-before-configure: done + unique-config-entry: done + config-flow-test-coverage: done + runtime-data: done + test-before-setup: done + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: done + dependency-transparency: done + action-setup: done + common-modules: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + docs-actions: done + brands: done + + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: todo + reauthentication-flow: todo + parallel-updates: todo + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: + status: exempt + comment: This integration does not utilize an options flow. + + # Gold + entity-translations: todo + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: done + discovery: done + stale-devices: + status: exempt + comment: Each config entry maps to a single device + diagnostics: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: todo + dynamic-devices: + status: exempt + comment: Each config entry maps to a single device + discovery-update-info: done + repair-issues: todo + docs-use-cases: todo + docs-supported-devices: todo + docs-supported-functions: done + docs-data-update: todo + docs-known-limitations: done + docs-troubleshooting: todo + docs-examples: done + + # Platinum + async-dependency: todo + inject-websession: done + strict-typing: todo diff --git a/homeassistant/components/fully_kiosk/sensor.py b/homeassistant/components/fully_kiosk/sensor.py index 48fc8e51425..ed95323547f 100644 --- a/homeassistant/components/fully_kiosk/sensor.py +++ b/homeassistant/components/fully_kiosk/sensor.py @@ -12,13 +12,12 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfInformation from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from .const import DOMAIN +from . import FullyKioskConfigEntry from .coordinator import FullyKioskDataUpdateCoordinator from .entity import FullyKioskEntity @@ -114,13 +113,11 @@ SENSORS: tuple[FullySensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: FullyKioskConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fully Kiosk Browser sensor.""" - coordinator: FullyKioskDataUpdateCoordinator = hass.data[DOMAIN][ - config_entry.entry_id - ] + coordinator = config_entry.runtime_data async_add_entities( FullySensor(coordinator, description) for description in SENSORS diff --git a/homeassistant/components/fully_kiosk/services.py b/homeassistant/components/fully_kiosk/services.py index b9369198940..089ae1d4246 100644 --- a/homeassistant/components/fully_kiosk/services.py +++ b/homeassistant/components/fully_kiosk/services.py @@ -53,7 +53,7 @@ async def async_setup_services(hass: HomeAssistant) -> None: for config_entry in config_entries: if config_entry.state != ConfigEntryState.LOADED: raise HomeAssistantError(f"{config_entry.title} is not loaded") - coordinators.append(hass.data[DOMAIN][config_entry.entry_id]) + coordinators.append(config_entry.runtime_data) return coordinators async def async_load_url(call: ServiceCall) -> None: diff --git a/homeassistant/components/fully_kiosk/strings.json b/homeassistant/components/fully_kiosk/strings.json index 9c0049d3e5f..ec7bd7b1c03 100644 --- a/homeassistant/components/fully_kiosk/strings.json +++ b/homeassistant/components/fully_kiosk/strings.json @@ -1,10 +1,16 @@ { + "common": { + "data_description_password": "The Remote Admin Password from the Fully Kiosk Browser app settings." + }, "config": { "step": { "discovery_confirm": { "description": "Do you want to set up {name} ({host})?", "data": { "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "[%key:component::fully_kiosk::common::data_description_password%]" } }, "user": { @@ -15,7 +21,8 @@ "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" }, "data_description": { - "host": "The hostname or IP address of the device running your Fully Kiosk Browser application." + "host": "The hostname or IP address of the device running your Fully Kiosk Browser application.", + "password": "[%key:component::fully_kiosk::common::data_description_password%]" } } }, diff --git a/homeassistant/components/fully_kiosk/switch.py b/homeassistant/components/fully_kiosk/switch.py index 9d5af87abe9..4adf8e8c924 100644 --- a/homeassistant/components/fully_kiosk/switch.py +++ b/homeassistant/components/fully_kiosk/switch.py @@ -9,12 +9,11 @@ from typing import Any from fullykiosk import FullyKiosk from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import FullyKioskConfigEntry from .coordinator import FullyKioskDataUpdateCoordinator from .entity import FullyKioskEntity @@ -84,13 +83,11 @@ SWITCHES: tuple[FullySwitchEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: FullyKioskConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Fully Kiosk Browser switch.""" - coordinator: FullyKioskDataUpdateCoordinator = hass.data[DOMAIN][ - config_entry.entry_id - ] + coordinator = config_entry.runtime_data async_add_entities( FullySwitchEntity(coordinator, description) for description in SWITCHES diff --git a/homeassistant/components/futurenow/manifest.json b/homeassistant/components/futurenow/manifest.json index dbe1b2d06fb..32a8761b1db 100644 --- a/homeassistant/components/futurenow/manifest.json +++ b/homeassistant/components/futurenow/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/futurenow", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["pyfnip==0.2"] } diff --git a/homeassistant/components/fyta/__init__.py b/homeassistant/components/fyta/__init__.py index efbb1453456..b29789be87e 100644 --- a/homeassistant/components/fyta/__init__.py +++ b/homeassistant/components/fyta/__init__.py @@ -15,6 +15,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.util.dt import async_get_time_zone from .const import CONF_EXPIRATION @@ -39,7 +40,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: FytaConfigEntry) -> bool entry.data[CONF_EXPIRATION] ).astimezone(await async_get_time_zone(tz)) - fyta = FytaConnector(username, password, access_token, expiration, tz) + fyta = FytaConnector( + username, password, access_token, expiration, tz, async_get_clientsession(hass) + ) coordinator = FytaCoordinator(hass, fyta) diff --git a/homeassistant/components/fyta/coordinator.py b/homeassistant/components/fyta/coordinator.py index c4aa9bfe589..553960bdcc6 100644 --- a/homeassistant/components/fyta/coordinator.py +++ b/homeassistant/components/fyta/coordinator.py @@ -61,7 +61,9 @@ class FytaCoordinator(DataUpdateCoordinator[dict[int, Plant]]): try: data = await self.fyta.update_all_plants() except (FytaConnectionError, FytaPlantError) as err: - raise UpdateFailed(err) from err + raise UpdateFailed( + translation_domain=DOMAIN, translation_key="update_error" + ) from err _LOGGER.debug("Data successfully updated") # data must be assigned before _async_add_remove_devices, as it is uses to set-up possible new devices @@ -122,9 +124,14 @@ class FytaCoordinator(DataUpdateCoordinator[dict[int, Plant]]): try: credentials = await self.fyta.login() except FytaConnectionError as ex: - raise ConfigEntryNotReady from ex + raise ConfigEntryNotReady( + translation_domain=DOMAIN, translation_key="config_entry_not_ready" + ) from ex except (FytaAuthentificationError, FytaPasswordError) as ex: - raise ConfigEntryAuthFailed from ex + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="auth_failed", + ) from ex new_config_entry = {**self.config_entry.data} new_config_entry[CONF_ACCESS_TOKEN] = credentials.access_token diff --git a/homeassistant/components/fyta/manifest.json b/homeassistant/components/fyta/manifest.json index 17fe5199eee..0df9eca2e38 100644 --- a/homeassistant/components/fyta/manifest.json +++ b/homeassistant/components/fyta/manifest.json @@ -7,6 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_polling", "loggers": ["fyta_cli"], - "quality_scale": "platinum", - "requirements": ["fyta_cli==0.6.10"] + "requirements": ["fyta_cli==0.7.0"] } diff --git a/homeassistant/components/fyta/strings.json b/homeassistant/components/fyta/strings.json index bacd24555b0..5adde02c0cb 100644 --- a/homeassistant/components/fyta/strings.json +++ b/homeassistant/components/fyta/strings.json @@ -7,6 +7,10 @@ "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "username": "The username to login to your FYTA account.", + "password": "The password to login to your FYTA account." } }, "reauth_confirm": { @@ -14,6 +18,10 @@ "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "username": "[%key:component::fyta::config::step::user::data_description::username%]", + "password": "[%key:component::fyta::config::step::user::data_description::password%]" } } }, @@ -93,5 +101,16 @@ "name": "Salinity" } } + }, + "exceptions": { + "update_error": { + "message": "Error while updating data from the API." + }, + "config_entry_not_ready": { + "message": "Error while loading the config entry." + }, + "auth_failed": { + "message": "Error while logging in to the API." + } } } diff --git a/homeassistant/components/garadget/manifest.json b/homeassistant/components/garadget/manifest.json index c7a30a465d2..bd1920a7c4c 100644 --- a/homeassistant/components/garadget/manifest.json +++ b/homeassistant/components/garadget/manifest.json @@ -3,5 +3,6 @@ "name": "Garadget", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/garadget", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/garages_amsterdam/__init__.py b/homeassistant/components/garages_amsterdam/__init__.py index 81ec72d9fbf..99d751cfcc8 100644 --- a/homeassistant/components/garages_amsterdam/__init__.py +++ b/homeassistant/components/garages_amsterdam/__init__.py @@ -1,62 +1,38 @@ """The Garages Amsterdam integration.""" -import asyncio -from datetime import timedelta -import logging +from __future__ import annotations -from odp_amsterdam import ODPAmsterdam, VehicleType +from odp_amsterdam import ODPAmsterdam from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import aiohttp_client -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import DOMAIN +from .coordinator import GaragesAmsterdamDataUpdateCoordinator -PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR] + +type GaragesAmsterdamConfigEntry = ConfigEntry[GaragesAmsterdamDataUpdateCoordinator] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, entry: GaragesAmsterdamConfigEntry +) -> bool: """Set up Garages Amsterdam from a config entry.""" - await get_coordinator(hass) + client = ODPAmsterdam(session=async_get_clientsession(hass)) + coordinator = GaragesAmsterdamDataUpdateCoordinator(hass, client) + + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: GaragesAmsterdamConfigEntry +) -> bool: """Unload Garages Amsterdam config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if len(hass.config_entries.async_entries(DOMAIN)) == 1: - hass.data.pop(DOMAIN) - - return unload_ok - - -async def get_coordinator( - hass: HomeAssistant, -) -> DataUpdateCoordinator: - """Get the data update coordinator.""" - if DOMAIN in hass.data: - return hass.data[DOMAIN] - - async def async_get_garages(): - async with asyncio.timeout(10): - return { - garage.garage_name: garage - for garage in await ODPAmsterdam( - session=aiohttp_client.async_get_clientsession(hass) - ).all_garages(vehicle=VehicleType.CAR) - } - - coordinator = DataUpdateCoordinator( - hass, - logging.getLogger(__name__), - name=DOMAIN, - update_method=async_get_garages, - update_interval=timedelta(minutes=10), - ) - await coordinator.async_config_entry_first_refresh() - - hass.data[DOMAIN] = coordinator - return coordinator + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/garages_amsterdam/binary_sensor.py b/homeassistant/components/garages_amsterdam/binary_sensor.py index 0aebe36baeb..b93b43e1173 100644 --- a/homeassistant/components/garages_amsterdam/binary_sensor.py +++ b/homeassistant/components/garages_amsterdam/binary_sensor.py @@ -2,47 +2,77 @@ from __future__ import annotations +from collections.abc import Callable +from dataclasses import dataclass + +from odp_amsterdam import Garage + from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, + BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import get_coordinator +from . import GaragesAmsterdamConfigEntry +from .coordinator import GaragesAmsterdamDataUpdateCoordinator from .entity import GaragesAmsterdamEntity -BINARY_SENSORS = { - "state", -} + +@dataclass(frozen=True, kw_only=True) +class GaragesAmsterdamBinarySensorEntityDescription(BinarySensorEntityDescription): + """Class describing Garages Amsterdam binary sensor entity.""" + + is_on: Callable[[Garage], bool] + + +BINARY_SENSORS: tuple[GaragesAmsterdamBinarySensorEntityDescription, ...] = ( + GaragesAmsterdamBinarySensorEntityDescription( + key="state", + translation_key="state", + device_class=BinarySensorDeviceClass.PROBLEM, + is_on=lambda garage: garage.state != "ok", + ), +) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: GaragesAmsterdamConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Defer sensor setup to the shared sensor module.""" - coordinator = await get_coordinator(hass) + coordinator = entry.runtime_data async_add_entities( GaragesAmsterdamBinarySensor( - coordinator, config_entry.data["garage_name"], info_type + coordinator=coordinator, + garage_name=entry.data["garage_name"], + description=description, ) - for info_type in BINARY_SENSORS + for description in BINARY_SENSORS ) class GaragesAmsterdamBinarySensor(GaragesAmsterdamEntity, BinarySensorEntity): """Binary Sensor representing garages amsterdam data.""" - _attr_device_class = BinarySensorDeviceClass.PROBLEM - _attr_name = None + entity_description: GaragesAmsterdamBinarySensorEntityDescription + + def __init__( + self, + *, + coordinator: GaragesAmsterdamDataUpdateCoordinator, + garage_name: str, + description: GaragesAmsterdamBinarySensorEntityDescription, + ) -> None: + """Initialize garages amsterdam binary sensor.""" + super().__init__(coordinator, garage_name) + self.entity_description = description + self._attr_unique_id = f"{garage_name}-{description.key}" @property def is_on(self) -> bool: """If the binary sensor is currently on or off.""" - return ( - getattr(self.coordinator.data[self._garage_name], self._info_type) != "ok" - ) + return self.entity_description.is_on(self.coordinator.data[self._garage_name]) diff --git a/homeassistant/components/garages_amsterdam/const.py b/homeassistant/components/garages_amsterdam/const.py index ae7801a9abd..be5e2216a81 100644 --- a/homeassistant/components/garages_amsterdam/const.py +++ b/homeassistant/components/garages_amsterdam/const.py @@ -1,4 +1,13 @@ """Constants for the Garages Amsterdam integration.""" -DOMAIN = "garages_amsterdam" -ATTRIBUTION = f'{"Data provided by municipality of Amsterdam"}' +from __future__ import annotations + +from datetime import timedelta +import logging +from typing import Final + +DOMAIN: Final = "garages_amsterdam" +ATTRIBUTION = "Data provided by municipality of Amsterdam" + +LOGGER = logging.getLogger(__package__) +SCAN_INTERVAL = timedelta(minutes=10) diff --git a/homeassistant/components/garages_amsterdam/coordinator.py b/homeassistant/components/garages_amsterdam/coordinator.py new file mode 100644 index 00000000000..3d06aba79e2 --- /dev/null +++ b/homeassistant/components/garages_amsterdam/coordinator.py @@ -0,0 +1,34 @@ +"""Coordinator for the Garages Amsterdam integration.""" + +from __future__ import annotations + +from odp_amsterdam import Garage, ODPAmsterdam, VehicleType + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .const import DOMAIN, LOGGER, SCAN_INTERVAL + + +class GaragesAmsterdamDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Garage]]): + """Class to manage fetching Garages Amsterdam data from single endpoint.""" + + def __init__( + self, + hass: HomeAssistant, + client: ODPAmsterdam, + ) -> None: + """Initialize global Garages Amsterdam data updater.""" + super().__init__( + hass, + LOGGER, + name=DOMAIN, + update_interval=SCAN_INTERVAL, + ) + self.client = client + + async def _async_update_data(self) -> dict[str, Garage]: + return { + garage.garage_name: garage + for garage in await self.client.all_garages(vehicle=VehicleType.CAR) + } diff --git a/homeassistant/components/garages_amsterdam/entity.py b/homeassistant/components/garages_amsterdam/entity.py index 671405235d4..433bc75b962 100644 --- a/homeassistant/components/garages_amsterdam/entity.py +++ b/homeassistant/components/garages_amsterdam/entity.py @@ -3,28 +3,26 @@ from __future__ import annotations from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) +from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import ATTRIBUTION, DOMAIN +from .coordinator import GaragesAmsterdamDataUpdateCoordinator -class GaragesAmsterdamEntity(CoordinatorEntity): +class GaragesAmsterdamEntity(CoordinatorEntity[GaragesAmsterdamDataUpdateCoordinator]): """Base Entity for garages amsterdam data.""" _attr_attribution = ATTRIBUTION _attr_has_entity_name = True def __init__( - self, coordinator: DataUpdateCoordinator, garage_name: str, info_type: str + self, + coordinator: GaragesAmsterdamDataUpdateCoordinator, + garage_name: str, ) -> None: """Initialize garages amsterdam entity.""" super().__init__(coordinator) - self._attr_unique_id = f"{garage_name}-{info_type}" self._garage_name = garage_name - self._info_type = info_type self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, garage_name)}, name=garage_name, diff --git a/homeassistant/components/garages_amsterdam/sensor.py b/homeassistant/components/garages_amsterdam/sensor.py index b6fc950a843..b562fff841a 100644 --- a/homeassistant/components/garages_amsterdam/sensor.py +++ b/homeassistant/components/garages_amsterdam/sensor.py @@ -2,49 +2,93 @@ from __future__ import annotations -from homeassistant.components.sensor import SensorEntity -from homeassistant.config_entries import ConfigEntry +from collections.abc import Callable +from dataclasses import dataclass + +from odp_amsterdam import Garage + +from homeassistant.components.sensor import ( + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from homeassistant.helpers.typing import StateType -from . import get_coordinator +from . import GaragesAmsterdamConfigEntry +from .coordinator import GaragesAmsterdamDataUpdateCoordinator from .entity import GaragesAmsterdamEntity -SENSORS = { - "free_space_short", - "free_space_long", - "short_capacity", - "long_capacity", -} + +@dataclass(frozen=True, kw_only=True) +class GaragesAmsterdamSensorEntityDescription(SensorEntityDescription): + """Class describing Garages Amsterdam sensor entity.""" + + value_fn: Callable[[Garage], StateType] + + +SENSORS: tuple[GaragesAmsterdamSensorEntityDescription, ...] = ( + GaragesAmsterdamSensorEntityDescription( + key="free_space_short", + translation_key="free_space_short", + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda garage: garage.free_space_short, + ), + GaragesAmsterdamSensorEntityDescription( + key="free_space_long", + translation_key="free_space_long", + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda garage: garage.free_space_long, + ), + GaragesAmsterdamSensorEntityDescription( + key="short_capacity", + translation_key="short_capacity", + value_fn=lambda garage: garage.short_capacity, + ), + GaragesAmsterdamSensorEntityDescription( + key="long_capacity", + translation_key="long_capacity", + value_fn=lambda garage: garage.long_capacity, + ), +) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: GaragesAmsterdamConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Defer sensor setup to the shared sensor module.""" - coordinator = await get_coordinator(hass) + coordinator = entry.runtime_data async_add_entities( - GaragesAmsterdamSensor(coordinator, config_entry.data["garage_name"], info_type) - for info_type in SENSORS - if getattr(coordinator.data[config_entry.data["garage_name"]], info_type) != "" + GaragesAmsterdamSensor( + coordinator=coordinator, + garage_name=entry.data["garage_name"], + description=description, + ) + for description in SENSORS + if description.value_fn(coordinator.data[entry.data["garage_name"]]) is not None ) class GaragesAmsterdamSensor(GaragesAmsterdamEntity, SensorEntity): """Sensor representing garages amsterdam data.""" - _attr_native_unit_of_measurement = "cars" + entity_description: GaragesAmsterdamSensorEntityDescription def __init__( - self, coordinator: DataUpdateCoordinator, garage_name: str, info_type: str + self, + *, + coordinator: GaragesAmsterdamDataUpdateCoordinator, + garage_name: str, + description: GaragesAmsterdamSensorEntityDescription, ) -> None: """Initialize garages amsterdam sensor.""" - super().__init__(coordinator, garage_name, info_type) - self._attr_translation_key = info_type + super().__init__(coordinator, garage_name) + self.entity_description = description + self._attr_unique_id = f"{garage_name}-{description.key}" @property def available(self) -> bool: @@ -54,6 +98,8 @@ class GaragesAmsterdamSensor(GaragesAmsterdamEntity, SensorEntity): ) @property - def native_value(self) -> str: + def native_value(self) -> StateType: """Return the state of the sensor.""" - return getattr(self.coordinator.data[self._garage_name], self._info_type) + return self.entity_description.value_fn( + self.coordinator.data[self._garage_name] + ) diff --git a/homeassistant/components/garages_amsterdam/strings.json b/homeassistant/components/garages_amsterdam/strings.json index 89a85f97448..19157afdafb 100644 --- a/homeassistant/components/garages_amsterdam/strings.json +++ b/homeassistant/components/garages_amsterdam/strings.json @@ -3,8 +3,13 @@ "config": { "step": { "user": { - "title": "Pick a garage to monitor", - "data": { "garage_name": "Garage name" } + "description": "Select a garage from the list", + "data": { + "garage_name": "Garage name" + }, + "data_description": { + "garage_name": "The name of the garage you want to monitor." + } } }, "abort": { @@ -16,16 +21,25 @@ "entity": { "sensor": { "free_space_short": { - "name": "Short parking free space" + "name": "Short parking free space", + "unit_of_measurement": "cars" }, "free_space_long": { - "name": "Long parking free space" + "name": "Long parking free space", + "unit_of_measurement": "cars" }, "short_capacity": { - "name": "Short parking capacity" + "name": "Short parking capacity", + "unit_of_measurement": "cars" }, "long_capacity": { - "name": "Long parking capacity" + "name": "Long parking capacity", + "unit_of_measurement": "cars" + } + }, + "binary_sensor": { + "state": { + "name": "State" } } } diff --git a/homeassistant/components/gc100/manifest.json b/homeassistant/components/gc100/manifest.json index b4af14a323b..687e09f5c89 100644 --- a/homeassistant/components/gc100/manifest.json +++ b/homeassistant/components/gc100/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/gc100", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["python-gc100==1.0.3a0"] } diff --git a/homeassistant/components/gdacs/manifest.json b/homeassistant/components/gdacs/manifest.json index fab47e00904..a40dc8cf91b 100644 --- a/homeassistant/components/gdacs/manifest.json +++ b/homeassistant/components/gdacs/manifest.json @@ -7,6 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["aio_georss_gdacs", "aio_georss_client"], - "quality_scale": "platinum", "requirements": ["aio-georss-gdacs==0.10"] } diff --git a/homeassistant/components/generic/config_flow.py b/homeassistant/components/generic/config_flow.py index a8f3f6f386b..84243101bd6 100644 --- a/homeassistant/components/generic/config_flow.py +++ b/homeassistant/components/generic/config_flow.py @@ -324,7 +324,7 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> GenericOptionsFlowHandler: """Get the options flow for this handler.""" - return GenericOptionsFlowHandler(config_entry) + return GenericOptionsFlowHandler() def check_for_existing(self, options: dict[str, Any]) -> bool: """Check whether an existing entry is using the same URLs.""" @@ -413,9 +413,8 @@ class GenericIPCamConfigFlow(ConfigFlow, domain=DOMAIN): class GenericOptionsFlowHandler(OptionsFlow): """Handle Generic IP Camera options.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize Generic IP Camera options flow.""" - self.config_entry = config_entry self.preview_cam: dict[str, Any] = {} self.user_input: dict[str, Any] = {} diff --git a/homeassistant/components/generic/manifest.json b/homeassistant/components/generic/manifest.json index b19d6d6293e..c1fbc16d9be 100644 --- a/homeassistant/components/generic/manifest.json +++ b/homeassistant/components/generic/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/generic", "integration_type": "device", "iot_class": "local_push", - "requirements": ["ha-av==10.1.1", "Pillow==10.4.0"] + "requirements": ["av==13.1.0", "Pillow==11.0.0"] } diff --git a/homeassistant/components/generic_hygrostat/strings.json b/homeassistant/components/generic_hygrostat/strings.json index a21ab68c628..7b8d56dbaa5 100644 --- a/homeassistant/components/generic_hygrostat/strings.json +++ b/homeassistant/components/generic_hygrostat/strings.json @@ -3,8 +3,8 @@ "config": { "step": { "user": { - "title": "Add generic hygrostat", - "description": "Create a entity that control the humidity via a switch and sensor.", + "title": "Create generic hygrostat", + "description": "Create a humidifier entity that controls the humidity via a switch and sensor.", "data": { "device_class": "Device class", "dry_tolerance": "Dry tolerance", @@ -17,7 +17,7 @@ "data_description": { "dry_tolerance": "The minimum amount of difference between the humidity read by the sensor specified in the target sensor option and the target humidity that must change prior to being switched on.", "humidifier": "Humidifier or dehumidifier switch; must be a toggle device.", - "min_cycle_duration": "Set a minimum amount of time that the switch specified in the humidifier option must be in its current state prior to being switched either off or on.", + "min_cycle_duration": "Set a minimum duration for which the specified switch must remain in its current state before it can be toggled off or on.", "target_sensor": "Sensor with current humidity.", "wet_tolerance": "The minimum amount of difference between the humidity read by the sensor specified in the target sensor option and the target humidity that must change prior to being switched off." } diff --git a/homeassistant/components/generic_thermostat/strings.json b/homeassistant/components/generic_thermostat/strings.json index 51549dc844e..fd89bec6349 100644 --- a/homeassistant/components/generic_thermostat/strings.json +++ b/homeassistant/components/generic_thermostat/strings.json @@ -3,7 +3,7 @@ "config": { "step": { "user": { - "title": "Add generic thermostat", + "title": "Create generic thermostat", "description": "Create a climate entity that controls the temperature via a switch and sensor.", "data": { "ac_mode": "Cooling mode", diff --git a/homeassistant/components/geniushub/__init__.py b/homeassistant/components/geniushub/__init__.py index f3081e50289..9ca6ecfcfe0 100644 --- a/homeassistant/components/geniushub/__init__.py +++ b/homeassistant/components/geniushub/__init__.py @@ -9,7 +9,6 @@ import aiohttp from geniushubclient import GeniusHub import voluptuous as vol -from homeassistant import config_entries from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_ENTITY_ID, @@ -21,20 +20,12 @@ from homeassistant.const import ( CONF_USERNAME, Platform, ) -from homeassistant.core import ( - DOMAIN as HOMEASSISTANT_DOMAIN, - HomeAssistant, - ServiceCall, - callback, -) -from homeassistant.data_entry_flow import FlowResultType +from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.helpers import config_validation as cv, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.event import async_track_time_interval -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.service import verify_domain_control -from homeassistant.helpers.typing import ConfigType from .const import DOMAIN @@ -45,27 +36,6 @@ SCAN_INTERVAL = timedelta(seconds=60) MAC_ADDRESS_REGEXP = r"^([0-9A-F]{2}:){5}([0-9A-F]{2})$" -CLOUD_API_SCHEMA = vol.Schema( - { - vol.Required(CONF_TOKEN): cv.string, - vol.Required(CONF_MAC): vol.Match(MAC_ADDRESS_REGEXP), - } -) - - -LOCAL_API_SCHEMA = vol.Schema( - { - vol.Required(CONF_HOST): cv.string, - vol.Required(CONF_USERNAME): cv.string, - vol.Required(CONF_PASSWORD): cv.string, - vol.Optional(CONF_MAC): vol.Match(MAC_ADDRESS_REGEXP), - } -) - -CONFIG_SCHEMA = vol.Schema( - {DOMAIN: vol.Any(LOCAL_API_SCHEMA, CLOUD_API_SCHEMA)}, extra=vol.ALLOW_EXTRA -) - ATTR_ZONE_MODE = "mode" ATTR_DURATION = "duration" @@ -100,56 +70,6 @@ PLATFORMS = [ ] -async def _async_import(hass: HomeAssistant, base_config: ConfigType) -> None: - """Import a config entry from configuration.yaml.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data=base_config[DOMAIN], - ) - if ( - result["type"] is FlowResultType.CREATE_ENTRY - or result["reason"] == "already_configured" - ): - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.12.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Genius Hub", - }, - ) - return - async_create_issue( - hass, - DOMAIN, - f"deprecated_yaml_import_issue_{result['reason']}", - breaks_in_ha_version="2024.12.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key=f"deprecated_yaml_import_issue_{result['reason']}", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Genius Hub", - }, - ) - - -async def async_setup(hass: HomeAssistant, base_config: ConfigType) -> bool: - """Set up a Genius Hub system.""" - if DOMAIN in base_config: - hass.async_create_task(_async_import(hass, base_config)) - return True - - type GeniusHubConfigEntry = ConfigEntry[GeniusBroker] diff --git a/homeassistant/components/geniushub/config_flow.py b/homeassistant/components/geniushub/config_flow.py index 601eac6c2f2..b106f9907bb 100644 --- a/homeassistant/components/geniushub/config_flow.py +++ b/homeassistant/components/geniushub/config_flow.py @@ -13,7 +13,6 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME -from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN @@ -123,14 +122,3 @@ class GeniusHubConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="cloud_api", errors=errors, data_schema=CLOUD_API_SCHEMA ) - - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import the yaml config.""" - if CONF_HOST in import_data: - result = await self.async_step_local_api(import_data) - else: - result = await self.async_step_cloud_api(import_data) - if result["type"] is FlowResultType.FORM: - assert result["errors"] - return self.async_abort(reason=result["errors"]["base"]) - return result diff --git a/homeassistant/components/geo_json_events/manifest.json b/homeassistant/components/geo_json_events/manifest.json index 8f4b36657dd..c41796514a5 100644 --- a/homeassistant/components/geo_json_events/manifest.json +++ b/homeassistant/components/geo_json_events/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["aio_geojson_generic_client"], - "requirements": ["aio-geojson-generic-client==0.4"] + "requirements": ["aio-geojson-generic-client==0.5"] } diff --git a/homeassistant/components/geo_rss_events/manifest.json b/homeassistant/components/geo_rss_events/manifest.json index 17640e37278..7c089bfa4e9 100644 --- a/homeassistant/components/geo_rss_events/manifest.json +++ b/homeassistant/components/geo_rss_events/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/geo_rss_events", "iot_class": "cloud_polling", "loggers": ["georss_client", "georss_generic_client"], + "quality_scale": "legacy", "requirements": ["georss-generic-client==0.8"] } diff --git a/homeassistant/components/geonetnz_quakes/manifest.json b/homeassistant/components/geonetnz_quakes/manifest.json index 2314dabcf0f..e8f4ee1a8c1 100644 --- a/homeassistant/components/geonetnz_quakes/manifest.json +++ b/homeassistant/components/geonetnz_quakes/manifest.json @@ -7,6 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["aio_geojson_geonetnz_quakes"], - "quality_scale": "platinum", "requirements": ["aio-geojson-geonetnz-quakes==0.16"] } diff --git a/homeassistant/components/gios/manifest.json b/homeassistant/components/gios/manifest.json index b1eae512688..3d2e719fab6 100644 --- a/homeassistant/components/gios/manifest.json +++ b/homeassistant/components/gios/manifest.json @@ -7,6 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["dacite", "gios"], - "quality_scale": "platinum", "requirements": ["gios==5.0.0"] } diff --git a/homeassistant/components/github/config_flow.py b/homeassistant/components/github/config_flow.py index 25d8782618f..9977f9d84cc 100644 --- a/homeassistant/components/github/config_flow.py +++ b/homeassistant/components/github/config_flow.py @@ -211,16 +211,12 @@ class GitHubConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for GitHub.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None, diff --git a/homeassistant/components/github/sensor.py b/homeassistant/components/github/sensor.py index 9a2b5ef5ac4..614ebe254c4 100644 --- a/homeassistant/components/github/sensor.py +++ b/homeassistant/components/github/sensor.py @@ -37,7 +37,6 @@ SENSOR_DESCRIPTIONS: tuple[GitHubSensorEntityDescription, ...] = ( GitHubSensorEntityDescription( key="discussions_count", translation_key="discussions_count", - native_unit_of_measurement="Discussions", entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, value_fn=lambda data: data["discussion"]["total"], @@ -45,7 +44,6 @@ SENSOR_DESCRIPTIONS: tuple[GitHubSensorEntityDescription, ...] = ( GitHubSensorEntityDescription( key="stargazers_count", translation_key="stargazers_count", - native_unit_of_measurement="Stars", entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, value_fn=lambda data: data["stargazers_count"], @@ -53,7 +51,6 @@ SENSOR_DESCRIPTIONS: tuple[GitHubSensorEntityDescription, ...] = ( GitHubSensorEntityDescription( key="subscribers_count", translation_key="subscribers_count", - native_unit_of_measurement="Watchers", entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, value_fn=lambda data: data["watchers"]["total"], @@ -61,7 +58,6 @@ SENSOR_DESCRIPTIONS: tuple[GitHubSensorEntityDescription, ...] = ( GitHubSensorEntityDescription( key="forks_count", translation_key="forks_count", - native_unit_of_measurement="Forks", entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, value_fn=lambda data: data["forks_count"], @@ -69,7 +65,6 @@ SENSOR_DESCRIPTIONS: tuple[GitHubSensorEntityDescription, ...] = ( GitHubSensorEntityDescription( key="issues_count", translation_key="issues_count", - native_unit_of_measurement="Issues", entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, value_fn=lambda data: data["issue"]["total"], @@ -77,7 +72,6 @@ SENSOR_DESCRIPTIONS: tuple[GitHubSensorEntityDescription, ...] = ( GitHubSensorEntityDescription( key="pulls_count", translation_key="pulls_count", - native_unit_of_measurement="Pull Requests", entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, value_fn=lambda data: data["pull_request"]["total"], diff --git a/homeassistant/components/github/strings.json b/homeassistant/components/github/strings.json index 38b796e2fd2..bcda47d72fb 100644 --- a/homeassistant/components/github/strings.json +++ b/homeassistant/components/github/strings.json @@ -19,22 +19,28 @@ "entity": { "sensor": { "discussions_count": { - "name": "Discussions" + "name": "Discussions", + "unit_of_measurement": "discussions" }, "stargazers_count": { - "name": "Stars" + "name": "Stars", + "unit_of_measurement": "stars" }, "subscribers_count": { - "name": "Watchers" + "name": "Watchers", + "unit_of_measurement": "watchers" }, "forks_count": { - "name": "Forks" + "name": "Forks", + "unit_of_measurement": "forks" }, "issues_count": { - "name": "Issues" + "name": "Issues", + "unit_of_measurement": "issues" }, "pulls_count": { - "name": "Pull requests" + "name": "Pull requests", + "unit_of_measurement": "pull requests" }, "latest_commit": { "name": "Latest commit" diff --git a/homeassistant/components/gitlab_ci/manifest.json b/homeassistant/components/gitlab_ci/manifest.json index 36fb356dae4..58fd827ff31 100644 --- a/homeassistant/components/gitlab_ci/manifest.json +++ b/homeassistant/components/gitlab_ci/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/gitlab_ci", "iot_class": "cloud_polling", "loggers": ["gitlab"], + "quality_scale": "legacy", "requirements": ["python-gitlab==1.6.0"] } diff --git a/homeassistant/components/gitter/manifest.json b/homeassistant/components/gitter/manifest.json index 009746a06c6..c578f7c2242 100644 --- a/homeassistant/components/gitter/manifest.json +++ b/homeassistant/components/gitter/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/gitter", "iot_class": "cloud_polling", "loggers": ["gitterpy"], + "quality_scale": "legacy", "requirements": ["gitterpy==0.1.7"] } diff --git a/homeassistant/components/glances/__init__.py b/homeassistant/components/glances/__init__.py index 0ddd8a86979..9d09e63606e 100644 --- a/homeassistant/components/glances/__init__.py +++ b/homeassistant/components/glances/__init__.py @@ -28,9 +28,7 @@ from homeassistant.exceptions import ( HomeAssistantError, ) from homeassistant.helpers.httpx_client import get_async_client -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from .const import DOMAIN from .coordinator import GlancesDataUpdateCoordinator PLATFORMS = [Platform.SENSOR] @@ -71,7 +69,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: GlancesConfigEntry) -> async def get_api(hass: HomeAssistant, entry_data: dict[str, Any]) -> Glances: """Return the api from glances_api.""" httpx_client = get_async_client(hass, verify_ssl=entry_data[CONF_VERIFY_SSL]) - for version in (4, 3, 2): + for version in (4, 3): api = Glances( host=entry_data[CONF_HOST], port=entry_data[CONF_PORT], @@ -86,19 +84,9 @@ async def get_api(hass: HomeAssistant, entry_data: dict[str, Any]) -> Glances: except GlancesApiNoDataAvailable as err: _LOGGER.debug("Failed to connect to Glances API v%s: %s", version, err) continue - if version == 2: - async_create_issue( - hass, - DOMAIN, - "deprecated_version", - breaks_in_ha_version="2024.8.0", - is_fixable=False, - severity=IssueSeverity.WARNING, - translation_key="deprecated_version", - ) _LOGGER.debug("Connected to Glances API v%s", version) return api - raise ServerVersionMismatch("Could not connect to Glances API version 2, 3 or 4") + raise ServerVersionMismatch("Could not connect to Glances API version 3 or 4") class ServerVersionMismatch(HomeAssistantError): diff --git a/homeassistant/components/glances/strings.json b/homeassistant/components/glances/strings.json index 11735601ce9..92aa1b47e31 100644 --- a/homeassistant/components/glances/strings.json +++ b/homeassistant/components/glances/strings.json @@ -123,11 +123,5 @@ "name": "{sensor_label} TX" } } - }, - "issues": { - "deprecated_version": { - "title": "Glances servers with version 2 is deprecated", - "description": "Glances servers with version 2 is deprecated and will not be supported in future versions of HA. It is recommended to update your server to Glances version 3 then reload the integration." - } } } diff --git a/homeassistant/components/go2rtc/__init__.py b/homeassistant/components/go2rtc/__init__.py index f1f6e44abc1..31acdd2de50 100644 --- a/homeassistant/components/go2rtc/__init__.py +++ b/homeassistant/components/go2rtc/__init__.py @@ -16,7 +16,7 @@ from go2rtc_client.ws import ( WsError, ) import voluptuous as vol -from webrtc_models import RTCIceCandidate +from webrtc_models import RTCIceCandidateInit from homeassistant.components.camera import ( Camera, @@ -264,7 +264,7 @@ class WebRTCProvider(CameraWebRTCProvider): value: WebRTCMessage match message: case WebRTCCandidate(): - value = HAWebRTCCandidate(RTCIceCandidate(message.candidate)) + value = HAWebRTCCandidate(RTCIceCandidateInit(message.candidate)) case WebRTCAnswer(): value = HAWebRTCAnswer(message.sdp) case WsError(): @@ -277,7 +277,7 @@ class WebRTCProvider(CameraWebRTCProvider): await ws_client.send(WebRTCOffer(offer_sdp, config.configuration.ice_servers)) async def async_on_webrtc_candidate( - self, session_id: str, candidate: RTCIceCandidate + self, session_id: str, candidate: RTCIceCandidateInit ) -> None: """Handle the WebRTC candidate.""" diff --git a/homeassistant/components/go2rtc/manifest.json b/homeassistant/components/go2rtc/manifest.json index 201b7168847..bedee99f930 100644 --- a/homeassistant/components/go2rtc/manifest.json +++ b/homeassistant/components/go2rtc/manifest.json @@ -7,6 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/go2rtc", "integration_type": "system", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["go2rtc-client==0.1.1"], "single_config_entry": true } diff --git a/homeassistant/components/goalzero/manifest.json b/homeassistant/components/goalzero/manifest.json index f1bfc7de876..a9fcbf26d36 100644 --- a/homeassistant/components/goalzero/manifest.json +++ b/homeassistant/components/goalzero/manifest.json @@ -15,6 +15,5 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["goalzero"], - "quality_scale": "silver", "requirements": ["goalzero==0.2.2"] } diff --git a/homeassistant/components/google/config_flow.py b/homeassistant/components/google/config_flow.py index 39b3c2d5666..8ae09b58957 100644 --- a/homeassistant/components/google/config_flow.py +++ b/homeassistant/components/google/config_flow.py @@ -238,16 +238,12 @@ class OAuth2FlowHandler( config_entry: ConfigEntry, ) -> OptionsFlow: """Create an options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Google Calendar options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/google/strings.json b/homeassistant/components/google/strings.json index 2ea45239a53..acc69c3799a 100644 --- a/homeassistant/components/google/strings.json +++ b/homeassistant/components/google/strings.json @@ -45,7 +45,7 @@ } }, "application_credentials": { - "description": "Follow the [instructions]({more_info_url}) for [OAuth consent screen]({oauth_consent_url}) to give Home Assistant access to your Google Calendar. You also need to create Application Credentials linked to your Calendar:\n1. Go to [Credentials]({oauth_creds_url}) and select **Create Credentials**.\n1. From the drop-down list select **OAuth client ID**.\n1. Select **TV and Limited Input devices** for the Application Type." + "description": "Follow the [instructions]({more_info_url}) for [OAuth consent screen]({oauth_consent_url}) to give Home Assistant access to your Google Calendar. You also need to create Application Credentials linked to your account:\n1. Go to [Credentials]({oauth_creds_url}) and select **Create Credentials**.\n1. From the drop-down list select **OAuth client ID**.\n1. Select **Web application** for the Application Type." }, "services": { "add_event": { diff --git a/homeassistant/components/google_assistant/const.py b/homeassistant/components/google_assistant/const.py index 04c85639e07..8132ecaae2c 100644 --- a/homeassistant/components/google_assistant/const.py +++ b/homeassistant/components/google_assistant/const.py @@ -78,6 +78,7 @@ TYPE_AWNING = f"{PREFIX_TYPES}AWNING" TYPE_BLINDS = f"{PREFIX_TYPES}BLINDS" TYPE_CAMERA = f"{PREFIX_TYPES}CAMERA" TYPE_CURTAIN = f"{PREFIX_TYPES}CURTAIN" +TYPE_CARBON_MONOXIDE_DETECTOR = f"{PREFIX_TYPES}CARBON_MONOXIDE_DETECTOR" TYPE_DEHUMIDIFIER = f"{PREFIX_TYPES}DEHUMIDIFIER" TYPE_DOOR = f"{PREFIX_TYPES}DOOR" TYPE_DOORBELL = f"{PREFIX_TYPES}DOORBELL" @@ -93,6 +94,7 @@ TYPE_SCENE = f"{PREFIX_TYPES}SCENE" TYPE_SENSOR = f"{PREFIX_TYPES}SENSOR" TYPE_SETTOP = f"{PREFIX_TYPES}SETTOP" TYPE_SHUTTER = f"{PREFIX_TYPES}SHUTTER" +TYPE_SMOKE_DETECTOR = f"{PREFIX_TYPES}SMOKE_DETECTOR" TYPE_SPEAKER = f"{PREFIX_TYPES}SPEAKER" TYPE_SWITCH = f"{PREFIX_TYPES}SWITCH" TYPE_THERMOSTAT = f"{PREFIX_TYPES}THERMOSTAT" @@ -136,6 +138,7 @@ EVENT_SYNC_RECEIVED = "google_assistant_sync" DOMAIN_TO_GOOGLE_TYPES = { alarm_control_panel.DOMAIN: TYPE_ALARM, + binary_sensor.DOMAIN: TYPE_SENSOR, button.DOMAIN: TYPE_SCENE, camera.DOMAIN: TYPE_CAMERA, climate.DOMAIN: TYPE_THERMOSTAT, @@ -168,6 +171,14 @@ DEVICE_CLASS_TO_GOOGLE_TYPES = { binary_sensor.DOMAIN, binary_sensor.BinarySensorDeviceClass.GARAGE_DOOR, ): TYPE_GARAGE, + ( + binary_sensor.DOMAIN, + binary_sensor.BinarySensorDeviceClass.SMOKE, + ): TYPE_SMOKE_DETECTOR, + ( + binary_sensor.DOMAIN, + binary_sensor.BinarySensorDeviceClass.CO, + ): TYPE_CARBON_MONOXIDE_DETECTOR, (cover.DOMAIN, cover.CoverDeviceClass.AWNING): TYPE_AWNING, (cover.DOMAIN, cover.CoverDeviceClass.CURTAIN): TYPE_CURTAIN, (cover.DOMAIN, cover.CoverDeviceClass.DOOR): TYPE_DOOR, diff --git a/homeassistant/components/google_assistant/trait.py b/homeassistant/components/google_assistant/trait.py index df56885995a..f99f1574038 100644 --- a/homeassistant/components/google_assistant/trait.py +++ b/homeassistant/components/google_assistant/trait.py @@ -2706,6 +2706,21 @@ class SensorStateTrait(_Trait): ), } + binary_sensor_types = { + binary_sensor.BinarySensorDeviceClass.CO: ( + "CarbonMonoxideLevel", + ["carbon monoxide detected", "no carbon monoxide detected", "unknown"], + ), + binary_sensor.BinarySensorDeviceClass.SMOKE: ( + "SmokeLevel", + ["smoke detected", "no smoke detected", "unknown"], + ), + binary_sensor.BinarySensorDeviceClass.MOISTURE: ( + "WaterLeak", + ["leak", "no leak", "unknown"], + ), + } + name = TRAIT_SENSOR_STATE commands: list[str] = [] @@ -2728,24 +2743,37 @@ class SensorStateTrait(_Trait): @classmethod def supported(cls, domain, features, device_class, _): """Test if state is supported.""" - return domain == sensor.DOMAIN and device_class in cls.sensor_types + return (domain == sensor.DOMAIN and device_class in cls.sensor_types) or ( + domain == binary_sensor.DOMAIN and device_class in cls.binary_sensor_types + ) def sync_attributes(self) -> dict[str, Any]: """Return attributes for a sync request.""" device_class = self.state.attributes.get(ATTR_DEVICE_CLASS) - data = self.sensor_types.get(device_class) - if device_class is None or data is None: - return {} + def create_sensor_state( + name: str, + raw_value_unit: str | None = None, + available_states: list[str] | None = None, + ) -> dict[str, Any]: + sensor_state: dict[str, Any] = { + "name": name, + } + if raw_value_unit: + sensor_state["numericCapabilities"] = {"rawValueUnit": raw_value_unit} + if available_states: + sensor_state["descriptiveCapabilities"] = { + "availableStates": available_states + } + return {"sensorStatesSupported": [sensor_state]} - sensor_state = { - "name": data[0], - "numericCapabilities": {"rawValueUnit": data[1]}, - } - - if device_class == sensor.SensorDeviceClass.AQI: - sensor_state["descriptiveCapabilities"] = { - "availableStates": [ + if self.state.domain == sensor.DOMAIN: + sensor_data = self.sensor_types.get(device_class) + if device_class is None or sensor_data is None: + return {} + available_states: list[str] | None = None + if device_class == sensor.SensorDeviceClass.AQI: + available_states = [ "healthy", "moderate", "unhealthy for sensitive groups", @@ -2753,30 +2781,53 @@ class SensorStateTrait(_Trait): "very unhealthy", "hazardous", "unknown", - ], - } - - return {"sensorStatesSupported": [sensor_state]} + ] + return create_sensor_state(sensor_data[0], sensor_data[1], available_states) + binary_sensor_data = self.binary_sensor_types.get(device_class) + if device_class is None or binary_sensor_data is None: + return {} + return create_sensor_state( + binary_sensor_data[0], available_states=binary_sensor_data[1] + ) def query_attributes(self) -> dict[str, Any]: """Return the attributes of this trait for this entity.""" device_class = self.state.attributes.get(ATTR_DEVICE_CLASS) - data = self.sensor_types.get(device_class) - if device_class is None or data is None: + def create_sensor_state( + name: str, raw_value: float | None = None, current_state: str | None = None + ) -> dict[str, Any]: + sensor_state: dict[str, Any] = { + "name": name, + "rawValue": raw_value, + } + if current_state: + sensor_state["currentSensorState"] = current_state + return {"currentSensorStateData": [sensor_state]} + + if self.state.domain == sensor.DOMAIN: + sensor_data = self.sensor_types.get(device_class) + if device_class is None or sensor_data is None: + return {} + try: + value = float(self.state.state) + except ValueError: + value = None + if self.state.state == STATE_UNKNOWN: + value = None + current_state: str | None = None + if device_class == sensor.SensorDeviceClass.AQI: + current_state = self._air_quality_description_for_aqi(value) + return create_sensor_state(sensor_data[0], value, current_state) + + binary_sensor_data = self.binary_sensor_types.get(device_class) + if device_class is None or binary_sensor_data is None: return {} - - try: - value = float(self.state.state) - except ValueError: - value = None - if self.state.state == STATE_UNKNOWN: - value = None - sensor_data = {"name": data[0], "rawValue": value} - - if device_class == sensor.SensorDeviceClass.AQI: - sensor_data["currentSensorState"] = self._air_quality_description_for_aqi( - value - ) - - return {"currentSensorStateData": [sensor_data]} + value = { + STATE_ON: 0, + STATE_OFF: 1, + STATE_UNKNOWN: 2, + }[self.state.state] + return create_sensor_state( + binary_sensor_data[0], current_state=binary_sensor_data[1][value] + ) diff --git a/homeassistant/components/google_assistant_sdk/config_flow.py b/homeassistant/components/google_assistant_sdk/config_flow.py index ea1ebe9e24a..48c92832483 100644 --- a/homeassistant/components/google_assistant_sdk/config_flow.py +++ b/homeassistant/components/google_assistant_sdk/config_flow.py @@ -66,10 +66,6 @@ class OAuth2FlowHandler( self._get_reauth_entry(), data=data ) - if self._async_current_entries(): - # Config entry already exists, only one allowed. - return self.async_abort(reason="single_instance_allowed") - return self.async_create_entry( title=DEFAULT_NAME, data=data, @@ -84,16 +80,12 @@ class OAuth2FlowHandler( config_entry: ConfigEntry, ) -> OptionsFlow: """Create the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Google Assistant SDK options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/google_assistant_sdk/manifest.json b/homeassistant/components/google_assistant_sdk/manifest.json index b6281e2a4f0..85469a464b3 100644 --- a/homeassistant/components/google_assistant_sdk/manifest.json +++ b/homeassistant/components/google_assistant_sdk/manifest.json @@ -7,6 +7,6 @@ "documentation": "https://www.home-assistant.io/integrations/google_assistant_sdk", "integration_type": "service", "iot_class": "cloud_polling", - "quality_scale": "platinum", - "requirements": ["gassist-text==0.0.11"] + "requirements": ["gassist-text==0.0.11"], + "single_config_entry": true } diff --git a/homeassistant/components/google_cloud/config_flow.py b/homeassistant/components/google_cloud/config_flow.py index dec849de4e6..fa6c952022b 100644 --- a/homeassistant/components/google_cloud/config_flow.py +++ b/homeassistant/components/google_cloud/config_flow.py @@ -15,7 +15,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.core import callback from homeassistant.helpers.selector import ( @@ -135,10 +135,10 @@ class GoogleCloudConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> GoogleCloudOptionsFlowHandler: """Create the options flow.""" - return GoogleCloudOptionsFlowHandler(config_entry) + return GoogleCloudOptionsFlowHandler() -class GoogleCloudOptionsFlowHandler(OptionsFlowWithConfigEntry): +class GoogleCloudOptionsFlowHandler(OptionsFlow): """Google Cloud options flow.""" async def async_step_init( @@ -169,7 +169,7 @@ class GoogleCloudOptionsFlowHandler(OptionsFlowWithConfigEntry): ) ), **tts_options_schema( - self.options, voices, from_config_flow=True + self.config_entry.options, voices, from_config_flow=True ).schema, vol.Optional( CONF_STT_MODEL, @@ -182,6 +182,6 @@ class GoogleCloudOptionsFlowHandler(OptionsFlowWithConfigEntry): ), } ), - self.options, + self.config_entry.options, ), ) diff --git a/homeassistant/components/google_cloud/helpers.py b/homeassistant/components/google_cloud/helpers.py index 3c614156132..f6e89fae7fa 100644 --- a/homeassistant/components/google_cloud/helpers.py +++ b/homeassistant/components/google_cloud/helpers.py @@ -52,7 +52,7 @@ async def async_tts_voices( def tts_options_schema( - config_options: dict[str, Any], + config_options: Mapping[str, Any], voices: dict[str, list[str]], from_config_flow: bool = False, ) -> vol.Schema: diff --git a/homeassistant/components/google_generative_ai_conversation/config_flow.py b/homeassistant/components/google_generative_ai_conversation/config_flow.py index bccc7d1fb84..83eec25ed15 100644 --- a/homeassistant/components/google_generative_ai_conversation/config_flow.py +++ b/homeassistant/components/google_generative_ai_conversation/config_flow.py @@ -163,7 +163,6 @@ class GoogleGenerativeAIOptionsFlow(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry self.last_rendered_recommended = config_entry.options.get( CONF_RECOMMENDED, False ) diff --git a/homeassistant/components/google_generative_ai_conversation/manifest.json b/homeassistant/components/google_generative_ai_conversation/manifest.json index f390b1f83e9..7b687b7da6f 100644 --- a/homeassistant/components/google_generative_ai_conversation/manifest.json +++ b/homeassistant/components/google_generative_ai_conversation/manifest.json @@ -8,6 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/google_generative_ai_conversation", "integration_type": "service", "iot_class": "cloud_polling", - "quality_scale": "platinum", "requirements": ["google-generativeai==0.8.2"] } diff --git a/homeassistant/components/google_maps/manifest.json b/homeassistant/components/google_maps/manifest.json index d7364e834a3..8311f75b732 100644 --- a/homeassistant/components/google_maps/manifest.json +++ b/homeassistant/components/google_maps/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/google_maps", "iot_class": "cloud_polling", "loggers": ["locationsharinglib"], + "quality_scale": "legacy", "requirements": ["locationsharinglib==5.0.1"] } diff --git a/homeassistant/components/google_photos/strings.json b/homeassistant/components/google_photos/strings.json index bd565a6122d..fa3f4669dac 100644 --- a/homeassistant/components/google_photos/strings.json +++ b/homeassistant/components/google_photos/strings.json @@ -48,7 +48,7 @@ "message": "`{filename}` is not an image" }, "missing_upload_permission": { - "message": "Home Assistnt was not granted permission to upload to Google Photos" + "message": "Home Assistant was not granted permission to upload to Google Photos" }, "upload_error": { "message": "Failed to upload content: {message}" diff --git a/homeassistant/components/google_pubsub/manifest.json b/homeassistant/components/google_pubsub/manifest.json index aa13f1808c4..9ea747898b2 100644 --- a/homeassistant/components/google_pubsub/manifest.json +++ b/homeassistant/components/google_pubsub/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/google_pubsub", "iot_class": "cloud_push", + "quality_scale": "legacy", "requirements": ["google-cloud-pubsub==2.23.0"] } diff --git a/homeassistant/components/google_travel_time/config_flow.py b/homeassistant/components/google_travel_time/config_flow.py index ee809a23aea..08de293bc7d 100644 --- a/homeassistant/components/google_travel_time/config_flow.py +++ b/homeassistant/components/google_travel_time/config_flow.py @@ -148,10 +148,6 @@ def default_options(hass: HomeAssistant) -> dict[str, str]: class GoogleOptionsFlow(OptionsFlow): """Handle an options flow for Google Travel Time.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize google options flow.""" - self.config_entry = config_entry - async def async_step_init(self, user_input=None) -> ConfigFlowResult: """Handle the initial step.""" if user_input is not None: @@ -213,7 +209,7 @@ class GoogleTravelTimeConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> GoogleOptionsFlow: """Get the options flow for this handler.""" - return GoogleOptionsFlow(config_entry) + return GoogleOptionsFlow() async def async_step_user(self, user_input=None) -> ConfigFlowResult: """Handle the initial step.""" diff --git a/homeassistant/components/google_travel_time/sensor.py b/homeassistant/components/google_travel_time/sensor.py index 618dda50bd4..a764036321b 100644 --- a/homeassistant/components/google_travel_time/sensor.py +++ b/homeassistant/components/google_travel_time/sensor.py @@ -7,6 +7,7 @@ import logging from googlemaps import Client from googlemaps.distance_matrix import distance_matrix +from googlemaps.exceptions import ApiError, Timeout, TransportError from homeassistant.components.sensor import ( SensorDeviceClass, @@ -172,9 +173,13 @@ class GoogleTravelTimeSensor(SensorEntity): self._resolved_destination, ) if self._resolved_destination is not None and self._resolved_origin is not None: - self._matrix = distance_matrix( - self._client, - self._resolved_origin, - self._resolved_destination, - **options_copy, - ) + try: + self._matrix = distance_matrix( + self._client, + self._resolved_origin, + self._resolved_destination, + **options_copy, + ) + except (ApiError, TransportError, Timeout) as ex: + _LOGGER.error("Error getting travel time: %s", ex) + self._matrix = None diff --git a/homeassistant/components/google_wifi/manifest.json b/homeassistant/components/google_wifi/manifest.json index 200684b2e1c..a71558a7d6f 100644 --- a/homeassistant/components/google_wifi/manifest.json +++ b/homeassistant/components/google_wifi/manifest.json @@ -3,5 +3,6 @@ "name": "Google Wifi", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/google_wifi", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/graphite/manifest.json b/homeassistant/components/graphite/manifest.json index da249a22829..cd50a5933f1 100644 --- a/homeassistant/components/graphite/manifest.json +++ b/homeassistant/components/graphite/manifest.json @@ -3,5 +3,6 @@ "name": "Graphite", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/graphite", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/greeneye_monitor/manifest.json b/homeassistant/components/greeneye_monitor/manifest.json index fcf4d004d26..15c4c2123e3 100644 --- a/homeassistant/components/greeneye_monitor/manifest.json +++ b/homeassistant/components/greeneye_monitor/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/greeneye_monitor", "iot_class": "local_push", "loggers": ["greeneye"], + "quality_scale": "legacy", "requirements": ["greeneye_monitor==3.0.3"] } diff --git a/homeassistant/components/greenwave/manifest.json b/homeassistant/components/greenwave/manifest.json index 5cb3255192f..422d3bc512e 100644 --- a/homeassistant/components/greenwave/manifest.json +++ b/homeassistant/components/greenwave/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/greenwave", "iot_class": "local_polling", "loggers": ["greenwavereality"], + "quality_scale": "legacy", "requirements": ["greenwavereality==0.5.1"] } diff --git a/homeassistant/components/group/strings.json b/homeassistant/components/group/strings.json index dbb6fb01f7b..cf694af0d98 100644 --- a/homeassistant/components/group/strings.json +++ b/homeassistant/components/group/strings.json @@ -3,7 +3,7 @@ "config": { "step": { "user": { - "title": "Add Group", + "title": "Create Group", "description": "Groups allow you to create a new entity that represents multiple entities of the same type.", "menu_options": { "binary_sensor": "Binary sensor group", @@ -283,20 +283,20 @@ }, "issues": { "uoms_not_matching_device_class": { - "title": "Unit of measurements are not correct", - "description": "Unit of measurements `{uoms}` of input sensors `{source_entities}` are not compatible and can't be converted with the device class `{device_class}` of sensor group `{entity_id}`.\n\nPlease correct the unit of measurements on the source entities and reload the group sensor to fix this issue." + "title": "Units of measurement are not correct", + "description": "Units of measurement `{uoms}` of input sensors `{source_entities}` are not compatible and can't be converted with the device class `{device_class}` of sensor group `{entity_id}`.\n\nPlease correct the unit of measurement on the source entities and reload the group sensor to fix this issue." }, "uoms_not_matching_no_device_class": { - "title": "Unit of measurements is not correct", - "description": "Unit of measurements `{uoms}` of input sensors `{source_entities}` are not compatible when not using a device class on sensor group `{entity_id}`.\n\nPlease correct the unit of measurements on the source entities or set a proper device class on the sensor group and reload the group sensor to fix this issue." + "title": "Units of measurement are not correct", + "description": "Units of measurement `{uoms}` of input sensors `{source_entities}` are not compatible when not using a device class on sensor group `{entity_id}`.\n\nPlease correct the unit of measurement on the source entities or set a proper device class on the sensor group and reload the group sensor to fix this issue." }, "device_classes_not_matching": { - "title": "Device classes is not correct", - "description": "Device classes `{device_classes}` on source entities `{source_entities}` needs to be same for sensor group `{entity_id}`.\n\nPlease correct the device classes on the source entities and reload the group sensor to fix this issue." + "title": "Device classes are not correct", + "description": "Device classes `{device_classes}` on source entities `{source_entities}` need to be identical for sensor group `{entity_id}`.\n\nPlease correct the device classes on the source entities and reload the group sensor to fix this issue." }, "state_classes_not_matching": { - "title": "State classes is not correct", - "description": "State classes `{state_classes}` on source entities `{source_entities}` needs to be same for sensor group `{entity_id}`.\n\nPlease correct the state classes on the source entities and reload the group sensor to fix this issue." + "title": "State classes are not correct", + "description": "State classes `{state_classes}` on source entities `{source_entities}` need to be identical for sensor group `{entity_id}`.\n\nPlease correct the state classes on the source entities and reload the group sensor to fix this issue." } } } diff --git a/homeassistant/components/gstreamer/manifest.json b/homeassistant/components/gstreamer/manifest.json index 95df94ef834..3ea9010a9d7 100644 --- a/homeassistant/components/gstreamer/manifest.json +++ b/homeassistant/components/gstreamer/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/gstreamer", "iot_class": "local_push", "loggers": ["gsp"], + "quality_scale": "legacy", "requirements": ["gstreamer-player==1.1.2"] } diff --git a/homeassistant/components/gtfs/manifest.json b/homeassistant/components/gtfs/manifest.json index 73a5998ea92..3bf41a1c763 100644 --- a/homeassistant/components/gtfs/manifest.json +++ b/homeassistant/components/gtfs/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/gtfs", "iot_class": "local_polling", "loggers": ["pygtfs"], + "quality_scale": "legacy", "requirements": ["pygtfs==0.1.9"] } diff --git a/homeassistant/components/habitica/__init__.py b/homeassistant/components/habitica/__init__.py index 502f52609dd..5843e14d63e 100644 --- a/homeassistant/components/habitica/__init__.py +++ b/homeassistant/components/habitica/__init__.py @@ -30,6 +30,7 @@ CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) PLATFORMS = [ + Platform.BINARY_SENSOR, Platform.BUTTON, Platform.CALENDAR, Platform.SENSOR, diff --git a/homeassistant/components/habitica/binary_sensor.py b/homeassistant/components/habitica/binary_sensor.py new file mode 100644 index 00000000000..bc79370ea63 --- /dev/null +++ b/homeassistant/components/habitica/binary_sensor.py @@ -0,0 +1,85 @@ +"""Binary sensor platform for Habitica integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from enum import StrEnum +from typing import Any + +from homeassistant.components.binary_sensor import ( + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import ASSETS_URL +from .entity import HabiticaBase +from .types import HabiticaConfigEntry + + +@dataclass(kw_only=True, frozen=True) +class HabiticaBinarySensorEntityDescription(BinarySensorEntityDescription): + """Habitica Binary Sensor Description.""" + + value_fn: Callable[[dict[str, Any]], bool | None] + entity_picture: Callable[[dict[str, Any]], str | None] + + +class HabiticaBinarySensor(StrEnum): + """Habitica Entities.""" + + PENDING_QUEST = "pending_quest" + + +def get_scroll_image_for_pending_quest_invitation(user: dict[str, Any]) -> str | None: + """Entity picture for pending quest invitation.""" + if user["party"]["quest"].get("key") and user["party"]["quest"]["RSVPNeeded"]: + return f"inventory_quest_scroll_{user["party"]["quest"]["key"]}.png" + return None + + +BINARY_SENSOR_DESCRIPTIONS: tuple[HabiticaBinarySensorEntityDescription, ...] = ( + HabiticaBinarySensorEntityDescription( + key=HabiticaBinarySensor.PENDING_QUEST, + translation_key=HabiticaBinarySensor.PENDING_QUEST, + value_fn=lambda user: user["party"]["quest"]["RSVPNeeded"], + entity_picture=get_scroll_image_for_pending_quest_invitation, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: HabiticaConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the habitica binary sensors.""" + + coordinator = config_entry.runtime_data + + async_add_entities( + HabiticaBinarySensorEntity(coordinator, description) + for description in BINARY_SENSOR_DESCRIPTIONS + ) + + +class HabiticaBinarySensorEntity(HabiticaBase, BinarySensorEntity): + """Representation of a Habitica binary sensor.""" + + entity_description: HabiticaBinarySensorEntityDescription + + @property + def is_on(self) -> bool | None: + """If the binary sensor is on.""" + return self.entity_description.value_fn(self.coordinator.data.user) + + @property + def entity_picture(self) -> str | None: + """Return the entity picture to use in the frontend, if any.""" + if entity_picture := self.entity_description.entity_picture( + self.coordinator.data.user + ): + return f"{ASSETS_URL}{entity_picture}" + return None diff --git a/homeassistant/components/habitica/button.py b/homeassistant/components/habitica/button.py index 8b41fb8c987..30e326f79a0 100644 --- a/homeassistant/components/habitica/button.py +++ b/homeassistant/components/habitica/button.py @@ -25,6 +25,8 @@ from .coordinator import HabiticaData, HabiticaDataUpdateCoordinator from .entity import HabiticaBase from .types import HabiticaConfigEntry +PARALLEL_UPDATES = 1 + @dataclass(kw_only=True, frozen=True) class HabiticaButtonEntityDescription(ButtonEntityDescription): diff --git a/homeassistant/components/habitica/calendar.py b/homeassistant/components/habitica/calendar.py index 5a0470c3440..be4433cb355 100644 --- a/homeassistant/components/habitica/calendar.py +++ b/homeassistant/components/habitica/calendar.py @@ -28,6 +28,8 @@ class HabiticaCalendar(StrEnum): DAILIES = "dailys" TODOS = "todos" + TODO_REMINDERS = "todo_reminders" + DAILY_REMINDERS = "daily_reminders" async def async_setup_entry( @@ -42,6 +44,8 @@ async def async_setup_entry( [ HabiticaTodosCalendarEntity(coordinator), HabiticaDailiesCalendarEntity(coordinator), + HabiticaTodoRemindersCalendarEntity(coordinator), + HabiticaDailyRemindersCalendarEntity(coordinator), ] ) @@ -225,3 +229,177 @@ class HabiticaDailiesCalendarEntity(HabiticaCalendarEntity): return { "yesterdaily": self.event.start < self.today.date() if self.event else None } + + +class HabiticaTodoRemindersCalendarEntity(HabiticaCalendarEntity): + """Habitica to-do reminders calendar entity.""" + + entity_description = CalendarEntityDescription( + key=HabiticaCalendar.TODO_REMINDERS, + translation_key=HabiticaCalendar.TODO_REMINDERS, + ) + + def reminders( + self, start_date: datetime, end_date: datetime | None = None + ) -> list[CalendarEvent]: + """Reminders for todos.""" + + events = [] + + for task in self.coordinator.data.tasks: + if task["type"] != HabiticaTaskType.TODO or task["completed"]: + continue + + for reminder in task.get("reminders", []): + # reminders are returned by the API in local time but with wrong + # timezone (UTC) and arbitrary added seconds/microseconds. When + # creating reminders in Habitica only hours and minutes can be defined. + start = datetime.fromisoformat(reminder["time"]).replace( + tzinfo=dt_util.DEFAULT_TIME_ZONE, second=0, microsecond=0 + ) + end = start + timedelta(hours=1) + + if end < start_date: + # Event ends before date range + continue + + if end_date and start > end_date: + # Event starts after date range + continue + + events.append( + CalendarEvent( + start=start, + end=end, + summary=task["text"], + description=task["notes"], + uid=f"{task["id"]}_{reminder["id"]}", + ) + ) + + return sorted( + events, + key=lambda event: event.start, + ) + + @property + def event(self) -> CalendarEvent | None: + """Return the next upcoming event.""" + return next(iter(self.reminders(dt_util.now())), None) + + async def async_get_events( + self, hass: HomeAssistant, start_date: datetime, end_date: datetime + ) -> list[CalendarEvent]: + """Return calendar events within a datetime range.""" + + return self.reminders(start_date, end_date) + + +class HabiticaDailyRemindersCalendarEntity(HabiticaCalendarEntity): + """Habitica daily reminders calendar entity.""" + + entity_description = CalendarEntityDescription( + key=HabiticaCalendar.DAILY_REMINDERS, + translation_key=HabiticaCalendar.DAILY_REMINDERS, + ) + + def start(self, reminder_time: str, reminder_date: date) -> datetime: + """Generate reminder times for dailies. + + Reminders for dailies have a datetime but the date part is arbitrary, + only the time part is evaluated. The dates for the reminders are the + dailies' due dates. + """ + return datetime.combine( + reminder_date, + datetime.fromisoformat(reminder_time) + .replace( + second=0, + microsecond=0, + ) + .time(), + tzinfo=dt_util.DEFAULT_TIME_ZONE, + ) + + @property + def today(self) -> datetime: + """Habitica daystart.""" + return dt_util.start_of_local_day( + datetime.fromisoformat(self.coordinator.data.user["lastCron"]) + ) + + def get_recurrence_dates( + self, recurrences: rrule, start_date: datetime, end_date: datetime | None = None + ) -> list[datetime]: + """Calculate recurrence dates based on start_date and end_date.""" + if end_date: + return recurrences.between( + start_date, end_date - timedelta(days=1), inc=True + ) + # if no end_date is given, return only the next recurrence + return [recurrences.after(self.today, inc=True)] + + def reminders( + self, start_date: datetime, end_date: datetime | None = None + ) -> list[CalendarEvent]: + """Reminders for dailies.""" + + events = [] + if end_date and end_date < self.today: + return [] + start_date = max(start_date, self.today) + + for task in self.coordinator.data.tasks: + if not (task["type"] == HabiticaTaskType.DAILY and task["everyX"]): + continue + + recurrences = build_rrule(task) + recurrences_start = self.today + + recurrence_dates = self.get_recurrence_dates( + recurrences, recurrences_start, end_date + ) + for recurrence in recurrence_dates: + is_future_event = recurrence > self.today + is_current_event = recurrence <= self.today and not task["completed"] + + if not is_future_event and not is_current_event: + continue + + for reminder in task.get("reminders", []): + start = self.start(reminder["time"], recurrence) + end = start + timedelta(hours=1) + + if end < start_date: + # Event ends before date range + continue + + if end_date and start > end_date: + # Event starts after date range + continue + events.append( + CalendarEvent( + start=start, + end=end, + summary=task["text"], + description=task["notes"], + uid=f"{task["id"]}_{reminder["id"]}", + ) + ) + + return sorted( + events, + key=lambda event: event.start, + ) + + @property + def event(self) -> CalendarEvent | None: + """Return the next upcoming event.""" + return next(iter(self.reminders(dt_util.now())), None) + + async def async_get_events( + self, hass: HomeAssistant, start_date: datetime, end_date: datetime + ) -> list[CalendarEvent]: + """Return calendar events within a datetime range.""" + + return self.reminders(start_date, end_date) diff --git a/homeassistant/components/habitica/config_flow.py b/homeassistant/components/habitica/config_flow.py index 88f3d1b803c..d168a5f57b4 100644 --- a/homeassistant/components/habitica/config_flow.py +++ b/homeassistant/components/habitica/config_flow.py @@ -25,7 +25,15 @@ from homeassistant.helpers.selector import ( TextSelectorType, ) -from .const import CONF_API_USER, DEFAULT_URL, DOMAIN +from .const import ( + CONF_API_USER, + DEFAULT_URL, + DOMAIN, + FORGOT_PASSWORD_URL, + HABITICANS_URL, + SIGN_UP_URL, + SITE_DATA_URL, +) STEP_ADVANCED_DATA_SCHEMA = vol.Schema( { @@ -69,6 +77,10 @@ class HabiticaConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_menu( step_id="user", menu_options=["login", "advanced"], + description_placeholders={ + "signup": SIGN_UP_URL, + "habiticans": HABITICANS_URL, + }, ) async def async_step_login( @@ -125,6 +137,7 @@ class HabiticaConfigFlow(ConfigFlow, domain=DOMAIN): data_schema=STEP_LOGIN_DATA_SCHEMA, suggested_values=user_input ), errors=errors, + description_placeholders={"forgot_password": FORGOT_PASSWORD_URL}, ) async def async_step_advanced( @@ -175,4 +188,8 @@ class HabiticaConfigFlow(ConfigFlow, domain=DOMAIN): data_schema=STEP_ADVANCED_DATA_SCHEMA, suggested_values=user_input ), errors=errors, + description_placeholders={ + "site_data": SITE_DATA_URL, + "default_url": DEFAULT_URL, + }, ) diff --git a/homeassistant/components/habitica/const.py b/homeassistant/components/habitica/const.py index 55322a13e6a..dce417b60a5 100644 --- a/homeassistant/components/habitica/const.py +++ b/homeassistant/components/habitica/const.py @@ -6,6 +6,11 @@ CONF_API_USER = "api_user" DEFAULT_URL = "https://habitica.com" ASSETS_URL = "https://habitica-assets.s3.amazonaws.com/mobileApp/images/" +SITE_DATA_URL = "https://habitica.com/user/settings/siteData" +FORGOT_PASSWORD_URL = "https://habitica.com/forgot-password" +SIGN_UP_URL = "https://habitica.com/register" +HABITICANS_URL = "https://habitica.com/static/img/home-main@3x.ffc32b12.png" + DOMAIN = "habitica" # service constants @@ -25,7 +30,21 @@ UNIT_TASKS = "tasks" ATTR_CONFIG_ENTRY = "config_entry" ATTR_SKILL = "skill" ATTR_TASK = "task" +ATTR_DIRECTION = "direction" +ATTR_TARGET = "target" +ATTR_ITEM = "item" SERVICE_CAST_SKILL = "cast_skill" +SERVICE_START_QUEST = "start_quest" +SERVICE_ACCEPT_QUEST = "accept_quest" +SERVICE_CANCEL_QUEST = "cancel_quest" +SERVICE_ABORT_QUEST = "abort_quest" +SERVICE_REJECT_QUEST = "reject_quest" +SERVICE_LEAVE_QUEST = "leave_quest" +SERVICE_SCORE_HABIT = "score_habit" +SERVICE_SCORE_REWARD = "score_reward" + +SERVICE_TRANSFORMATION = "transformation" + WARRIOR = "warrior" ROGUE = "rogue" diff --git a/homeassistant/components/habitica/coordinator.py b/homeassistant/components/habitica/coordinator.py index cce2c684ba8..f9ffb1b53bd 100644 --- a/homeassistant/components/habitica/coordinator.py +++ b/homeassistant/components/habitica/coordinator.py @@ -51,12 +51,17 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]): ), ) self.api = habitipy + self.content: dict[str, Any] = {} async def _async_update_data(self) -> HabiticaData: try: user_response = await self.api.user.get() tasks_response = await self.api.tasks.user.get() tasks_response.extend(await self.api.tasks.user.get(type="completedTodos")) + if not self.content: + self.content = await self.api.content.get( + language=user_response["preferences"]["language"] + ) except ClientResponseError as error: if error.status == HTTPStatus.TOO_MANY_REQUESTS: _LOGGER.debug("Rate limit exceeded, will try again later") diff --git a/homeassistant/components/habitica/diagnostics.py b/homeassistant/components/habitica/diagnostics.py new file mode 100644 index 00000000000..bca79946503 --- /dev/null +++ b/homeassistant/components/habitica/diagnostics.py @@ -0,0 +1,27 @@ +"""Diagnostics platform for Habitica integration.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.const import CONF_URL +from homeassistant.core import HomeAssistant + +from .const import CONF_API_USER +from .types import HabiticaConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, config_entry: HabiticaConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + + habitica_data = await config_entry.runtime_data.api.user.anonymized.get() + + return { + "config_entry_data": { + CONF_URL: config_entry.data[CONF_URL], + CONF_API_USER: config_entry.data[CONF_API_USER], + }, + "habitica_data": habitica_data, + } diff --git a/homeassistant/components/habitica/icons.json b/homeassistant/components/habitica/icons.json index 617f08a4e58..d4ca5dba10d 100644 --- a/homeassistant/components/habitica/icons.json +++ b/homeassistant/components/habitica/icons.json @@ -64,6 +64,12 @@ }, "dailys": { "default": "mdi:calendar-multiple" + }, + "todo_reminders": { + "default": "mdi:reminder" + }, + "daily_reminders": { + "default": "mdi:reminder" } }, "sensor": { @@ -126,6 +132,18 @@ }, "rewards": { "default": "mdi:treasure-chest" + }, + "strength": { + "default": "mdi:arm-flex-outline" + }, + "intelligence": { + "default": "mdi:head-snowflake-outline" + }, + "perception": { + "default": "mdi:eye-outline" + }, + "constitution": { + "default": "mdi:run-fast" } }, "switch": { @@ -135,6 +153,14 @@ "on": "mdi:sleep" } } + }, + "binary_sensor": { + "pending_quest": { + "default": "mdi:script-outline", + "state": { + "on": "mdi:script-text-outline" + } + } } }, "services": { @@ -143,6 +169,33 @@ }, "cast_skill": { "service": "mdi:creation-outline" + }, + "accept_quest": { + "service": "mdi:script-text" + }, + "reject_quest": { + "service": "mdi:script-text" + }, + "leave_quest": { + "service": "mdi:script-text" + }, + "abort_quest": { + "service": "mdi:script-text-key" + }, + "cancel_quest": { + "service": "mdi:script-text-key" + }, + "start_quest": { + "service": "mdi:script-text-key" + }, + "score_habit": { + "service": "mdi:counter" + }, + "score_reward": { + "service": "mdi:sack" + }, + "transformation": { + "service": "mdi:flask-round-bottom" } } } diff --git a/homeassistant/components/habitica/manifest.json b/homeassistant/components/habitica/manifest.json index 8e3396d32cf..a01697c3945 100644 --- a/homeassistant/components/habitica/manifest.json +++ b/homeassistant/components/habitica/manifest.json @@ -1,7 +1,7 @@ { "domain": "habitica", "name": "Habitica", - "codeowners": ["@ASMfreaK", "@leikoilja", "@tr4nt0r"], + "codeowners": ["@tr4nt0r"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/habitica", "iot_class": "cloud_polling", diff --git a/homeassistant/components/habitica/sensor.py b/homeassistant/components/habitica/sensor.py index 77356f88265..41d0168d748 100644 --- a/homeassistant/components/habitica/sensor.py +++ b/homeassistant/components/habitica/sensor.py @@ -24,10 +24,10 @@ from homeassistant.helpers.issue_registry import ( ) from homeassistant.helpers.typing import StateType -from .const import DOMAIN, UNIT_TASKS +from .const import ASSETS_URL, DOMAIN, UNIT_TASKS from .entity import HabiticaBase from .types import HabiticaConfigEntry -from .util import entity_used_in +from .util import entity_used_in, get_attribute_points, get_attributes_total _LOGGER = logging.getLogger(__name__) @@ -36,7 +36,11 @@ _LOGGER = logging.getLogger(__name__) class HabitipySensorEntityDescription(SensorEntityDescription): """Habitipy Sensor Description.""" - value_fn: Callable[[dict[str, Any]], StateType] + value_fn: Callable[[dict[str, Any], dict[str, Any]], StateType] + attributes_fn: ( + Callable[[dict[str, Any], dict[str, Any]], dict[str, Any] | None] | None + ) = None + entity_picture: str | None = None @dataclass(kw_only=True, frozen=True) @@ -65,90 +69,128 @@ class HabitipySensorEntity(StrEnum): REWARDS = "rewards" GEMS = "gems" TRINKETS = "trinkets" + STRENGTH = "strength" + INTELLIGENCE = "intelligence" + CONSTITUTION = "constitution" + PERCEPTION = "perception" SENSOR_DESCRIPTIONS: tuple[HabitipySensorEntityDescription, ...] = ( HabitipySensorEntityDescription( key=HabitipySensorEntity.DISPLAY_NAME, translation_key=HabitipySensorEntity.DISPLAY_NAME, - value_fn=lambda user: user.get("profile", {}).get("name"), + value_fn=lambda user, _: user.get("profile", {}).get("name"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.HEALTH, translation_key=HabitipySensorEntity.HEALTH, native_unit_of_measurement="HP", suggested_display_precision=0, - value_fn=lambda user: user.get("stats", {}).get("hp"), + value_fn=lambda user, _: user.get("stats", {}).get("hp"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.HEALTH_MAX, translation_key=HabitipySensorEntity.HEALTH_MAX, native_unit_of_measurement="HP", entity_registry_enabled_default=False, - value_fn=lambda user: user.get("stats", {}).get("maxHealth"), + value_fn=lambda user, _: user.get("stats", {}).get("maxHealth"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.MANA, translation_key=HabitipySensorEntity.MANA, native_unit_of_measurement="MP", suggested_display_precision=0, - value_fn=lambda user: user.get("stats", {}).get("mp"), + value_fn=lambda user, _: user.get("stats", {}).get("mp"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.MANA_MAX, translation_key=HabitipySensorEntity.MANA_MAX, native_unit_of_measurement="MP", - value_fn=lambda user: user.get("stats", {}).get("maxMP"), + value_fn=lambda user, _: user.get("stats", {}).get("maxMP"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.EXPERIENCE, translation_key=HabitipySensorEntity.EXPERIENCE, native_unit_of_measurement="XP", - value_fn=lambda user: user.get("stats", {}).get("exp"), + value_fn=lambda user, _: user.get("stats", {}).get("exp"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.EXPERIENCE_MAX, translation_key=HabitipySensorEntity.EXPERIENCE_MAX, native_unit_of_measurement="XP", - value_fn=lambda user: user.get("stats", {}).get("toNextLevel"), + value_fn=lambda user, _: user.get("stats", {}).get("toNextLevel"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.LEVEL, translation_key=HabitipySensorEntity.LEVEL, - value_fn=lambda user: user.get("stats", {}).get("lvl"), + value_fn=lambda user, _: user.get("stats", {}).get("lvl"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.GOLD, translation_key=HabitipySensorEntity.GOLD, native_unit_of_measurement="GP", suggested_display_precision=2, - value_fn=lambda user: user.get("stats", {}).get("gp"), + value_fn=lambda user, _: user.get("stats", {}).get("gp"), ), HabitipySensorEntityDescription( key=HabitipySensorEntity.CLASS, translation_key=HabitipySensorEntity.CLASS, - value_fn=lambda user: user.get("stats", {}).get("class"), + value_fn=lambda user, _: user.get("stats", {}).get("class"), device_class=SensorDeviceClass.ENUM, options=["warrior", "healer", "wizard", "rogue"], ), HabitipySensorEntityDescription( key=HabitipySensorEntity.GEMS, translation_key=HabitipySensorEntity.GEMS, - value_fn=lambda user: user.get("balance", 0) * 4, + value_fn=lambda user, _: user.get("balance", 0) * 4, suggested_display_precision=0, native_unit_of_measurement="gems", + entity_picture="shop_gem.png", ), HabitipySensorEntityDescription( key=HabitipySensorEntity.TRINKETS, translation_key=HabitipySensorEntity.TRINKETS, value_fn=( - lambda user: user.get("purchased", {}) + lambda user, _: user.get("purchased", {}) .get("plan", {}) .get("consecutive", {}) .get("trinkets", 0) ), suggested_display_precision=0, native_unit_of_measurement="⧖", + entity_picture="notif_subscriber_reward.png", + ), + HabitipySensorEntityDescription( + key=HabitipySensorEntity.STRENGTH, + translation_key=HabitipySensorEntity.STRENGTH, + value_fn=lambda user, content: get_attributes_total(user, content, "str"), + attributes_fn=lambda user, content: get_attribute_points(user, content, "str"), + suggested_display_precision=0, + native_unit_of_measurement="STR", + ), + HabitipySensorEntityDescription( + key=HabitipySensorEntity.INTELLIGENCE, + translation_key=HabitipySensorEntity.INTELLIGENCE, + value_fn=lambda user, content: get_attributes_total(user, content, "int"), + attributes_fn=lambda user, content: get_attribute_points(user, content, "int"), + suggested_display_precision=0, + native_unit_of_measurement="INT", + ), + HabitipySensorEntityDescription( + key=HabitipySensorEntity.PERCEPTION, + translation_key=HabitipySensorEntity.PERCEPTION, + value_fn=lambda user, content: get_attributes_total(user, content, "per"), + attributes_fn=lambda user, content: get_attribute_points(user, content, "per"), + suggested_display_precision=0, + native_unit_of_measurement="PER", + ), + HabitipySensorEntityDescription( + key=HabitipySensorEntity.CONSTITUTION, + translation_key=HabitipySensorEntity.CONSTITUTION, + value_fn=lambda user, content: get_attributes_total(user, content, "con"), + attributes_fn=lambda user, content: get_attribute_points(user, content, "con"), + suggested_display_precision=0, + native_unit_of_measurement="CON", ), ) @@ -243,7 +285,23 @@ class HabitipySensor(HabiticaBase, SensorEntity): def native_value(self) -> StateType: """Return the state of the device.""" - return self.entity_description.value_fn(self.coordinator.data.user) + return self.entity_description.value_fn( + self.coordinator.data.user, self.coordinator.content + ) + + @property + def extra_state_attributes(self) -> dict[str, float | None] | None: + """Return entity specific state attributes.""" + if func := self.entity_description.attributes_fn: + return func(self.coordinator.data.user, self.coordinator.content) + return None + + @property + def entity_picture(self) -> str | None: + """Return the entity picture to use in the frontend, if any.""" + if entity_picture := self.entity_description.entity_picture: + return f"{ASSETS_URL}{entity_picture}" + return None class HabitipyTaskSensor(HabiticaBase, SensorEntity): diff --git a/homeassistant/components/habitica/services.py b/homeassistant/components/habitica/services.py index 8ca80ff63ad..7f2d66e4690 100644 --- a/homeassistant/components/habitica/services.py +++ b/homeassistant/components/habitica/services.py @@ -9,6 +9,7 @@ from typing import Any from aiohttp import ClientResponseError import voluptuous as vol +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_NAME, CONF_NAME from homeassistant.core import ( HomeAssistant, @@ -18,19 +19,32 @@ from homeassistant.core import ( ) from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.selector import ConfigEntrySelector from .const import ( ATTR_ARGS, ATTR_CONFIG_ENTRY, ATTR_DATA, + ATTR_DIRECTION, + ATTR_ITEM, ATTR_PATH, ATTR_SKILL, + ATTR_TARGET, ATTR_TASK, DOMAIN, EVENT_API_CALL_SUCCESS, + SERVICE_ABORT_QUEST, + SERVICE_ACCEPT_QUEST, SERVICE_API_CALL, + SERVICE_CANCEL_QUEST, SERVICE_CAST_SKILL, + SERVICE_LEAVE_QUEST, + SERVICE_REJECT_QUEST, + SERVICE_SCORE_HABIT, + SERVICE_SCORE_REWARD, + SERVICE_START_QUEST, + SERVICE_TRANSFORMATION, ) from .types import HabiticaConfigEntry @@ -53,11 +67,60 @@ SERVICE_CAST_SKILL_SCHEMA = vol.Schema( } ) +SERVICE_MANAGE_QUEST_SCHEMA = vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(), + } +) +SERVICE_SCORE_TASK_SCHEMA = vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(), + vol.Required(ATTR_TASK): cv.string, + vol.Optional(ATTR_DIRECTION): cv.string, + } +) -def async_setup_services(hass: HomeAssistant) -> None: +SERVICE_TRANSFORMATION_SCHEMA = vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(), + vol.Required(ATTR_ITEM): cv.string, + vol.Required(ATTR_TARGET): cv.string, + } +) + + +def get_config_entry(hass: HomeAssistant, entry_id: str) -> HabiticaConfigEntry: + """Return config entry or raise if not found or not loaded.""" + if not (entry := hass.config_entries.async_get_entry(entry_id)): + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="entry_not_found", + ) + if entry.state is not ConfigEntryState.LOADED: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="entry_not_loaded", + ) + return entry + + +def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901 """Set up services for Habitica integration.""" async def handle_api_call(call: ServiceCall) -> None: + async_create_issue( + hass, + DOMAIN, + "deprecated_api_call", + breaks_in_ha_version="2025.6.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_api_call", + ) + _LOGGER.warning( + "Deprecated action called: 'habitica.api_call' is deprecated and will be removed in Home Assistant version 2025.6.0" + ) + name = call.data[ATTR_NAME] path = call.data[ATTR_PATH] entries = hass.config_entries.async_entries(DOMAIN) @@ -86,14 +149,7 @@ def async_setup_services(hass: HomeAssistant) -> None: async def cast_skill(call: ServiceCall) -> ServiceResponse: """Skill action.""" - entry: HabiticaConfigEntry | None - if not ( - entry := hass.config_entries.async_get_entry(call.data[ATTR_CONFIG_ENTRY]) - ): - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="entry_not_found", - ) + entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY]) coordinator = entry.runtime_data skill = { "pickpocket": {"spellId": "pickPocket", "cost": "10 MP"}, @@ -151,6 +207,181 @@ def async_setup_services(hass: HomeAssistant) -> None: await coordinator.async_request_refresh() return response + async def manage_quests(call: ServiceCall) -> ServiceResponse: + """Accept, reject, start, leave or cancel quests.""" + entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY]) + coordinator = entry.runtime_data + + COMMAND_MAP = { + SERVICE_ABORT_QUEST: "abort", + SERVICE_ACCEPT_QUEST: "accept", + SERVICE_CANCEL_QUEST: "cancel", + SERVICE_LEAVE_QUEST: "leave", + SERVICE_REJECT_QUEST: "reject", + SERVICE_START_QUEST: "force-start", + } + try: + return await coordinator.api.groups.party.quests[ + COMMAND_MAP[call.service] + ].post() + except ClientResponseError as e: + if e.status == HTTPStatus.TOO_MANY_REQUESTS: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="setup_rate_limit_exception", + ) from e + if e.status == HTTPStatus.UNAUTHORIZED: + raise ServiceValidationError( + translation_domain=DOMAIN, translation_key="quest_action_unallowed" + ) from e + if e.status == HTTPStatus.NOT_FOUND: + raise ServiceValidationError( + translation_domain=DOMAIN, translation_key="quest_not_found" + ) from e + raise HomeAssistantError( + translation_domain=DOMAIN, translation_key="service_call_exception" + ) from e + + for service in ( + SERVICE_ABORT_QUEST, + SERVICE_ACCEPT_QUEST, + SERVICE_CANCEL_QUEST, + SERVICE_LEAVE_QUEST, + SERVICE_REJECT_QUEST, + SERVICE_START_QUEST, + ): + hass.services.async_register( + DOMAIN, + service, + manage_quests, + schema=SERVICE_MANAGE_QUEST_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) + + async def score_task(call: ServiceCall) -> ServiceResponse: + """Score a task action.""" + entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY]) + coordinator = entry.runtime_data + try: + task_id, task_value = next( + (task["id"], task.get("value")) + for task in coordinator.data.tasks + if call.data[ATTR_TASK] in (task["id"], task.get("alias")) + or call.data[ATTR_TASK] == task["text"] + ) + except StopIteration as e: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="task_not_found", + translation_placeholders={"task": f"'{call.data[ATTR_TASK]}'"}, + ) from e + + try: + response: dict[str, Any] = ( + await coordinator.api.tasks[task_id] + .score[call.data.get(ATTR_DIRECTION, "up")] + .post() + ) + except ClientResponseError as e: + if e.status == HTTPStatus.TOO_MANY_REQUESTS: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="setup_rate_limit_exception", + ) from e + if e.status == HTTPStatus.UNAUTHORIZED and task_value is not None: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="not_enough_gold", + translation_placeholders={ + "gold": f"{coordinator.data.user["stats"]["gp"]:.2f} GP", + "cost": f"{task_value} GP", + }, + ) from e + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="service_call_exception", + ) from e + else: + await coordinator.async_request_refresh() + return response + + async def transformation(call: ServiceCall) -> ServiceResponse: + """User a transformation item on a player character.""" + + entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY]) + coordinator = entry.runtime_data + ITEMID_MAP = { + "snowball": {"itemId": "snowball"}, + "spooky_sparkles": {"itemId": "spookySparkles"}, + "seafoam": {"itemId": "seafoam"}, + "shiny_seed": {"itemId": "shinySeed"}, + } + # check if target is self + if call.data[ATTR_TARGET] in ( + coordinator.data.user["id"], + coordinator.data.user["profile"]["name"], + coordinator.data.user["auth"]["local"]["username"], + ): + target_id = coordinator.data.user["id"] + else: + # check if target is a party member + try: + party = await coordinator.api.groups.party.members.get() + except ClientResponseError as e: + if e.status == HTTPStatus.TOO_MANY_REQUESTS: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="setup_rate_limit_exception", + ) from e + if e.status == HTTPStatus.NOT_FOUND: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="party_not_found", + ) from e + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="service_call_exception", + ) from e + try: + target_id = next( + member["id"] + for member in party + if call.data[ATTR_TARGET].lower() + in ( + member["id"], + member["auth"]["local"]["username"].lower(), + member["profile"]["name"].lower(), + ) + ) + except StopIteration as e: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="target_not_found", + translation_placeholders={"target": f"'{call.data[ATTR_TARGET]}'"}, + ) from e + try: + response: dict[str, Any] = await coordinator.api.user.class_.cast[ + ITEMID_MAP[call.data[ATTR_ITEM]]["itemId"] + ].post(targetId=target_id) + except ClientResponseError as e: + if e.status == HTTPStatus.TOO_MANY_REQUESTS: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="setup_rate_limit_exception", + ) from e + if e.status == HTTPStatus.UNAUTHORIZED: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="item_not_found", + translation_placeholders={"item": call.data[ATTR_ITEM]}, + ) from e + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="service_call_exception", + ) from e + else: + return response + hass.services.async_register( DOMAIN, SERVICE_API_CALL, @@ -165,3 +396,26 @@ def async_setup_services(hass: HomeAssistant) -> None: schema=SERVICE_CAST_SKILL_SCHEMA, supports_response=SupportsResponse.ONLY, ) + + hass.services.async_register( + DOMAIN, + SERVICE_SCORE_HABIT, + score_task, + schema=SERVICE_SCORE_TASK_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) + hass.services.async_register( + DOMAIN, + SERVICE_SCORE_REWARD, + score_task, + schema=SERVICE_SCORE_TASK_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) + + hass.services.async_register( + DOMAIN, + SERVICE_TRANSFORMATION, + transformation, + schema=SERVICE_TRANSFORMATION_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) diff --git a/homeassistant/components/habitica/services.yaml b/homeassistant/components/habitica/services.yaml index 546ac8c1c34..a89c935b630 100644 --- a/homeassistant/components/habitica/services.yaml +++ b/homeassistant/components/habitica/services.yaml @@ -17,7 +17,7 @@ api_call: object: cast_skill: fields: - config_entry: + config_entry: &config_entry required: true selector: config_entry: @@ -33,7 +33,64 @@ cast_skill: - "fireball" mode: dropdown translation_key: "skill_select" - task: + task: &task + required: true + selector: + text: +accept_quest: + fields: + config_entry: *config_entry +reject_quest: + fields: + config_entry: *config_entry +start_quest: + fields: + config_entry: *config_entry +cancel_quest: + fields: + config_entry: *config_entry +abort_quest: + fields: + config_entry: *config_entry +leave_quest: + fields: + config_entry: *config_entry +score_habit: + fields: + config_entry: *config_entry + task: *task + direction: + required: true + selector: + select: + options: + - value: up + label: "➕" + - value: down + label: "➖" +score_reward: + fields: + config_entry: *config_entry + task: *task +transformation: + fields: + config_entry: + required: true + selector: + config_entry: + integration: habitica + item: + required: true + selector: + select: + options: + - "snowball" + - "spooky_sparkles" + - "seafoam" + - "shiny_seed" + mode: dropdown + translation_key: "transformation_item_select" + target: required: true selector: text: diff --git a/homeassistant/components/habitica/strings.json b/homeassistant/components/habitica/strings.json index 690cdab09ad..81691327aec 100644 --- a/homeassistant/components/habitica/strings.json +++ b/homeassistant/components/habitica/strings.json @@ -1,7 +1,8 @@ { "common": { "todos": "To-Do's", - "dailies": "Dailies" + "dailies": "Dailies", + "config_entry_name": "Select character" }, "config": { "abort": { @@ -14,30 +15,48 @@ }, "step": { "user": { + "title": "Habitica - Gamify your life", "menu_options": { "login": "Login to Habitica", "advanced": "Login to other instances" }, - "description": "Connect your Habitica profile to allow monitoring of your user's profile and tasks." + "description": "![Habiticans]({habiticans}) Connect your Habitica account to keep track of your adventurer's stats, progress, and manage your to-dos and daily tasks.\n\n[Don't have a Habitica account? Sign up here.]({signup})" }, "login": { + "title": "[%key:component::habitica::config::step::user::menu_options::login%]", "data": { "username": "Email or username (case-sensitive)", "password": "[%key:common::config_flow::data::password%]" - } + }, + "data_description": { + "username": "Email or username (case-sensitive) to connect Home Assistant to your Habitica account", + "password": "Password for the account to connect Home Assistant to Habitica" + }, + "description": "Enter your login details to start using Habitica with Home Assistant\n\n[Forgot your password?]({forgot_password})" }, "advanced": { + "title": "[%key:component::habitica::config::step::user::menu_options::advanced%]", "data": { "url": "[%key:common::config_flow::data::url%]", "api_user": "User ID", "api_key": "API Token", "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" }, - "description": "You can retrieve your `User ID` and `API Token` from **Settings -> Site Data** on Habitica or the instance you want to connect to" + "data_description": { + "url": "URL of the Habitica installation to connect to. Defaults to `{default_url}`", + "api_user": "User ID of your Habitica account", + "api_key": "API Token of the Habitica account" + }, + "description": "You can retrieve your `User ID` and `API Token` from [**Settings -> Site Data**]({site_data}) on Habitica or the instance you want to connect to" } } }, "entity": { + "binary_sensor": { + "pending_quest": { + "name": "Pending quest invitation" + } + }, "button": { "run_cron": { "name": "Start my day" @@ -103,6 +122,12 @@ } } } + }, + "todo_reminders": { + "name": "To-do reminders" + }, + "daily_reminders": { + "name": "Daily reminders" } }, "sensor": { @@ -159,6 +184,86 @@ }, "rewards": { "name": "Rewards" + }, + "strength": { + "name": "Strength", + "state_attributes": { + "level": { + "name": "[%key:component::habitica::entity::sensor::level::name%]" + }, + "equipment": { + "name": "Battle gear" + }, + "class": { + "name": "Class equip bonus" + }, + "allocated": { + "name": "Allocated attribute points" + }, + "buffs": { + "name": "Buffs" + } + } + }, + "intelligence": { + "name": "Intelligence", + "state_attributes": { + "level": { + "name": "[%key:component::habitica::entity::sensor::level::name%]" + }, + "equipment": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::equipment::name%]" + }, + "class": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::class::name%]" + }, + "allocated": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::allocated::name%]" + }, + "buffs": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::buffs::name%]" + } + } + }, + "perception": { + "name": "Perception", + "state_attributes": { + "level": { + "name": "[%key:component::habitica::entity::sensor::level::name%]" + }, + "equipment": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::equipment::name%]" + }, + "class": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::class::name%]" + }, + "allocated": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::allocated::name%]" + }, + "buffs": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::buffs::name%]" + } + } + }, + "constitution": { + "name": "Constitution", + "state_attributes": { + "level": { + "name": "[%key:component::habitica::entity::sensor::level::name%]" + }, + "equipment": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::equipment::name%]" + }, + "class": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::class::name%]" + }, + "allocated": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::allocated::name%]" + }, + "buffs": { + "name": "[%key:component::habitica::entity::sensor::strength::state_attributes::buffs::name%]" + } + } } }, "switch": { @@ -215,20 +320,45 @@ "not_enough_mana": { "message": "Unable to cast skill, not enough mana. Your character has {mana}, but the skill costs {cost}." }, + "not_enough_gold": { + "message": "Unable to buy reward, not enough gold. Your character has {gold}, but the reward costs {cost}." + }, "skill_not_found": { "message": "Unable to cast skill, your character does not have the skill or spell {skill}." }, "entry_not_found": { - "message": "The selected character is currently not configured or loaded in Home Assistant." + "message": "The selected character is not configured in Home Assistant." + }, + "entry_not_loaded": { + "message": "The selected character is currently not loaded or disabled in Home Assistant." }, "task_not_found": { - "message": "Unable to cast skill, could not find the task {task}" + "message": "Unable to complete action, could not find the task {task}" + }, + "quest_action_unallowed": { + "message": "Action not allowed, only quest leader or group leader can perform this action" + }, + "quest_not_found": { + "message": "Unable to complete action, quest or group not found" + }, + "target_not_found": { + "message": "Unable to find target {target} in your party" + }, + "party_not_found": { + "message": "Unable to find target, you are currently not in a party. You can only target yourself" + }, + "item_not_found": { + "message": "Unable to use {item}, you don't own this item." } }, "issues": { "deprecated_task_entity": { "title": "The Habitica {task_name} sensor is deprecated", "description": "The Habitica entity `{entity}` is deprecated and will be removed in a future release.\nPlease update your automations and scripts to replace the sensor entity with the newly added todo entity.\nWhen you are done migrating you can disable `{entity}`." + }, + "deprecated_api_call": { + "title": "The Habitica action habitica.api_call is deprecated", + "description": "The Habitica action `habitica.api_call` is deprecated and will be removed in Home Assistant 2025.5.0.\n\nPlease update your automations and scripts to use other Habitica actions and entities." } }, "services": { @@ -255,7 +385,7 @@ "description": "Use a skill or spell from your Habitica character on a specific task to affect its progress or status.", "fields": { "config_entry": { - "name": "Select character", + "name": "[%key:component::habitica::common::config_entry_name%]", "description": "Choose the Habitica character to cast the skill." }, "skill": { @@ -267,6 +397,116 @@ "description": "The name (or task ID) of the task you want to target with the skill or spell." } } + }, + "accept_quest": { + "name": "Accept a quest invitation", + "description": "Accept a pending invitation to a quest.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "Choose the Habitica character for which to perform the action." + } + } + }, + "reject_quest": { + "name": "Reject a quest invitation", + "description": "Reject a pending invitation to a quest.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]" + } + } + }, + "leave_quest": { + "name": "Leave a quest", + "description": "Leave the current quest you are participating in.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]" + } + } + }, + "abort_quest": { + "name": "Abort an active quest", + "description": "Terminate your party's ongoing quest. All progress will be lost and the quest roll returned to the owner's inventory. Only quest leader or group leader can perform this action.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]" + } + } + }, + "cancel_quest": { + "name": "Cancel a pending quest", + "description": "Cancel a quest that has not yet startet. All accepted and pending invitations will be canceled and the quest roll returned to the owner's inventory. Only quest leader or group leader can perform this action.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]" + } + } + }, + "start_quest": { + "name": "Force-start a pending quest", + "description": "Begin the quest immediately, bypassing any pending invitations that haven't been accepted or rejected. Only quest leader or group leader can perform this action.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "[%key:component::habitica::services::accept_quest::fields::config_entry::description%]" + } + } + }, + "score_habit": { + "name": "Track a habit", + "description": "Increase the positive or negative streak of a habit to track its progress.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "Select the Habitica character tracking your habit." + }, + "task": { + "name": "Habit name", + "description": "The name (or task ID) of the Habitica habit." + }, + "direction": { + "name": "Reward or loss", + "description": "Is it positive or negative progress you want to track for your habit." + } + } + }, + "score_reward": { + "name": "Buy a reward", + "description": "Reward yourself and buy one of your custom rewards with gold earned by fulfilling tasks.", + "fields": { + "config_entry": { + "name": "[%key:component::habitica::common::config_entry_name%]", + "description": "Select the Habitica character buying the reward." + }, + "task": { + "name": "Reward name", + "description": "The name (or task ID) of the custom reward." + } + } + }, + "transformation": { + "name": "Use a transformation item", + "description": "Use a transformation item from your Habitica character's inventory on a member of your party or yourself.", + "fields": { + "config_entry": { + "name": "Select character", + "description": "Choose the Habitica character to use the transformation item." + }, + "item": { + "name": "Transformation item", + "description": "Select the transformation item you want to use. Item must be in the characters inventory." + }, + "target": { + "name": "Target character", + "description": "The name of the character you want to use the transformation item on. You can also specify the players username or user ID." + } + } } }, "selector": { @@ -277,6 +517,14 @@ "backstab": "Rogue: Backstab", "smash": "Warrior: Brutal smash" } + }, + "transformation_item_select": { + "options": { + "snowball": "Snowball", + "spooky_sparkles": "Spooky sparkles", + "seafoam": "Seafoam", + "shiny_seed": "Shiny seed" + } } } } diff --git a/homeassistant/components/habitica/switch.py b/homeassistant/components/habitica/switch.py index 6682911e892..de0cc533050 100644 --- a/homeassistant/components/habitica/switch.py +++ b/homeassistant/components/habitica/switch.py @@ -19,6 +19,8 @@ from .coordinator import HabiticaData, HabiticaDataUpdateCoordinator from .entity import HabiticaBase from .types import HabiticaConfigEntry +PARALLEL_UPDATES = 1 + @dataclass(kw_only=True, frozen=True) class HabiticaSwitchEntityDescription(SwitchEntityDescription): diff --git a/homeassistant/components/habitica/todo.py b/homeassistant/components/habitica/todo.py index 0fff7b66605..0ca5f723c45 100644 --- a/homeassistant/components/habitica/todo.py +++ b/homeassistant/components/habitica/todo.py @@ -27,6 +27,8 @@ from .entity import HabiticaBase from .types import HabiticaConfigEntry, HabiticaTaskType from .util import next_due_date +PARALLEL_UPDATES = 1 + class HabiticaTodoList(StrEnum): """Habitica Entities.""" diff --git a/homeassistant/components/habitica/util.py b/homeassistant/components/habitica/util.py index 93a7c234a5d..b2b4430c490 100644 --- a/homeassistant/components/habitica/util.py +++ b/homeassistant/components/habitica/util.py @@ -3,6 +3,7 @@ from __future__ import annotations import datetime +from math import floor from typing import TYPE_CHECKING, Any from dateutil.rrule import ( @@ -139,3 +140,52 @@ def get_recurrence_rule(recurrence: rrule) -> str: """ return str(recurrence).split("RRULE:")[1] + + +def get_attribute_points( + user: dict[str, Any], content: dict[str, Any], attribute: str +) -> dict[str, float]: + """Get modifiers contributing to strength attribute.""" + + gear_set = { + "weapon", + "armor", + "head", + "shield", + "back", + "headAccessory", + "eyewear", + "body", + } + + equipment = sum( + stats[attribute] + for gear in gear_set + if (equipped := user["items"]["gear"]["equipped"].get(gear)) + and (stats := content["gear"]["flat"].get(equipped)) + ) + + class_bonus = sum( + stats[attribute] / 2 + for gear in gear_set + if (equipped := user["items"]["gear"]["equipped"].get(gear)) + and (stats := content["gear"]["flat"].get(equipped)) + and stats["klass"] == user["stats"]["class"] + ) + + return { + "level": min(floor(user["stats"]["lvl"] / 2), 50), + "equipment": equipment, + "class": class_bonus, + "allocated": user["stats"][attribute], + "buffs": user["stats"]["buffs"][attribute], + } + + +def get_attributes_total( + user: dict[str, Any], content: dict[str, Any], attribute: str +) -> int: + """Get total attribute points.""" + return floor( + sum(value for value in get_attribute_points(user, content, attribute).values()) + ) diff --git a/homeassistant/components/harman_kardon_avr/manifest.json b/homeassistant/components/harman_kardon_avr/manifest.json index c28504cf2d8..e56aeebafe4 100644 --- a/homeassistant/components/harman_kardon_avr/manifest.json +++ b/homeassistant/components/harman_kardon_avr/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/harman_kardon_avr", "iot_class": "local_polling", "loggers": ["hkavr"], + "quality_scale": "legacy", "requirements": ["hkavr==0.0.5"] } diff --git a/homeassistant/components/harmony/config_flow.py b/homeassistant/components/harmony/config_flow.py index 87eb657a0a9..b75ad617b39 100644 --- a/homeassistant/components/harmony/config_flow.py +++ b/homeassistant/components/harmony/config_flow.py @@ -28,7 +28,6 @@ from homeassistant.core import callback from homeassistant.exceptions import HomeAssistantError from .const import DOMAIN, PREVIOUS_ACTIVE_ACTIVITY, UNIQUE_ID -from .data import HarmonyConfigEntry from .util import ( find_best_name_for_remote, find_unique_id_for_remote, @@ -156,7 +155,7 @@ class HarmonyConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def _async_create_entry_from_valid_input( self, validated: dict[str, Any], user_input: dict[str, Any] @@ -186,10 +185,6 @@ def _options_from_user_input(user_input: dict[str, Any]) -> dict[str, Any]: class OptionsFlowHandler(OptionsFlow): """Handle a option flow for Harmony.""" - def __init__(self, config_entry: HarmonyConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/hassio/const.py b/homeassistant/components/hassio/const.py index b337017147b..82ce74832c2 100644 --- a/homeassistant/components/hassio/const.py +++ b/homeassistant/components/hassio/const.py @@ -137,17 +137,3 @@ class SupervisorEntityModel(StrEnum): CORE = "Home Assistant Core" SUPERVIOSR = "Home Assistant Supervisor" HOST = "Home Assistant Host" - - -class SupervisorIssueContext(StrEnum): - """Context for supervisor issues.""" - - ADDON = "addon" - CORE = "core" - DNS_SERVER = "dns_server" - MOUNT = "mount" - OS = "os" - PLUGIN = "plugin" - SUPERVISOR = "supervisor" - STORE = "store" - SYSTEM = "system" diff --git a/homeassistant/components/hassio/handler.py b/homeassistant/components/hassio/handler.py index f69ee40293b..58f2aa8c144 100644 --- a/homeassistant/components/hassio/handler.py +++ b/homeassistant/components/hassio/handler.py @@ -91,15 +91,6 @@ async def async_create_backup( return await hassio.send_command(command, payload=payload, timeout=None) -@bind_hass -@_api_bool -async def async_apply_suggestion(hass: HomeAssistant, suggestion_uuid: str) -> dict: - """Apply a suggestion from supervisor's resolution center.""" - hassio: HassIO = hass.data[DOMAIN] - command = f"/resolution/suggestion/{suggestion_uuid}" - return await hassio.send_command(command, timeout=None) - - @api_data async def async_get_green_settings(hass: HomeAssistant) -> dict[str, bool]: """Return settings specific to Home Assistant Green.""" @@ -245,26 +236,6 @@ class HassIO: """ return self.send_command("/ingress/panels", method="get") - @api_data - def get_resolution_info(self) -> Coroutine: - """Return data for Supervisor resolution center. - - This method returns a coroutine. - """ - return self.send_command("/resolution/info", method="get") - - @api_data - def get_suggestions_for_issue( - self, issue_id: str - ) -> Coroutine[Any, Any, dict[str, Any]]: - """Return suggestions for issue from Supervisor resolution center. - - This method returns a coroutine. - """ - return self.send_command( - f"/resolution/issue/{issue_id}/suggestions", method="get" - ) - @_api_bool async def update_hass_api( self, http_config: dict[str, Any], refresh_token: RefreshToken @@ -304,14 +275,6 @@ class HassIO: "/supervisor/options", payload={"diagnostics": diagnostics} ) - @_api_bool - def apply_suggestion(self, suggestion_uuid: str) -> Coroutine: - """Apply a suggestion from supervisor's resolution center. - - This method returns a coroutine. - """ - return self.send_command(f"/resolution/suggestion/{suggestion_uuid}") - async def send_command( self, command: str, diff --git a/homeassistant/components/hassio/issues.py b/homeassistant/components/hassio/issues.py index 944bc99a6b9..16697659077 100644 --- a/homeassistant/components/hassio/issues.py +++ b/homeassistant/components/hassio/issues.py @@ -7,6 +7,10 @@ from dataclasses import dataclass, field from datetime import datetime import logging from typing import Any, NotRequired, TypedDict +from uuid import UUID + +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import ContextType, Issue as SupervisorIssue from homeassistant.core import HassJob, HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -20,12 +24,8 @@ from homeassistant.helpers.issue_registry import ( from .const import ( ATTR_DATA, ATTR_HEALTHY, - ATTR_ISSUES, - ATTR_SUGGESTIONS, ATTR_SUPPORTED, - ATTR_UNHEALTHY, ATTR_UNHEALTHY_REASONS, - ATTR_UNSUPPORTED, ATTR_UNSUPPORTED_REASONS, ATTR_UPDATE_KEY, ATTR_WS_EVENT, @@ -45,10 +45,9 @@ from .const import ( PLACEHOLDER_KEY_REFERENCE, REQUEST_REFRESH_DELAY, UPDATE_KEY_SUPERVISOR, - SupervisorIssueContext, ) from .coordinator import get_addons_info -from .handler import HassIO, HassioAPIError +from .handler import HassIO, get_supervisor_client ISSUE_KEY_UNHEALTHY = "unhealthy" ISSUE_KEY_UNSUPPORTED = "unsupported" @@ -120,9 +119,9 @@ class SuggestionDataType(TypedDict): class Suggestion: """Suggestion from Supervisor which resolves an issue.""" - uuid: str + uuid: UUID type: str - context: SupervisorIssueContext + context: ContextType reference: str | None = None @property @@ -134,9 +133,9 @@ class Suggestion: def from_dict(cls, data: SuggestionDataType) -> Suggestion: """Convert from dictionary representation.""" return cls( - uuid=data["uuid"], + uuid=UUID(data["uuid"]), type=data["type"], - context=SupervisorIssueContext(data["context"]), + context=ContextType(data["context"]), reference=data["reference"], ) @@ -155,9 +154,9 @@ class IssueDataType(TypedDict): class Issue: """Issue from Supervisor.""" - uuid: str + uuid: UUID type: str - context: SupervisorIssueContext + context: ContextType reference: str | None = None suggestions: list[Suggestion] = field(default_factory=list, compare=False) @@ -171,9 +170,9 @@ class Issue: """Convert from dictionary representation.""" suggestions: list[SuggestionDataType] = data.get("suggestions", []) return cls( - uuid=data["uuid"], + uuid=UUID(data["uuid"]), type=data["type"], - context=SupervisorIssueContext(data["context"]), + context=ContextType(data["context"]), reference=data["reference"], suggestions=[ Suggestion.from_dict(suggestion) for suggestion in suggestions @@ -190,7 +189,8 @@ class SupervisorIssues: self._client = client self._unsupported_reasons: set[str] = set() self._unhealthy_reasons: set[str] = set() - self._issues: dict[str, Issue] = {} + self._issues: dict[UUID, Issue] = {} + self._supervisor_client = get_supervisor_client(hass) @property def unhealthy_reasons(self) -> set[str]: @@ -283,7 +283,7 @@ class SupervisorIssues: async_create_issue( self._hass, DOMAIN, - issue.uuid, + issue.uuid.hex, is_fixable=bool(issue.suggestions), severity=IssueSeverity.WARNING, translation_key=issue.key, @@ -292,19 +292,37 @@ class SupervisorIssues: self._issues[issue.uuid] = issue - async def add_issue_from_data(self, data: IssueDataType) -> None: + async def add_issue_from_data(self, data: SupervisorIssue) -> None: """Add issue from data to list after getting latest suggestions.""" try: - data["suggestions"] = ( - await self._client.get_suggestions_for_issue(data["uuid"]) - )[ATTR_SUGGESTIONS] - except HassioAPIError: + suggestions = ( + await self._supervisor_client.resolution.suggestions_for_issue( + data.uuid + ) + ) + except SupervisorError: _LOGGER.error( "Could not get suggestions for supervisor issue %s, skipping it", - data["uuid"], + data.uuid.hex, ) return - self.add_issue(Issue.from_dict(data)) + self.add_issue( + Issue( + uuid=data.uuid, + type=str(data.type), + context=data.context, + reference=data.reference, + suggestions=[ + Suggestion( + uuid=suggestion.uuid, + type=str(suggestion.type), + context=suggestion.context, + reference=suggestion.reference, + ) + for suggestion in suggestions + ], + ) + ) def remove_issue(self, issue: Issue) -> None: """Remove an issue from the list. Delete a repair if necessary.""" @@ -312,13 +330,13 @@ class SupervisorIssues: return if issue.key in ISSUE_KEYS_FOR_REPAIRS: - async_delete_issue(self._hass, DOMAIN, issue.uuid) + async_delete_issue(self._hass, DOMAIN, issue.uuid.hex) del self._issues[issue.uuid] def get_issue(self, issue_id: str) -> Issue | None: """Get issue from key.""" - return self._issues.get(issue_id) + return self._issues.get(UUID(issue_id)) async def setup(self) -> None: """Create supervisor events listener.""" @@ -331,8 +349,8 @@ class SupervisorIssues: async def _update(self, _: datetime | None = None) -> None: """Update issues from Supervisor resolution center.""" try: - data = await self._client.get_resolution_info() - except HassioAPIError as err: + data = await self._supervisor_client.resolution.info() + except SupervisorError as err: _LOGGER.error("Failed to update supervisor issues: %r", err) async_call_later( self._hass, @@ -340,18 +358,16 @@ class SupervisorIssues: HassJob(self._update, cancel_on_shutdown=True), ) return - self.unhealthy_reasons = set(data[ATTR_UNHEALTHY]) - self.unsupported_reasons = set(data[ATTR_UNSUPPORTED]) + self.unhealthy_reasons = set(data.unhealthy) + self.unsupported_reasons = set(data.unsupported) # Remove any cached issues that weren't returned - for issue_id in set(self._issues.keys()) - { - issue["uuid"] for issue in data[ATTR_ISSUES] - }: + for issue_id in set(self._issues) - {issue.uuid for issue in data.issues}: self.remove_issue(self._issues[issue_id]) # Add/update any issues that came back await asyncio.gather( - *[self.add_issue_from_data(issue) for issue in data[ATTR_ISSUES]] + *[self.add_issue_from_data(issue) for issue in data.issues] ) @callback diff --git a/homeassistant/components/hassio/repairs.py b/homeassistant/components/hassio/repairs.py index 0fcd96ace38..0e8122c08b9 100644 --- a/homeassistant/components/hassio/repairs.py +++ b/homeassistant/components/hassio/repairs.py @@ -6,6 +6,8 @@ from collections.abc import Callable, Coroutine from types import MethodType from typing import Any +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import ContextType import voluptuous as vol from homeassistant.components.repairs import RepairsFlow @@ -20,9 +22,8 @@ from .const import ( PLACEHOLDER_KEY_ADDON, PLACEHOLDER_KEY_COMPONENTS, PLACEHOLDER_KEY_REFERENCE, - SupervisorIssueContext, ) -from .handler import async_apply_suggestion +from .handler import get_supervisor_client from .issues import Issue, Suggestion HELP_URLS = { @@ -51,9 +52,10 @@ class SupervisorIssueRepairFlow(RepairsFlow): _data: dict[str, Any] | None = None _issue: Issue | None = None - def __init__(self, issue_id: str) -> None: + def __init__(self, hass: HomeAssistant, issue_id: str) -> None: """Initialize repair flow.""" self._issue_id = issue_id + self._supervisor_client = get_supervisor_client(hass) super().__init__() @property @@ -124,9 +126,12 @@ class SupervisorIssueRepairFlow(RepairsFlow): if not confirmed and suggestion.key in SUGGESTION_CONFIRMATION_REQUIRED: return self._async_form_for_suggestion(suggestion) - if await async_apply_suggestion(self.hass, suggestion.uuid): - return self.async_create_entry(data={}) - return self.async_abort(reason="apply_suggestion_fail") + try: + await self._supervisor_client.resolution.apply_suggestion(suggestion.uuid) + except SupervisorError: + return self.async_abort(reason="apply_suggestion_fail") + + return self.async_create_entry(data={}) @staticmethod def _async_step( @@ -163,9 +168,9 @@ class DockerConfigIssueRepairFlow(SupervisorIssueRepairFlow): if issue.key == self.issue.key or issue.type != self.issue.type: continue - if issue.context == SupervisorIssueContext.CORE: + if issue.context == ContextType.CORE: components.insert(0, "Home Assistant") - elif issue.context == SupervisorIssueContext.ADDON: + elif issue.context == ContextType.ADDON: components.append( next( ( @@ -210,11 +215,11 @@ async def async_create_fix_flow( supervisor_issues = get_issues_info(hass) issue = supervisor_issues and supervisor_issues.get_issue(issue_id) if issue and issue.key == ISSUE_KEY_SYSTEM_DOCKER_CONFIG: - return DockerConfigIssueRepairFlow(issue_id) + return DockerConfigIssueRepairFlow(hass, issue_id) if issue and issue.key in { ISSUE_KEY_ADDON_DETACHED_ADDON_REMOVED, ISSUE_KEY_ADDON_BOOT_FAIL, }: - return AddonIssueRepairFlow(issue_id) + return AddonIssueRepairFlow(hass, issue_id) - return SupervisorIssueRepairFlow(issue_id) + return SupervisorIssueRepairFlow(hass, issue_id) diff --git a/homeassistant/components/hassio/strings.json b/homeassistant/components/hassio/strings.json index de42a317cc7..556a5a13f95 100644 --- a/homeassistant/components/hassio/strings.json +++ b/homeassistant/components/hassio/strings.json @@ -279,7 +279,7 @@ } }, "addon_restart": { - "name": "Restart add-on.", + "name": "Restart add-on", "description": "Restarts an add-on.", "fields": { "addon": { @@ -289,7 +289,7 @@ } }, "addon_stdin": { - "name": "Write data to add-on stdin.", + "name": "Write data to add-on stdin", "description": "Writes data to the add-on's standard input.", "fields": { "addon": { @@ -299,7 +299,7 @@ } }, "addon_stop": { - "name": "Stop add-on.", + "name": "Stop add-on", "description": "Stops an add-on.", "fields": { "addon": { @@ -309,7 +309,7 @@ } }, "addon_update": { - "name": "Update add-on.", + "name": "Update add-on", "description": "Updates an add-on. This action should be used with caution since add-on updates can contain breaking changes. It is highly recommended that you review release notes/change logs before updating an add-on.", "fields": { "addon": { @@ -319,15 +319,15 @@ } }, "host_reboot": { - "name": "Reboot the host system.", + "name": "Reboot the host system", "description": "Reboots the host system." }, "host_shutdown": { - "name": "Power off the host system.", + "name": "Power off the host system", "description": "Powers off the host system." }, "backup_full": { - "name": "Create a full backup.", + "name": "Create a full backup", "description": "Creates a full backup.", "fields": { "name": { @@ -353,7 +353,7 @@ } }, "backup_partial": { - "name": "Create a partial backup.", + "name": "Create a partial backup", "description": "Creates a partial backup.", "fields": { "homeassistant": { @@ -391,7 +391,7 @@ } }, "restore_full": { - "name": "Restore from full backup.", + "name": "Restore from full backup", "description": "Restores from full backup.", "fields": { "slug": { @@ -405,7 +405,7 @@ } }, "restore_partial": { - "name": "Restore from partial backup.", + "name": "Restore from partial backup", "description": "Restores from a partial backup.", "fields": { "slug": { diff --git a/homeassistant/components/haveibeenpwned/manifest.json b/homeassistant/components/haveibeenpwned/manifest.json index 2451871f0c8..eb9ad4c356f 100644 --- a/homeassistant/components/haveibeenpwned/manifest.json +++ b/homeassistant/components/haveibeenpwned/manifest.json @@ -3,5 +3,6 @@ "name": "HaveIBeenPwned", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/haveibeenpwned", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/hddtemp/manifest.json b/homeassistant/components/hddtemp/manifest.json index 8dd2676596c..4fe23233870 100644 --- a/homeassistant/components/hddtemp/manifest.json +++ b/homeassistant/components/hddtemp/manifest.json @@ -3,5 +3,6 @@ "name": "hddtemp", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/hddtemp", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/hdmi_cec/entity.py b/homeassistant/components/hdmi_cec/entity.py index b1bcb2720d4..bdb796e6a36 100644 --- a/homeassistant/components/hdmi_cec/entity.py +++ b/homeassistant/components/hdmi_cec/entity.py @@ -36,7 +36,7 @@ class CecEntity(Entity): """Initialize the device.""" self._device = device self._logical_address = logical - self.entity_id = "%s.%d" % (DOMAIN, self._logical_address) + self.entity_id = f"{DOMAIN}.{self._logical_address}" self._set_attr_name() self._attr_icon = ICONS_BY_TYPE.get(self._device.type, ICON_UNKNOWN) diff --git a/homeassistant/components/hdmi_cec/manifest.json b/homeassistant/components/hdmi_cec/manifest.json index fbd9e2304d9..2e37e908e16 100644 --- a/homeassistant/components/hdmi_cec/manifest.json +++ b/homeassistant/components/hdmi_cec/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/hdmi_cec", "iot_class": "local_push", "loggers": ["pycec"], + "quality_scale": "legacy", "requirements": ["pyCEC==0.5.2"] } diff --git a/homeassistant/components/heatmiser/manifest.json b/homeassistant/components/heatmiser/manifest.json index f3f33f79b04..c7ffeb237ed 100644 --- a/homeassistant/components/heatmiser/manifest.json +++ b/homeassistant/components/heatmiser/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/heatmiser", "iot_class": "local_polling", "loggers": ["heatmiserV3"], + "quality_scale": "legacy", "requirements": ["heatmiserV3==2.0.3"] } diff --git a/homeassistant/components/here_travel_time/config_flow.py b/homeassistant/components/here_travel_time/config_flow.py index 4376ae793c0..c2b70de148c 100644 --- a/homeassistant/components/here_travel_time/config_flow.py +++ b/homeassistant/components/here_travel_time/config_flow.py @@ -113,7 +113,7 @@ class HERETravelTimeConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> HERETravelTimeOptionsFlow: """Get the options flow.""" - return HERETravelTimeOptionsFlow(config_entry) + return HERETravelTimeOptionsFlow() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -297,9 +297,8 @@ class HERETravelTimeConfigFlow(ConfigFlow, domain=DOMAIN): class HERETravelTimeOptionsFlow(OptionsFlow): """Handle HERE Travel Time options.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize HERE Travel Time options flow.""" - self.config_entry = config_entry self._config: dict[str, Any] = {} async def async_step_init( diff --git a/homeassistant/components/hikvision/manifest.json b/homeassistant/components/hikvision/manifest.json index e37e149ccda..a0832732105 100644 --- a/homeassistant/components/hikvision/manifest.json +++ b/homeassistant/components/hikvision/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/hikvision", "iot_class": "local_push", "loggers": ["pyhik"], + "quality_scale": "legacy", "requirements": ["pyHik==0.3.2"] } diff --git a/homeassistant/components/hikvisioncam/manifest.json b/homeassistant/components/hikvisioncam/manifest.json index 28f677512b7..badb38a52d5 100644 --- a/homeassistant/components/hikvisioncam/manifest.json +++ b/homeassistant/components/hikvisioncam/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/hikvisioncam", "iot_class": "local_polling", "loggers": ["hikvision"], + "quality_scale": "legacy", "requirements": ["hikvision==0.4"] } diff --git a/homeassistant/components/history/__init__.py b/homeassistant/components/history/__init__.py index 365be06fd2d..7241e1fac9a 100644 --- a/homeassistant/components/history/__init__.py +++ b/homeassistant/components/history/__init__.py @@ -22,7 +22,7 @@ import homeassistant.util.dt as dt_util from . import websocket_api from .const import DOMAIN -from .helpers import entities_may_have_state_changes_after, has_recorder_run_after +from .helpers import entities_may_have_state_changes_after, has_states_before CONF_ORDER = "use_include_order" @@ -107,7 +107,10 @@ class HistoryPeriodView(HomeAssistantView): no_attributes = "no_attributes" in request.query if ( - (end_time and not has_recorder_run_after(hass, end_time)) + # has_states_before will return True if there are states older than + # end_time. If it's false, we know there are no states in the + # database up until end_time. + (end_time and not has_states_before(hass, end_time)) or not include_start_time_state and entity_ids and not entities_may_have_state_changes_after( diff --git a/homeassistant/components/history/helpers.py b/homeassistant/components/history/helpers.py index bd477e7e4ed..2010b7373ff 100644 --- a/homeassistant/components/history/helpers.py +++ b/homeassistant/components/history/helpers.py @@ -6,7 +6,6 @@ from collections.abc import Iterable from datetime import datetime as dt from homeassistant.components.recorder import get_instance -from homeassistant.components.recorder.models import process_timestamp from homeassistant.core import HomeAssistant @@ -26,8 +25,10 @@ def entities_may_have_state_changes_after( return False -def has_recorder_run_after(hass: HomeAssistant, run_time: dt) -> bool: - """Check if the recorder has any runs after a specific time.""" - return run_time >= process_timestamp( - get_instance(hass).recorder_runs_manager.first.start - ) +def has_states_before(hass: HomeAssistant, run_time: dt) -> bool: + """Check if the recorder has states as old or older than run_time. + + Returns True if there may be such states. + """ + oldest_ts = get_instance(hass).states_manager.oldest_ts + return oldest_ts is not None and run_time.timestamp() >= oldest_ts diff --git a/homeassistant/components/history/websocket_api.py b/homeassistant/components/history/websocket_api.py index c85d975c3c9..35f8ed5f1ac 100644 --- a/homeassistant/components/history/websocket_api.py +++ b/homeassistant/components/history/websocket_api.py @@ -39,7 +39,7 @@ from homeassistant.util.async_ import create_eager_task import homeassistant.util.dt as dt_util from .const import EVENT_COALESCE_TIME, MAX_PENDING_HISTORY_STATES -from .helpers import entities_may_have_state_changes_after, has_recorder_run_after +from .helpers import entities_may_have_state_changes_after, has_states_before _LOGGER = logging.getLogger(__name__) @@ -142,7 +142,10 @@ async def ws_get_history_during_period( no_attributes = msg["no_attributes"] if ( - (end_time and not has_recorder_run_after(hass, end_time)) + # has_states_before will return True if there are states older than + # end_time. If it's false, we know there are no states in the + # database up until end_time. + (end_time and not has_states_before(hass, end_time)) or not include_start_time_state and entity_ids and not entities_may_have_state_changes_after( diff --git a/homeassistant/components/history_stats/data.py b/homeassistant/components/history_stats/data.py index 544e1772b01..f9b79d74cb4 100644 --- a/homeassistant/components/history_stats/data.py +++ b/homeassistant/components/history_stats/data.py @@ -4,6 +4,8 @@ from __future__ import annotations from dataclasses import dataclass import datetime +import logging +import math from homeassistant.components.recorder import get_instance, history from homeassistant.core import Event, EventStateChangedData, HomeAssistant, State @@ -14,6 +16,8 @@ from .helpers import async_calculate_period, floored_timestamp MIN_TIME_UTC = datetime.datetime.min.replace(tzinfo=dt_util.UTC) +_LOGGER = logging.getLogger(__name__) + @dataclass class HistoryStatsState: @@ -176,26 +180,32 @@ class HistoryStats: # state_changes_during_period is called with include_start_time_state=True # which is the default and always provides the state at the start # of the period - previous_state_matches = ( - self._history_current_period - and self._history_current_period[0].state in self._entity_states - ) - last_state_change_timestamp = start_timestamp + previous_state_matches = False + last_state_change_timestamp = 0.0 elapsed = 0.0 - match_count = 1 if previous_state_matches else 0 + match_count = 0 # Make calculations for history_state in self._history_current_period: current_state_matches = history_state.state in self._entity_states state_change_timestamp = history_state.last_changed + if math.floor(state_change_timestamp) > now_timestamp: + # Shouldn't count states that are in the future + _LOGGER.debug( + "Skipping future timestamp %s (now %s)", + state_change_timestamp, + now_timestamp, + ) + continue + if previous_state_matches: elapsed += state_change_timestamp - last_state_change_timestamp elif current_state_matches: match_count += 1 previous_state_matches = current_state_matches - last_state_change_timestamp = state_change_timestamp + last_state_change_timestamp = max(start_timestamp, state_change_timestamp) # Count time elapsed between last history state and end of measure if previous_state_matches: diff --git a/homeassistant/components/history_stats/strings.json b/homeassistant/components/history_stats/strings.json index 8961d66118d..aff2ac50bef 100644 --- a/homeassistant/components/history_stats/strings.json +++ b/homeassistant/components/history_stats/strings.json @@ -9,7 +9,7 @@ }, "step": { "user": { - "description": "Add a history stats sensor", + "description": "Create a history stats sensor", "data": { "name": "[%key:common::config_flow::data::name%]", "entity_id": "Entity", diff --git a/homeassistant/components/hitron_coda/manifest.json b/homeassistant/components/hitron_coda/manifest.json index 2f18707c95e..15f71b62cf3 100644 --- a/homeassistant/components/hitron_coda/manifest.json +++ b/homeassistant/components/hitron_coda/manifest.json @@ -3,5 +3,6 @@ "name": "Rogers Hitron CODA", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/hitron_coda", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/hive/config_flow.py b/homeassistant/components/hive/config_flow.py index d6be2d1efab..a997954f4cc 100644 --- a/homeassistant/components/hive/config_flow.py +++ b/homeassistant/components/hive/config_flow.py @@ -182,7 +182,6 @@ class HiveOptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize Hive options flow.""" self.hive = None - self.config_entry = config_entry self.interval = config_entry.options.get(CONF_SCAN_INTERVAL, 120) async def async_step_init( diff --git a/homeassistant/components/holiday/manifest.json b/homeassistant/components/holiday/manifest.json index a3c0a4514d3..7edc140da11 100644 --- a/homeassistant/components/holiday/manifest.json +++ b/homeassistant/components/holiday/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/holiday", "iot_class": "local_polling", - "requirements": ["holidays==0.61", "babel==2.15.0"] + "requirements": ["holidays==0.62", "babel==2.15.0"] } diff --git a/homeassistant/components/home_connect/__init__.py b/homeassistant/components/home_connect/__init__.py index c60515eb57f..6e89fd2c9f7 100644 --- a/homeassistant/components/home_connect/__init__.py +++ b/homeassistant/components/home_connect/__init__.py @@ -4,7 +4,8 @@ from __future__ import annotations from datetime import timedelta import logging -from typing import Any +import re +from typing import Any, cast from requests import HTTPError import voluptuous as vol @@ -40,8 +41,12 @@ from .const import ( SERVICE_START_PROGRAM, ) +type HomeConnectConfigEntry = ConfigEntry[api.ConfigEntryAuth] + _LOGGER = logging.getLogger(__name__) +RE_CAMEL_CASE = re.compile(r"(? api.HomeConnectDevice: - """Return a Home Connect appliance instance given an device_id.""" - for hc_api in hass.data[DOMAIN].values(): - for device in hc_api.devices: - if device.device_id == device_id: - return device.appliance - raise ValueError(f"Appliance for device id {device_id} not found") +def _get_appliance( + hass: HomeAssistant, + device_id: str | None = None, + device_entry: dr.DeviceEntry | None = None, + entry: HomeConnectConfigEntry | None = None, +) -> api.HomeConnectAppliance: + """Return a Home Connect appliance instance given a device id or a device entry.""" + if device_id is not None and device_entry is None: + device_registry = dr.async_get(hass) + device_entry = device_registry.async_get(device_id) + assert device_entry, "Either a device id or a device entry must be provided" + + ha_id = next( + ( + identifier[1] + for identifier in device_entry.identifiers + if identifier[0] == DOMAIN + ), + None, + ) + assert ha_id + + def find_appliance( + entry: HomeConnectConfigEntry, + ) -> api.HomeConnectAppliance | None: + for device in entry.runtime_data.devices: + appliance = device.appliance + if appliance.haId == ha_id: + return appliance + return None + + if entry is None: + for entry_id in device_entry.config_entries: + entry = hass.config_entries.async_get_entry(entry_id) + assert entry + if entry.domain == DOMAIN: + entry = cast(HomeConnectConfigEntry, entry) + if (appliance := find_appliance(entry)) is not None: + return appliance + elif (appliance := find_appliance(entry)) is not None: + return appliance + raise ValueError(f"Appliance for device id {device_entry.id} not found") async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up Home Connect component.""" - hass.data[DOMAIN] = {} async def _async_service_program(call, method): """Execute calls to services taking a program.""" @@ -121,14 +159,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: options.append(option) - appliance = _get_appliance_by_device_id(hass, device_id) + appliance = _get_appliance(hass, device_id) await hass.async_add_executor_job(getattr(appliance, method), program, options) async def _async_service_command(call, command): """Execute calls to services executing a command.""" device_id = call.data[ATTR_DEVICE_ID] - appliance = _get_appliance_by_device_id(hass, device_id) + appliance = _get_appliance(hass, device_id) await hass.async_add_executor_job(appliance.execute_command, command) async def _async_service_key_value(call, method): @@ -138,7 +176,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: unit = call.data.get(ATTR_UNIT) device_id = call.data[ATTR_DEVICE_ID] - appliance = _get_appliance_by_device_id(hass, device_id) + appliance = _get_appliance(hass, device_id) if unit is not None: await hass.async_add_executor_job( getattr(appliance, method), @@ -224,7 +262,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: HomeConnectConfigEntry) -> bool: """Set up Home Connect from a config entry.""" implementation = ( await config_entry_oauth2_flow.async_get_config_entry_implementation( @@ -232,9 +270,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ) ) - hc_api = api.ConfigEntryAuth(hass, entry, implementation) - - hass.data[DOMAIN][entry.entry_id] = hc_api + entry.runtime_data = api.ConfigEntryAuth(hass, entry, implementation) await update_all_devices(hass, entry) @@ -243,45 +279,35 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: HomeConnectConfigEntry +) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) @Throttle(SCAN_INTERVAL) -async def update_all_devices(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def update_all_devices( + hass: HomeAssistant, entry: HomeConnectConfigEntry +) -> None: """Update all the devices.""" - data = hass.data[DOMAIN] - hc_api = data[entry.entry_id] + hc_api = entry.runtime_data - device_registry = dr.async_get(hass) try: await hass.async_add_executor_job(hc_api.get_devices) for device in hc_api.devices: - device_entry = device_registry.async_get_or_create( - config_entry_id=entry.entry_id, - identifiers={(DOMAIN, device.appliance.haId)}, - name=device.appliance.name, - manufacturer=device.appliance.brand, - model=device.appliance.vib, - ) - - device.device_id = device_entry.id - await hass.async_add_executor_job(device.initialize) except HTTPError as err: _LOGGER.warning("Cannot update devices: %s", err.response.status_code) -async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_migrate_entry( + hass: HomeAssistant, entry: HomeConnectConfigEntry +) -> bool: """Migrate old entry.""" - _LOGGER.debug("Migrating from version %s", config_entry.version) + _LOGGER.debug("Migrating from version %s", entry.version) - if config_entry.version == 1 and config_entry.minor_version == 1: + if entry.version == 1 and entry.minor_version == 1: @callback def update_unique_id( @@ -297,20 +323,31 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> } return None - await async_migrate_entries(hass, config_entry.entry_id, update_unique_id) + await async_migrate_entries(hass, entry.entry_id, update_unique_id) - hass.config_entries.async_update_entry(config_entry, minor_version=2) + hass.config_entries.async_update_entry(entry, minor_version=2) - _LOGGER.debug("Migration to version %s successful", config_entry.version) + _LOGGER.debug("Migration to version %s successful", entry.version) return True def get_dict_from_home_connect_error(err: api.HomeConnectError) -> dict[str, Any]: """Return a dict from a Home Connect error.""" - return ( - err.args[0] + return { + "description": cast(dict[str, Any], err.args[0]).get("description", "?") if len(err.args) > 0 and isinstance(err.args[0], dict) - else {"description": err.args[0]} + else err.args[0] if len(err.args) > 0 and isinstance(err.args[0], str) - else {} - ) + else "?", + } + + +def bsh_key_to_translation_key(bsh_key: str) -> str: + """Convert a BSH key to a translation key format. + + This function takes a BSH key, such as `Dishcare.Dishwasher.Program.Eco50`, + and converts it to a translation key format, such as `dishcare_dishwasher_bsh_key_eco50`. + """ + return "_".join( + RE_CAMEL_CASE.sub("_", split) for split in bsh_key.split(".") + ).lower() diff --git a/homeassistant/components/home_connect/binary_sensor.py b/homeassistant/components/home_connect/binary_sensor.py index f044a3fdfb4..f9775918f16 100644 --- a/homeassistant/components/home_connect/binary_sensor.py +++ b/homeassistant/components/home_connect/binary_sensor.py @@ -10,8 +10,8 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntityDescription, ) from homeassistant.components.script import scripts_with_entity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.issue_registry import ( IssueSeverity, @@ -19,6 +19,7 @@ from homeassistant.helpers.issue_registry import ( async_delete_issue, ) +from . import HomeConnectConfigEntry from .api import HomeConnectDevice from .const import ( ATTR_VALUE, @@ -117,15 +118,14 @@ BINARY_SENSORS = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: HomeConnectConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Home Connect binary sensor.""" def get_entities() -> list[BinarySensorEntity]: entities: list[BinarySensorEntity] = [] - hc_api = hass.data[DOMAIN][config_entry.entry_id] - for device in hc_api.devices: + for device in entry.runtime_data.devices: entities.extend( HomeConnectBinarySensor(device, description) for description in BINARY_SENSORS @@ -192,11 +192,32 @@ class HomeConnectDoorBinarySensor(HomeConnectBinarySensor): async def async_added_to_hass(self) -> None: """Call when entity is added to hass.""" await super().async_added_to_hass() - entity_automations = automations_with_entity(self.hass, self.entity_id) - entity_scripts = scripts_with_entity(self.hass, self.entity_id) - items = entity_automations + entity_scripts + automations = automations_with_entity(self.hass, self.entity_id) + scripts = scripts_with_entity(self.hass, self.entity_id) + items = automations + scripts if not items: return + + entity_reg: er.EntityRegistry = er.async_get(self.hass) + entity_automations = [ + automation_entity + for automation_id in automations + if (automation_entity := entity_reg.async_get(automation_id)) + ] + entity_scripts = [ + script_entity + for script_id in scripts + if (script_entity := entity_reg.async_get(script_id)) + ] + + items_list = [ + f"- [{item.original_name}](/config/automation/edit/{item.unique_id})" + for item in entity_automations + ] + [ + f"- [{item.original_name}](/config/script/edit/{item.unique_id})" + for item in entity_scripts + ] + async_create_issue( self.hass, DOMAIN, @@ -207,7 +228,7 @@ class HomeConnectDoorBinarySensor(HomeConnectBinarySensor): translation_key="deprecated_binary_common_door_sensor", translation_placeholders={ "entity": self.entity_id, - "items": "\n".join([f"- {item}" for item in items]), + "items": "\n".join(items_list), }, ) diff --git a/homeassistant/components/home_connect/const.py b/homeassistant/components/home_connect/const.py index e49a56b9b97..e9f32b0e772 100644 --- a/homeassistant/components/home_connect/const.py +++ b/homeassistant/components/home_connect/const.py @@ -5,10 +5,23 @@ DOMAIN = "home_connect" OAUTH2_AUTHORIZE = "https://api.home-connect.com/security/oauth/authorize" OAUTH2_TOKEN = "https://api.home-connect.com/security/oauth/token" +APPLIANCES_WITH_PROGRAMS = ( + "CleaningRobot", + "CoffeeMaker", + "Dishwasher", + "Dryer", + "Hood", + "Oven", + "WarmingDrawer", + "Washer", + "WasherDryer", +) + BSH_POWER_STATE = "BSH.Common.Setting.PowerState" BSH_POWER_ON = "BSH.Common.EnumType.PowerState.On" BSH_POWER_OFF = "BSH.Common.EnumType.PowerState.Off" BSH_POWER_STANDBY = "BSH.Common.EnumType.PowerState.Standby" +BSH_SELECTED_PROGRAM = "BSH.Common.Root.SelectedProgram" BSH_ACTIVE_PROGRAM = "BSH.Common.Root.ActiveProgram" BSH_REMOTE_CONTROL_ACTIVATION_STATE = "BSH.Common.Status.RemoteControlActive" BSH_REMOTE_START_ALLOWANCE_STATE = "BSH.Common.Status.RemoteControlStartAllowed" diff --git a/homeassistant/components/home_connect/diagnostics.py b/homeassistant/components/home_connect/diagnostics.py new file mode 100644 index 00000000000..d2505853d23 --- /dev/null +++ b/homeassistant/components/home_connect/diagnostics.py @@ -0,0 +1,46 @@ +"""Diagnostics support for Home Connect Diagnostics.""" + +from __future__ import annotations + +from typing import Any + +from homeconnect.api import HomeConnectAppliance + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceEntry + +from . import HomeConnectConfigEntry, _get_appliance +from .api import HomeConnectDevice + + +def _generate_appliance_diagnostics(appliance: HomeConnectAppliance) -> dict[str, Any]: + return { + "status": appliance.status, + "programs": appliance.get_programs_available(), + } + + +def _generate_entry_diagnostics( + devices: list[HomeConnectDevice], +) -> dict[str, dict[str, Any]]: + return { + device.appliance.haId: _generate_appliance_diagnostics(device.appliance) + for device in devices + } + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: HomeConnectConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + return await hass.async_add_executor_job( + _generate_entry_diagnostics, entry.runtime_data.devices + ) + + +async def async_get_device_diagnostics( + hass: HomeAssistant, entry: HomeConnectConfigEntry, device: DeviceEntry +) -> dict[str, Any]: + """Return diagnostics for a device.""" + appliance = _get_appliance(hass, device_entry=device, entry=entry) + return await hass.async_add_executor_job(_generate_appliance_diagnostics, appliance) diff --git a/homeassistant/components/home_connect/light.py b/homeassistant/components/home_connect/light.py index 873e7d24f93..97efc0413ab 100644 --- a/homeassistant/components/home_connect/light.py +++ b/homeassistant/components/home_connect/light.py @@ -15,14 +15,13 @@ from homeassistant.components.light import ( LightEntity, LightEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.color as color_util -from . import get_dict_from_home_connect_error -from .api import ConfigEntryAuth, HomeConnectDevice +from . import HomeConnectConfigEntry, get_dict_from_home_connect_error +from .api import HomeConnectDevice from .const import ( ATTR_VALUE, BSH_AMBIENT_LIGHT_BRIGHTNESS, @@ -88,18 +87,17 @@ LIGHTS: tuple[HomeConnectLightEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: HomeConnectConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Home Connect light.""" def get_entities() -> list[LightEntity]: """Get a list of entities.""" - hc_api: ConfigEntryAuth = hass.data[DOMAIN][config_entry.entry_id] return [ HomeConnectLight(device, description) for description in LIGHTS - for device in hc_api.devices + for device in entry.runtime_data.devices if description.key in device.appliance.status ] diff --git a/homeassistant/components/home_connect/number.py b/homeassistant/components/home_connect/number.py index ad853df77d0..d1063a2026f 100644 --- a/homeassistant/components/home_connect/number.py +++ b/homeassistant/components/home_connect/number.py @@ -11,13 +11,11 @@ from homeassistant.components.number import ( NumberEntity, NumberEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import get_dict_from_home_connect_error -from .api import ConfigEntryAuth +from . import HomeConnectConfigEntry, get_dict_from_home_connect_error from .const import ( ATTR_CONSTRAINTS, ATTR_STEPSIZE, @@ -84,18 +82,17 @@ NUMBERS = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: HomeConnectConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Home Connect number.""" def get_entities() -> list[HomeConnectNumberEntity]: """Get a list of entities.""" - hc_api: ConfigEntryAuth = hass.data[DOMAIN][config_entry.entry_id] return [ HomeConnectNumberEntity(device, description) for description in NUMBERS - for device in hc_api.devices + for device in entry.runtime_data.devices if description.key in device.appliance.status ] diff --git a/homeassistant/components/home_connect/select.py b/homeassistant/components/home_connect/select.py new file mode 100644 index 00000000000..fdd1f38bf97 --- /dev/null +++ b/homeassistant/components/home_connect/select.py @@ -0,0 +1,300 @@ +"""Provides a select platform for Home Connect.""" + +import contextlib +import logging + +from homeconnect.api import HomeConnectError + +from homeassistant.components.select import SelectEntity, SelectEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import ( + HomeConnectConfigEntry, + bsh_key_to_translation_key, + get_dict_from_home_connect_error, +) +from .api import HomeConnectDevice +from .const import ( + APPLIANCES_WITH_PROGRAMS, + ATTR_VALUE, + BSH_ACTIVE_PROGRAM, + BSH_SELECTED_PROGRAM, + DOMAIN, +) +from .entity import HomeConnectEntity + +_LOGGER = logging.getLogger(__name__) + +TRANSLATION_KEYS_PROGRAMS_MAP = { + bsh_key_to_translation_key(program): program + for program in ( + "ConsumerProducts.CleaningRobot.Program.Cleaning.CleanAll", + "ConsumerProducts.CleaningRobot.Program.Cleaning.CleanMap", + "ConsumerProducts.CleaningRobot.Program.Basic.GoHome", + "ConsumerProducts.CoffeeMaker.Program.Beverage.Ristretto", + "ConsumerProducts.CoffeeMaker.Program.Beverage.Espresso", + "ConsumerProducts.CoffeeMaker.Program.Beverage.EspressoDoppio", + "ConsumerProducts.CoffeeMaker.Program.Beverage.Coffee", + "ConsumerProducts.CoffeeMaker.Program.Beverage.XLCoffee", + "ConsumerProducts.CoffeeMaker.Program.Beverage.CaffeGrande", + "ConsumerProducts.CoffeeMaker.Program.Beverage.EspressoMacchiato", + "ConsumerProducts.CoffeeMaker.Program.Beverage.Cappuccino", + "ConsumerProducts.CoffeeMaker.Program.Beverage.LatteMacchiato", + "ConsumerProducts.CoffeeMaker.Program.Beverage.CaffeLatte", + "ConsumerProducts.CoffeeMaker.Program.Beverage.MilkFroth", + "ConsumerProducts.CoffeeMaker.Program.Beverage.WarmMilk", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.KleinerBrauner", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.GrosserBrauner", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.Verlaengerter", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.VerlaengerterBraun", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.WienerMelange", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.FlatWhite", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.Cortado", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.CafeCortado", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.CafeConLeche", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.CafeAuLait", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.Doppio", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.Kaapi", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.KoffieVerkeerd", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.Galao", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.Garoto", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.Americano", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.RedEye", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.BlackEye", + "ConsumerProducts.CoffeeMaker.Program.CoffeeWorld.DeadEye", + "ConsumerProducts.CoffeeMaker.Program.Beverage.HotWater", + "Dishcare.Dishwasher.Program.PreRinse", + "Dishcare.Dishwasher.Program.Auto1", + "Dishcare.Dishwasher.Program.Auto2", + "Dishcare.Dishwasher.Program.Auto3", + "Dishcare.Dishwasher.Program.Eco50", + "Dishcare.Dishwasher.Program.Quick45", + "Dishcare.Dishwasher.Program.Intensiv70", + "Dishcare.Dishwasher.Program.Normal65", + "Dishcare.Dishwasher.Program.Glas40", + "Dishcare.Dishwasher.Program.GlassCare", + "Dishcare.Dishwasher.Program.NightWash", + "Dishcare.Dishwasher.Program.Quick65", + "Dishcare.Dishwasher.Program.Normal45", + "Dishcare.Dishwasher.Program.Intensiv45", + "Dishcare.Dishwasher.Program.AutoHalfLoad", + "Dishcare.Dishwasher.Program.IntensivPower", + "Dishcare.Dishwasher.Program.MagicDaily", + "Dishcare.Dishwasher.Program.Super60", + "Dishcare.Dishwasher.Program.Kurz60", + "Dishcare.Dishwasher.Program.ExpressSparkle65", + "Dishcare.Dishwasher.Program.MachineCare", + "Dishcare.Dishwasher.Program.SteamFresh", + "Dishcare.Dishwasher.Program.MaximumCleaning", + "Dishcare.Dishwasher.Program.MixedLoad", + "LaundryCare.Dryer.Program.Cotton", + "LaundryCare.Dryer.Program.Synthetic", + "LaundryCare.Dryer.Program.Mix", + "LaundryCare.Dryer.Program.Blankets", + "LaundryCare.Dryer.Program.BusinessShirts", + "LaundryCare.Dryer.Program.DownFeathers", + "LaundryCare.Dryer.Program.Hygiene", + "LaundryCare.Dryer.Program.Jeans", + "LaundryCare.Dryer.Program.Outdoor", + "LaundryCare.Dryer.Program.SyntheticRefresh", + "LaundryCare.Dryer.Program.Towels", + "LaundryCare.Dryer.Program.Delicates", + "LaundryCare.Dryer.Program.Super40", + "LaundryCare.Dryer.Program.Shirts15", + "LaundryCare.Dryer.Program.Pillow", + "LaundryCare.Dryer.Program.AntiShrink", + "LaundryCare.Dryer.Program.MyTime.MyDryingTime", + "LaundryCare.Dryer.Program.TimeCold", + "LaundryCare.Dryer.Program.TimeWarm", + "LaundryCare.Dryer.Program.InBasket", + "LaundryCare.Dryer.Program.TimeColdFix.TimeCold20", + "LaundryCare.Dryer.Program.TimeColdFix.TimeCold30", + "LaundryCare.Dryer.Program.TimeColdFix.TimeCold60", + "LaundryCare.Dryer.Program.TimeWarmFix.TimeWarm30", + "LaundryCare.Dryer.Program.TimeWarmFix.TimeWarm40", + "LaundryCare.Dryer.Program.TimeWarmFix.TimeWarm60", + "LaundryCare.Dryer.Program.Dessous", + "Cooking.Common.Program.Hood.Automatic", + "Cooking.Common.Program.Hood.Venting", + "Cooking.Common.Program.Hood.DelayedShutOff", + "Cooking.Oven.Program.HeatingMode.PreHeating", + "Cooking.Oven.Program.HeatingMode.HotAir", + "Cooking.Oven.Program.HeatingMode.HotAirEco", + "Cooking.Oven.Program.HeatingMode.HotAirGrilling", + "Cooking.Oven.Program.HeatingMode.TopBottomHeating", + "Cooking.Oven.Program.HeatingMode.TopBottomHeatingEco", + "Cooking.Oven.Program.HeatingMode.BottomHeating", + "Cooking.Oven.Program.HeatingMode.PizzaSetting", + "Cooking.Oven.Program.HeatingMode.SlowCook", + "Cooking.Oven.Program.HeatingMode.IntensiveHeat", + "Cooking.Oven.Program.HeatingMode.KeepWarm", + "Cooking.Oven.Program.HeatingMode.PreheatOvenware", + "Cooking.Oven.Program.HeatingMode.FrozenHeatupSpecial", + "Cooking.Oven.Program.HeatingMode.Desiccation", + "Cooking.Oven.Program.HeatingMode.Defrost", + "Cooking.Oven.Program.HeatingMode.Proof", + "Cooking.Oven.Program.HeatingMode.HotAir30Steam", + "Cooking.Oven.Program.HeatingMode.HotAir60Steam", + "Cooking.Oven.Program.HeatingMode.HotAir80Steam", + "Cooking.Oven.Program.HeatingMode.HotAir100Steam", + "Cooking.Oven.Program.HeatingMode.SabbathProgramme", + "Cooking.Oven.Program.Microwave.90Watt", + "Cooking.Oven.Program.Microwave.180Watt", + "Cooking.Oven.Program.Microwave.360Watt", + "Cooking.Oven.Program.Microwave.600Watt", + "Cooking.Oven.Program.Microwave.900Watt", + "Cooking.Oven.Program.Microwave.1000Watt", + "Cooking.Oven.Program.Microwave.Max", + "Cooking.Oven.Program.HeatingMode.WarmingDrawer", + "LaundryCare.Washer.Program.Cotton", + "LaundryCare.Washer.Program.Cotton.CottonEco", + "LaundryCare.Washer.Program.Cotton.Eco4060", + "LaundryCare.Washer.Program.Cotton.Colour", + "LaundryCare.Washer.Program.EasyCare", + "LaundryCare.Washer.Program.Mix", + "LaundryCare.Washer.Program.Mix.NightWash", + "LaundryCare.Washer.Program.DelicatesSilk", + "LaundryCare.Washer.Program.Wool", + "LaundryCare.Washer.Program.Sensitive", + "LaundryCare.Washer.Program.Auto30", + "LaundryCare.Washer.Program.Auto40", + "LaundryCare.Washer.Program.Auto60", + "LaundryCare.Washer.Program.Chiffon", + "LaundryCare.Washer.Program.Curtains", + "LaundryCare.Washer.Program.DarkWash", + "LaundryCare.Washer.Program.Dessous", + "LaundryCare.Washer.Program.Monsoon", + "LaundryCare.Washer.Program.Outdoor", + "LaundryCare.Washer.Program.PlushToy", + "LaundryCare.Washer.Program.ShirtsBlouses", + "LaundryCare.Washer.Program.SportFitness", + "LaundryCare.Washer.Program.Towels", + "LaundryCare.Washer.Program.WaterProof", + "LaundryCare.Washer.Program.PowerSpeed59", + "LaundryCare.Washer.Program.Super153045.Super15", + "LaundryCare.Washer.Program.Super153045.Super1530", + "LaundryCare.Washer.Program.DownDuvet.Duvet", + "LaundryCare.Washer.Program.Rinse.RinseSpinDrain", + "LaundryCare.Washer.Program.DrumClean", + "LaundryCare.WasherDryer.Program.Cotton", + "LaundryCare.WasherDryer.Program.Cotton.Eco4060", + "LaundryCare.WasherDryer.Program.Mix", + "LaundryCare.WasherDryer.Program.EasyCare", + "LaundryCare.WasherDryer.Program.WashAndDry60", + "LaundryCare.WasherDryer.Program.WashAndDry90", + ) +} + +PROGRAMS_TRANSLATION_KEYS_MAP = { + value: key for key, value in TRANSLATION_KEYS_PROGRAMS_MAP.items() +} + +PROGRAM_SELECT_ENTITY_DESCRIPTIONS = ( + SelectEntityDescription( + key=BSH_ACTIVE_PROGRAM, + translation_key="active_program", + ), + SelectEntityDescription( + key=BSH_SELECTED_PROGRAM, + translation_key="selected_program", + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: HomeConnectConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Home Connect select entities.""" + + def get_entities() -> list[HomeConnectProgramSelectEntity]: + """Get a list of entities.""" + entities: list[HomeConnectProgramSelectEntity] = [] + programs_not_found = set() + for device in entry.runtime_data.devices: + if device.appliance.type in APPLIANCES_WITH_PROGRAMS: + with contextlib.suppress(HomeConnectError): + programs = device.appliance.get_programs_available() + if programs: + for program in programs: + if program not in PROGRAMS_TRANSLATION_KEYS_MAP: + programs.remove(program) + if program not in programs_not_found: + _LOGGER.info( + 'The program "%s" is not part of the official Home Connect API specification', + program, + ) + programs_not_found.add(program) + entities.extend( + HomeConnectProgramSelectEntity(device, programs, desc) + for desc in PROGRAM_SELECT_ENTITY_DESCRIPTIONS + ) + return entities + + async_add_entities(await hass.async_add_executor_job(get_entities), True) + + +class HomeConnectProgramSelectEntity(HomeConnectEntity, SelectEntity): + """Select class for Home Connect programs.""" + + def __init__( + self, + device: HomeConnectDevice, + programs: list[str], + desc: SelectEntityDescription, + ) -> None: + """Initialize the entity.""" + super().__init__( + device, + desc, + ) + self._attr_options = [ + PROGRAMS_TRANSLATION_KEYS_MAP[program] for program in programs + ] + self.start_on_select = desc.key == BSH_ACTIVE_PROGRAM + + async def async_update(self) -> None: + """Update the program selection status.""" + program = self.device.appliance.status.get(self.bsh_key, {}).get(ATTR_VALUE) + if not program: + program_translation_key = None + elif not ( + program_translation_key := PROGRAMS_TRANSLATION_KEYS_MAP.get(program) + ): + _LOGGER.debug( + 'The program "%s" is not part of the official Home Connect API specification', + program, + ) + self._attr_current_option = program_translation_key + _LOGGER.debug("Updated, new program: %s", self._attr_current_option) + + async def async_select_option(self, option: str) -> None: + """Select new program.""" + bsh_key = TRANSLATION_KEYS_PROGRAMS_MAP[option] + _LOGGER.debug( + "Starting program: %s" if self.start_on_select else "Selecting program: %s", + bsh_key, + ) + if self.start_on_select: + target = self.device.appliance.start_program + else: + target = self.device.appliance.select_program + try: + await self.hass.async_add_executor_job(target, bsh_key) + except HomeConnectError as err: + if self.start_on_select: + translation_key = "start_program" + else: + translation_key = "select_program" + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key=translation_key, + translation_placeholders={ + **get_dict_from_home_connect_error(err), + "program": bsh_key, + }, + ) from err + self.async_entity_update() diff --git a/homeassistant/components/home_connect/sensor.py b/homeassistant/components/home_connect/sensor.py index 70096313d86..3ccf55bac6e 100644 --- a/homeassistant/components/home_connect/sensor.py +++ b/homeassistant/components/home_connect/sensor.py @@ -14,14 +14,13 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import PERCENTAGE, UnitOfTime, UnitOfVolume from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import slugify import homeassistant.util.dt as dt_util -from .api import ConfigEntryAuth +from . import HomeConnectConfigEntry from .const import ( ATTR_VALUE, BSH_DOOR_STATE, @@ -34,7 +33,6 @@ from .const import ( COFFEE_EVENT_WATER_TANK_EMPTY, DISHWASHER_EVENT_RINSE_AID_NEARLY_EMPTY, DISHWASHER_EVENT_SALT_NEARLY_EMPTY, - DOMAIN, REFRIGERATION_EVENT_DOOR_ALARM_FREEZER, REFRIGERATION_EVENT_DOOR_ALARM_REFRIGERATOR, REFRIGERATION_EVENT_TEMP_ALARM_FREEZER, @@ -253,7 +251,7 @@ EVENT_SENSORS = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: HomeConnectConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Home Connect sensor.""" @@ -261,8 +259,7 @@ async def async_setup_entry( def get_entities() -> list[SensorEntity]: """Get a list of entities.""" entities: list[SensorEntity] = [] - hc_api: ConfigEntryAuth = hass.data[DOMAIN][config_entry.entry_id] - for device in hc_api.devices: + for device in entry.runtime_data.devices: entities.extend( HomeConnectSensor( device, diff --git a/homeassistant/components/home_connect/strings.json b/homeassistant/components/home_connect/strings.json index eb57d822b15..f9524763020 100644 --- a/homeassistant/components/home_connect/strings.json +++ b/homeassistant/components/home_connect/strings.json @@ -38,13 +38,16 @@ "message": "Error while trying to set color of {entity_id}: {description}" }, "set_setting": { - "message": "Error while trying to assign the value \"{value}\" to the setting \"{key}\" for {entity_id}: {description}" + "message": "Error while trying to assign the value \"{value}\" to the setting \"{setting_key}\" for {entity_id}: {description}" }, "turn_on": { - "message": "Error while trying to turn on {entity_id} ({key}): {description}" + "message": "Error while trying to turn on {entity_id} ({setting_key}): {description}" }, "turn_off": { - "message": "Error while trying to turn off {entity_id} ({key}): {description}" + "message": "Error while trying to turn off {entity_id} ({setting_key}): {description}" + }, + "select_program": { + "message": "Error while trying to select program {program}: {description}" }, "start_program": { "message": "Error while trying to start program {program}: {description}" @@ -267,6 +270,326 @@ "name": "Wine compartment 3 temperature" } }, + "select": { + "selected_program": { + "name": "Selected program", + "state": { + "consumer_products_cleaning_robot_program_cleaning_clean_all": "Clean all", + "consumer_products_cleaning_robot_program_cleaning_clean_map": "Clean map", + "consumer_products_cleaning_robot_program_basic_go_home": "Go home", + "consumer_products_coffee_maker_program_beverage_ristretto": "Ristretto", + "consumer_products_coffee_maker_program_beverage_espresso": "Espresso", + "consumer_products_coffee_maker_program_beverage_espresso_doppio": "Espresso doppio", + "consumer_products_coffee_maker_program_beverage_coffee": "Coffee", + "consumer_products_coffee_maker_program_beverage_x_l_coffee": "XL coffee", + "consumer_products_coffee_maker_program_beverage_caffe_grande": "Caffe grande", + "consumer_products_coffee_maker_program_beverage_espresso_macchiato": "Espresso macchiato", + "consumer_products_coffee_maker_program_beverage_cappuccino": "Cappuccino", + "consumer_products_coffee_maker_program_beverage_latte_macchiato": "Latte macchiato", + "consumer_products_coffee_maker_program_beverage_caffe_latte": "Caffe latte", + "consumer_products_coffee_maker_program_beverage_milk_froth": "Milk froth", + "consumer_products_coffee_maker_program_beverage_warm_milk": "Warm milk", + "consumer_products_coffee_maker_program_coffee_world_kleiner_brauner": "Kleiner brauner", + "consumer_products_coffee_maker_program_coffee_world_grosser_brauner": "Grosser brauner", + "consumer_products_coffee_maker_program_coffee_world_verlaengerter": "Verlaengerter", + "consumer_products_coffee_maker_program_coffee_world_verlaengerter_braun": "Verlaengerter braun", + "consumer_products_coffee_maker_program_coffee_world_wiener_melange": "Wiener melange", + "consumer_products_coffee_maker_program_coffee_world_flat_white": "Flat white", + "consumer_products_coffee_maker_program_coffee_world_cortado": "Cortado", + "consumer_products_coffee_maker_program_coffee_world_cafe_cortado": "Cafe cortado", + "consumer_products_coffee_maker_program_coffee_world_cafe_con_leche": "Cafe con leche", + "consumer_products_coffee_maker_program_coffee_world_cafe_au_lait": "Cafe au lait", + "consumer_products_coffee_maker_program_coffee_world_doppio": "Doppio", + "consumer_products_coffee_maker_program_coffee_world_kaapi": "Kaapi", + "consumer_products_coffee_maker_program_coffee_world_koffie_verkeerd": "Koffie verkeerd", + "consumer_products_coffee_maker_program_coffee_world_galao": "Galao", + "consumer_products_coffee_maker_program_coffee_world_garoto": "Garoto", + "consumer_products_coffee_maker_program_coffee_world_americano": "Americano", + "consumer_products_coffee_maker_program_coffee_world_red_eye": "Red eye", + "consumer_products_coffee_maker_program_coffee_world_black_eye": "Black eye", + "consumer_products_coffee_maker_program_coffee_world_dead_eye": "Dead eye", + "consumer_products_coffee_maker_program_beverage_hot_water": "Hot water", + "dishcare_dishwasher_program_pre_rinse": "Pre_rinse", + "dishcare_dishwasher_program_auto_1": "Auto 1", + "dishcare_dishwasher_program_auto_2": "Auto 2", + "dishcare_dishwasher_program_auto_3": "Auto 3", + "dishcare_dishwasher_program_eco_50": "Eco 50ºC", + "dishcare_dishwasher_program_quick_45": "Quick 45ºC", + "dishcare_dishwasher_program_intensiv_70": "Intensive 70ºC", + "dishcare_dishwasher_program_normal_65": "Normal 65ºC", + "dishcare_dishwasher_program_glas_40": "Glass 40ºC", + "dishcare_dishwasher_program_glass_care": "Glass care", + "dishcare_dishwasher_program_night_wash": "Night wash", + "dishcare_dishwasher_program_quick_65": "Quick 65ºC", + "dishcare_dishwasher_program_normal_45": "Normal 45ºC", + "dishcare_dishwasher_program_intensiv_45": "Intensive 45ºC", + "dishcare_dishwasher_program_auto_half_load": "Auto half load", + "dishcare_dishwasher_program_intensiv_power": "Intensive power", + "dishcare_dishwasher_program_magic_daily": "Magic daily", + "dishcare_dishwasher_program_super_60": "Super 60ºC", + "dishcare_dishwasher_program_kurz_60": "Kurz 60ºC", + "dishcare_dishwasher_program_express_sparkle_65": "Express sparkle 65ºC", + "dishcare_dishwasher_program_machine_care": "Machine care", + "dishcare_dishwasher_program_steam_fresh": "Steam fresh", + "dishcare_dishwasher_program_maximum_cleaning": "Maximum cleaning", + "dishcare_dishwasher_program_mixed_load": "Mixed load", + "laundry_care_dryer_program_cotton": "Cotton", + "laundry_care_dryer_program_synthetic": "Synthetic", + "laundry_care_dryer_program_mix": "Mix", + "laundry_care_dryer_program_blankets": "Blankets", + "laundry_care_dryer_program_business_shirts": "Business shirts", + "laundry_care_dryer_program_down_feathers": "Down feathers", + "laundry_care_dryer_program_hygiene": "Hygiene", + "laundry_care_dryer_program_jeans": "Jeans", + "laundry_care_dryer_program_outdoor": "Outdoor", + "laundry_care_dryer_program_synthetic_refresh": "Synthetic refresh", + "laundry_care_dryer_program_towels": "Towels", + "laundry_care_dryer_program_delicates": "Delicates", + "laundry_care_dryer_program_super_40": "Super 40ºC", + "laundry_care_dryer_program_shirts_15": "Shirts 15ºC", + "laundry_care_dryer_program_pillow": "Pillow", + "laundry_care_dryer_program_anti_shrink": "Anti shrink", + "laundry_care_dryer_program_my_time_my_drying_time": "My drying time", + "laundry_care_dryer_program_time_cold": "Cold (variable time)", + "laundry_care_dryer_program_time_warm": "Warm (variable time)", + "laundry_care_dryer_program_in_basket": "In basket", + "laundry_care_dryer_program_time_cold_fix_time_cold_20": "Cold (20 min)", + "laundry_care_dryer_program_time_cold_fix_time_cold_30": "Cold (30 min)", + "laundry_care_dryer_program_time_cold_fix_time_cold_60": "Cold (60 min)", + "laundry_care_dryer_program_time_warm_fix_time_warm_30": "Warm (30 min)", + "laundry_care_dryer_program_time_warm_fix_time_warm_40": "Warm (40 min)", + "laundry_care_dryer_program_time_warm_fix_time_warm_60": "Warm (60 min)", + "laundry_care_dryer_program_dessous": "Dessous", + "cooking_common_program_hood_automatic": "Automatic", + "cooking_common_program_hood_venting": "Venting", + "cooking_common_program_hood_delayed_shut_off": "Delayed shut off", + "cooking_oven_program_heating_mode_pre_heating": "Pre-heating", + "cooking_oven_program_heating_mode_hot_air": "Hot air", + "cooking_oven_program_heating_mode_hot_air_eco": "Hot air eco", + "cooking_oven_program_heating_mode_hot_air_grilling": "Hot air grilling", + "cooking_oven_program_heating_mode_top_bottom_heating": "Top bottom heating", + "cooking_oven_program_heating_mode_top_bottom_heating_eco": "Top bottom heating eco", + "cooking_oven_program_heating_mode_bottom_heating": "Bottom heating", + "cooking_oven_program_heating_mode_pizza_setting": "Pizza setting", + "cooking_oven_program_heating_mode_slow_cook": "Slow cook", + "cooking_oven_program_heating_mode_intensive_heat": "Intensive heat", + "cooking_oven_program_heating_mode_keep_warm": "Keep warm", + "cooking_oven_program_heating_mode_preheat_ovenware": "Preheat ovenware", + "cooking_oven_program_heating_mode_frozen_heatup_special": "Special Heat-Up for frozen products", + "cooking_oven_program_heating_mode_desiccation": "Desiccation", + "cooking_oven_program_heating_mode_defrost": "Defrost", + "cooking_oven_program_heating_mode_proof": "Proof", + "cooking_oven_program_heating_mode_hot_air_30_steam": "Hot air + 30 RH", + "cooking_oven_program_heating_mode_hot_air_60_steam": "Hot air + 60 RH", + "cooking_oven_program_heating_mode_hot_air_80_steam": "Hot air + 80 RH", + "cooking_oven_program_heating_mode_hot_air_100_steam": "Hot air + 100 RH", + "cooking_oven_program_heating_mode_sabbath_programme": "Sabbath programme", + "cooking_oven_program_microwave_90_watt": "90 Watt", + "cooking_oven_program_microwave_180_watt": "180 Watt", + "cooking_oven_program_microwave_360_watt": "360 Watt", + "cooking_oven_program_microwave_600_watt": "600 Watt", + "cooking_oven_program_microwave_900_watt": "900 Watt", + "cooking_oven_program_microwave_1000_watt": "1000 Watt", + "cooking_oven_program_microwave_max": "Max", + "cooking_oven_program_heating_mode_warming_drawer": "Warming drawer", + "laundry_care_washer_program_cotton": "Cotton", + "laundry_care_washer_program_cotton_cotton_eco": "Cotton eco", + "laundry_care_washer_program_cotton_eco_4060": "Cotton eco 40/60ºC", + "laundry_care_washer_program_cotton_colour": "Cotton color", + "laundry_care_washer_program_easy_care": "Easy care", + "laundry_care_washer_program_mix": "Mix", + "laundry_care_washer_program_mix_night_wash": "Mix night wash", + "laundry_care_washer_program_delicates_silk": "Delicates silk", + "laundry_care_washer_program_wool": "Wool", + "laundry_care_washer_program_sensitive": "Sensitive", + "laundry_care_washer_program_auto_30": "Auto 30ºC", + "laundry_care_washer_program_auto_40": "Auto 40ºC", + "laundry_care_washer_program_auto_60": "Auto 60ºC", + "laundry_care_washer_program_chiffon": "Chiffon", + "laundry_care_washer_program_curtains": "Curtains", + "laundry_care_washer_program_dark_wash": "Dark wash", + "laundry_care_washer_program_dessous": "Dessous", + "laundry_care_washer_program_monsoon": "Monsoon", + "laundry_care_washer_program_outdoor": "Outdoor", + "laundry_care_washer_program_plush_toy": "Plush toy", + "laundry_care_washer_program_shirts_blouses": "Shirts blouses", + "laundry_care_washer_program_sport_fitness": "Sport fitness", + "laundry_care_washer_program_towels": "Towels", + "laundry_care_washer_program_water_proof": "Water proof", + "laundry_care_washer_program_power_speed_59": "Power speed <60 min", + "laundry_care_washer_program_super_153045_super_15": "Super 15 min", + "laundry_care_washer_program_super_153045_super_1530": "Super 15/30 min", + "laundry_care_washer_program_down_duvet_duvet": "Down duvet", + "laundry_care_washer_program_rinse_rinse_spin_drain": "Rinse spin drain", + "laundry_care_washer_program_drum_clean": "Drum clean", + "laundry_care_washer_dryer_program_cotton": "Cotton", + "laundry_care_washer_dryer_program_cotton_eco_4060": "Cotton eco 40/60 ºC", + "laundry_care_washer_dryer_program_mix": "Mix", + "laundry_care_washer_dryer_program_easy_care": "Easy care", + "laundry_care_washer_dryer_program_wash_and_dry_60": "Wash and dry (60 min)", + "laundry_care_washer_dryer_program_wash_and_dry_90": "Wash and dry (90 min)" + } + }, + "active_program": { + "name": "Active program", + "state": { + "consumer_products_cleaning_robot_program_cleaning_clean_all": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_cleaning_robot_program_cleaning_clean_all%]", + "consumer_products_cleaning_robot_program_cleaning_clean_map": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_cleaning_robot_program_cleaning_clean_map%]", + "consumer_products_cleaning_robot_program_basic_go_home": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_cleaning_robot_program_basic_go_home%]", + "consumer_products_coffee_maker_program_beverage_ristretto": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_ristretto%]", + "consumer_products_coffee_maker_program_beverage_espresso": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_espresso%]", + "consumer_products_coffee_maker_program_beverage_espresso_doppio": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_espresso_doppio%]", + "consumer_products_coffee_maker_program_beverage_coffee": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_coffee%]", + "consumer_products_coffee_maker_program_beverage_x_l_coffee": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_x_l_coffee%]", + "consumer_products_coffee_maker_program_beverage_caffe_grande": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_caffe_grande%]", + "consumer_products_coffee_maker_program_beverage_espresso_macchiato": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_espresso_macchiato%]", + "consumer_products_coffee_maker_program_beverage_cappuccino": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_cappuccino%]", + "consumer_products_coffee_maker_program_beverage_latte_macchiato": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_latte_macchiato%]", + "consumer_products_coffee_maker_program_beverage_caffe_latte": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_caffe_latte%]", + "consumer_products_coffee_maker_program_beverage_milk_froth": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_milk_froth%]", + "consumer_products_coffee_maker_program_beverage_warm_milk": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_warm_milk%]", + "consumer_products_coffee_maker_program_coffee_world_kleiner_brauner": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_kleiner_brauner%]", + "consumer_products_coffee_maker_program_coffee_world_grosser_brauner": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_grosser_brauner%]", + "consumer_products_coffee_maker_program_coffee_world_verlaengerter": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_verlaengerter%]", + "consumer_products_coffee_maker_program_coffee_world_verlaengerter_braun": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_verlaengerter_braun%]", + "consumer_products_coffee_maker_program_coffee_world_wiener_melange": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_wiener_melange%]", + "consumer_products_coffee_maker_program_coffee_world_flat_white": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_flat_white%]", + "consumer_products_coffee_maker_program_coffee_world_cortado": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_cortado%]", + "consumer_products_coffee_maker_program_coffee_world_cafe_cortado": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_cafe_cortado%]", + "consumer_products_coffee_maker_program_coffee_world_cafe_con_leche": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_cafe_con_leche%]", + "consumer_products_coffee_maker_program_coffee_world_cafe_au_lait": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_cafe_au_lait%]", + "consumer_products_coffee_maker_program_coffee_world_doppio": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_doppio%]", + "consumer_products_coffee_maker_program_coffee_world_kaapi": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_kaapi%]", + "consumer_products_coffee_maker_program_coffee_world_koffie_verkeerd": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_koffie_verkeerd%]", + "consumer_products_coffee_maker_program_coffee_world_galao": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_galao%]", + "consumer_products_coffee_maker_program_coffee_world_garoto": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_garoto%]", + "consumer_products_coffee_maker_program_coffee_world_americano": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_americano%]", + "consumer_products_coffee_maker_program_coffee_world_red_eye": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_red_eye%]", + "consumer_products_coffee_maker_program_coffee_world_black_eye": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_black_eye%]", + "consumer_products_coffee_maker_program_coffee_world_dead_eye": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_coffee_world_dead_eye%]", + "consumer_products_coffee_maker_program_beverage_hot_water": "[%key:component::home_connect::entity::select::selected_program::state::consumer_products_coffee_maker_program_beverage_hot_water%]", + "dishcare_dishwasher_program_pre_rinse": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_pre_rinse%]", + "dishcare_dishwasher_program_auto_1": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_auto_1%]", + "dishcare_dishwasher_program_auto_2": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_auto_2%]", + "dishcare_dishwasher_program_auto_3": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_auto_3%]", + "dishcare_dishwasher_program_eco_50": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_eco_50%]", + "dishcare_dishwasher_program_quick_45": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_quick_45%]", + "dishcare_dishwasher_program_intensiv_70": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_intensiv_70%]", + "dishcare_dishwasher_program_normal_65": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_normal_65%]", + "dishcare_dishwasher_program_glas_40": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_glas_40%]", + "dishcare_dishwasher_program_glass_care": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_glass_care%]", + "dishcare_dishwasher_program_night_wash": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_night_wash%]", + "dishcare_dishwasher_program_quick_65": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_quick_65%]", + "dishcare_dishwasher_program_normal_45": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_normal_45%]", + "dishcare_dishwasher_program_intensiv_45": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_intensiv_45%]", + "dishcare_dishwasher_program_auto_half_load": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_auto_half_load%]", + "dishcare_dishwasher_program_intensiv_power": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_intensiv_power%]", + "dishcare_dishwasher_program_magic_daily": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_magic_daily%]", + "dishcare_dishwasher_program_super_60": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_super_60%]", + "dishcare_dishwasher_program_kurz_60": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_kurz_60%]", + "dishcare_dishwasher_program_express_sparkle_65": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_express_sparkle_65%]", + "dishcare_dishwasher_program_machine_care": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_machine_care%]", + "dishcare_dishwasher_program_steam_fresh": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_steam_fresh%]", + "dishcare_dishwasher_program_maximum_cleaning": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_maximum_cleaning%]", + "dishcare_dishwasher_program_mixed_load": "[%key:component::home_connect::entity::select::selected_program::state::dishcare_dishwasher_program_mixed_load%]", + "laundry_care_dryer_program_cotton": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_cotton%]", + "laundry_care_dryer_program_synthetic": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_synthetic%]", + "laundry_care_dryer_program_mix": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_mix%]", + "laundry_care_dryer_program_blankets": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_blankets%]", + "laundry_care_dryer_program_business_shirts": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_business_shirts%]", + "laundry_care_dryer_program_down_feathers": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_down_feathers%]", + "laundry_care_dryer_program_hygiene": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_hygiene%]", + "laundry_care_dryer_program_jeans": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_jeans%]", + "laundry_care_dryer_program_outdoor": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_outdoor%]", + "laundry_care_dryer_program_synthetic_refresh": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_synthetic_refresh%]", + "laundry_care_dryer_program_towels": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_towels%]", + "laundry_care_dryer_program_delicates": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_delicates%]", + "laundry_care_dryer_program_super_40": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_super_40%]", + "laundry_care_dryer_program_shirts_15": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_shirts_15%]", + "laundry_care_dryer_program_pillow": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_pillow%]", + "laundry_care_dryer_program_anti_shrink": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_anti_shrink%]", + "laundry_care_dryer_program_my_time_my_drying_time": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_my_time_my_drying_time%]", + "laundry_care_dryer_program_time_cold": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_time_cold%]", + "laundry_care_dryer_program_time_warm": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_time_warm%]", + "laundry_care_dryer_program_in_basket": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_in_basket%]", + "laundry_care_dryer_program_time_cold_fix_time_cold_20": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_time_cold_fix_time_cold_20%]", + "laundry_care_dryer_program_time_cold_fix_time_cold_30": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_time_cold_fix_time_cold_30%]", + "laundry_care_dryer_program_time_cold_fix_time_cold_60": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_time_cold_fix_time_cold_60%]", + "laundry_care_dryer_program_time_warm_fix_time_warm_30": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_time_warm_fix_time_warm_30%]", + "laundry_care_dryer_program_time_warm_fix_time_warm_40": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_time_warm_fix_time_warm_40%]", + "laundry_care_dryer_program_time_warm_fix_time_warm_60": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_time_warm_fix_time_warm_60%]", + "laundry_care_dryer_program_dessous": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_dryer_program_dessous%]", + "cooking_common_program_hood_automatic": "[%key:component::home_connect::entity::select::selected_program::state::cooking_common_program_hood_automatic%]", + "cooking_common_program_hood_venting": "[%key:component::home_connect::entity::select::selected_program::state::cooking_common_program_hood_venting%]", + "cooking_common_program_hood_delayed_shut_off": "[%key:component::home_connect::entity::select::selected_program::state::cooking_common_program_hood_delayed_shut_off%]", + "cooking_oven_program_heating_mode_pre_heating": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_pre_heating%]", + "cooking_oven_program_heating_mode_hot_air": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_hot_air%]", + "cooking_oven_program_heating_mode_hot_air_eco": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_hot_air_eco%]", + "cooking_oven_program_heating_mode_hot_air_grilling": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_hot_air_grilling%]", + "cooking_oven_program_heating_mode_top_bottom_heating": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_top_bottom_heating%]", + "cooking_oven_program_heating_mode_top_bottom_heating_eco": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_top_bottom_heating_eco%]", + "cooking_oven_program_heating_mode_bottom_heating": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_bottom_heating%]", + "cooking_oven_program_heating_mode_pizza_setting": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_pizza_setting%]", + "cooking_oven_program_heating_mode_slow_cook": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_slow_cook%]", + "cooking_oven_program_heating_mode_intensive_heat": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_intensive_heat%]", + "cooking_oven_program_heating_mode_keep_warm": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_keep_warm%]", + "cooking_oven_program_heating_mode_preheat_ovenware": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_preheat_ovenware%]", + "cooking_oven_program_heating_mode_frozen_heatup_special": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_frozen_heatup_special%]", + "cooking_oven_program_heating_mode_desiccation": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_desiccation%]", + "cooking_oven_program_heating_mode_defrost": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_defrost%]", + "cooking_oven_program_heating_mode_proof": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_proof%]", + "cooking_oven_program_heating_mode_hot_air_30_steam": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_hot_air_30_steam%]", + "cooking_oven_program_heating_mode_hot_air_60_steam": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_hot_air_60_steam%]", + "cooking_oven_program_heating_mode_hot_air_80_steam": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_hot_air_80_steam%]", + "cooking_oven_program_heating_mode_hot_air_100_steam": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_hot_air_100_steam%]", + "cooking_oven_program_heating_mode_sabbath_programme": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_sabbath_programme%]", + "cooking_oven_program_microwave_90_watt": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_microwave_90_watt%]", + "cooking_oven_program_microwave_180_watt": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_microwave_180_watt%]", + "cooking_oven_program_microwave_360_watt": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_microwave_360_watt%]", + "cooking_oven_program_microwave_600_watt": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_microwave_600_watt%]", + "cooking_oven_program_microwave_900_watt": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_microwave_900_watt%]", + "cooking_oven_program_microwave_1000_watt": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_microwave_1000_watt%]", + "cooking_oven_program_microwave_max": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_microwave_max%]", + "cooking_oven_program_heating_mode_warming_drawer": "[%key:component::home_connect::entity::select::selected_program::state::cooking_oven_program_heating_mode_warming_drawer%]", + "laundry_care_washer_program_cotton": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_cotton%]", + "laundry_care_washer_program_cotton_cotton_eco": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_cotton_cotton_eco%]", + "laundry_care_washer_program_cotton_eco_4060": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_cotton_eco_4060%]", + "laundry_care_washer_program_cotton_colour": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_cotton_colour%]", + "laundry_care_washer_program_easy_care": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_easy_care%]", + "laundry_care_washer_program_mix": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_mix%]", + "laundry_care_washer_program_mix_night_wash": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_mix_night_wash%]", + "laundry_care_washer_program_delicates_silk": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_delicates_silk%]", + "laundry_care_washer_program_wool": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_wool%]", + "laundry_care_washer_program_sensitive": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_sensitive%]", + "laundry_care_washer_program_auto_30": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_auto_30%]", + "laundry_care_washer_program_auto_40": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_auto_40%]", + "laundry_care_washer_program_auto_60": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_auto_60%]", + "laundry_care_washer_program_chiffon": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_chiffon%]", + "laundry_care_washer_program_curtains": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_curtains%]", + "laundry_care_washer_program_dark_wash": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_dark_wash%]", + "laundry_care_washer_program_dessous": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_dessous%]", + "laundry_care_washer_program_monsoon": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_monsoon%]", + "laundry_care_washer_program_outdoor": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_outdoor%]", + "laundry_care_washer_program_plush_toy": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_plush_toy%]", + "laundry_care_washer_program_shirts_blouses": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_shirts_blouses%]", + "laundry_care_washer_program_sport_fitness": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_sport_fitness%]", + "laundry_care_washer_program_towels": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_towels%]", + "laundry_care_washer_program_water_proof": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_water_proof%]", + "laundry_care_washer_program_power_speed_59": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_power_speed_59%]", + "laundry_care_washer_program_super_153045_super_15": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_super_153045_super_15%]", + "laundry_care_washer_program_super_153045_super_1530": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_super_153045_super_1530%]", + "laundry_care_washer_program_down_duvet_duvet": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_down_duvet_duvet%]", + "laundry_care_washer_program_rinse_rinse_spin_drain": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_rinse_rinse_spin_drain%]", + "laundry_care_washer_program_drum_clean": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_program_drum_clean%]", + "laundry_care_washer_dryer_program_cotton": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_dryer_program_cotton%]", + "laundry_care_washer_dryer_program_cotton_eco_4060": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_dryer_program_cotton_eco_4060%]", + "laundry_care_washer_dryer_program_mix": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_dryer_program_mix%]", + "laundry_care_washer_dryer_program_easy_care": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_dryer_program_easy_care%]", + "laundry_care_washer_dryer_program_wash_and_dry_60": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_dryer_program_wash_and_dry_60%]", + "laundry_care_washer_dryer_program_wash_and_dry_90": "[%key:component::home_connect::entity::select::selected_program::state::laundry_care_washer_dryer_program_wash_and_dry_90%]" + } + } + }, "sensor": { "program_progress": { "name": "Program progress" diff --git a/homeassistant/components/home_connect/switch.py b/homeassistant/components/home_connect/switch.py index 25bbb85278a..2fe3ff0a010 100644 --- a/homeassistant/components/home_connect/switch.py +++ b/homeassistant/components/home_connect/switch.py @@ -7,14 +7,13 @@ from typing import Any from homeconnect.api import HomeConnectError from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import get_dict_from_home_connect_error -from .api import ConfigEntryAuth +from . import HomeConnectConfigEntry, get_dict_from_home_connect_error from .const import ( + APPLIANCES_WITH_PROGRAMS, ATTR_ALLOWED_VALUES, ATTR_CONSTRAINTS, ATTR_VALUE, @@ -38,18 +37,6 @@ from .entity import HomeConnectDevice, HomeConnectEntity _LOGGER = logging.getLogger(__name__) -APPLIANCES_WITH_PROGRAMS = ( - "CleaningRobot", - "CoffeeMaker", - "Dishwasher", - "Dryer", - "Hood", - "Oven", - "WarmingDrawer", - "Washer", - "WasherDryer", -) - SWITCHES = ( SwitchEntityDescription( @@ -105,7 +92,7 @@ SWITCHES = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: HomeConnectConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Home Connect switch.""" @@ -113,8 +100,7 @@ async def async_setup_entry( def get_entities() -> list[SwitchEntity]: """Get a list of entities.""" entities: list[SwitchEntity] = [] - hc_api: ConfigEntryAuth = hass.data[DOMAIN][config_entry.entry_id] - for device in hc_api.devices: + for device in entry.runtime_data.devices: if device.appliance.type in APPLIANCES_WITH_PROGRAMS: with contextlib.suppress(HomeConnectError): programs = device.appliance.get_programs_available() diff --git a/homeassistant/components/home_connect/time.py b/homeassistant/components/home_connect/time.py index 946a2354938..f28339b3595 100644 --- a/homeassistant/components/home_connect/time.py +++ b/homeassistant/components/home_connect/time.py @@ -6,13 +6,11 @@ import logging from homeconnect.api import HomeConnectError from homeassistant.components.time import TimeEntity, TimeEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import get_dict_from_home_connect_error -from .api import ConfigEntryAuth +from . import HomeConnectConfigEntry, get_dict_from_home_connect_error from .const import ( ATTR_VALUE, DOMAIN, @@ -35,18 +33,17 @@ TIME_ENTITIES = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: HomeConnectConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Home Connect switch.""" def get_entities() -> list[HomeConnectTimeEntity]: """Get a list of entities.""" - hc_api: ConfigEntryAuth = hass.data[DOMAIN][config_entry.entry_id] return [ HomeConnectTimeEntity(device, description) for description in TIME_ENTITIES - for device in hc_api.devices + for device in entry.runtime_data.devices if description.key in device.appliance.status ] diff --git a/homeassistant/components/homeassistant/strings.json b/homeassistant/components/homeassistant/strings.json index 0dd4eff507d..da8a1015d79 100644 --- a/homeassistant/components/homeassistant/strings.json +++ b/homeassistant/components/homeassistant/strings.json @@ -134,7 +134,7 @@ }, "elevation": { "name": "[%key:common::config_flow::data::elevation%]", - "description": "Elevation of your location." + "description": "Elevation of your location above sea level." } } }, diff --git a/homeassistant/components/homeassistant_hardware/firmware_config_flow.py b/homeassistant/components/homeassistant_hardware/firmware_config_flow.py index 37d12d2bd61..a91fb00c142 100644 --- a/homeassistant/components/homeassistant_hardware/firmware_config_flow.py +++ b/homeassistant/components/homeassistant_hardware/firmware_config_flow.py @@ -24,7 +24,6 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.core import callback from homeassistant.data_entry_flow import AbortFlow @@ -496,13 +495,15 @@ class BaseFirmwareConfigFlow(BaseFirmwareInstallFlow, ConfigFlow): return await self.async_step_pick_firmware() -class BaseFirmwareOptionsFlow(BaseFirmwareInstallFlow, OptionsFlowWithConfigEntry): +class BaseFirmwareOptionsFlow(BaseFirmwareInstallFlow, OptionsFlow): """Zigbee and Thread options flow handlers.""" - def __init__(self, *args: Any, **kwargs: Any) -> None: + def __init__(self, config_entry: ConfigEntry, *args: Any, **kwargs: Any) -> None: """Instantiate options flow.""" super().__init__(*args, **kwargs) + self._config_entry = config_entry + self._probed_firmware_type = ApplicationType(self.config_entry.data["firmware"]) # Make `context` a regular dictionary diff --git a/homeassistant/components/homeassistant_hardware/silabs_multiprotocol_addon.py b/homeassistant/components/homeassistant_hardware/silabs_multiprotocol_addon.py index 14ae57391ef..2b08031405f 100644 --- a/homeassistant/components/homeassistant_hardware/silabs_multiprotocol_addon.py +++ b/homeassistant/components/homeassistant_hardware/silabs_multiprotocol_addon.py @@ -318,7 +318,6 @@ class OptionsFlowHandler(OptionsFlow, ABC): self.start_task: asyncio.Task | None = None self.stop_task: asyncio.Task | None = None self._zha_migration_mgr: ZhaMultiPANMigrationHelper | None = None - self.config_entry = config_entry self.original_addon_config: dict[str, Any] | None = None self.revert_reason: str | None = None diff --git a/homeassistant/components/homekit/config_flow.py b/homeassistant/components/homekit/config_flow.py index a63e365ead7..53db7774821 100644 --- a/homeassistant/components/homekit/config_flow.py +++ b/homeassistant/components/homekit/config_flow.py @@ -362,15 +362,14 @@ class HomeKitConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for homekit.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize options flow.""" - self.config_entry = config_entry self.hk_options: dict[str, Any] = {} self.included_cameras: list[str] = [] diff --git a/homeassistant/components/homekit/util.py b/homeassistant/components/homekit/util.py index ae7e35030be..8395c1a8c9a 100644 --- a/homeassistant/components/homekit/util.py +++ b/homeassistant/components/homekit/util.py @@ -114,7 +114,7 @@ _LOGGER = logging.getLogger(__name__) NUMBERS_ONLY_RE = re.compile(r"[^\d.]+") VERSION_RE = re.compile(r"([0-9]+)(\.[0-9]+)?(\.[0-9]+)?") -INVALID_END_CHARS = "-_" +INVALID_END_CHARS = "-_ " MAX_VERSION_PART = 2**32 - 1 @@ -424,20 +424,12 @@ def cleanup_name_for_homekit(name: str | None) -> str: def temperature_to_homekit(temperature: float, unit: str) -> float: """Convert temperature to Celsius for HomeKit.""" - return round( - TemperatureConverter.convert(temperature, unit, UnitOfTemperature.CELSIUS), 1 - ) + return TemperatureConverter.convert(temperature, unit, UnitOfTemperature.CELSIUS) def temperature_to_states(temperature: float, unit: str) -> float: """Convert temperature back from Celsius to Home Assistant unit.""" - return ( - round( - TemperatureConverter.convert(temperature, UnitOfTemperature.CELSIUS, unit) - * 2 - ) - / 2 - ) + return TemperatureConverter.convert(temperature, UnitOfTemperature.CELSIUS, unit) def density_to_air_quality(density: float) -> int: diff --git a/homeassistant/components/homekit_controller/manifest.json b/homeassistant/components/homekit_controller/manifest.json index cddd61a12c1..b7c82b9fd51 100644 --- a/homeassistant/components/homekit_controller/manifest.json +++ b/homeassistant/components/homekit_controller/manifest.json @@ -14,6 +14,6 @@ "documentation": "https://www.home-assistant.io/integrations/homekit_controller", "iot_class": "local_push", "loggers": ["aiohomekit", "commentjson"], - "requirements": ["aiohomekit==3.2.6"], + "requirements": ["aiohomekit==3.2.7"], "zeroconf": ["_hap._tcp.local.", "_hap._udp.local."] } diff --git a/homeassistant/components/homematic/manifest.json b/homeassistant/components/homematic/manifest.json index 9c67a5da0b2..749bd7b44e8 100644 --- a/homeassistant/components/homematic/manifest.json +++ b/homeassistant/components/homematic/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/homematic", "iot_class": "local_push", "loggers": ["pyhomematic"], + "quality_scale": "legacy", "requirements": ["pyhomematic==0.1.77"] } diff --git a/homeassistant/components/homematicip_cloud/manifest.json b/homeassistant/components/homematicip_cloud/manifest.json index 97af964ffc7..7878a8b4e0a 100644 --- a/homeassistant/components/homematicip_cloud/manifest.json +++ b/homeassistant/components/homematicip_cloud/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/homematicip_cloud", "iot_class": "cloud_push", "loggers": ["homematicip"], - "quality_scale": "silver", "requirements": ["homematicip==1.1.3"] } diff --git a/homeassistant/components/homematicip_cloud/sensor.py b/homeassistant/components/homematicip_cloud/sensor.py index eab7ba4f09e..c44d280c190 100644 --- a/homeassistant/components/homematicip_cloud/sensor.py +++ b/homeassistant/components/homematicip_cloud/sensor.py @@ -420,6 +420,7 @@ class HomematicipWindspeedSensor(HomematicipGenericEntity, SensorEntity): _attr_device_class = SensorDeviceClass.WIND_SPEED _attr_native_unit_of_measurement = UnitOfSpeed.KILOMETERS_PER_HOUR + _attr_state_class = SensorStateClass.MEASUREMENT def __init__(self, hap: HomematicipHAP, device) -> None: """Initialize the windspeed sensor.""" @@ -451,6 +452,7 @@ class HomematicipTodayRainSensor(HomematicipGenericEntity, SensorEntity): _attr_device_class = SensorDeviceClass.PRECIPITATION _attr_native_unit_of_measurement = UnitOfPrecipitationDepth.MILLIMETERS + _attr_state_class = SensorStateClass.MEASUREMENT def __init__(self, hap: HomematicipHAP, device) -> None: """Initialize the device.""" diff --git a/homeassistant/components/homewizard/button.py b/homeassistant/components/homewizard/button.py index a9cc19d72a7..7b05cb95271 100644 --- a/homeassistant/components/homewizard/button.py +++ b/homeassistant/components/homewizard/button.py @@ -10,6 +10,8 @@ from .coordinator import HWEnergyDeviceUpdateCoordinator from .entity import HomeWizardEntity from .helpers import homewizard_exception_handler +PARALLEL_UPDATES = 1 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/homewizard/config_flow.py b/homeassistant/components/homewizard/config_flow.py index d52e53cf39b..a6e4356328e 100644 --- a/homeassistant/components/homewizard/config_flow.py +++ b/homeassistant/components/homewizard/config_flow.py @@ -6,16 +6,18 @@ from collections.abc import Mapping import logging from typing import Any, NamedTuple -from homewizard_energy import HomeWizardEnergy +from homewizard_energy import HomeWizardEnergyV1 from homewizard_energy.errors import DisabledError, RequestError, UnsupportedError -from homewizard_energy.models import Device -from voluptuous import Required, Schema +from homewizard_energy.v1.models import Device +import voluptuous as vol from homeassistant.components import onboarding, zeroconf +from homeassistant.components.dhcp import DhcpServiceInfo from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_IP_ADDRESS, CONF_PATH from homeassistant.data_entry_flow import AbortFlow from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.selector import TextSelector from .const import ( CONF_API_ENABLED, @@ -68,11 +70,11 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN): user_input = user_input or {} return self.async_show_form( step_id="user", - data_schema=Schema( + data_schema=vol.Schema( { - Required( + vol.Required( CONF_IP_ADDRESS, default=user_input.get(CONF_IP_ADDRESS) - ): str, + ): TextSelector(), } ), errors=errors, @@ -110,6 +112,32 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_discovery_confirm() + async def async_step_dhcp( + self, discovery_info: DhcpServiceInfo + ) -> ConfigFlowResult: + """Handle dhcp discovery to update existing entries. + + This flow is triggered only by DHCP discovery of known devices. + """ + try: + device = await self._async_try_connect(discovery_info.ip) + except RecoverableError as ex: + _LOGGER.error(ex) + return self.async_abort(reason="unknown") + + await self.async_set_unique_id( + f"{device.product_type}_{discovery_info.macaddress}" + ) + + self._abort_if_unique_id_configured( + updates={CONF_IP_ADDRESS: discovery_info.ip} + ) + + # This situation should never happen, as Home Assistant will only + # send updates for existing entries. In case it does, we'll just + # abort the flow with an unknown error. + return self.async_abort(reason="unknown") + async def async_step_discovery_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -170,6 +198,43 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="reauth_confirm", errors=errors) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of the integration.""" + errors: dict[str, str] = {} + if user_input: + try: + device_info = await self._async_try_connect(user_input[CONF_IP_ADDRESS]) + except RecoverableError as ex: + _LOGGER.error(ex) + errors = {"base": ex.error_code} + else: + await self.async_set_unique_id( + f"{device_info.product_type}_{device_info.serial}" + ) + self._abort_if_unique_id_mismatch(reason="wrong_device") + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), + data_updates=user_input, + ) + reconfigure_entry = self._get_reconfigure_entry() + return self.async_show_form( + step_id="reconfigure", + data_schema=vol.Schema( + { + vol.Required( + CONF_IP_ADDRESS, + default=reconfigure_entry.data.get(CONF_IP_ADDRESS), + ): TextSelector(), + } + ), + description_placeholders={ + "title": reconfigure_entry.title, + }, + errors=errors, + ) + @staticmethod async def _async_try_connect(ip_address: str) -> Device: """Try to connect. @@ -177,7 +242,7 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN): Make connection with device to test the connection and to get info for unique_id. """ - energy_api = HomeWizardEnergy(ip_address) + energy_api = HomeWizardEnergyV1(ip_address) try: return await energy_api.device() diff --git a/homeassistant/components/homewizard/const.py b/homeassistant/components/homewizard/const.py index 8cee8350268..809ecc1416b 100644 --- a/homeassistant/components/homewizard/const.py +++ b/homeassistant/components/homewizard/const.py @@ -6,7 +6,7 @@ from dataclasses import dataclass from datetime import timedelta import logging -from homewizard_energy.models import Data, Device, State, System +from homewizard_energy.v1.models import Data, Device, State, System from homeassistant.const import Platform diff --git a/homeassistant/components/homewizard/coordinator.py b/homeassistant/components/homewizard/coordinator.py index 61b304eb39c..8f5045d3b94 100644 --- a/homeassistant/components/homewizard/coordinator.py +++ b/homeassistant/components/homewizard/coordinator.py @@ -4,10 +4,10 @@ from __future__ import annotations import logging -from homewizard_energy import HomeWizardEnergy -from homewizard_energy.const import SUPPORTS_IDENTIFY, SUPPORTS_STATE +from homewizard_energy import HomeWizardEnergyV1 from homewizard_energy.errors import DisabledError, RequestError, UnsupportedError -from homewizard_energy.models import Device +from homewizard_energy.v1.const import SUPPORTS_IDENTIFY, SUPPORTS_STATE +from homewizard_energy.v1.models import Device from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_IP_ADDRESS @@ -23,7 +23,7 @@ _LOGGER = logging.getLogger(__name__) class HWEnergyDeviceUpdateCoordinator(DataUpdateCoordinator[DeviceResponseEntry]): """Gather data for the energy device.""" - api: HomeWizardEnergy + api: HomeWizardEnergyV1 api_disabled: bool = False _unsupported_error: bool = False @@ -36,7 +36,7 @@ class HWEnergyDeviceUpdateCoordinator(DataUpdateCoordinator[DeviceResponseEntry] ) -> None: """Initialize update coordinator.""" super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=UPDATE_INTERVAL) - self.api = HomeWizardEnergy( + self.api = HomeWizardEnergyV1( self.config_entry.data[CONF_IP_ADDRESS], clientsession=async_get_clientsession(hass), ) @@ -66,7 +66,9 @@ class HWEnergyDeviceUpdateCoordinator(DataUpdateCoordinator[DeviceResponseEntry] ) except RequestError as ex: - raise UpdateFailed(ex) from ex + raise UpdateFailed( + ex, translation_domain=DOMAIN, translation_key="communication_error" + ) from ex except DisabledError as ex: if not self.api_disabled: @@ -79,7 +81,9 @@ class HWEnergyDeviceUpdateCoordinator(DataUpdateCoordinator[DeviceResponseEntry] self.config_entry.entry_id ) - raise UpdateFailed(ex) from ex + raise UpdateFailed( + ex, translation_domain=DOMAIN, translation_key="api_disabled" + ) from ex self.api_disabled = False diff --git a/homeassistant/components/homewizard/manifest.json b/homeassistant/components/homewizard/manifest.json index 65672903eb8..13bfc512551 100644 --- a/homeassistant/components/homewizard/manifest.json +++ b/homeassistant/components/homewizard/manifest.json @@ -3,10 +3,15 @@ "name": "HomeWizard Energy", "codeowners": ["@DCSBL"], "config_flow": true, + "dhcp": [ + { + "registered_devices": true + } + ], "documentation": "https://www.home-assistant.io/integrations/homewizard", "iot_class": "local_polling", "loggers": ["homewizard_energy"], "quality_scale": "platinum", - "requirements": ["python-homewizard-energy==v6.3.0"], + "requirements": ["python-homewizard-energy==v7.0.0"], "zeroconf": ["_hwenergy._tcp.local."] } diff --git a/homeassistant/components/homewizard/number.py b/homeassistant/components/homewizard/number.py index 1af77859a0f..1ed4c642f6b 100644 --- a/homeassistant/components/homewizard/number.py +++ b/homeassistant/components/homewizard/number.py @@ -13,6 +13,8 @@ from .coordinator import HWEnergyDeviceUpdateCoordinator from .entity import HomeWizardEntity from .helpers import homewizard_exception_handler +PARALLEL_UPDATES = 1 + async def async_setup_entry( hass: HomeAssistant, @@ -62,4 +64,4 @@ class HWEnergyNumberEntity(HomeWizardEntity, NumberEntity): or (brightness := self.coordinator.data.state.brightness) is None ): return None - return brightness_to_value((0, 100), brightness) + return round(brightness_to_value((0, 100), brightness)) diff --git a/homeassistant/components/homewizard/quality_scale.yaml b/homeassistant/components/homewizard/quality_scale.yaml new file mode 100644 index 00000000000..423bc4dea49 --- /dev/null +++ b/homeassistant/components/homewizard/quality_scale.yaml @@ -0,0 +1,81 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + The integration does not provide any additional actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + The integration does not provide any additional actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + This integration does not have an options flow. + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: done + reauthentication-flow: done + test-coverage: done + + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: done + docs-examples: done + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: done + dynamic-devices: + status: exempt + comment: | + The integration connects to a single device per configuration entry. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: done + repair-issues: + status: exempt + comment: | + This integration does not raise any repairable issues. + stale-devices: + status: exempt + comment: | + This integration connect to a single device per configuration entry. + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/homewizard/sensor.py b/homeassistant/components/homewizard/sensor.py index 57071875edb..24ed5933d06 100644 --- a/homeassistant/components/homewizard/sensor.py +++ b/homeassistant/components/homewizard/sensor.py @@ -6,7 +6,7 @@ from collections.abc import Callable from dataclasses import dataclass from typing import Final -from homewizard_energy.models import Data, ExternalDevice +from homewizard_energy.v1.models import Data, ExternalDevice from homeassistant.components.sensor import ( DEVICE_CLASS_UNITS, diff --git a/homeassistant/components/homewizard/strings.json b/homeassistant/components/homewizard/strings.json index 751c1ec450d..4309664c4c8 100644 --- a/homeassistant/components/homewizard/strings.json +++ b/homeassistant/components/homewizard/strings.json @@ -6,6 +6,9 @@ "description": "Enter the IP address of your HomeWizard Energy device to integrate with Home Assistant.", "data": { "ip_address": "[%key:common::config_flow::data::ip%]" + }, + "data_description": { + "ip_address": "The IP address of your HomeWizard Energy device." } }, "discovery_confirm": { @@ -14,10 +17,19 @@ }, "reauth_confirm": { "description": "The local API is disabled. Go to the HomeWizard Energy app and enable the API in the device settings." + }, + "reconfigure": { + "description": "Update configuration for {title}.", + "data": { + "ip_address": "[%key:common::config_flow::data::ip%]" + }, + "data_description": { + "ip_address": "[%key:component::homewizard::config::step::user::data_description::ip_address%]" + } } }, "error": { - "api_not_enabled": "The API is not enabled. Enable API in the HomeWizard Energy App under settings", + "api_not_enabled": "The local API is disabled. Go to the HomeWizard Energy app and enable the API in the device settings.", "network_error": "Device unreachable, make sure that you have entered the correct IP address and that the device is available in your network" }, "abort": { @@ -26,7 +38,9 @@ "device_not_supported": "This device is not supported", "unknown_error": "[%key:common::config_flow::error::unknown%]", "unsupported_api_version": "Detected unsupported API version", - "reauth_successful": "Enabling API was successful" + "reauth_successful": "Enabling API was successful", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", + "wrong_device": "The configured device is not the same found on this IP address." } }, "entity": { @@ -120,7 +134,7 @@ }, "exceptions": { "api_disabled": { - "message": "The local API of the HomeWizard device is disabled" + "message": "The local API is disabled." }, "communication_error": { "message": "An error occurred while communicating with HomeWizard device" diff --git a/homeassistant/components/homewizard/switch.py b/homeassistant/components/homewizard/switch.py index 14c6e0778f1..aa0af17f578 100644 --- a/homeassistant/components/homewizard/switch.py +++ b/homeassistant/components/homewizard/switch.py @@ -6,7 +6,7 @@ from collections.abc import Awaitable, Callable from dataclasses import dataclass from typing import Any -from homewizard_energy import HomeWizardEnergy +from homewizard_energy import HomeWizardEnergyV1 from homeassistant.components.switch import ( SwitchDeviceClass, @@ -23,6 +23,8 @@ from .coordinator import HWEnergyDeviceUpdateCoordinator from .entity import HomeWizardEntity from .helpers import homewizard_exception_handler +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class HomeWizardSwitchEntityDescription(SwitchEntityDescription): @@ -31,7 +33,7 @@ class HomeWizardSwitchEntityDescription(SwitchEntityDescription): available_fn: Callable[[DeviceResponseEntry], bool] create_fn: Callable[[HWEnergyDeviceUpdateCoordinator], bool] is_on_fn: Callable[[DeviceResponseEntry], bool | None] - set_fn: Callable[[HomeWizardEnergy, bool], Awaitable[Any]] + set_fn: Callable[[HomeWizardEnergyV1, bool], Awaitable[Any]] SWITCHES = [ diff --git a/homeassistant/components/honeywell/climate.py b/homeassistant/components/honeywell/climate.py index 98cbae4eb7e..d4e5ee10a6b 100644 --- a/homeassistant/components/honeywell/climate.py +++ b/homeassistant/components/honeywell/climate.py @@ -398,7 +398,7 @@ class HoneywellUSThermostat(ClimateEntity): raise ServiceValidationError( translation_domain=DOMAIN, translation_key="temp_failed_value", - translation_placeholders={"temp": temperature}, + translation_placeholders={"temperature": temperature}, ) from err async def async_set_temperature(self, **kwargs: Any) -> None: @@ -422,7 +422,7 @@ class HoneywellUSThermostat(ClimateEntity): raise ServiceValidationError( translation_domain=DOMAIN, translation_key="temp_failed_value", - translation_placeholders={"temp": str(temperature)}, + translation_placeholders={"temperature": str(temperature)}, ) from err async def async_set_fan_mode(self, fan_mode: str) -> None: diff --git a/homeassistant/components/honeywell/config_flow.py b/homeassistant/components/honeywell/config_flow.py index c9b1dfb950a..c7cda500692 100644 --- a/homeassistant/components/honeywell/config_flow.py +++ b/homeassistant/components/honeywell/config_flow.py @@ -129,16 +129,12 @@ class HoneywellConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> HoneywellOptionsFlowHandler: """Options callback for Honeywell.""" - return HoneywellOptionsFlowHandler(config_entry) + return HoneywellOptionsFlowHandler() class HoneywellOptionsFlowHandler(OptionsFlow): """Config flow options for Honeywell.""" - def __init__(self, entry: ConfigEntry) -> None: - """Initialize Honeywell options flow.""" - self.config_entry = entry - async def async_step_init(self, user_input=None) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: diff --git a/homeassistant/components/honeywell/strings.json b/homeassistant/components/honeywell/strings.json index aa6e53620a5..a64f1a6fce0 100644 --- a/homeassistant/components/honeywell/strings.json +++ b/homeassistant/components/honeywell/strings.json @@ -16,6 +16,9 @@ } } }, + "abort": { + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + }, "error": { "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" diff --git a/homeassistant/components/horizon/manifest.json b/homeassistant/components/horizon/manifest.json index d1280a6fe65..d30e2f39e34 100644 --- a/homeassistant/components/horizon/manifest.json +++ b/homeassistant/components/horizon/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/horizon", "iot_class": "local_polling", "loggers": ["horimote"], + "quality_scale": "legacy", "requirements": ["horimote==0.4.1"] } diff --git a/homeassistant/components/hp_ilo/manifest.json b/homeassistant/components/hp_ilo/manifest.json index 378a9ac1865..9f2dfb21783 100644 --- a/homeassistant/components/hp_ilo/manifest.json +++ b/homeassistant/components/hp_ilo/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/hp_ilo", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["python-hpilo==4.4.3"] } diff --git a/homeassistant/components/http/__init__.py b/homeassistant/components/http/__init__.py index a8721720dfb..95cdee9ab9e 100644 --- a/homeassistant/components/http/__init__.py +++ b/homeassistant/components/http/__init__.py @@ -326,7 +326,8 @@ class HomeAssistantApplication(web.Application): protocol, writer, task, - loop=self._loop, + # loop will never be None when called from aiohttp + loop=self._loop, # type: ignore[arg-type] client_max_size=self._client_max_size, ) @@ -505,15 +506,14 @@ class HomeAssistantHTTP: self, url_path: str, path: str, cache_headers: bool = True ) -> None: """Register a folder or file to serve as a static path.""" - frame.report( + frame.report_usage( "calls hass.http.register_static_path which is deprecated because " "it does blocking I/O in the event loop, instead " "call `await hass.http.async_register_static_paths(" - f'[StaticPathConfig("{url_path}", "{path}", {cache_headers})])`; ' - "This function will be removed in 2025.7", + f'[StaticPathConfig("{url_path}", "{path}", {cache_headers})])`', exclude_integrations={"http"}, - error_if_core=False, - error_if_integration=False, + core_behavior=frame.ReportBehavior.LOG, + breaks_in_ha_version="2025.7", ) configs = [StaticPathConfig(url_path, path, cache_headers)] resources = self._make_static_resources(configs) diff --git a/homeassistant/components/huawei_lte/config_flow.py b/homeassistant/components/huawei_lte/config_flow.py index 02349b2ae7f..08fdae50c51 100644 --- a/homeassistant/components/huawei_lte/config_flow.py +++ b/homeassistant/components/huawei_lte/config_flow.py @@ -69,7 +69,7 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def _async_show_user_form( self, @@ -345,10 +345,6 @@ class ConfigFlowHandler(ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(OptionsFlow): """Huawei LTE options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/huawei_lte/manifest.json b/homeassistant/components/huawei_lte/manifest.json index 908092ba2ca..6720d6718ef 100644 --- a/homeassistant/components/huawei_lte/manifest.json +++ b/homeassistant/components/huawei_lte/manifest.json @@ -7,7 +7,7 @@ "iot_class": "local_polling", "loggers": ["huawei_lte_api.Session"], "requirements": [ - "huawei-lte-api==1.9.3", + "huawei-lte-api==1.10.0", "stringcase==1.2.0", "url-normalize==1.4.3" ], diff --git a/homeassistant/components/hue/config_flow.py b/homeassistant/components/hue/config_flow.py index e73ae8fe11d..8d17f810461 100644 --- a/homeassistant/components/hue/config_flow.py +++ b/homeassistant/components/hue/config_flow.py @@ -57,8 +57,8 @@ class HueFlowHandler(ConfigFlow, domain=DOMAIN): ) -> HueV1OptionsFlowHandler | HueV2OptionsFlowHandler: """Get the options flow for this handler.""" if config_entry.data.get(CONF_API_VERSION, 1) == 1: - return HueV1OptionsFlowHandler(config_entry) - return HueV2OptionsFlowHandler(config_entry) + return HueV1OptionsFlowHandler() + return HueV2OptionsFlowHandler() def __init__(self) -> None: """Initialize the Hue flow.""" @@ -280,10 +280,6 @@ class HueFlowHandler(ConfigFlow, domain=DOMAIN): class HueV1OptionsFlowHandler(OptionsFlow): """Handle Hue options for V1 implementation.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize Hue options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -315,10 +311,6 @@ class HueV1OptionsFlowHandler(OptionsFlow): class HueV2OptionsFlowHandler(OptionsFlow): """Handle Hue options for V2 implementation.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize Hue options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/hue/manifest.json b/homeassistant/components/hue/manifest.json index dbd9b511977..22f1d3991e7 100644 --- a/homeassistant/components/hue/manifest.json +++ b/homeassistant/components/hue/manifest.json @@ -10,7 +10,6 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["aiohue"], - "quality_scale": "platinum", "requirements": ["aiohue==4.7.3"], "zeroconf": ["_hue._tcp.local."] } diff --git a/homeassistant/components/hue/strings.json b/homeassistant/components/hue/strings.json index ab1d0fb58ad..2f7f2e55561 100644 --- a/homeassistant/components/hue/strings.json +++ b/homeassistant/components/hue/strings.json @@ -137,15 +137,15 @@ "services": { "hue_activate_scene": { "name": "Activate scene", - "description": "Activates a hue scene stored in the hue hub.", + "description": "Activates a Hue scene stored in the Hue hub.", "fields": { "group_name": { "name": "Group", - "description": "Name of hue group/room from the hue app." + "description": "Name of Hue group/room from the Hue app." }, "scene_name": { "name": "Scene", - "description": "Name of hue scene from the hue app." + "description": "Name of Hue scene from the Hue app." }, "dynamic": { "name": "Dynamic", diff --git a/homeassistant/components/humidifier/const.py b/homeassistant/components/humidifier/const.py index fc6b0fc14d4..03ff0774ca0 100644 --- a/homeassistant/components/humidifier/const.py +++ b/homeassistant/components/humidifier/const.py @@ -57,7 +57,7 @@ SERVICE_SET_HUMIDITY = "set_humidity" class HumidifierEntityFeature(IntFlag): - """Supported features of the alarm control panel entity.""" + """Supported features of the humidifier entity.""" MODES = 1 diff --git a/homeassistant/components/husqvarna_automower/api.py b/homeassistant/components/husqvarna_automower/api.py index f1d3e1ef4fa..8a9a31b926a 100644 --- a/homeassistant/components/husqvarna_automower/api.py +++ b/homeassistant/components/husqvarna_automower/api.py @@ -7,6 +7,7 @@ from aioautomower.auth import AbstractAuth from aioautomower.const import API_BASE_URL from aiohttp import ClientSession +from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.helpers import config_entry_oauth2_flow _LOGGER = logging.getLogger(__name__) @@ -28,3 +29,16 @@ class AsyncConfigEntryAuth(AbstractAuth): """Return a valid access token.""" await self._oauth_session.async_ensure_token_valid() return cast(str, self._oauth_session.token["access_token"]) + + +class AsyncConfigFlowAuth(AbstractAuth): + """Provide Automower AbstractAuth for the config flow.""" + + def __init__(self, websession: ClientSession, token: dict) -> None: + """Initialize Husqvarna Automower auth.""" + super().__init__(websession, API_BASE_URL) + self.token: dict = token + + async def async_get_access_token(self) -> str: + """Return a valid access token.""" + return cast(str, self.token[CONF_ACCESS_TOKEN]) diff --git a/homeassistant/components/husqvarna_automower/binary_sensor.py b/homeassistant/components/husqvarna_automower/binary_sensor.py index 5d1ccb6a074..f8b8f155458 100644 --- a/homeassistant/components/husqvarna_automower/binary_sensor.py +++ b/homeassistant/components/husqvarna_automower/binary_sensor.py @@ -3,24 +3,42 @@ from collections.abc import Callable from dataclasses import dataclass import logging +from typing import TYPE_CHECKING from aioautomower.model import MowerActivities, MowerAttributes +from homeassistant.components.automation import automations_with_entity from homeassistant.components.binary_sensor import ( + DOMAIN as BINARY_SENSOR_DOMAIN, BinarySensorDeviceClass, BinarySensorEntity, BinarySensorEntityDescription, ) +from homeassistant.components.script import scripts_with_entity from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import ( + IssueSeverity, + async_create_issue, + async_delete_issue, +) from . import AutomowerConfigEntry +from .const import DOMAIN from .coordinator import AutomowerDataUpdateCoordinator from .entity import AutomowerBaseEntity _LOGGER = logging.getLogger(__name__) +def entity_used_in(hass: HomeAssistant, entity_id: str) -> list[str]: + """Get list of related automations and scripts.""" + used_in = automations_with_entity(hass, entity_id) + used_in += scripts_with_entity(hass, entity_id) + return used_in + + @dataclass(frozen=True, kw_only=True) class AutomowerBinarySensorEntityDescription(BinarySensorEntityDescription): """Describes Automower binary sensor entity.""" @@ -43,6 +61,7 @@ MOWER_BINARY_SENSOR_TYPES: tuple[AutomowerBinarySensorEntityDescription, ...] = key="returning_to_dock", translation_key="returning_to_dock", value_fn=lambda data: data.mower.activity == MowerActivities.GOING_HOME, + entity_registry_enabled_default=False, ), ) @@ -81,3 +100,39 @@ class AutomowerBinarySensorEntity(AutomowerBaseEntity, BinarySensorEntity): def is_on(self) -> bool: """Return the state of the binary sensor.""" return self.entity_description.value_fn(self.mower_attributes) + + async def async_added_to_hass(self) -> None: + """Raise issue when entity is registered and was not disabled.""" + if TYPE_CHECKING: + assert self.unique_id + if not ( + entity_id := er.async_get(self.hass).async_get_entity_id( + BINARY_SENSOR_DOMAIN, DOMAIN, self.unique_id + ) + ): + return + if ( + self.enabled + and self.entity_description.key == "returning_to_dock" + and entity_used_in(self.hass, entity_id) + ): + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_entity_{self.entity_description.key}", + breaks_in_ha_version="2025.6.0", + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_entity", + translation_placeholders={ + "entity_name": str(self.name), + "entity": entity_id, + }, + ) + else: + async_delete_issue( + self.hass, + DOMAIN, + f"deprecated_task_entity_{self.entity_description.key}", + ) + await super().async_added_to_hass() diff --git a/homeassistant/components/husqvarna_automower/button.py b/homeassistant/components/husqvarna_automower/button.py index 22a732ec54c..ce303325496 100644 --- a/homeassistant/components/husqvarna_automower/button.py +++ b/homeassistant/components/husqvarna_automower/button.py @@ -22,6 +22,8 @@ from .entity import ( _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class AutomowerButtonEntityDescription(ButtonEntityDescription): diff --git a/homeassistant/components/husqvarna_automower/config_flow.py b/homeassistant/components/husqvarna_automower/config_flow.py index 3e76b9ac812..4da3bd14089 100644 --- a/homeassistant/components/husqvarna_automower/config_flow.py +++ b/homeassistant/components/husqvarna_automower/config_flow.py @@ -4,12 +4,15 @@ from collections.abc import Mapping import logging from typing import Any +from aioautomower.session import AutomowerSession from aioautomower.utils import structure_token from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN, CONF_NAME, CONF_TOKEN -from homeassistant.helpers import config_entry_oauth2_flow +from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow +from homeassistant.util import dt as dt_util +from .api import AsyncConfigFlowAuth from .const import DOMAIN, NAME _LOGGER = logging.getLogger(__name__) @@ -46,9 +49,20 @@ class HusqvarnaConfigFlowHandler( self._abort_if_unique_id_configured() + websession = aiohttp_client.async_get_clientsession(self.hass) + tz = await dt_util.async_get_time_zone(str(dt_util.DEFAULT_TIME_ZONE)) + automower_api = AutomowerSession(AsyncConfigFlowAuth(websession, token), tz) + try: + data = await automower_api.get_status() + except Exception: # noqa: BLE001 + return self.async_abort(reason="unknown") + if data == {}: + return self.async_abort(reason="no_mower_connected") + structured_token = structure_token(token[CONF_ACCESS_TOKEN]) first_name = structured_token.user.first_name last_name = structured_token.user.last_name + return self.async_create_entry( title=f"{NAME} of {first_name} {last_name}", data=data, diff --git a/homeassistant/components/husqvarna_automower/lawn_mower.py b/homeassistant/components/husqvarna_automower/lawn_mower.py index eeabaa09f79..9b3ce7dab1a 100644 --- a/homeassistant/components/husqvarna_automower/lawn_mower.py +++ b/homeassistant/components/husqvarna_automower/lawn_mower.py @@ -22,6 +22,10 @@ from .const import DOMAIN from .coordinator import AutomowerDataUpdateCoordinator from .entity import AutomowerAvailableEntity, handle_sending_exception +_LOGGER = logging.getLogger(__name__) + +PARALLEL_UPDATES = 1 + DOCKED_ACTIVITIES = (MowerActivities.PARKED_IN_CS, MowerActivities.CHARGING) MOWING_ACTIVITIES = ( MowerActivities.MOWING, @@ -42,9 +46,6 @@ PARK = "park" OVERRIDE_MODES = [MOW, PARK] -_LOGGER = logging.getLogger(__name__) - - async def async_setup_entry( hass: HomeAssistant, entry: AutomowerConfigEntry, diff --git a/homeassistant/components/husqvarna_automower/number.py b/homeassistant/components/husqvarna_automower/number.py index d6d794f2d83..e69b52fab93 100644 --- a/homeassistant/components/husqvarna_automower/number.py +++ b/homeassistant/components/husqvarna_automower/number.py @@ -24,6 +24,8 @@ from .entity import ( _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 1 + @callback def _async_get_cutting_height(data: MowerAttributes) -> int: diff --git a/homeassistant/components/husqvarna_automower/select.py b/homeassistant/components/husqvarna_automower/select.py index a9431acaae3..65960e897e4 100644 --- a/homeassistant/components/husqvarna_automower/select.py +++ b/homeassistant/components/husqvarna_automower/select.py @@ -16,6 +16,7 @@ from .entity import AutomowerControlEntity, handle_sending_exception _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 1 HEADLIGHT_MODES: list = [ HeadlightModes.ALWAYS_OFF.lower(), diff --git a/homeassistant/components/husqvarna_automower/sensor.py b/homeassistant/components/husqvarna_automower/sensor.py index ebb68033918..70b5510de36 100644 --- a/homeassistant/components/husqvarna_automower/sensor.py +++ b/homeassistant/components/husqvarna_automower/sensor.py @@ -349,6 +349,7 @@ MOWER_SENSOR_TYPES: tuple[AutomowerSensorEntityDescription, ...] = ( key="number_of_collisions", translation_key="number_of_collisions", entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, state_class=SensorStateClass.TOTAL, exists_fn=lambda data: data.statistics.number_of_collisions is not None, value_fn=attrgetter("statistics.number_of_collisions"), diff --git a/homeassistant/components/husqvarna_automower/strings.json b/homeassistant/components/husqvarna_automower/strings.json index 05a18bcb19f..d4c91e29f7d 100644 --- a/homeassistant/components/husqvarna_automower/strings.json +++ b/homeassistant/components/husqvarna_automower/strings.json @@ -27,7 +27,9 @@ "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "wrong_account": "You can only reauthenticate this entry with the same Husqvarna account.", - "missing_amc_scope": "The `Authentication API` and the `Automower Connect API` are not connected to your application in the Husqvarna Developer Portal." + "no_mower_connected": "No mowers connected to this account.", + "missing_amc_scope": "The `Authentication API` and the `Automower Connect API` are not connected to your application in the Husqvarna Developer Portal.", + "unknown": "[%key:common::config_flow::error::unknown%]" }, "create_entry": { "default": "[%key:common::config_flow::create_entry::authenticated%]" @@ -311,6 +313,12 @@ } } }, + "issues": { + "deprecated_entity": { + "title": "The Husqvarna Automower {entity_name} sensor is deprecated", + "description": "The Husqvarna Automower entity `{entity}` is deprecated and will be removed in a future release.\nYou can use the new returning state of the lawn mower entity instead.\nPlease update your automations and scripts to replace the sensor entity with the newly added lawn mower entity.\nWhen you are done migrating you can disable `{entity}`." + } + }, "services": { "override_schedule": { "name": "Override schedule", diff --git a/homeassistant/components/husqvarna_automower/switch.py b/homeassistant/components/husqvarna_automower/switch.py index 2bbe5c87624..352b4c59ba1 100644 --- a/homeassistant/components/husqvarna_automower/switch.py +++ b/homeassistant/components/husqvarna_automower/switch.py @@ -19,6 +19,8 @@ from .entity import ( handle_sending_exception, ) +PARALLEL_UPDATES = 1 + _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/husqvarna_automower_ble/manifest.json b/homeassistant/components/husqvarna_automower_ble/manifest.json index 3e72d9707c7..7566b5c9d32 100644 --- a/homeassistant/components/husqvarna_automower_ble/manifest.json +++ b/homeassistant/components/husqvarna_automower_ble/manifest.json @@ -10,7 +10,7 @@ "codeowners": ["@alistair23"], "config_flow": true, "dependencies": ["bluetooth_adapters"], - "documentation": "https://www.home-assistant.io/integrations/???", + "documentation": "https://www.home-assistant.io/integrations/husqvarna_automower_ble", "iot_class": "local_polling", "requirements": ["automower-ble==0.2.0"] } diff --git a/homeassistant/components/huum/manifest.json b/homeassistant/components/huum/manifest.json index 7629f529b91..38562e1a072 100644 --- a/homeassistant/components/huum/manifest.json +++ b/homeassistant/components/huum/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/huum", "iot_class": "cloud_polling", - "requirements": ["huum==0.7.10"] + "requirements": ["huum==0.7.12"] } diff --git a/homeassistant/components/hvv_departures/config_flow.py b/homeassistant/components/hvv_departures/config_flow.py index 3e1b98d9a38..536b8f18259 100644 --- a/homeassistant/components/hvv_departures/config_flow.py +++ b/homeassistant/components/hvv_departures/config_flow.py @@ -141,16 +141,14 @@ class HVVDeparturesConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Options flow handler.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize HVV Departures options flow.""" - self.config_entry = config_entry - self.options = dict(config_entry.options) self.departure_filters: dict[str, Any] = {} async def async_step_init( diff --git a/homeassistant/components/hydrawise/const.py b/homeassistant/components/hydrawise/const.py index 633c00ce659..6d846dd6127 100644 --- a/homeassistant/components/hydrawise/const.py +++ b/homeassistant/components/hydrawise/const.py @@ -10,7 +10,7 @@ DEFAULT_WATERING_TIME = timedelta(minutes=15) MANUFACTURER = "Hydrawise" -MAIN_SCAN_INTERVAL = timedelta(seconds=60) +MAIN_SCAN_INTERVAL = timedelta(minutes=5) WATER_USE_SCAN_INTERVAL = timedelta(minutes=60) SIGNAL_UPDATE_HYDRAWISE = "hydrawise_update" diff --git a/homeassistant/components/hydrawise/sensor.py b/homeassistant/components/hydrawise/sensor.py index 1d8c75d5437..96cc16832da 100644 --- a/homeassistant/components/hydrawise/sensor.py +++ b/homeassistant/components/hydrawise/sensor.py @@ -4,9 +4,11 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from datetime import datetime, timedelta +from datetime import timedelta from typing import Any +from pydrawise.schema import ControllerWaterUseSummary + from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, @@ -30,66 +32,8 @@ class HydrawiseSensorEntityDescription(SensorEntityDescription): value_fn: Callable[[HydrawiseSensor], Any] -def _get_zone_watering_time(sensor: HydrawiseSensor) -> int: - if (current_run := sensor.zone.scheduled_runs.current_run) is not None: - return int(current_run.remaining_time.total_seconds() / 60) - return 0 - - -def _get_zone_next_cycle(sensor: HydrawiseSensor) -> datetime | None: - if (next_run := sensor.zone.scheduled_runs.next_run) is not None: - return dt_util.as_utc(next_run.start_time) - return None - - -def _get_zone_daily_active_water_use(sensor: HydrawiseSensor) -> float: - """Get active water use for the zone.""" - daily_water_summary = sensor.coordinator.data.daily_water_summary[ - sensor.controller.id - ] - return float(daily_water_summary.active_use_by_zone_id.get(sensor.zone.id, 0.0)) - - -def _get_zone_daily_active_water_time(sensor: HydrawiseSensor) -> float | None: - """Get active water time for the zone.""" - daily_water_summary = sensor.coordinator.data.daily_water_summary[ - sensor.controller.id - ] - return daily_water_summary.active_time_by_zone_id.get( - sensor.zone.id, timedelta() - ).total_seconds() - - -def _get_controller_daily_active_water_use(sensor: HydrawiseSensor) -> float | None: - """Get active water use for the controller.""" - daily_water_summary = sensor.coordinator.data.daily_water_summary[ - sensor.controller.id - ] - return daily_water_summary.total_active_use - - -def _get_controller_daily_inactive_water_use(sensor: HydrawiseSensor) -> float | None: - """Get inactive water use for the controller.""" - daily_water_summary = sensor.coordinator.data.daily_water_summary[ - sensor.controller.id - ] - return daily_water_summary.total_inactive_use - - -def _get_controller_daily_active_water_time(sensor: HydrawiseSensor) -> float: - """Get active water time for the controller.""" - daily_water_summary = sensor.coordinator.data.daily_water_summary[ - sensor.controller.id - ] - return daily_water_summary.total_active_time.total_seconds() - - -def _get_controller_daily_total_water_use(sensor: HydrawiseSensor) -> float | None: - """Get inactive water use for the controller.""" - daily_water_summary = sensor.coordinator.data.daily_water_summary[ - sensor.controller.id - ] - return daily_water_summary.total_use +def _get_water_use(sensor: HydrawiseSensor) -> ControllerWaterUseSummary: + return sensor.coordinator.data.daily_water_summary[sensor.controller.id] WATER_USE_CONTROLLER_SENSORS: tuple[HydrawiseSensorEntityDescription, ...] = ( @@ -98,7 +42,9 @@ WATER_USE_CONTROLLER_SENSORS: tuple[HydrawiseSensorEntityDescription, ...] = ( translation_key="daily_active_water_time", device_class=SensorDeviceClass.DURATION, native_unit_of_measurement=UnitOfTime.SECONDS, - value_fn=_get_controller_daily_active_water_time, + value_fn=lambda sensor: _get_water_use( + sensor + ).total_active_time.total_seconds(), ), ) @@ -109,7 +55,11 @@ WATER_USE_ZONE_SENSORS: tuple[HydrawiseSensorEntityDescription, ...] = ( translation_key="daily_active_water_time", device_class=SensorDeviceClass.DURATION, native_unit_of_measurement=UnitOfTime.SECONDS, - value_fn=_get_zone_daily_active_water_time, + value_fn=lambda sensor: ( + _get_water_use(sensor) + .active_time_by_zone_id.get(sensor.zone.id, timedelta()) + .total_seconds() + ), ), ) @@ -119,21 +69,21 @@ FLOW_CONTROLLER_SENSORS: tuple[HydrawiseSensorEntityDescription, ...] = ( translation_key="daily_total_water_use", device_class=SensorDeviceClass.VOLUME, suggested_display_precision=1, - value_fn=_get_controller_daily_total_water_use, + value_fn=lambda sensor: _get_water_use(sensor).total_use, ), HydrawiseSensorEntityDescription( key="daily_active_water_use", translation_key="daily_active_water_use", device_class=SensorDeviceClass.VOLUME, suggested_display_precision=1, - value_fn=_get_controller_daily_active_water_use, + value_fn=lambda sensor: _get_water_use(sensor).total_active_use, ), HydrawiseSensorEntityDescription( key="daily_inactive_water_use", translation_key="daily_inactive_water_use", device_class=SensorDeviceClass.VOLUME, suggested_display_precision=1, - value_fn=_get_controller_daily_inactive_water_use, + value_fn=lambda sensor: _get_water_use(sensor).total_inactive_use, ), ) @@ -143,7 +93,9 @@ FLOW_ZONE_SENSORS: tuple[SensorEntityDescription, ...] = ( translation_key="daily_active_water_use", device_class=SensorDeviceClass.VOLUME, suggested_display_precision=1, - value_fn=_get_zone_daily_active_water_use, + value_fn=lambda sensor: float( + _get_water_use(sensor).active_use_by_zone_id.get(sensor.zone.id, 0.0) + ), ), ) @@ -152,13 +104,24 @@ ZONE_SENSORS: tuple[HydrawiseSensorEntityDescription, ...] = ( key="next_cycle", translation_key="next_cycle", device_class=SensorDeviceClass.TIMESTAMP, - value_fn=_get_zone_next_cycle, + value_fn=lambda sensor: ( + dt_util.as_utc(sensor.zone.scheduled_runs.next_run.start_time) + if sensor.zone.scheduled_runs.next_run is not None + else None + ), ), HydrawiseSensorEntityDescription( key="watering_time", translation_key="watering_time", native_unit_of_measurement=UnitOfTime.MINUTES, - value_fn=_get_zone_watering_time, + value_fn=lambda sensor: ( + int( + sensor.zone.scheduled_runs.current_run.remaining_time.total_seconds() + / 60 + ) + if sensor.zone.scheduled_runs.current_run is not None + else 0 + ), ), ) diff --git a/homeassistant/components/hyperion/config_flow.py b/homeassistant/components/hyperion/config_flow.py index 161c531328d..b2b7dbdf531 100644 --- a/homeassistant/components/hyperion/config_flow.py +++ b/homeassistant/components/hyperion/config_flow.py @@ -424,24 +424,22 @@ class HyperionConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> HyperionOptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> HyperionOptionsFlow: """Get the Hyperion Options flow.""" - return HyperionOptionsFlow(config_entry) + return HyperionOptionsFlow() class HyperionOptionsFlow(OptionsFlow): """Hyperion options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize a Hyperion options flow.""" - self._config_entry = config_entry - def _create_client(self) -> client.HyperionClient: """Create and connect a client instance.""" return create_hyperion_client( - self._config_entry.data[CONF_HOST], - self._config_entry.data[CONF_PORT], - token=self._config_entry.data.get(CONF_TOKEN), + self.config_entry.data[CONF_HOST], + self.config_entry.data[CONF_PORT], + token=self.config_entry.data.get(CONF_TOKEN), ) async def async_step_init( @@ -470,8 +468,7 @@ class HyperionOptionsFlow(OptionsFlow): return self.async_create_entry(title="", data=user_input) default_effect_show_list = list( - set(effects) - - set(self._config_entry.options.get(CONF_EFFECT_HIDE_LIST, [])) + set(effects) - set(self.config_entry.options.get(CONF_EFFECT_HIDE_LIST, [])) ) return self.async_show_form( @@ -480,7 +477,7 @@ class HyperionOptionsFlow(OptionsFlow): { vol.Optional( CONF_PRIORITY, - default=self._config_entry.options.get( + default=self.config_entry.options.get( CONF_PRIORITY, DEFAULT_PRIORITY ), ): vol.All(vol.Coerce(int), vol.Range(min=0, max=255)), diff --git a/homeassistant/components/hyperion/manifest.json b/homeassistant/components/hyperion/manifest.json index f18491044fa..684fb276f53 100644 --- a/homeassistant/components/hyperion/manifest.json +++ b/homeassistant/components/hyperion/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/hyperion", "iot_class": "local_push", "loggers": ["hyperion"], - "quality_scale": "platinum", "requirements": ["hyperion-py==0.7.5"], "ssdp": [ { diff --git a/homeassistant/components/iammeter/manifest.json b/homeassistant/components/iammeter/manifest.json index f1ebecab00d..22831767e62 100644 --- a/homeassistant/components/iammeter/manifest.json +++ b/homeassistant/components/iammeter/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/iammeter", "iot_class": "local_polling", "loggers": ["iammeter"], + "quality_scale": "legacy", "requirements": ["iammeter==0.2.1"] } diff --git a/homeassistant/components/ibeacon/config_flow.py b/homeassistant/components/ibeacon/config_flow.py index feb5a801d51..c00398e39b0 100644 --- a/homeassistant/components/ibeacon/config_flow.py +++ b/homeassistant/components/ibeacon/config_flow.py @@ -44,16 +44,12 @@ class IBeaconConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlow: """Get the options flow for this handler.""" - return IBeaconOptionsFlow(config_entry) + return IBeaconOptionsFlow() class IBeaconOptionsFlow(OptionsFlow): """Handle options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init(self, user_input: dict | None = None) -> ConfigFlowResult: """Manage the options.""" errors = {} diff --git a/homeassistant/components/idasen_desk/manifest.json b/homeassistant/components/idasen_desk/manifest.json index 17a5f519274..0f8c9eaafc9 100644 --- a/homeassistant/components/idasen_desk/manifest.json +++ b/homeassistant/components/idasen_desk/manifest.json @@ -11,6 +11,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/idasen_desk", "iot_class": "local_push", - "quality_scale": "silver", "requirements": ["idasen-ha==2.6.2"] } diff --git a/homeassistant/components/idteck_prox/manifest.json b/homeassistant/components/idteck_prox/manifest.json index e1d9b8a7ba8..92055908591 100644 --- a/homeassistant/components/idteck_prox/manifest.json +++ b/homeassistant/components/idteck_prox/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/idteck_prox", "iot_class": "local_push", "loggers": ["rfk101py"], + "quality_scale": "legacy", "requirements": ["rfk101py==0.0.1"] } diff --git a/homeassistant/components/iglo/manifest.json b/homeassistant/components/iglo/manifest.json index f270d06bcae..7ce4804a516 100644 --- a/homeassistant/components/iglo/manifest.json +++ b/homeassistant/components/iglo/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/iglo", "iot_class": "local_polling", "loggers": ["iglo"], + "quality_scale": "legacy", "requirements": ["iglo==1.2.7"] } diff --git a/homeassistant/components/ign_sismologia/manifest.json b/homeassistant/components/ign_sismologia/manifest.json index c76013f6821..d371f0d3614 100644 --- a/homeassistant/components/ign_sismologia/manifest.json +++ b/homeassistant/components/ign_sismologia/manifest.json @@ -6,5 +6,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["georss_ign_sismologia_client"], + "quality_scale": "legacy", "requirements": ["georss-ign-sismologia-client==0.8"] } diff --git a/homeassistant/components/ihc/manifest.json b/homeassistant/components/ihc/manifest.json index 2400206c3a0..68cc1b2c754 100644 --- a/homeassistant/components/ihc/manifest.json +++ b/homeassistant/components/ihc/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/ihc", "iot_class": "local_push", "loggers": ["ihcsdk"], + "quality_scale": "legacy", "requirements": ["defusedxml==0.7.1", "ihcsdk==2.8.5"] } diff --git a/homeassistant/components/image_processing/__init__.py b/homeassistant/components/image_processing/__init__.py index 2c1d0f9304c..0ac8d39813b 100644 --- a/homeassistant/components/image_processing/__init__.py +++ b/homeassistant/components/image_processing/__init__.py @@ -223,7 +223,7 @@ class ImageProcessingFaceEntity(ImageProcessingEntity): confidence = f_co for attr in (ATTR_NAME, ATTR_MOTION): if attr in face: - state = face[attr] # type: ignore[literal-required] + state = face[attr] break return state diff --git a/homeassistant/components/image_upload/manifest.json b/homeassistant/components/image_upload/manifest.json index 963721a0476..bb8c33ba749 100644 --- a/homeassistant/components/image_upload/manifest.json +++ b/homeassistant/components/image_upload/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/image_upload", "integration_type": "system", "quality_scale": "internal", - "requirements": ["Pillow==10.4.0"] + "requirements": ["Pillow==11.0.0"] } diff --git a/homeassistant/components/imap/config_flow.py b/homeassistant/components/imap/config_flow.py index 5bbb8599cf2..994c53b5b3e 100644 --- a/homeassistant/components/imap/config_flow.py +++ b/homeassistant/components/imap/config_flow.py @@ -13,7 +13,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import ( CONF_NAME, @@ -213,12 +213,12 @@ class IMAPConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> OptionsFlow: + ) -> ImapOptionsFlow: """Get the options flow for this handler.""" - return OptionsFlow(config_entry) + return ImapOptionsFlow() -class OptionsFlow(OptionsFlowWithConfigEntry): +class ImapOptionsFlow(OptionsFlow): """Option flow handler.""" async def async_step_init( @@ -226,13 +226,13 @@ class OptionsFlow(OptionsFlowWithConfigEntry): ) -> ConfigFlowResult: """Manage the options.""" errors: dict[str, str] | None = None - entry_data: dict[str, Any] = dict(self._config_entry.data) + entry_data: dict[str, Any] = dict(self.config_entry.data) if user_input is not None: try: self._async_abort_entries_match( { - CONF_SERVER: self._config_entry.data[CONF_SERVER], - CONF_USERNAME: self._config_entry.data[CONF_USERNAME], + CONF_SERVER: self.config_entry.data[CONF_SERVER], + CONF_USERNAME: self.config_entry.data[CONF_USERNAME], CONF_FOLDER: user_input[CONF_FOLDER], CONF_SEARCH: user_input[CONF_SEARCH], } diff --git a/homeassistant/components/imap/coordinator.py b/homeassistant/components/imap/coordinator.py index a9d0fdfbd48..2726b47a679 100644 --- a/homeassistant/components/imap/coordinator.py +++ b/homeassistant/components/imap/coordinator.py @@ -332,7 +332,17 @@ class ImapDataUpdateCoordinator(DataUpdateCoordinator[int | None]): raise UpdateFailed( f"Invalid response for search '{self.config_entry.data[CONF_SEARCH]}': {result} / {lines[0]}" ) - if not (count := len(message_ids := lines[0].split())): + # Check we do have returned items. + # + # In rare cases, when no UID's are returned, + # only the status line is returned, and not an empty line. + # See: https://github.com/home-assistant/core/issues/132042 + # + # Strictly the RfC notes that 0 or more numbers should be returned + # delimited by a space. + # + # See: https://datatracker.ietf.org/doc/html/rfc3501#section-7.2.5 + if len(lines) == 1 or not (count := len(message_ids := lines[0].split())): self._last_message_uid = None return 0 last_message_uid = ( diff --git a/homeassistant/components/imap/quality_scale.yaml b/homeassistant/components/imap/quality_scale.yaml new file mode 100644 index 00000000000..180aef93f91 --- /dev/null +++ b/homeassistant/components/imap/quality_scale.yaml @@ -0,0 +1,97 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: + status: todo + comment: | + The package is only tested, but not built and published inside a CI pipeline yet. + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: > + Per IMAP service instance there is one numeric sensor entity to reflect + the actual number of emails for a service. There is no event registration. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + config-entry-unloading: done + log-when-unavailable: + status: done + comment: | + Logs for unavailability are on debug level to avoid flooding the logs. + entity-unavailable: + status: done + comment: > + An entity is available as long as the service is loaded. + An `unknown` value is set if the mail service is temporary unavailable. + action-exceptions: done + reauthentication-flow: done + parallel-updates: done + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: done + + # Gold + entity-translations: done + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: + status: done + comment: The only entity supplied returns the primary value for the service. + discovery: + status: exempt + comment: | + Discovery for IMAP services is not desirerable. + stale-devices: + status: exempt + comment: > + The device class is a service. When removed, entities are removed as well. + diagnostics: done + exception-translations: done + icon-translations: done + reconfiguration-flow: + status: todo + comment: | + Options can be set through the option flow, reconfiguration is not supported yet. + dynamic-devices: + status: exempt + comment: | + The device class is a service. + discovery-update-info: + status: exempt + comment: Discovery is not desirable for this integration. + repair-issues: + status: exempt + comment: There are no repairs currently. + docs-use-cases: done + docs-supported-devices: + status: exempt + comment: The device class is a service. + docs-supported-functions: done + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: done + docs-examples: done + + # Platinum + async-dependency: done + inject-websession: + status: exempt + comment: | + This integration does not use web sessions. + strict-typing: done diff --git a/homeassistant/components/imap/sensor.py b/homeassistant/components/imap/sensor.py index 625af9ce6a1..b484586e057 100644 --- a/homeassistant/components/imap/sensor.py +++ b/homeassistant/components/imap/sensor.py @@ -7,7 +7,7 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.const import CONF_USERNAME +from homeassistant.const import CONF_USERNAME, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -19,10 +19,10 @@ from .coordinator import ImapDataUpdateCoordinator IMAP_MAIL_COUNT_DESCRIPTION = SensorEntityDescription( key="imap_mail_count", + entity_category=EntityCategory.DIAGNOSTIC, state_class=SensorStateClass.MEASUREMENT, suggested_display_precision=0, translation_key="imap_mail_count", - name=None, ) diff --git a/homeassistant/components/imap/strings.json b/homeassistant/components/imap/strings.json index 7c4a0d9a973..8ff5d838199 100644 --- a/homeassistant/components/imap/strings.json +++ b/homeassistant/components/imap/strings.json @@ -10,8 +10,21 @@ "charset": "Character set", "folder": "Folder", "search": "IMAP search", + "event_message_data": "Message data to be included in the `imap_content` event data:", "ssl_cipher_list": "SSL cipher list (Advanced)", "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" + }, + "data_description": { + "username": "The IMAP username.", + "password": "The IMAP password", + "server": "The IMAP server.", + "port": "The IMAP port supporting SSL, usually this is 993.", + "charset": "The character set used. Common values are `utf-8` or `US-ASCII`.", + "folder": "In generally the folder is set to `INBOX`, but e.g. in case of a sub folder, named `Test`, this should be `INBOX.Test`.", + "search": "The IMAP search command which is `UnSeen UnDeleted` by default.", + "event_message_data": "Note that the event size is limited, and not all message text might be sent with the event if the message is too large.", + "ssl_cipher_list": "If the IMAP service only supports legacy encryption, try to change this.", + "verify_ssl": "Recommended, to ensure the server certificate is valid. Turn off, if the server certificate is not trusted (e.g. self signed)." } }, "reauth_confirm": { @@ -19,6 +32,9 @@ "title": "[%key:common::config_flow::title::reauth%]", "data": { "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "Correct the IMAP password." } } }, @@ -35,6 +51,14 @@ "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, + "entity": { + "sensor": { + "imap_mail_count": { + "name": "Messages", + "unit_of_measurement": "messages" + } + } + }, "exceptions": { "copy_failed": { "message": "Copying the message failed with \"{error}\"." @@ -73,7 +97,15 @@ "custom_event_data_template": "Template to create custom event data", "max_message_size": "Max message size (2048 < size < 30000)", "enable_push": "Enable Push-IMAP if the server supports it. Turn off if Push-IMAP updates are unreliable.", - "event_message_data": "Message data to be included in the `imap_content` event data:" + "event_message_data": "Message data to be included in the `imap_content` event data." + }, + "data_description": { + "folder": "[%key:component::imap::config::step::user::data_description::folder%]", + "search": "[%key:component::imap::config::step::user::data_description::search%]", + "event_message_data": "[%key:component::imap::config::step::user::data_description::event_message_data%]", + "custom_event_data_template": "This template is evaluated when a new message was received, and the result is added to the `custom` attribute of the event data.", + "max_message_size": "Limit the maximum size of the event. Instead of passing the (whole) text message, using a template is a better option.", + "enable_push": "Using Push-IMAP is recommended. Polling will increase the time to respond." } } }, diff --git a/homeassistant/components/imgw_pib/manifest.json b/homeassistant/components/imgw_pib/manifest.json index c01be10fc68..b5c35f3f1eb 100644 --- a/homeassistant/components/imgw_pib/manifest.json +++ b/homeassistant/components/imgw_pib/manifest.json @@ -5,6 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/imgw_pib", "iot_class": "cloud_polling", - "quality_scale": "platinum", "requirements": ["imgw_pib==1.0.6"] } diff --git a/homeassistant/components/influxdb/manifest.json b/homeassistant/components/influxdb/manifest.json index ad3f282eff7..55af2b37fb7 100644 --- a/homeassistant/components/influxdb/manifest.json +++ b/homeassistant/components/influxdb/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/influxdb", "iot_class": "local_push", "loggers": ["influxdb", "influxdb_client"], + "quality_scale": "legacy", "requirements": ["influxdb==5.3.1", "influxdb-client==1.24.0"] } diff --git a/homeassistant/components/input_number/strings.json b/homeassistant/components/input_number/strings.json index 8a2351ebad4..ed6b6fad208 100644 --- a/homeassistant/components/input_number/strings.json +++ b/homeassistant/components/input_number/strings.json @@ -41,7 +41,7 @@ }, "increment": { "name": "Increment", - "description": "Increments the value by 1 step." + "description": "Increments the current value by 1 step." }, "set_value": { "name": "Set", diff --git a/homeassistant/components/integration/strings.json b/homeassistant/components/integration/strings.json index 6186521aa1b..ed4f5de3ea7 100644 --- a/homeassistant/components/integration/strings.json +++ b/homeassistant/components/integration/strings.json @@ -3,7 +3,7 @@ "config": { "step": { "user": { - "title": "Add Riemann sum integral sensor", + "title": "Create Riemann sum integral sensor", "description": "Create a sensor that calculates a Riemann sum to estimate the integral of a sensor.", "data": { "method": "Integration method", diff --git a/homeassistant/components/intent/__init__.py b/homeassistant/components/intent/__init__.py index 1322576f115..1ffb8747d91 100644 --- a/homeassistant/components/intent/__init__.py +++ b/homeassistant/components/intent/__init__.py @@ -2,7 +2,6 @@ from __future__ import annotations -from datetime import datetime import logging from typing import Any, Protocol @@ -42,9 +41,11 @@ from homeassistant.const import ( from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State from homeassistant.helpers import config_validation as cv, integration_platform, intent from homeassistant.helpers.typing import ConfigType +from homeassistant.util import dt as dt_util from .const import DOMAIN, TIMER_DATA from .timers import ( + CancelAllTimersIntentHandler, CancelTimerIntentHandler, DecreaseTimerIntentHandler, IncreaseTimerIntentHandler, @@ -130,6 +131,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: intent.async_register(hass, SetPositionIntentHandler()) intent.async_register(hass, StartTimerIntentHandler()) intent.async_register(hass, CancelTimerIntentHandler()) + intent.async_register(hass, CancelAllTimersIntentHandler()) intent.async_register(hass, IncreaseTimerIntentHandler()) intent.async_register(hass, DecreaseTimerIntentHandler()) intent.async_register(hass, PauseTimerIntentHandler()) @@ -405,7 +407,7 @@ class GetCurrentDateIntentHandler(intent.IntentHandler): async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: response = intent_obj.create_response() - response.async_set_speech_slots({"date": datetime.now().date()}) + response.async_set_speech_slots({"date": dt_util.now().date()}) return response @@ -417,7 +419,7 @@ class GetCurrentTimeIntentHandler(intent.IntentHandler): async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: response = intent_obj.create_response() - response.async_set_speech_slots({"time": datetime.now().time()}) + response.async_set_speech_slots({"time": dt_util.now().time()}) return response diff --git a/homeassistant/components/intent/timers.py b/homeassistant/components/intent/timers.py index 639744abc66..84b96492241 100644 --- a/homeassistant/components/intent/timers.py +++ b/homeassistant/components/intent/timers.py @@ -887,6 +887,36 @@ class CancelTimerIntentHandler(intent.IntentHandler): return intent_obj.create_response() +class CancelAllTimersIntentHandler(intent.IntentHandler): + """Intent handler for cancelling all timers.""" + + intent_type = intent.INTENT_CANCEL_ALL_TIMERS + description = "Cancels all timers" + slot_schema = { + vol.Optional("area"): cv.string, + } + + async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: + """Handle the intent.""" + hass = intent_obj.hass + timer_manager: TimerManager = hass.data[TIMER_DATA] + slots = self.async_validate_slots(intent_obj.slots) + canceled = 0 + + for timer in _find_timers(hass, intent_obj.device_id, slots): + timer_manager.cancel_timer(timer.id) + canceled += 1 + + response = intent_obj.create_response() + speech_slots = {"canceled": canceled} + if "area" in slots: + speech_slots["area"] = slots["area"]["value"] + + response.async_set_speech_slots(speech_slots) + + return response + + class IncreaseTimerIntentHandler(intent.IntentHandler): """Intent handler for increasing the time of a timer.""" diff --git a/homeassistant/components/intesishome/manifest.json b/homeassistant/components/intesishome/manifest.json index 6b7a579d99f..ab306fb4773 100644 --- a/homeassistant/components/intesishome/manifest.json +++ b/homeassistant/components/intesishome/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/intesishome", "iot_class": "cloud_push", "loggers": ["pyintesishome"], + "quality_scale": "legacy", "requirements": ["pyintesishome==1.8.0"] } diff --git a/homeassistant/components/iperf3/manifest.json b/homeassistant/components/iperf3/manifest.json index a1bb26ddc1a..16e33e47331 100644 --- a/homeassistant/components/iperf3/manifest.json +++ b/homeassistant/components/iperf3/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/iperf3", "iot_class": "local_polling", "loggers": ["iperf3"], + "quality_scale": "legacy", "requirements": ["iperf3==0.1.11"] } diff --git a/homeassistant/components/ipp/manifest.json b/homeassistant/components/ipp/manifest.json index baa41cf00bd..54c26b63585 100644 --- a/homeassistant/components/ipp/manifest.json +++ b/homeassistant/components/ipp/manifest.json @@ -7,7 +7,6 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["deepmerge", "pyipp"], - "quality_scale": "platinum", "requirements": ["pyipp==0.17.0"], "zeroconf": ["_ipps._tcp.local.", "_ipp._tcp.local."] } diff --git a/homeassistant/components/iqvia/manifest.json b/homeassistant/components/iqvia/manifest.json index 6142fa1349e..11c99a7428f 100644 --- a/homeassistant/components/iqvia/manifest.json +++ b/homeassistant/components/iqvia/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["pyiqvia"], - "requirements": ["numpy==1.26.4", "pyiqvia==2022.04.0"] + "requirements": ["numpy==2.1.3", "pyiqvia==2022.04.0"] } diff --git a/homeassistant/components/irish_rail_transport/manifest.json b/homeassistant/components/irish_rail_transport/manifest.json index bb9b0d59ef0..2a118f17e2a 100644 --- a/homeassistant/components/irish_rail_transport/manifest.json +++ b/homeassistant/components/irish_rail_transport/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/irish_rail_transport", "iot_class": "cloud_polling", "loggers": ["pyirishrail"], + "quality_scale": "legacy", "requirements": ["pyirishrail==0.0.2"] } diff --git a/homeassistant/components/iron_os/coordinator.py b/homeassistant/components/iron_os/coordinator.py index da82b76f92e..699f5a01704 100644 --- a/homeassistant/components/iron_os/coordinator.py +++ b/homeassistant/components/iron_os/coordinator.py @@ -37,15 +37,14 @@ class IronOSLiveDataCoordinator(DataUpdateCoordinator[LiveDataResponse]): ) self.device = device - async def _async_setup(self) -> None: - """Set up the coordinator.""" - - self.device_info = await self.device.get_device_info() - async def _async_update_data(self) -> LiveDataResponse: """Fetch data from Device.""" try: + # device info is cached and won't be refetched on every + # coordinator refresh, only after the device has disconnected + # the device info is refetched + self.device_info = await self.device.get_device_info() return await self.device.get_live_data() except CommunicationError as e: @@ -60,6 +59,7 @@ class IronOSFirmwareUpdateCoordinator(DataUpdateCoordinator[GitHubReleaseModel]) super().__init__( hass, _LOGGER, + config_entry=None, name=DOMAIN, update_interval=SCAN_INTERVAL_GITHUB, ) diff --git a/homeassistant/components/iron_os/manifest.json b/homeassistant/components/iron_os/manifest.json index 9fcb84e0f6a..4ec08a43b61 100644 --- a/homeassistant/components/iron_os/manifest.json +++ b/homeassistant/components/iron_os/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/iron_os", "iot_class": "local_polling", "loggers": ["pynecil", "aiogithubapi"], - "requirements": ["pynecil==0.2.0", "aiogithubapi==24.6.0"] + "requirements": ["pynecil==0.2.1", "aiogithubapi==24.6.0"] } diff --git a/homeassistant/components/iron_os/strings.json b/homeassistant/components/iron_os/strings.json index 75584fe191c..92441b39fc3 100644 --- a/homeassistant/components/iron_os/strings.json +++ b/homeassistant/components/iron_os/strings.json @@ -5,10 +5,13 @@ "description": "[%key:component::bluetooth::config::step::user::description%]", "data": { "address": "[%key:common::config_flow::data::device%]" + }, + "data_description": { + "address": "Ensure your device is powered on and within Bluetooth range before continuing" } }, "bluetooth_confirm": { - "description": "[%key:component::bluetooth::config::step::bluetooth_confirm::description%]" + "description": "Do you want to set up {name}?\n\n*Ensure your device is powered on and within Bluetooth range before continuing*" } }, "abort": { diff --git a/homeassistant/components/islamic_prayer_times/config_flow.py b/homeassistant/components/islamic_prayer_times/config_flow.py index 2db89183499..ce911ccc49d 100644 --- a/homeassistant/components/islamic_prayer_times/config_flow.py +++ b/homeassistant/components/islamic_prayer_times/config_flow.py @@ -52,7 +52,7 @@ class IslamicPrayerFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> IslamicPrayerOptionsFlowHandler: """Get the options flow for this handler.""" - return IslamicPrayerOptionsFlowHandler(config_entry) + return IslamicPrayerOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -93,10 +93,6 @@ class IslamicPrayerFlowHandler(ConfigFlow, domain=DOMAIN): class IslamicPrayerOptionsFlowHandler(OptionsFlow): """Handle Islamic Prayer client options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/iss/config_flow.py b/homeassistant/components/iss/config_flow.py index 9cc533f5cc5..eaf01a6d094 100644 --- a/homeassistant/components/iss/config_flow.py +++ b/homeassistant/components/iss/config_flow.py @@ -1,5 +1,7 @@ """Config flow to configure iss component.""" +from __future__ import annotations + import voluptuous as vol from homeassistant.config_entries import ( @@ -23,9 +25,9 @@ class ISSConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> OptionsFlow: + ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_user(self, user_input=None) -> ConfigFlowResult: """Handle a flow initialized by the user.""" @@ -42,16 +44,10 @@ class ISSConfigFlow(ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(OptionsFlow): """Config flow options handler for iss.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - self.options = dict(config_entry.options) - async def async_step_init(self, user_input=None) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: - self.options.update(user_input) - return self.async_create_entry(title="", data=self.options) + return self.async_create_entry(data=self.config_entry.options | user_input) return self.async_show_form( step_id="init", diff --git a/homeassistant/components/ista_ecotrend/strings.json b/homeassistant/components/ista_ecotrend/strings.json index f76cf5286cb..0757977a8ea 100644 --- a/homeassistant/components/ista_ecotrend/strings.json +++ b/homeassistant/components/ista_ecotrend/strings.json @@ -14,14 +14,23 @@ "data": { "email": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" - } + }, + "data_description": { + "email": "Enter the email address associated with your ista EcoTrend account", + "password": "Enter the password for your ista EcoTrend account" + }, + "description": "Connect your **ista EcoTrend** account to Home Assistant to access your monthly heating and water usage data." }, "reauth_confirm": { "title": "[%key:common::config_flow::title::reauth%]", - "description": "Please reenter the password for: {email}", + "description": "Re-enter your password for `{email}` to reconnect your ista EcoTrend account to Home Assistant.", "data": { "email": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "email": "[%key:component::ista_ecotrend::config::step::user::data_description::email%]", + "password": "[%key:component::ista_ecotrend::config::step::user::data_description::password%]" } } } diff --git a/homeassistant/components/isy994/config_flow.py b/homeassistant/components/isy994/config_flow.py index 0239926f5e3..3575fa99a55 100644 --- a/homeassistant/components/isy994/config_flow.py +++ b/homeassistant/components/isy994/config_flow.py @@ -140,7 +140,7 @@ class Isy994ConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlow: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -314,10 +314,6 @@ class Isy994ConfigFlow(ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(OptionsFlow): """Handle a option flow for ISY/IoX.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/itach/manifest.json b/homeassistant/components/itach/manifest.json index 2928620b952..68b34b4321e 100644 --- a/homeassistant/components/itach/manifest.json +++ b/homeassistant/components/itach/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/itach", "iot_class": "assumed_state", + "quality_scale": "legacy", "requirements": ["pyitachip2ir==0.0.7"] } diff --git a/homeassistant/components/itunes/manifest.json b/homeassistant/components/itunes/manifest.json index f1135dbf847..a12271d04d7 100644 --- a/homeassistant/components/itunes/manifest.json +++ b/homeassistant/components/itunes/manifest.json @@ -3,5 +3,6 @@ "name": "Apple iTunes", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/itunes", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/jellyfin/config_flow.py b/homeassistant/components/jellyfin/config_flow.py index f60d96f3efa..0c170d2485f 100644 --- a/homeassistant/components/jellyfin/config_flow.py +++ b/homeassistant/components/jellyfin/config_flow.py @@ -8,11 +8,7 @@ from typing import Any import voluptuous as vol -from homeassistant.config_entries import ( - ConfigFlow, - ConfigFlowResult, - OptionsFlowWithConfigEntry, -) +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME from homeassistant.core import callback from homeassistant.util.uuid import random_uuid_hex @@ -143,12 +139,12 @@ class JellyfinConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow( config_entry: JellyfinConfigEntry, - ) -> OptionsFlowWithConfigEntry: + ) -> OptionsFlowHandler: """Create the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() -class OptionsFlowHandler(OptionsFlowWithConfigEntry): +class OptionsFlowHandler(OptionsFlow): """Handle an option flow for jellyfin.""" async def async_step_init( diff --git a/homeassistant/components/jellyfin/sensor.py b/homeassistant/components/jellyfin/sensor.py index 24aeecab7e5..5c519f661ee 100644 --- a/homeassistant/components/jellyfin/sensor.py +++ b/homeassistant/components/jellyfin/sensor.py @@ -36,7 +36,6 @@ SENSOR_TYPES: tuple[JellyfinSensorEntityDescription, ...] = ( key="watching", translation_key="watching", value_fn=_count_now_playing, - native_unit_of_measurement="clients", ), ) diff --git a/homeassistant/components/jellyfin/strings.json b/homeassistant/components/jellyfin/strings.json index f2afa0c8ad5..a9816b1fb78 100644 --- a/homeassistant/components/jellyfin/strings.json +++ b/homeassistant/components/jellyfin/strings.json @@ -29,7 +29,8 @@ "entity": { "sensor": { "watching": { - "name": "Active clients" + "name": "Active clients", + "unit_of_measurement": "clients" } } }, diff --git a/homeassistant/components/jewish_calendar/__init__.py b/homeassistant/components/jewish_calendar/__init__.py index fd238e8d615..823e9bd59be 100644 --- a/homeassistant/components/jewish_calendar/__init__.py +++ b/homeassistant/components/jewish_calendar/__init__.py @@ -5,26 +5,17 @@ from __future__ import annotations from functools import partial from hdate import Location -import voluptuous as vol -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import ( CONF_ELEVATION, CONF_LANGUAGE, CONF_LATITUDE, - CONF_LOCATION, CONF_LONGITUDE, - CONF_NAME, CONF_TIME_ZONE, Platform, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback -import homeassistant.helpers.config_validation as cv -import homeassistant.helpers.entity_registry as er -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.typing import ConfigType +from homeassistant.core import HomeAssistant -from .binary_sensor import BINARY_SENSORS from .const import ( CONF_CANDLE_LIGHT_MINUTES, CONF_DIASPORA, @@ -33,94 +24,15 @@ from .const import ( DEFAULT_DIASPORA, DEFAULT_HAVDALAH_OFFSET_MINUTES, DEFAULT_LANGUAGE, - DEFAULT_NAME, - DOMAIN, ) -from .sensor import INFO_SENSORS, TIME_SENSORS +from .entity import JewishCalendarConfigEntry, JewishCalendarData PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR] -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.All( - cv.deprecated(DOMAIN), - { - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Optional(CONF_DIASPORA, default=DEFAULT_DIASPORA): cv.boolean, - vol.Inclusive(CONF_LATITUDE, "coordinates"): cv.latitude, - vol.Inclusive(CONF_LONGITUDE, "coordinates"): cv.longitude, - vol.Optional(CONF_LANGUAGE, default=DEFAULT_LANGUAGE): vol.In( - ["hebrew", "english"] - ), - vol.Optional( - CONF_CANDLE_LIGHT_MINUTES, default=DEFAULT_CANDLE_LIGHT - ): int, - # Default of 0 means use 8.5 degrees / 'three_stars' time. - vol.Optional( - CONF_HAVDALAH_OFFSET_MINUTES, - default=DEFAULT_HAVDALAH_OFFSET_MINUTES, - ): int, - }, - ) - }, - extra=vol.ALLOW_EXTRA, -) - -def get_unique_prefix( - location: Location, - language: str, - candle_lighting_offset: int | None, - havdalah_offset: int | None, -) -> str: - """Create a prefix for unique ids.""" - # location.altitude was unset before 2024.6 when this method - # was used to create the unique id. As such it would always - # use the default altitude of 754. - config_properties = [ - location.latitude, - location.longitude, - location.timezone, - 754, - location.diaspora, - language, - candle_lighting_offset, - havdalah_offset, - ] - prefix = "_".join(map(str, config_properties)) - return f"{prefix}" - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the Jewish Calendar component.""" - if DOMAIN not in config: - return True - - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - is_fixable=False, - issue_domain=DOMAIN, - breaks_in_ha_version="2024.12.0", - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": DEFAULT_NAME, - }, - ) - - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN] - ) - ) - - return True - - -async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, config_entry: JewishCalendarConfigEntry +) -> bool: """Set up a configuration entry for Jewish calendar.""" language = config_entry.data.get(CONF_LANGUAGE, DEFAULT_LANGUAGE) diaspora = config_entry.data.get(CONF_DIASPORA, DEFAULT_DIASPORA) @@ -143,27 +55,19 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b ) ) - hass.data.setdefault(DOMAIN, {})[config_entry.entry_id] = { - CONF_LANGUAGE: language, - CONF_DIASPORA: diaspora, - CONF_LOCATION: location, - CONF_CANDLE_LIGHT_MINUTES: candle_lighting_offset, - CONF_HAVDALAH_OFFSET_MINUTES: havdalah_offset, - } - - # Update unique ID to be unrelated to user defined options - old_prefix = get_unique_prefix( - location, language, candle_lighting_offset, havdalah_offset + config_entry.runtime_data = JewishCalendarData( + language, + diaspora, + location, + candle_lighting_offset, + havdalah_offset, ) - ent_reg = er.async_get(hass) - entries = er.async_entries_for_config_entry(ent_reg, config_entry.entry_id) - if not entries or any(entry.unique_id.startswith(old_prefix) for entry in entries): - async_update_unique_ids(ent_reg, config_entry.entry_id, old_prefix) - await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) - async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: + async def update_listener( + hass: HomeAssistant, config_entry: JewishCalendarConfigEntry + ) -> None: # Trigger update of states for all platforms await hass.config_entries.async_reload(config_entry.entry_id) @@ -171,35 +75,8 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b return True -async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, config_entry: JewishCalendarConfigEntry +) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms( - config_entry, PLATFORMS - ) - - if unload_ok: - hass.data[DOMAIN].pop(config_entry.entry_id) - - return unload_ok - - -@callback -def async_update_unique_ids( - ent_reg: er.EntityRegistry, new_prefix: str, old_prefix: str -) -> None: - """Update unique ID to be unrelated to user defined options. - - Introduced with release 2024.6 - """ - platform_descriptions = { - Platform.BINARY_SENSOR: BINARY_SENSORS, - Platform.SENSOR: (*INFO_SENSORS, *TIME_SENSORS), - } - for platform, descriptions in platform_descriptions.items(): - for description in descriptions: - new_unique_id = f"{new_prefix}-{description.key}" - old_unique_id = f"{old_prefix}_{description.key}" - if entity_id := ent_reg.async_get_entity_id( - platform, DOMAIN, old_unique_id - ): - ent_reg.async_update_entity(entity_id, new_unique_id=new_unique_id) + return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) diff --git a/homeassistant/components/jewish_calendar/binary_sensor.py b/homeassistant/components/jewish_calendar/binary_sensor.py index 060650ee25c..9fd1371f8a8 100644 --- a/homeassistant/components/jewish_calendar/binary_sensor.py +++ b/homeassistant/components/jewish_calendar/binary_sensor.py @@ -14,15 +14,13 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.helpers import event from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.dt as dt_util -from .const import DOMAIN -from .entity import JewishCalendarEntity +from .entity import JewishCalendarConfigEntry, JewishCalendarEntity @dataclass(frozen=True) @@ -63,14 +61,12 @@ BINARY_SENSORS: tuple[JewishCalendarBinarySensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: JewishCalendarConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Jewish Calendar binary sensors.""" - entry = hass.data[DOMAIN][config_entry.entry_id] - async_add_entities( - JewishCalendarBinarySensor(config_entry, entry, description) + JewishCalendarBinarySensor(config_entry, description) for description in BINARY_SENSORS ) diff --git a/homeassistant/components/jewish_calendar/config_flow.py b/homeassistant/components/jewish_calendar/config_flow.py index f96699d01bd..a2eadbf57bd 100644 --- a/homeassistant/components/jewish_calendar/config_flow.py +++ b/homeassistant/components/jewish_calendar/config_flow.py @@ -12,7 +12,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import ( CONF_ELEVATION, @@ -90,32 +90,21 @@ class JewishCalendarConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowWithConfigEntry: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> JewishCalendarOptionsFlowHandler: """Get the options flow for this handler.""" - return JewishCalendarOptionsFlowHandler(config_entry) + return JewishCalendarOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" if user_input is not None: - _options = {} - if CONF_CANDLE_LIGHT_MINUTES in user_input: - _options[CONF_CANDLE_LIGHT_MINUTES] = user_input[ - CONF_CANDLE_LIGHT_MINUTES - ] - del user_input[CONF_CANDLE_LIGHT_MINUTES] - if CONF_HAVDALAH_OFFSET_MINUTES in user_input: - _options[CONF_HAVDALAH_OFFSET_MINUTES] = user_input[ - CONF_HAVDALAH_OFFSET_MINUTES - ] - del user_input[CONF_HAVDALAH_OFFSET_MINUTES] if CONF_LOCATION in user_input: user_input[CONF_LATITUDE] = user_input[CONF_LOCATION][CONF_LATITUDE] user_input[CONF_LONGITUDE] = user_input[CONF_LOCATION][CONF_LONGITUDE] - return self.async_create_entry( - title=DEFAULT_NAME, data=user_input, options=_options - ) + return self.async_create_entry(title=DEFAULT_NAME, data=user_input) return self.async_show_form( step_id="user", @@ -124,10 +113,6 @@ class JewishCalendarConfigFlow(ConfigFlow, domain=DOMAIN): ), ) - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import a config entry from configuration.yaml.""" - return await self.async_step_user(import_data) - async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -145,7 +130,7 @@ class JewishCalendarConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_update_reload_and_abort(reconfigure_entry, data=user_input) -class JewishCalendarOptionsFlowHandler(OptionsFlowWithConfigEntry): +class JewishCalendarOptionsFlowHandler(OptionsFlow): """Handle Jewish Calendar options.""" async def async_step_init( diff --git a/homeassistant/components/jewish_calendar/entity.py b/homeassistant/components/jewish_calendar/entity.py index c11925df954..1d2a6e45c0a 100644 --- a/homeassistant/components/jewish_calendar/entity.py +++ b/homeassistant/components/jewish_calendar/entity.py @@ -1,18 +1,27 @@ """Entity representing a Jewish Calendar sensor.""" -from typing import Any +from dataclasses import dataclass + +from hdate import Location from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_LANGUAGE, CONF_LOCATION from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity import Entity, EntityDescription -from .const import ( - CONF_CANDLE_LIGHT_MINUTES, - CONF_DIASPORA, - CONF_HAVDALAH_OFFSET_MINUTES, - DOMAIN, -) +from .const import DOMAIN + +type JewishCalendarConfigEntry = ConfigEntry[JewishCalendarData] + + +@dataclass +class JewishCalendarData: + """Jewish Calendar runtime dataclass.""" + + language: str + diaspora: bool + location: Location + candle_lighting_offset: int + havdalah_offset: int class JewishCalendarEntity(Entity): @@ -22,8 +31,7 @@ class JewishCalendarEntity(Entity): def __init__( self, - config_entry: ConfigEntry, - data: dict[str, Any], + config_entry: JewishCalendarConfigEntry, description: EntityDescription, ) -> None: """Initialize a Jewish Calendar entity.""" @@ -32,10 +40,11 @@ class JewishCalendarEntity(Entity): self._attr_device_info = DeviceInfo( entry_type=DeviceEntryType.SERVICE, identifiers={(DOMAIN, config_entry.entry_id)}, - name=config_entry.title, ) - self._location = data[CONF_LOCATION] - self._hebrew = data[CONF_LANGUAGE] == "hebrew" - self._candle_lighting_offset = data[CONF_CANDLE_LIGHT_MINUTES] - self._havdalah_offset = data[CONF_HAVDALAH_OFFSET_MINUTES] - self._diaspora = data[CONF_DIASPORA] + data = config_entry.runtime_data + self._location = data.location + self._hebrew = data.language == "hebrew" + self._language = data.language + self._candle_lighting_offset = data.candle_lighting_offset + self._havdalah_offset = data.havdalah_offset + self._diaspora = data.diaspora diff --git a/homeassistant/components/jewish_calendar/manifest.json b/homeassistant/components/jewish_calendar/manifest.json index 2642f6c81e9..aca45320002 100644 --- a/homeassistant/components/jewish_calendar/manifest.json +++ b/homeassistant/components/jewish_calendar/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/jewish_calendar", "iot_class": "calculated", "loggers": ["hdate"], - "quality_scale": "silver", - "requirements": ["hdate==0.10.9"], + "requirements": ["hdate==0.11.1"], "single_config_entry": true } diff --git a/homeassistant/components/jewish_calendar/sensor.py b/homeassistant/components/jewish_calendar/sensor.py index 87b4375b8b2..d3e70eb411c 100644 --- a/homeassistant/components/jewish_calendar/sensor.py +++ b/homeassistant/components/jewish_calendar/sensor.py @@ -14,15 +14,13 @@ from homeassistant.components.sensor import ( SensorEntity, SensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import SUN_EVENT_SUNSET, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.sun import get_astral_event_date import homeassistant.util.dt as dt_util -from .const import DOMAIN -from .entity import JewishCalendarEntity +from .entity import JewishCalendarConfigEntry, JewishCalendarEntity _LOGGER = logging.getLogger(__name__) @@ -169,17 +167,15 @@ TIME_SENSORS: tuple[SensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: JewishCalendarConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Jewish calendar sensors .""" - entry = hass.data[DOMAIN][config_entry.entry_id] sensors = [ - JewishCalendarSensor(config_entry, entry, description) - for description in INFO_SENSORS + JewishCalendarSensor(config_entry, description) for description in INFO_SENSORS ] sensors.extend( - JewishCalendarTimeSensor(config_entry, entry, description) + JewishCalendarTimeSensor(config_entry, description) for description in TIME_SENSORS ) @@ -193,12 +189,11 @@ class JewishCalendarSensor(JewishCalendarEntity, SensorEntity): def __init__( self, - config_entry: ConfigEntry, - data: dict[str, Any], + config_entry: JewishCalendarConfigEntry, description: SensorEntityDescription, ) -> None: """Initialize the Jewish calendar sensor.""" - super().__init__(config_entry, data, description) + super().__init__(config_entry, description) self._attrs: dict[str, str] = {} async def async_update(self) -> None: @@ -280,15 +275,18 @@ class JewishCalendarSensor(JewishCalendarEntity, SensorEntity): # Compute the weekly portion based on the upcoming shabbat. return after_tzais_date.upcoming_shabbat.parasha if self.entity_description.key == "holiday": - self._attrs = { - "id": after_shkia_date.holiday_name, - "type": after_shkia_date.holiday_type.name, - "type_id": after_shkia_date.holiday_type.value, - } - self._attr_options = [ - h.description.hebrew.long if self._hebrew else h.description.english - for h in htables.HOLIDAYS - ] + _id = _type = _type_id = "" + _holiday_type = after_shkia_date.holiday_type + if isinstance(_holiday_type, list): + _id = ", ".join(after_shkia_date.holiday_name) + _type = ", ".join([_htype.name for _htype in _holiday_type]) + _type_id = ", ".join([str(_htype.value) for _htype in _holiday_type]) + else: + _id = after_shkia_date.holiday_name + _type = _holiday_type.name + _type_id = _holiday_type.value + self._attrs = {"id": _id, "type": _type, "type_id": _type_id} + self._attr_options = htables.get_all_holidays(self._language) return after_shkia_date.holiday_description if self.entity_description.key == "omer_count": diff --git a/homeassistant/components/jewish_calendar/strings.json b/homeassistant/components/jewish_calendar/strings.json index e5367b5819e..1b7b86c0056 100644 --- a/homeassistant/components/jewish_calendar/strings.json +++ b/homeassistant/components/jewish_calendar/strings.json @@ -27,7 +27,8 @@ } }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" } }, "options": { diff --git a/homeassistant/components/joaoapps_join/manifest.json b/homeassistant/components/joaoapps_join/manifest.json index 36d54ec6d55..55a908bf090 100644 --- a/homeassistant/components/joaoapps_join/manifest.json +++ b/homeassistant/components/joaoapps_join/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/joaoapps_join", "iot_class": "cloud_push", "loggers": ["pyjoin"], + "quality_scale": "legacy", "requirements": ["python-join-api==0.0.9"] } diff --git a/homeassistant/components/kaiterra/manifest.json b/homeassistant/components/kaiterra/manifest.json index 12ac1559fd7..88651565cd0 100644 --- a/homeassistant/components/kaiterra/manifest.json +++ b/homeassistant/components/kaiterra/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/kaiterra", "iot_class": "cloud_polling", "loggers": ["kaiterra_async_client"], + "quality_scale": "legacy", "requirements": ["kaiterra-async-client==1.0.0"] } diff --git a/homeassistant/components/kankun/manifest.json b/homeassistant/components/kankun/manifest.json index c15a87eacaa..473209508ac 100644 --- a/homeassistant/components/kankun/manifest.json +++ b/homeassistant/components/kankun/manifest.json @@ -3,5 +3,6 @@ "name": "Kankun", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/kankun", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/keba/manifest.json b/homeassistant/components/keba/manifest.json index 42f2762ef3d..d86ce053187 100644 --- a/homeassistant/components/keba/manifest.json +++ b/homeassistant/components/keba/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/keba", "iot_class": "local_polling", "loggers": ["keba_kecontact"], + "quality_scale": "legacy", "requirements": ["keba-kecontact==1.1.0"] } diff --git a/homeassistant/components/keenetic_ndms2/config_flow.py b/homeassistant/components/keenetic_ndms2/config_flow.py index 69e81bf292d..d11fedac385 100644 --- a/homeassistant/components/keenetic_ndms2/config_flow.py +++ b/homeassistant/components/keenetic_ndms2/config_flow.py @@ -55,7 +55,7 @@ class KeeneticFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> KeeneticOptionsFlowHandler: """Get the options flow for this handler.""" - return KeeneticOptionsFlowHandler(config_entry) + return KeeneticOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -138,9 +138,8 @@ class KeeneticFlowHandler(ConfigFlow, domain=DOMAIN): class KeeneticOptionsFlowHandler(OptionsFlow): """Handle options.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize options flow.""" - self.config_entry = config_entry self._interface_options: dict[str, str] = {} async def async_step_init( diff --git a/homeassistant/components/kef/manifest.json b/homeassistant/components/kef/manifest.json index 29e398994f4..1bbce2ff35d 100644 --- a/homeassistant/components/kef/manifest.json +++ b/homeassistant/components/kef/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/kef", "iot_class": "local_polling", "loggers": ["aiokef", "tenacity"], + "quality_scale": "legacy", "requirements": ["aiokef==0.2.16", "getmac==0.9.4"] } diff --git a/homeassistant/components/keyboard/manifest.json b/homeassistant/components/keyboard/manifest.json index ea6d0aa20c2..e4a6606fb80 100644 --- a/homeassistant/components/keyboard/manifest.json +++ b/homeassistant/components/keyboard/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/keyboard", "iot_class": "local_push", "loggers": ["pykeyboard"], + "quality_scale": "legacy", "requirements": ["pyuserinput==0.1.11"] } diff --git a/homeassistant/components/keyboard_remote/manifest.json b/homeassistant/components/keyboard_remote/manifest.json index bb84b32defc..b405f36bb23 100644 --- a/homeassistant/components/keyboard_remote/manifest.json +++ b/homeassistant/components/keyboard_remote/manifest.json @@ -6,5 +6,6 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["aionotify", "evdev"], + "quality_scale": "legacy", "requirements": ["evdev==1.6.1", "asyncinotify==4.0.2"] } diff --git a/homeassistant/components/kira/manifest.json b/homeassistant/components/kira/manifest.json index c8a476b07c9..60901d13f4e 100644 --- a/homeassistant/components/kira/manifest.json +++ b/homeassistant/components/kira/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/kira", "iot_class": "local_push", "loggers": ["pykira"], + "quality_scale": "legacy", "requirements": ["pykira==0.1.1"] } diff --git a/homeassistant/components/kitchen_sink/config_flow.py b/homeassistant/components/kitchen_sink/config_flow.py index 986879e3058..019d1dddcad 100644 --- a/homeassistant/components/kitchen_sink/config_flow.py +++ b/homeassistant/components/kitchen_sink/config_flow.py @@ -12,7 +12,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.core import callback @@ -33,7 +33,7 @@ class KitchenSinkConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Set the config entry up from yaml.""" @@ -54,7 +54,7 @@ class KitchenSinkConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_abort(reason="reauth_successful") -class OptionsFlowHandler(OptionsFlowWithConfigEntry): +class OptionsFlowHandler(OptionsFlow): """Handle options.""" async def async_step_init( @@ -68,8 +68,7 @@ class OptionsFlowHandler(OptionsFlowWithConfigEntry): ) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: - self.options.update(user_input) - return await self._update_options() + return self.async_create_entry(data=self.config_entry.options | user_input) return self.async_show_form( step_id="options_1", @@ -95,7 +94,3 @@ class OptionsFlowHandler(OptionsFlowWithConfigEntry): } ), ) - - async def _update_options(self) -> ConfigFlowResult: - """Update config entry options.""" - return self.async_create_entry(title="", data=self.options) diff --git a/homeassistant/components/kiwi/manifest.json b/homeassistant/components/kiwi/manifest.json index 60b0d1fd28b..74a27776128 100644 --- a/homeassistant/components/kiwi/manifest.json +++ b/homeassistant/components/kiwi/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/kiwi", "iot_class": "cloud_polling", "loggers": ["kiwiki"], + "quality_scale": "legacy", "requirements": ["kiwiki-client==0.1.1"] } diff --git a/homeassistant/components/kmtronic/config_flow.py b/homeassistant/components/kmtronic/config_flow.py index 6bf0b878f72..56b1d4675bc 100644 --- a/homeassistant/components/kmtronic/config_flow.py +++ b/homeassistant/components/kmtronic/config_flow.py @@ -66,7 +66,7 @@ class KmtronicConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> KMTronicOptionsFlow: """Get the options flow for this handler.""" - return KMTronicOptionsFlow(config_entry) + return KMTronicOptionsFlow() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -102,10 +102,6 @@ class InvalidAuth(HomeAssistantError): class KMTronicOptionsFlow(OptionsFlow): """Handle options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/knocki/__init__.py b/homeassistant/components/knocki/__init__.py index 42c3956bd68..dfdf060e3b5 100644 --- a/homeassistant/components/knocki/__init__.py +++ b/homeassistant/components/knocki/__init__.py @@ -41,13 +41,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: KnockiConfigEntry) -> bo await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - entry.async_create_background_task( - hass, client.start_websocket(), "knocki-websocket" - ) + await client.start_websocket() return True async def async_unload_entry(hass: HomeAssistant, entry: KnockiConfigEntry) -> bool: """Unload a config entry.""" + await entry.runtime_data.client.close() return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/knocki/manifest.json b/homeassistant/components/knocki/manifest.json index d9a45b18f0e..a91119ca831 100644 --- a/homeassistant/components/knocki/manifest.json +++ b/homeassistant/components/knocki/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["knocki"], - "requirements": ["knocki==0.3.5"] + "requirements": ["knocki==0.4.2"] } diff --git a/homeassistant/components/knx/__init__.py b/homeassistant/components/knx/__init__.py index fe6f3ad8892..ea654c358e7 100644 --- a/homeassistant/components/knx/__init__.py +++ b/homeassistant/components/knx/__init__.py @@ -29,7 +29,6 @@ from homeassistant.const import ( ) from homeassistant.core import Event, HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import DeviceEntry from homeassistant.helpers.reload import async_integration_yaml_config from homeassistant.helpers.storage import STORAGE_DIR @@ -55,6 +54,7 @@ from .const import ( CONF_KNX_SECURE_USER_PASSWORD, CONF_KNX_STATE_UPDATER, CONF_KNX_TELEGRAM_LOG_SIZE, + CONF_KNX_TUNNEL_ENDPOINT_IA, CONF_KNX_TUNNELING, CONF_KNX_TUNNELING_TCP, CONF_KNX_TUNNELING_TCP_SECURE, @@ -102,20 +102,6 @@ _KNX_YAML_CONFIG: Final = "knx_yaml_config" CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.All( - # deprecated since 2021.12 - cv.deprecated(CONF_KNX_STATE_UPDATER), - cv.deprecated(CONF_KNX_RATE_LIMIT), - cv.deprecated(CONF_KNX_ROUTING), - cv.deprecated(CONF_KNX_TUNNELING), - cv.deprecated(CONF_KNX_INDIVIDUAL_ADDRESS), - cv.deprecated(CONF_KNX_MCAST_GRP), - cv.deprecated(CONF_KNX_MCAST_PORT), - cv.deprecated("event_filter"), - # deprecated since 2021.4 - cv.deprecated("config_file"), - # deprecated since 2021.2 - cv.deprecated("fire_event"), - cv.deprecated("fire_event_filter"), vol.Schema( { **EventSchema.SCHEMA, @@ -367,6 +353,7 @@ class KNXModule: if _conn_type == CONF_KNX_TUNNELING_TCP: return ConnectionConfig( connection_type=ConnectionType.TUNNELING_TCP, + individual_address=self.entry.data.get(CONF_KNX_TUNNEL_ENDPOINT_IA), gateway_ip=self.entry.data[CONF_HOST], gateway_port=self.entry.data[CONF_PORT], auto_reconnect=True, @@ -379,6 +366,7 @@ class KNXModule: if _conn_type == CONF_KNX_TUNNELING_TCP_SECURE: return ConnectionConfig( connection_type=ConnectionType.TUNNELING_TCP_SECURE, + individual_address=self.entry.data.get(CONF_KNX_TUNNEL_ENDPOINT_IA), gateway_ip=self.entry.data[CONF_HOST], gateway_port=self.entry.data[CONF_PORT], secure_config=SecureConfig( diff --git a/homeassistant/components/knx/config_flow.py b/homeassistant/components/knx/config_flow.py index 4a71c600824..feeb7626577 100644 --- a/homeassistant/components/knx/config_flow.py +++ b/homeassistant/components/knx/config_flow.py @@ -770,7 +770,6 @@ class KNXOptionsFlow(KNXCommonFlow, OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize KNX options flow.""" - self.config_entry = config_entry super().__init__(initial_data=config_entry.data) # type: ignore[arg-type] @callback diff --git a/homeassistant/components/knx/const.py b/homeassistant/components/knx/const.py index e22546d3806..a946ded0359 100644 --- a/homeassistant/components/knx/const.py +++ b/homeassistant/components/knx/const.py @@ -52,8 +52,8 @@ CONF_KNX_DEFAULT_RATE_LIMIT: Final = 0 DEFAULT_ROUTING_IA: Final = "0.0.240" CONF_KNX_TELEGRAM_LOG_SIZE: Final = "telegram_log_size" -TELEGRAM_LOG_DEFAULT: Final = 200 -TELEGRAM_LOG_MAX: Final = 5000 # ~2 MB or ~5 hours of reasonable bus load +TELEGRAM_LOG_DEFAULT: Final = 1000 +TELEGRAM_LOG_MAX: Final = 25000 # ~10 MB or ~25 hours of reasonable bus load ## # Secure constants @@ -104,7 +104,7 @@ class KNXConfigEntryData(TypedDict, total=False): route_back: bool # not required host: str # only required for tunnelling port: int # only required for tunnelling - tunnel_endpoint_ia: str | None + tunnel_endpoint_ia: str | None # tunnelling only - not required (use get()) # KNX secure user_id: int | None # not required user_password: str | None # not required diff --git a/homeassistant/components/knx/light.py b/homeassistant/components/knx/light.py index ba1194220c2..8e64b46c890 100644 --- a/homeassistant/components/knx/light.py +++ b/homeassistant/components/knx/light.py @@ -4,6 +4,7 @@ from __future__ import annotations from typing import Any, cast +from propcache import cached_property from xknx import XKNX from xknx.devices.light import ColorTemperatureType, Light as XknxLight, XYYColor @@ -389,39 +390,47 @@ class _KnxLight(LightEntity): ) return None - @property - def color_mode(self) -> ColorMode: - """Return the color mode of the light.""" - if self._device.supports_xyy_color: - return ColorMode.XY - if self._device.supports_hs_color: - return ColorMode.HS - if self._device.supports_rgbw: - return ColorMode.RGBW - if self._device.supports_color: - return ColorMode.RGB + @cached_property + def supported_color_modes(self) -> set[ColorMode]: + """Get supported color modes.""" + color_mode = set() if ( self._device.supports_color_temperature or self._device.supports_tunable_white ): - return ColorMode.COLOR_TEMP - if self._device.supports_brightness: - return ColorMode.BRIGHTNESS - return ColorMode.ONOFF - - @property - def supported_color_modes(self) -> set[ColorMode]: - """Flag supported color modes.""" - return {self.color_mode} + color_mode.add(ColorMode.COLOR_TEMP) + if self._device.supports_xyy_color: + color_mode.add(ColorMode.XY) + if self._device.supports_rgbw: + color_mode.add(ColorMode.RGBW) + elif self._device.supports_color: + # one of RGB or RGBW so individual color configurations work properly + color_mode.add(ColorMode.RGB) + if self._device.supports_hs_color: + color_mode.add(ColorMode.HS) + if not color_mode: + # brightness or on/off must be the only supported mode + if self._device.supports_brightness: + color_mode.add(ColorMode.BRIGHTNESS) + else: + color_mode.add(ColorMode.ONOFF) + return color_mode async def async_turn_on(self, **kwargs: Any) -> None: """Turn the light on.""" brightness = kwargs.get(ATTR_BRIGHTNESS) - color_temp = kwargs.get(ATTR_COLOR_TEMP_KELVIN) - rgb = kwargs.get(ATTR_RGB_COLOR) - rgbw = kwargs.get(ATTR_RGBW_COLOR) - hs_color = kwargs.get(ATTR_HS_COLOR) - xy_color = kwargs.get(ATTR_XY_COLOR) + # LightEntity color translation will ensure that only attributes of supported + # color modes are passed to this method - so we can't set unsupported mode here + if color_temp := kwargs.get(ATTR_COLOR_TEMP_KELVIN): + self._attr_color_mode = ColorMode.COLOR_TEMP + if rgb := kwargs.get(ATTR_RGB_COLOR): + self._attr_color_mode = ColorMode.RGB + if rgbw := kwargs.get(ATTR_RGBW_COLOR): + self._attr_color_mode = ColorMode.RGBW + if hs_color := kwargs.get(ATTR_HS_COLOR): + self._attr_color_mode = ColorMode.HS + if xy_color := kwargs.get(ATTR_XY_COLOR): + self._attr_color_mode = ColorMode.XY if ( not self.is_on @@ -500,17 +509,17 @@ class _KnxLight(LightEntity): await self._device.set_brightness(brightness) return # brightness without color in kwargs; set via color - if self.color_mode == ColorMode.XY: + if self._attr_color_mode == ColorMode.XY: await self._device.set_xyy_color(XYYColor(brightness=brightness)) return # default to white if color not known for RGB(W) - if self.color_mode == ColorMode.RGBW: + if self._attr_color_mode == ColorMode.RGBW: _rgbw = self.rgbw_color if not _rgbw or not any(_rgbw): _rgbw = (0, 0, 0, 255) await set_color(_rgbw[:3], _rgbw[3], brightness) return - if self.color_mode == ColorMode.RGB: + if self._attr_color_mode == ColorMode.RGB: _rgb = self.rgb_color if not _rgb or not any(_rgb): _rgb = (255, 255, 255) @@ -533,6 +542,7 @@ class KnxYamlLight(_KnxLight, KnxYamlEntity): knx_module=knx_module, device=_create_yaml_light(knx_module.xknx, config), ) + self._attr_color_mode = next(iter(self.supported_color_modes)) self._attr_max_color_temp_kelvin: int = config[LightSchema.CONF_MAX_KELVIN] self._attr_min_color_temp_kelvin: int = config[LightSchema.CONF_MIN_KELVIN] self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) @@ -566,5 +576,6 @@ class KnxUiLight(_KnxLight, KnxUiEntity): self._device = _create_ui_light( knx_module.xknx, config[DOMAIN], config[CONF_ENTITY][CONF_NAME] ) + self._attr_color_mode = next(iter(self.supported_color_modes)) self._attr_max_color_temp_kelvin: int = config[DOMAIN][CONF_COLOR_TEMP_MAX] self._attr_min_color_temp_kelvin: int = config[DOMAIN][CONF_COLOR_TEMP_MIN] diff --git a/homeassistant/components/knx/manifest.json b/homeassistant/components/knx/manifest.json index df895282a2b..aed7f3ed455 100644 --- a/homeassistant/components/knx/manifest.json +++ b/homeassistant/components/knx/manifest.json @@ -9,11 +9,10 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["xknx", "xknxproject"], - "quality_scale": "platinum", "requirements": [ "xknx==3.3.0", "xknxproject==3.8.1", - "knx-frontend==2024.9.10.221729" + "knx-frontend==2024.11.16.205004" ], "single_config_entry": true } diff --git a/homeassistant/components/knx/schema.py b/homeassistant/components/knx/schema.py index bf2fc55e5c9..9311046e410 100644 --- a/homeassistant/components/knx/schema.py +++ b/homeassistant/components/knx/schema.py @@ -222,9 +222,6 @@ class BinarySensorSchema(KNXPlatformSchema): DEFAULT_NAME = "KNX Binary Sensor" ENTITY_SCHEMA = vol.All( - # deprecated since September 2020 - cv.deprecated("significant_bit"), - cv.deprecated("automation"), vol.Schema( { vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, @@ -358,10 +355,6 @@ class ClimateSchema(KNXPlatformSchema): DEFAULT_FAN_SPEED_MODE = "percent" ENTITY_SCHEMA = vol.All( - # deprecated since September 2020 - cv.deprecated("setpoint_shift_step", replacement_key=CONF_TEMPERATURE_STEP), - # deprecated since 2021.6 - cv.deprecated("create_temperature_sensors"), vol.Schema( { vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, @@ -969,8 +962,6 @@ class WeatherSchema(KNXPlatformSchema): DEFAULT_NAME = "KNX Weather Station" ENTITY_SCHEMA = vol.All( - # deprecated since 2021.6 - cv.deprecated("create_sensors"), vol.Schema( { vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, diff --git a/homeassistant/components/knx/strings.json b/homeassistant/components/knx/strings.json index 8d8692f6b7a..08b921f316b 100644 --- a/homeassistant/components/knx/strings.json +++ b/homeassistant/components/knx/strings.json @@ -294,19 +294,24 @@ "name": "Connection type" }, "telegrams_incoming": { - "name": "Incoming telegrams" + "name": "Incoming telegrams", + "unit_of_measurement": "[%key:component::knx::entity::sensor::telegram_count::unit_of_measurement%]" }, "telegrams_incoming_error": { - "name": "Incoming telegram errors" + "name": "Incoming telegram errors", + "unit_of_measurement": "errors" }, "telegrams_outgoing": { - "name": "Outgoing telegrams" + "name": "Outgoing telegrams", + "unit_of_measurement": "[%key:component::knx::entity::sensor::telegram_count::unit_of_measurement%]" }, "telegrams_outgoing_error": { - "name": "Outgoing telegram errors" + "name": "Outgoing telegram errors", + "unit_of_measurement": "[%key:component::knx::entity::sensor::telegrams_incoming_error::unit_of_measurement%]" }, "telegram_count": { - "name": "Telegrams" + "name": "Telegrams", + "unit_of_measurement": "telegrams" } } }, diff --git a/homeassistant/components/knx/telegrams.py b/homeassistant/components/knx/telegrams.py index f4b31fd11f9..dcd5f477679 100644 --- a/homeassistant/components/knx/telegrams.py +++ b/homeassistant/components/knx/telegrams.py @@ -75,6 +75,7 @@ class Telegrams: ) ) self.recent_telegrams: deque[TelegramDict] = deque(maxlen=log_size) + self.last_ga_telegrams: dict[str, TelegramDict] = {} async def load_history(self) -> None: """Load history from store.""" @@ -88,6 +89,9 @@ class Telegrams: if isinstance(telegram["payload"], list): telegram["payload"] = tuple(telegram["payload"]) # type: ignore[unreachable] self.recent_telegrams.extend(telegrams) + self.last_ga_telegrams = { + t["destination"]: t for t in telegrams if t["payload"] is not None + } async def save_history(self) -> None: """Save history to store.""" @@ -98,6 +102,9 @@ class Telegrams: """Handle incoming and outgoing telegrams from xknx.""" telegram_dict = self.telegram_to_dict(telegram) self.recent_telegrams.append(telegram_dict) + if telegram_dict["payload"] is not None: + # exclude GroupValueRead telegrams + self.last_ga_telegrams[telegram_dict["destination"]] = telegram_dict async_dispatcher_send(self.hass, SIGNAL_KNX_TELEGRAM, telegram, telegram_dict) def telegram_to_dict(self, telegram: Telegram) -> TelegramDict: diff --git a/homeassistant/components/knx/websocket.py b/homeassistant/components/knx/websocket.py index 6cb2218b221..9ba3e0ccff6 100644 --- a/homeassistant/components/knx/websocket.py +++ b/homeassistant/components/knx/websocket.py @@ -47,6 +47,7 @@ async def register_panel(hass: HomeAssistant) -> None: websocket_api.async_register_command(hass, ws_project_file_process) websocket_api.async_register_command(hass, ws_project_file_remove) websocket_api.async_register_command(hass, ws_group_monitor_info) + websocket_api.async_register_command(hass, ws_group_telegrams) websocket_api.async_register_command(hass, ws_subscribe_telegram) websocket_api.async_register_command(hass, ws_get_knx_project) websocket_api.async_register_command(hass, ws_validate_entity) @@ -287,6 +288,27 @@ def ws_group_monitor_info( ) +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "knx/group_telegrams", + } +) +@provide_knx +@callback +def ws_group_telegrams( + hass: HomeAssistant, + knx: KNXModule, + connection: websocket_api.ActiveConnection, + msg: dict, +) -> None: + """Handle get group telegrams command.""" + connection.send_result( + msg["id"], + knx.telegrams.last_ga_telegrams, + ) + + @websocket_api.require_admin @websocket_api.websocket_command( { diff --git a/homeassistant/components/kodi/config_flow.py b/homeassistant/components/kodi/config_flow.py index ef0798220dd..f87b94b23fd 100644 --- a/homeassistant/components/kodi/config_flow.py +++ b/homeassistant/components/kodi/config_flow.py @@ -145,6 +145,7 @@ class KodiConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle user-confirmation of discovered node.""" if user_input is None: + assert self._name is not None return self.async_show_form( step_id="discovery_confirm", description_placeholders={"name": self._name}, diff --git a/homeassistant/components/konnected/config_flow.py b/homeassistant/components/konnected/config_flow.py index 3f1ef99c6fb..65dd7cf39b3 100644 --- a/homeassistant/components/konnected/config_flow.py +++ b/homeassistant/components/konnected/config_flow.py @@ -402,9 +402,10 @@ class OptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.entry = config_entry - self.model = self.entry.data[CONF_MODEL] - self.current_opt = self.entry.options or self.entry.data[CONF_DEFAULT_OPTIONS] + self.model = config_entry.data[CONF_MODEL] + self.current_opt = ( + config_entry.options or config_entry.data[CONF_DEFAULT_OPTIONS] + ) # as config proceeds we'll build up new options and then replace what's in the config entry self.new_opt: dict[str, Any] = {CONF_IO: {}} @@ -475,7 +476,7 @@ class OptionsFlowHandler(OptionsFlow): ), description_placeholders={ "model": KONN_PANEL_MODEL_NAMES[self.model], - "host": self.entry.data[CONF_HOST], + "host": self.config_entry.data[CONF_HOST], }, errors=errors, ) @@ -511,7 +512,7 @@ class OptionsFlowHandler(OptionsFlow): ), description_placeholders={ "model": KONN_PANEL_MODEL_NAMES[self.model], - "host": self.entry.data[CONF_HOST], + "host": self.config_entry.data[CONF_HOST], }, errors=errors, ) @@ -571,7 +572,7 @@ class OptionsFlowHandler(OptionsFlow): ), description_placeholders={ "model": KONN_PANEL_MODEL_NAMES[self.model], - "host": self.entry.data[CONF_HOST], + "host": self.config_entry.data[CONF_HOST], }, errors=errors, ) diff --git a/homeassistant/components/kostal_plenticore/manifest.json b/homeassistant/components/kostal_plenticore/manifest.json index d65368e7ee4..09352fa7a80 100644 --- a/homeassistant/components/kostal_plenticore/manifest.json +++ b/homeassistant/components/kostal_plenticore/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/kostal_plenticore", "iot_class": "local_polling", "loggers": ["kostal"], - "requirements": ["pykoplenti==1.2.2"] + "requirements": ["pykoplenti==1.3.0"] } diff --git a/homeassistant/components/kostal_plenticore/sensor.py b/homeassistant/components/kostal_plenticore/sensor.py index fbbfb03fb3e..67de34f2fce 100644 --- a/homeassistant/components/kostal_plenticore/sensor.py +++ b/homeassistant/components/kostal_plenticore/sensor.py @@ -17,6 +17,7 @@ from homeassistant.components.sensor import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( PERCENTAGE, + EntityCategory, UnitOfElectricCurrent, UnitOfElectricPotential, UnitOfEnergy, @@ -747,6 +748,15 @@ SENSOR_PROCESS_DATA = [ state_class=SensorStateClass.TOTAL_INCREASING, formatter="format_energy", ), + PlenticoreSensorEntityDescription( + module_id="scb:event", + key="Event:ActiveErrorCnt", + name="Active Alarms", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + icon="mdi:alert", + formatter="format_round", + ), PlenticoreSensorEntityDescription( module_id="_virt_", key="pv_P", diff --git a/homeassistant/components/kraken/config_flow.py b/homeassistant/components/kraken/config_flow.py index 67778515273..54a817f0a50 100644 --- a/homeassistant/components/kraken/config_flow.py +++ b/homeassistant/components/kraken/config_flow.py @@ -33,7 +33,7 @@ class KrakenConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> KrakenOptionsFlowHandler: """Get the options flow for this handler.""" - return KrakenOptionsFlowHandler(config_entry) + return KrakenOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -53,10 +53,6 @@ class KrakenConfigFlow(ConfigFlow, domain=DOMAIN): class KrakenOptionsFlowHandler(OptionsFlow): """Handle Kraken client options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize Kraken options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/kwb/manifest.json b/homeassistant/components/kwb/manifest.json index 36d3a0af2d7..6a11e08555f 100644 --- a/homeassistant/components/kwb/manifest.json +++ b/homeassistant/components/kwb/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/kwb", "iot_class": "local_polling", "loggers": ["pykwb"], + "quality_scale": "legacy", "requirements": ["pykwb==0.0.8"] } diff --git a/homeassistant/components/lacrosse/manifest.json b/homeassistant/components/lacrosse/manifest.json index 0c7cf8b6dc6..b4023b533ca 100644 --- a/homeassistant/components/lacrosse/manifest.json +++ b/homeassistant/components/lacrosse/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/lacrosse", "iot_class": "local_polling", "loggers": ["pylacrosse"], + "quality_scale": "legacy", "requirements": ["pylacrosse==0.4"] } diff --git a/homeassistant/components/lamarzocco/__init__.py b/homeassistant/components/lamarzocco/__init__.py index 82a91c0003f..5de9a2eeed4 100644 --- a/homeassistant/components/lamarzocco/__init__.py +++ b/homeassistant/components/lamarzocco/__init__.py @@ -2,12 +2,12 @@ import logging -from lmcloud.client_bluetooth import LaMarzoccoBluetoothClient -from lmcloud.client_cloud import LaMarzoccoCloudClient -from lmcloud.client_local import LaMarzoccoLocalClient -from lmcloud.const import BT_MODEL_PREFIXES, FirmwareType -from lmcloud.exceptions import AuthFail, RequestNotSuccessful from packaging import version +from pylamarzocco.client_bluetooth import LaMarzoccoBluetoothClient +from pylamarzocco.client_cloud import LaMarzoccoCloudClient +from pylamarzocco.client_local import LaMarzoccoLocalClient +from pylamarzocco.const import BT_MODEL_PREFIXES, FirmwareType +from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful from homeassistant.components.bluetooth import async_discovered_service_info from homeassistant.config_entries import ConfigEntry @@ -23,7 +23,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir -from homeassistant.helpers.httpx_client import get_async_client +from homeassistant.helpers.httpx_client import create_async_httpx_client from .const import CONF_USE_BLUETOOTH, DOMAIN from .coordinator import LaMarzoccoConfigEntry, LaMarzoccoUpdateCoordinator @@ -47,11 +47,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) - assert entry.unique_id serial = entry.unique_id - + client = create_async_httpx_client(hass) cloud_client = LaMarzoccoCloudClient( username=entry.data[CONF_USERNAME], password=entry.data[CONF_PASSWORD], - client=get_async_client(hass), + client=client, ) # initialize local API @@ -61,7 +61,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) - local_client = LaMarzoccoLocalClient( host=host, local_bearer=entry.data[CONF_TOKEN], - client=get_async_client(hass), + client=client, ) # initialize Bluetooth diff --git a/homeassistant/components/lamarzocco/binary_sensor.py b/homeassistant/components/lamarzocco/binary_sensor.py index c48453214bd..444e4d0723b 100644 --- a/homeassistant/components/lamarzocco/binary_sensor.py +++ b/homeassistant/components/lamarzocco/binary_sensor.py @@ -3,7 +3,7 @@ from collections.abc import Callable from dataclasses import dataclass -from lmcloud.models import LaMarzoccoMachineConfig +from pylamarzocco.models import LaMarzoccoMachineConfig from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, diff --git a/homeassistant/components/lamarzocco/button.py b/homeassistant/components/lamarzocco/button.py index 60374a85e1e..dabf01d817d 100644 --- a/homeassistant/components/lamarzocco/button.py +++ b/homeassistant/components/lamarzocco/button.py @@ -1,11 +1,11 @@ """Button platform for La Marzocco espresso machines.""" +import asyncio from collections.abc import Callable, Coroutine from dataclasses import dataclass from typing import Any -from lmcloud.exceptions import RequestNotSuccessful -from lmcloud.lm_machine import LaMarzoccoMachine +from pylamarzocco.exceptions import RequestNotSuccessful from homeassistant.components.button import ButtonEntity, ButtonEntityDescription from homeassistant.core import HomeAssistant @@ -13,9 +13,12 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN -from .coordinator import LaMarzoccoConfigEntry +from .coordinator import LaMarzoccoConfigEntry, LaMarzoccoUpdateCoordinator from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription +PARALLEL_UPDATES = 1 +BACKFLUSH_ENABLED_DURATION = 15 + @dataclass(frozen=True, kw_only=True) class LaMarzoccoButtonEntityDescription( @@ -24,14 +27,25 @@ class LaMarzoccoButtonEntityDescription( ): """Description of a La Marzocco button.""" - press_fn: Callable[[LaMarzoccoMachine], Coroutine[Any, Any, None]] + press_fn: Callable[[LaMarzoccoUpdateCoordinator], Coroutine[Any, Any, None]] + + +async def async_backflush_and_update(coordinator: LaMarzoccoUpdateCoordinator) -> None: + """Press backflush button.""" + await coordinator.device.start_backflush() + # lib will set state optimistically + coordinator.async_set_updated_data(None) + # backflush is enabled for 15 seconds + # then turns off automatically + await asyncio.sleep(BACKFLUSH_ENABLED_DURATION + 1) + await coordinator.async_request_refresh() ENTITIES: tuple[LaMarzoccoButtonEntityDescription, ...] = ( LaMarzoccoButtonEntityDescription( key="start_backflush", translation_key="start_backflush", - press_fn=lambda machine: machine.start_backflush(), + press_fn=async_backflush_and_update, ), ) @@ -59,7 +73,7 @@ class LaMarzoccoButtonEntity(LaMarzoccoEntity, ButtonEntity): async def async_press(self) -> None: """Press button.""" try: - await self.entity_description.press_fn(self.coordinator.device) + await self.entity_description.press_fn(self.coordinator) except RequestNotSuccessful as exc: raise HomeAssistantError( translation_domain=DOMAIN, @@ -68,4 +82,3 @@ class LaMarzoccoButtonEntity(LaMarzoccoEntity, ButtonEntity): "key": self.entity_description.key, }, ) from exc - await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/lamarzocco/calendar.py b/homeassistant/components/lamarzocco/calendar.py index 3d8b2474c94..0ec9b55a9a1 100644 --- a/homeassistant/components/lamarzocco/calendar.py +++ b/homeassistant/components/lamarzocco/calendar.py @@ -3,7 +3,7 @@ from collections.abc import Iterator from datetime import datetime, timedelta -from lmcloud.models import LaMarzoccoWakeUpSleepEntry +from pylamarzocco.models import LaMarzoccoWakeUpSleepEntry from homeassistant.components.calendar import CalendarEntity, CalendarEvent from homeassistant.core import HomeAssistant diff --git a/homeassistant/components/lamarzocco/config_flow.py b/homeassistant/components/lamarzocco/config_flow.py index 438bf7fe6b9..c01b55fb885 100644 --- a/homeassistant/components/lamarzocco/config_flow.py +++ b/homeassistant/components/lamarzocco/config_flow.py @@ -1,19 +1,23 @@ """Config flow for La Marzocco integration.""" +from __future__ import annotations + from collections.abc import Mapping import logging from typing import Any -from lmcloud.client_cloud import LaMarzoccoCloudClient -from lmcloud.client_local import LaMarzoccoLocalClient -from lmcloud.exceptions import AuthFail, RequestNotSuccessful -from lmcloud.models import LaMarzoccoDeviceInfo +from httpx import AsyncClient +from pylamarzocco.client_cloud import LaMarzoccoCloudClient +from pylamarzocco.client_local import LaMarzoccoLocalClient +from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful +from pylamarzocco.models import LaMarzoccoDeviceInfo import voluptuous as vol from homeassistant.components.bluetooth import ( BluetoothServiceInfo, async_discovered_service_info, ) +from homeassistant.components.dhcp import DhcpServiceInfo from homeassistant.config_entries import ( SOURCE_REAUTH, SOURCE_RECONFIGURE, @@ -21,9 +25,9 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import ( + CONF_ADDRESS, CONF_HOST, CONF_MAC, CONF_MODEL, @@ -34,7 +38,7 @@ from homeassistant.const import ( ) from homeassistant.core import callback from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.httpx_client import get_async_client +from homeassistant.helpers.httpx_client import create_async_httpx_client from homeassistant.helpers.selector import ( SelectOptionDict, SelectSelector, @@ -54,6 +58,8 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 2 + _client: AsyncClient + def __init__(self) -> None: """Initialize the config flow.""" self._config: dict[str, Any] = {} @@ -76,10 +82,12 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): **user_input, **self._discovered, } + self._client = create_async_httpx_client(self.hass) cloud_client = LaMarzoccoCloudClient( username=data[CONF_USERNAME], password=data[CONF_PASSWORD], + client=self._client, ) try: self._fleet = await cloud_client.get_customer_fleet() @@ -103,6 +111,15 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "machine_not_found" else: self._config = data + # if DHCP discovery was used, auto fill machine selection + if CONF_HOST in self._discovered: + return await self.async_step_machine_selection( + user_input={ + CONF_HOST: self._discovered[CONF_HOST], + CONF_MACHINE: self._discovered[CONF_MACHINE], + } + ) + # if Bluetooth discovery was used, only select host return self.async_show_form( step_id="machine_selection", data_schema=vol.Schema( @@ -114,6 +131,12 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): self._config = data return await self.async_step_machine_selection() + placeholders: dict[str, str] | None = None + if self._discovered: + self.context["title_placeholders"] = placeholders = { + CONF_NAME: self._discovered[CONF_MACHINE] + } + return self.async_show_form( step_id="user", data_schema=vol.Schema( @@ -123,6 +146,7 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): } ), errors=errors, + description_placeholders=placeholders, ) async def async_step_machine_selection( @@ -144,7 +168,7 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): # validate local connection if host is provided if user_input.get(CONF_HOST): if not await LaMarzoccoLocalClient.validate_connection( - client=get_async_client(self.hass), + client=self._client, host=user_input[CONF_HOST], token=selected_device.communication_key, ): @@ -258,6 +282,34 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_user() + async def async_step_dhcp( + self, discovery_info: DhcpServiceInfo + ) -> ConfigFlowResult: + """Handle discovery via dhcp.""" + + serial = discovery_info.hostname.upper() + + await self.async_set_unique_id(serial) + self._abort_if_unique_id_configured( + updates={ + CONF_HOST: discovery_info.ip, + CONF_ADDRESS: discovery_info.macaddress, + } + ) + self._async_abort_entries_match({CONF_ADDRESS: discovery_info.macaddress}) + + _LOGGER.debug( + "Discovered La Marzocco machine %s through DHCP at address %s", + discovery_info.hostname, + discovery_info.ip, + ) + + self._discovered[CONF_MACHINE] = serial + self._discovered[CONF_HOST] = discovery_info.ip + self._discovered[CONF_ADDRESS] = discovery_info.macaddress + + return await self.async_step_user() + async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: @@ -308,12 +360,12 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> OptionsFlow: + ) -> LmOptionsFlowHandler: """Create the options flow.""" - return LmOptionsFlowHandler(config_entry) + return LmOptionsFlowHandler() -class LmOptionsFlowHandler(OptionsFlowWithConfigEntry): +class LmOptionsFlowHandler(OptionsFlow): """Handles options flow for the component.""" async def async_step_init( @@ -327,7 +379,7 @@ class LmOptionsFlowHandler(OptionsFlowWithConfigEntry): { vol.Optional( CONF_USE_BLUETOOTH, - default=self.options.get(CONF_USE_BLUETOOTH, True), + default=self.config_entry.options.get(CONF_USE_BLUETOOTH, True), ): cv.boolean, } ) diff --git a/homeassistant/components/lamarzocco/coordinator.py b/homeassistant/components/lamarzocco/coordinator.py index e2ff8791a05..46a8e05745e 100644 --- a/homeassistant/components/lamarzocco/coordinator.py +++ b/homeassistant/components/lamarzocco/coordinator.py @@ -8,11 +8,12 @@ import logging from time import time from typing import Any -from lmcloud.client_bluetooth import LaMarzoccoBluetoothClient -from lmcloud.client_cloud import LaMarzoccoCloudClient -from lmcloud.client_local import LaMarzoccoLocalClient -from lmcloud.exceptions import AuthFail, RequestNotSuccessful -from lmcloud.lm_machine import LaMarzoccoMachine +from pylamarzocco.client_bluetooth import LaMarzoccoBluetoothClient +from pylamarzocco.client_cloud import LaMarzoccoCloudClient +from pylamarzocco.client_local import LaMarzoccoLocalClient +from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful +from pylamarzocco.lm_machine import LaMarzoccoMachine +from websockets.protocol import State from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_MODEL, CONF_NAME, EVENT_HOMEASSISTANT_STOP @@ -85,7 +86,7 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]): if ( self._local_client is not None and self._local_client.websocket is not None - and self._local_client.websocket.open + and self._local_client.websocket.state is State.OPEN ): self._local_client.terminating = True await self._local_client.websocket.close() @@ -126,9 +127,12 @@ class LaMarzoccoUpdateCoordinator(DataUpdateCoordinator[None]): try: await func(*args, **kwargs) except AuthFail as ex: - msg = "Authentication failed." - _LOGGER.debug(msg, exc_info=True) - raise ConfigEntryAuthFailed(msg) from ex + _LOGGER.debug("Authentication failed", exc_info=True) + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, translation_key="authentication_failed" + ) from ex except RequestNotSuccessful as ex: _LOGGER.debug(ex, exc_info=True) - raise UpdateFailed(f"Querying API failed. Error: {ex}") from ex + raise UpdateFailed( + translation_domain=DOMAIN, translation_key="api_error" + ) from ex diff --git a/homeassistant/components/lamarzocco/diagnostics.py b/homeassistant/components/lamarzocco/diagnostics.py index edce6a349aa..43ae51ee192 100644 --- a/homeassistant/components/lamarzocco/diagnostics.py +++ b/homeassistant/components/lamarzocco/diagnostics.py @@ -5,7 +5,7 @@ from __future__ import annotations from dataclasses import asdict from typing import Any, TypedDict -from lmcloud.const import FirmwareType +from pylamarzocco.const import FirmwareType from homeassistant.components.diagnostics import async_redact_data from homeassistant.core import HomeAssistant diff --git a/homeassistant/components/lamarzocco/entity.py b/homeassistant/components/lamarzocco/entity.py index f7e6ff9e2b8..f0942f51ace 100644 --- a/homeassistant/components/lamarzocco/entity.py +++ b/homeassistant/components/lamarzocco/entity.py @@ -3,10 +3,11 @@ from collections.abc import Callable from dataclasses import dataclass -from lmcloud.const import FirmwareType -from lmcloud.lm_machine import LaMarzoccoMachine +from pylamarzocco.const import FirmwareType +from pylamarzocco.lm_machine import LaMarzoccoMachine -from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.const import CONF_ADDRESS +from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -47,6 +48,17 @@ class LaMarzoccoBaseEntity( serial_number=device.serial_number, sw_version=device.firmware[FirmwareType.MACHINE].current_version, ) + if coordinator.config_entry.data.get(CONF_ADDRESS): + self._attr_device_info.update( + DeviceInfo( + connections={ + ( + CONNECTION_NETWORK_MAC, + coordinator.config_entry.data[CONF_ADDRESS], + ) + } + ) + ) class LaMarzoccoEntity(LaMarzoccoBaseEntity): diff --git a/homeassistant/components/lamarzocco/manifest.json b/homeassistant/components/lamarzocco/manifest.json index a1da8982cd8..43b1c7deb47 100644 --- a/homeassistant/components/lamarzocco/manifest.json +++ b/homeassistant/components/lamarzocco/manifest.json @@ -18,9 +18,23 @@ "codeowners": ["@zweckj"], "config_flow": true, "dependencies": ["bluetooth_adapters"], + "dhcp": [ + { + "registered_devices": true + }, + { + "hostname": "gs[0-9][0-9][0-9][0-9][0-9][0-9]" + }, + { + "hostname": "lm[0-9][0-9][0-9][0-9][0-9][0-9]" + }, + { + "hostname": "mr[0-9][0-9][0-9][0-9][0-9][0-9]" + } + ], "documentation": "https://www.home-assistant.io/integrations/lamarzocco", "integration_type": "device", "iot_class": "cloud_polling", - "loggers": ["lmcloud"], - "requirements": ["lmcloud==1.2.3"] + "loggers": ["pylamarzocco"], + "requirements": ["pylamarzocco==1.2.12"] } diff --git a/homeassistant/components/lamarzocco/number.py b/homeassistant/components/lamarzocco/number.py index df75147e7e1..f32607fd73b 100644 --- a/homeassistant/components/lamarzocco/number.py +++ b/homeassistant/components/lamarzocco/number.py @@ -4,16 +4,16 @@ from collections.abc import Callable, Coroutine from dataclasses import dataclass from typing import Any -from lmcloud.const import ( +from pylamarzocco.const import ( KEYS_PER_MODEL, BoilerType, MachineModel, PhysicalKey, PrebrewMode, ) -from lmcloud.exceptions import RequestNotSuccessful -from lmcloud.lm_machine import LaMarzoccoMachine -from lmcloud.models import LaMarzoccoMachineConfig +from pylamarzocco.exceptions import RequestNotSuccessful +from pylamarzocco.lm_machine import LaMarzoccoMachine +from pylamarzocco.models import LaMarzoccoMachineConfig from homeassistant.components.number import ( NumberDeviceClass, @@ -35,6 +35,8 @@ from .const import DOMAIN from .coordinator import LaMarzoccoConfigEntry, LaMarzoccoUpdateCoordinator from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class LaMarzoccoNumberEntityDescription( diff --git a/homeassistant/components/lamarzocco/quality_scale.yaml b/homeassistant/components/lamarzocco/quality_scale.yaml new file mode 100644 index 00000000000..3677bd8d6b8 --- /dev/null +++ b/homeassistant/components/lamarzocco/quality_scale.yaml @@ -0,0 +1,87 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + No custom actions are defined. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + No custom actions are defined. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + No explicit event subscriptions. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: | + No custom actions are defined. + config-entry-unloading: done + docs-configuration-parameters: done + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: + status: done + comment: | + Handled by coordinator. + parallel-updates: done + reauthentication-flow: done + test-coverage: done + + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: + status: done + comment: | + DHCP & Bluetooth discovery. + docs-data-update: done + docs-examples: done + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: done + dynamic-devices: + status: exempt + comment: | + Device type integration. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: done + repair-issues: done + stale-devices: + status: exempt + comment: | + Device type integration. + + # Platinum + async-dependency: done + inject-websession: + status: done + comment: | + Uses `httpx` session. + strict-typing: done diff --git a/homeassistant/components/lamarzocco/select.py b/homeassistant/components/lamarzocco/select.py index 1958fa6f210..637ef935979 100644 --- a/homeassistant/components/lamarzocco/select.py +++ b/homeassistant/components/lamarzocco/select.py @@ -4,10 +4,10 @@ from collections.abc import Callable, Coroutine from dataclasses import dataclass from typing import Any -from lmcloud.const import MachineModel, PrebrewMode, SmartStandbyMode, SteamLevel -from lmcloud.exceptions import RequestNotSuccessful -from lmcloud.lm_machine import LaMarzoccoMachine -from lmcloud.models import LaMarzoccoMachineConfig +from pylamarzocco.const import MachineModel, PrebrewMode, SmartStandbyMode, SteamLevel +from pylamarzocco.exceptions import RequestNotSuccessful +from pylamarzocco.lm_machine import LaMarzoccoMachine +from pylamarzocco.models import LaMarzoccoMachineConfig from homeassistant.components.select import SelectEntity, SelectEntityDescription from homeassistant.const import EntityCategory @@ -19,6 +19,8 @@ from .const import DOMAIN from .coordinator import LaMarzoccoConfigEntry from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription +PARALLEL_UPDATES = 1 + STEAM_LEVEL_HA_TO_LM = { "1": SteamLevel.LEVEL_1, "2": SteamLevel.LEVEL_2, diff --git a/homeassistant/components/lamarzocco/sensor.py b/homeassistant/components/lamarzocco/sensor.py index ca8a118c1ee..04b095e798c 100644 --- a/homeassistant/components/lamarzocco/sensor.py +++ b/homeassistant/components/lamarzocco/sensor.py @@ -3,8 +3,8 @@ from collections.abc import Callable from dataclasses import dataclass -from lmcloud.const import BoilerType, MachineModel, PhysicalKey -from lmcloud.lm_machine import LaMarzoccoMachine +from pylamarzocco.const import BoilerType, MachineModel, PhysicalKey +from pylamarzocco.lm_machine import LaMarzoccoMachine from homeassistant.components.sensor import ( SensorDeviceClass, diff --git a/homeassistant/components/lamarzocco/strings.json b/homeassistant/components/lamarzocco/strings.json index 959dda265a9..666eb7f4a84 100644 --- a/homeassistant/components/lamarzocco/strings.json +++ b/homeassistant/components/lamarzocco/strings.json @@ -1,6 +1,5 @@ { "config": { - "flow_title": "La Marzocco Espresso {host}", "abort": { "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", @@ -26,7 +25,10 @@ "bluetooth_selection": { "description": "Select your device from available Bluetooth devices.", "data": { - "mac": "Bluetooth device" + "mac": "[%key:common::config_flow::data::device%]" + }, + "data_description": { + "mac": "Select the Bluetooth device that is your machine" } }, "machine_selection": { @@ -36,7 +38,8 @@ "machine": "Machine" }, "data_description": { - "host": "Local IP address of the machine" + "host": "Local IP address of the machine", + "machine": "Select the machine you want to integrate" } }, "reauth_confirm": { @@ -64,8 +67,10 @@ "step": { "init": { "data": { - "title": "Update Configuration", "use_bluetooth": "Use Bluetooth" + }, + "data_description": { + "use_bluetooth": "Should the integration try to use Bluetooth to control the machine?" } } } @@ -196,6 +201,12 @@ } }, "exceptions": { + "api_error": { + "message": "Error while communicating with the API" + }, + "authentication_failed": { + "message": "Authentication failed" + }, "auto_on_off_error": { "message": "Error while setting auto on/off to {state} for {id}" }, diff --git a/homeassistant/components/lamarzocco/switch.py b/homeassistant/components/lamarzocco/switch.py index a611424418f..4dc701c4c29 100644 --- a/homeassistant/components/lamarzocco/switch.py +++ b/homeassistant/components/lamarzocco/switch.py @@ -4,10 +4,10 @@ from collections.abc import Callable, Coroutine from dataclasses import dataclass from typing import Any -from lmcloud.const import BoilerType -from lmcloud.exceptions import RequestNotSuccessful -from lmcloud.lm_machine import LaMarzoccoMachine -from lmcloud.models import LaMarzoccoMachineConfig +from pylamarzocco.const import BoilerType +from pylamarzocco.exceptions import RequestNotSuccessful +from pylamarzocco.lm_machine import LaMarzoccoMachine +from pylamarzocco.models import LaMarzoccoMachineConfig from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.const import EntityCategory @@ -19,6 +19,8 @@ from .const import DOMAIN from .coordinator import LaMarzoccoConfigEntry, LaMarzoccoUpdateCoordinator from .entity import LaMarzoccoBaseEntity, LaMarzoccoEntity, LaMarzoccoEntityDescription +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class LaMarzoccoSwitchEntityDescription( @@ -108,7 +110,7 @@ class LaMarzoccoSwitchEntity(LaMarzoccoEntity, SwitchEntity): raise HomeAssistantError( translation_domain=DOMAIN, translation_key="switch_off_error", - translation_placeholders={"name": self.entity_description.key}, + translation_placeholders={"key": self.entity_description.key}, ) from exc self.async_write_ha_state() diff --git a/homeassistant/components/lamarzocco/update.py b/homeassistant/components/lamarzocco/update.py index 61f436a7d7f..ca182909042 100644 --- a/homeassistant/components/lamarzocco/update.py +++ b/homeassistant/components/lamarzocco/update.py @@ -3,8 +3,8 @@ from dataclasses import dataclass from typing import Any -from lmcloud.const import FirmwareType -from lmcloud.exceptions import RequestNotSuccessful +from pylamarzocco.const import FirmwareType +from pylamarzocco.exceptions import RequestNotSuccessful from homeassistant.components.update import ( UpdateDeviceClass, @@ -21,6 +21,8 @@ from .const import DOMAIN from .coordinator import LaMarzoccoConfigEntry from .entity import LaMarzoccoEntity, LaMarzoccoEntityDescription +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class LaMarzoccoUpdateEntityDescription( diff --git a/homeassistant/components/lametric/manifest.json b/homeassistant/components/lametric/manifest.json index 92ccd29c916..b0c6f8fd96e 100644 --- a/homeassistant/components/lametric/manifest.json +++ b/homeassistant/components/lametric/manifest.json @@ -13,7 +13,6 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["demetriek"], - "quality_scale": "platinum", "requirements": ["demetriek==0.4.0"], "ssdp": [ { diff --git a/homeassistant/components/lannouncer/manifest.json b/homeassistant/components/lannouncer/manifest.json index c04d9e87655..9d0942bd14f 100644 --- a/homeassistant/components/lannouncer/manifest.json +++ b/homeassistant/components/lannouncer/manifest.json @@ -3,5 +3,6 @@ "name": "LANnouncer", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/lannouncer", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/lastfm/config_flow.py b/homeassistant/components/lastfm/config_flow.py index c6ea120242d..0e1f680dd63 100644 --- a/homeassistant/components/lastfm/config_flow.py +++ b/homeassistant/components/lastfm/config_flow.py @@ -11,7 +11,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_API_KEY from homeassistant.core import callback @@ -80,7 +80,7 @@ class LastFmConfigFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> LastFmOptionsFlowHandler: """Get the options flow for this handler.""" - return LastFmOptionsFlowHandler(config_entry) + return LastFmOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -155,7 +155,7 @@ class LastFmConfigFlowHandler(ConfigFlow, domain=DOMAIN): ) -class LastFmOptionsFlowHandler(OptionsFlowWithConfigEntry): +class LastFmOptionsFlowHandler(OptionsFlow): """LastFm Options flow handler.""" async def async_step_init( @@ -163,24 +163,25 @@ class LastFmOptionsFlowHandler(OptionsFlowWithConfigEntry): ) -> ConfigFlowResult: """Initialize form.""" errors: dict[str, str] = {} + options = self.config_entry.options if user_input is not None: users, errors = validate_lastfm_users( - self.options[CONF_API_KEY], user_input[CONF_USERS] + options[CONF_API_KEY], user_input[CONF_USERS] ) user_input[CONF_USERS] = users if not errors: return self.async_create_entry( title="LastFM", data={ - **self.options, + **options, CONF_USERS: user_input[CONF_USERS], }, ) - if self.options[CONF_MAIN_USER]: + if options[CONF_MAIN_USER]: try: main_user, _ = get_lastfm_user( - self.options[CONF_API_KEY], - self.options[CONF_MAIN_USER], + options[CONF_API_KEY], + options[CONF_MAIN_USER], ) friends_response = await self.hass.async_add_executor_job( main_user.get_friends @@ -206,6 +207,6 @@ class LastFmOptionsFlowHandler(OptionsFlowWithConfigEntry): ), } ), - user_input or self.options, + user_input or options, ), ) diff --git a/homeassistant/components/lcn/__init__.py b/homeassistant/components/lcn/__init__.py index 5995e06efcc..eb26ef48e4e 100644 --- a/homeassistant/components/lcn/__init__.py +++ b/homeassistant/components/lcn/__init__.py @@ -8,7 +8,7 @@ import logging import pypck from pypck.connection import PchkConnectionManager -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_DEVICE_ID, CONF_DOMAIN, @@ -20,7 +20,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import config_validation as cv, device_registry as dr from homeassistant.helpers.typing import ConfigType from .const import ( @@ -39,40 +39,29 @@ from .helpers import ( InputType, async_update_config_entry, generate_unique_id, - import_lcn_config, register_lcn_address_devices, register_lcn_host_device, ) -from .schemas import CONFIG_SCHEMA # noqa: F401 -from .services import SERVICES +from .services import register_services from .websocket import register_panel_and_ws_api _LOGGER = logging.getLogger(__name__) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the LCN component.""" - if DOMAIN not in config: - return True + hass.data.setdefault(DOMAIN, {}) - # initialize a config_flow for all LCN configurations read from - # configuration.yaml - config_entries_data = import_lcn_config(config[DOMAIN]) + await register_services(hass) + await register_panel_and_ws_api(hass) - for config_entry_data in config_entries_data: - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=config_entry_data, - ) - ) return True async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Set up a connection to PCHK host from a config entry.""" - hass.data.setdefault(DOMAIN, {}) if config_entry.entry_id in hass.data[DOMAIN]: return False @@ -132,15 +121,6 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b ) lcn_connection.register_for_inputs(input_received) - # register service calls - for service_name, service in SERVICES: - if not hass.services.has_service(DOMAIN, service_name): - hass.services.async_register( - DOMAIN, service_name, service(hass).async_call_service, service.schema - ) - - await register_panel_and_ws_api(hass) - return True @@ -191,11 +171,6 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> host = hass.data[DOMAIN].pop(config_entry.entry_id) await host[CONNECTION].async_close() - # unregister service calls - if unload_ok and not hass.data[DOMAIN]: # check if this is the last entry to unload - for service_name, _ in SERVICES: - hass.services.async_remove(DOMAIN, service_name) - return unload_ok diff --git a/homeassistant/components/lcn/config_flow.py b/homeassistant/components/lcn/config_flow.py index e78378a61b1..008265e62ae 100644 --- a/homeassistant/components/lcn/config_flow.py +++ b/homeassistant/components/lcn/config_flow.py @@ -9,7 +9,6 @@ import pypck import voluptuous as vol from homeassistant import config_entries -from homeassistant.config_entries import ConfigFlowResult from homeassistant.const import ( CONF_BASE, CONF_DEVICES, @@ -20,14 +19,12 @@ from homeassistant.const import ( CONF_PORT, CONF_USERNAME, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType from . import PchkConnectionManager from .const import CONF_ACKNOWLEDGE, CONF_DIM_MODE, CONF_SK_NUM_TRIES, DIM_MODES, DOMAIN -from .helpers import purge_device_registry, purge_entity_registry _LOGGER = logging.getLogger(__name__) @@ -113,55 +110,6 @@ class LcnFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): VERSION = 2 MINOR_VERSION = 1 - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import existing configuration from LCN.""" - # validate the imported connection parameters - if error := await validate_connection(import_data): - async_create_issue( - self.hass, - DOMAIN, - error, - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.ERROR, - translation_key=error, - translation_placeholders={ - "url": "/config/integrations/dashboard/add?domain=lcn" - }, - ) - return self.async_abort(reason=error) - - async_create_issue( - self.hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.12.0", - is_fixable=False, - is_persistent=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "LCN", - }, - ) - - # check if we already have a host with the same address configured - if entry := get_config_entry(self.hass, import_data): - entry.source = config_entries.SOURCE_IMPORT - # Cleanup entity and device registry, if we imported from configuration.yaml to - # remove orphans when entities were removed from configuration - purge_entity_registry(self.hass, entry.entry_id, import_data) - purge_device_registry(self.hass, entry.entry_id, import_data) - - self.hass.config_entries.async_update_entry(entry, data=import_data) - return self.async_abort(reason="existing_configuration_updated") - - return self.async_create_entry( - title=f"{import_data[CONF_HOST]}", data=import_data - ) - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> config_entries.ConfigFlowResult: diff --git a/homeassistant/components/lcn/helpers.py b/homeassistant/components/lcn/helpers.py index 7da047682ac..6a9c63ea212 100644 --- a/homeassistant/components/lcn/helpers.py +++ b/homeassistant/components/lcn/helpers.py @@ -9,7 +9,6 @@ import re from typing import cast import pypck -import voluptuous as vol from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -19,17 +18,12 @@ from homeassistant.const import ( CONF_DEVICES, CONF_DOMAIN, CONF_ENTITIES, - CONF_HOST, - CONF_IP_ADDRESS, CONF_LIGHTS, CONF_NAME, - CONF_PASSWORD, - CONF_PORT, CONF_RESOURCE, CONF_SENSORS, CONF_SOURCE, CONF_SWITCHES, - CONF_USERNAME, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -37,19 +31,13 @@ from homeassistant.helpers.typing import ConfigType from .const import ( BINSENSOR_PORTS, - CONF_ACKNOWLEDGE, CONF_CLIMATES, - CONF_CONNECTIONS, - CONF_DIM_MODE, - CONF_DOMAIN_DATA, CONF_HARDWARE_SERIAL, CONF_HARDWARE_TYPE, CONF_OUTPUT, CONF_SCENES, - CONF_SK_NUM_TRIES, CONF_SOFTWARE_SERIAL, CONNECTION, - DEFAULT_NAME, DOMAIN, LED_PORTS, LOGICOP_PORTS, @@ -146,110 +134,6 @@ def generate_unique_id( return unique_id -def import_lcn_config(lcn_config: ConfigType) -> list[ConfigType]: - """Convert lcn settings from configuration.yaml to config_entries data. - - Create a list of config_entry data structures like: - - "data": { - "host": "pchk", - "ip_address": "192.168.2.41", - "port": 4114, - "username": "lcn", - "password": "lcn, - "sk_num_tries: 0, - "dim_mode: "STEPS200", - "acknowledge": False, - "devices": [ - { - "address": (0, 7, False) - "name": "", - "hardware_serial": -1, - "software_serial": -1, - "hardware_type": -1 - }, ... - ], - "entities": [ - { - "address": (0, 7, False) - "name": "Light_Output1", - "resource": "output1", - "domain": "light", - "domain_data": { - "output": "OUTPUT1", - "dimmable": True, - "transition": 5000.0 - } - }, ... - ] - } - """ - data = {} - for connection in lcn_config[CONF_CONNECTIONS]: - host = { - CONF_HOST: connection[CONF_NAME], - CONF_IP_ADDRESS: connection[CONF_HOST], - CONF_PORT: connection[CONF_PORT], - CONF_USERNAME: connection[CONF_USERNAME], - CONF_PASSWORD: connection[CONF_PASSWORD], - CONF_SK_NUM_TRIES: connection[CONF_SK_NUM_TRIES], - CONF_DIM_MODE: connection[CONF_DIM_MODE], - CONF_ACKNOWLEDGE: False, - CONF_DEVICES: [], - CONF_ENTITIES: [], - } - data[connection[CONF_NAME]] = host - - for confkey, domain_config in lcn_config.items(): - if confkey == CONF_CONNECTIONS: - continue - domain = DOMAIN_LOOKUP[confkey] - # loop over entities in configuration.yaml - for domain_data in domain_config: - # remove name and address from domain_data - entity_name = domain_data.pop(CONF_NAME) - address, host_name = domain_data.pop(CONF_ADDRESS) - - if host_name is None: - host_name = DEFAULT_NAME - - # check if we have a new device config - for device_config in data[host_name][CONF_DEVICES]: - if address == device_config[CONF_ADDRESS]: - break - else: # create new device_config - device_config = { - CONF_ADDRESS: address, - CONF_NAME: "", - CONF_HARDWARE_SERIAL: -1, - CONF_SOFTWARE_SERIAL: -1, - CONF_HARDWARE_TYPE: -1, - } - - data[host_name][CONF_DEVICES].append(device_config) - - # insert entity config - resource = get_resource(domain, domain_data).lower() - for entity_config in data[host_name][CONF_ENTITIES]: - if ( - address == entity_config[CONF_ADDRESS] - and resource == entity_config[CONF_RESOURCE] - and domain == entity_config[CONF_DOMAIN] - ): - break - else: # create new entity_config - entity_config = { - CONF_ADDRESS: address, - CONF_NAME: entity_name, - CONF_RESOURCE: resource, - CONF_DOMAIN: domain, - CONF_DOMAIN_DATA: domain_data.copy(), - } - data[host_name][CONF_ENTITIES].append(entity_config) - - return list(data.values()) - - def purge_entity_registry( hass: HomeAssistant, entry_id: str, imported_entry_data: ConfigType ) -> None: @@ -436,26 +320,6 @@ def get_device_config( return None -def has_unique_host_names(hosts: list[ConfigType]) -> list[ConfigType]: - """Validate that all connection names are unique. - - Use 'pchk' as default connection_name (or add a numeric suffix if - pchk' is already in use. - """ - suffix = 0 - for host in hosts: - if host.get(CONF_NAME) is None: - if suffix == 0: - host[CONF_NAME] = DEFAULT_NAME - else: - host[CONF_NAME] = f"{DEFAULT_NAME}{suffix:d}" - suffix += 1 - - schema = vol.Schema(vol.Unique()) - schema([host.get(CONF_NAME) for host in hosts]) - return hosts - - def is_address(value: str) -> tuple[AddressType, str]: """Validate the given address string. diff --git a/homeassistant/components/lcn/manifest.json b/homeassistant/components/lcn/manifest.json index 6ce41a2d08d..695a35df871 100644 --- a/homeassistant/components/lcn/manifest.json +++ b/homeassistant/components/lcn/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/lcn", "iot_class": "local_push", "loggers": ["pypck"], - "requirements": ["pypck==0.7.24", "lcn-frontend==0.2.1"] + "requirements": ["pypck==0.7.24", "lcn-frontend==0.2.2"] } diff --git a/homeassistant/components/lcn/schemas.py b/homeassistant/components/lcn/schemas.py index 3b4d2333970..c9c91b9843d 100644 --- a/homeassistant/components/lcn/schemas.py +++ b/homeassistant/components/lcn/schemas.py @@ -4,20 +4,9 @@ import voluptuous as vol from homeassistant.components.climate import DEFAULT_MAX_TEMP, DEFAULT_MIN_TEMP from homeassistant.const import ( - CONF_ADDRESS, - CONF_BINARY_SENSORS, - CONF_COVERS, - CONF_HOST, - CONF_LIGHTS, - CONF_NAME, - CONF_PASSWORD, - CONF_PORT, CONF_SCENE, - CONF_SENSORS, CONF_SOURCE, - CONF_SWITCHES, CONF_UNIT_OF_MEASUREMENT, - CONF_USERNAME, UnitOfTemperature, ) import homeassistant.helpers.config_validation as cv @@ -25,9 +14,6 @@ from homeassistant.helpers.typing import VolDictType from .const import ( BINSENSOR_PORTS, - CONF_CLIMATES, - CONF_CONNECTIONS, - CONF_DIM_MODE, CONF_DIMMABLE, CONF_LOCKABLE, CONF_MAX_TEMP, @@ -37,12 +23,8 @@ from .const import ( CONF_OUTPUTS, CONF_REGISTER, CONF_REVERSE_TIME, - CONF_SCENES, CONF_SETPOINT, - CONF_SK_NUM_TRIES, CONF_TRANSITION, - DIM_MODES, - DOMAIN, KEYS, LED_PORTS, LOGICOP_PORTS, @@ -56,7 +38,6 @@ from .const import ( VAR_UNITS, VARIABLES, ) -from .helpers import has_unique_host_names, is_address ADDRESS_SCHEMA = vol.Coerce(tuple) @@ -130,72 +111,3 @@ DOMAIN_DATA_SWITCH: VolDictType = { vol.In(OUTPUT_PORTS + RELAY_PORTS + SETPOINTS + KEYS), ), } - - -# -# Configuration -# - -DOMAIN_DATA_BASE: VolDictType = { - vol.Required(CONF_NAME): cv.string, - vol.Required(CONF_ADDRESS): is_address, -} - -BINARY_SENSORS_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_BINARY_SENSOR}) - -CLIMATES_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_CLIMATE}) - -COVERS_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_COVER}) - -LIGHTS_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_LIGHT}) - -SCENES_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_SCENE}) - -SENSORS_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_SENSOR}) - -SWITCHES_SCHEMA = vol.Schema({**DOMAIN_DATA_BASE, **DOMAIN_DATA_SWITCH}) - -CONNECTION_SCHEMA = vol.Schema( - { - vol.Required(CONF_HOST): cv.string, - vol.Required(CONF_PORT): cv.port, - vol.Required(CONF_USERNAME): cv.string, - vol.Required(CONF_PASSWORD): cv.string, - vol.Optional(CONF_SK_NUM_TRIES, default=0): cv.positive_int, - vol.Optional(CONF_DIM_MODE, default="steps50"): vol.All( - vol.Upper, vol.In(DIM_MODES) - ), - vol.Optional(CONF_NAME): cv.string, - } -) - -CONFIG_SCHEMA = vol.Schema( - vol.All( - cv.deprecated(DOMAIN), - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_CONNECTIONS): vol.All( - cv.ensure_list, has_unique_host_names, [CONNECTION_SCHEMA] - ), - vol.Optional(CONF_BINARY_SENSORS): vol.All( - cv.ensure_list, [BINARY_SENSORS_SCHEMA] - ), - vol.Optional(CONF_CLIMATES): vol.All( - cv.ensure_list, [CLIMATES_SCHEMA] - ), - vol.Optional(CONF_COVERS): vol.All(cv.ensure_list, [COVERS_SCHEMA]), - vol.Optional(CONF_LIGHTS): vol.All(cv.ensure_list, [LIGHTS_SCHEMA]), - vol.Optional(CONF_SCENES): vol.All(cv.ensure_list, [SCENES_SCHEMA]), - vol.Optional(CONF_SENSORS): vol.All( - cv.ensure_list, [SENSORS_SCHEMA] - ), - vol.Optional(CONF_SWITCHES): vol.All( - cv.ensure_list, [SWITCHES_SCHEMA] - ), - }, - ) - }, - ), - extra=vol.ALLOW_EXTRA, -) diff --git a/homeassistant/components/lcn/services.py b/homeassistant/components/lcn/services.py index 611a7353bcd..92f5863c47e 100644 --- a/homeassistant/components/lcn/services.py +++ b/homeassistant/components/lcn/services.py @@ -429,3 +429,11 @@ SERVICES = ( (LcnService.DYN_TEXT, DynText), (LcnService.PCK, Pck), ) + + +async def register_services(hass: HomeAssistant) -> None: + """Register services for LCN.""" + for service_name, service in SERVICES: + hass.services.async_register( + DOMAIN, service_name, service(hass).async_call_service, service.schema + ) diff --git a/homeassistant/components/lcn/strings.json b/homeassistant/components/lcn/strings.json index ae0b1b01f9a..088a3654500 100644 --- a/homeassistant/components/lcn/strings.json +++ b/homeassistant/components/lcn/strings.json @@ -63,18 +63,6 @@ } }, "issues": { - "authentication_error": { - "title": "Authentication failed.", - "description": "Configuring LCN using YAML is being removed but there was an error importing your YAML configuration.\n\nEnsure username and password are correct.\n\nConsider removing the LCN YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - }, - "license_error": { - "title": "Maximum number of connections was reached.", - "description": "Configuring LCN using YAML is being removed but there was an error importing your YAML configuration.\n\nEnsure sufficient PCHK licenses are registered and restart Home Assistant.\n\nConsider removing the LCN YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - }, - "connection_refused": { - "title": "Unable to connect to PCHK.", - "description": "Configuring LCN using YAML is being removed but there was an error importing your YAML configuration.\n\nEnsure the connection (IP and port) to the LCN bus coupler is correct.\n\nConsider removing the LCN YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - }, "deprecated_regulatorlock_sensor": { "title": "Deprecated LCN regulator lock binary sensor", "description": "Your LCN regulator lock binary sensor entity `{entity}` is beeing used in automations or scripts. A regulator lock switch entity is available and should be used going forward.\n\nPlease adjust your automations or scripts to fix this issue." diff --git a/homeassistant/components/lektrico/__init__.py b/homeassistant/components/lektrico/__init__.py index c309bb42ece..475b6132541 100644 --- a/homeassistant/components/lektrico/__init__.py +++ b/homeassistant/components/lektrico/__init__.py @@ -12,6 +12,7 @@ from .coordinator import LektricoDeviceDataUpdateCoordinator # List the platforms that charger supports. CHARGERS_PLATFORMS: list[Platform] = [ + Platform.BINARY_SENSOR, Platform.BUTTON, Platform.NUMBER, Platform.SENSOR, diff --git a/homeassistant/components/lektrico/binary_sensor.py b/homeassistant/components/lektrico/binary_sensor.py new file mode 100644 index 00000000000..d0a3e39690c --- /dev/null +++ b/homeassistant/components/lektrico/binary_sensor.py @@ -0,0 +1,139 @@ +"""Support for Lektrico binary sensors entities.""" + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.const import ATTR_SERIAL_NUMBER, CONF_TYPE, EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import LektricoConfigEntry, LektricoDeviceDataUpdateCoordinator +from .entity import LektricoEntity + + +@dataclass(frozen=True, kw_only=True) +class LektricoBinarySensorEntityDescription(BinarySensorEntityDescription): + """Describes Lektrico binary sensor entity.""" + + value_fn: Callable[[dict[str, Any]], bool] + + +BINARY_SENSORS: tuple[LektricoBinarySensorEntityDescription, ...] = ( + LektricoBinarySensorEntityDescription( + key="state_e_activated", + translation_key="state_e_activated", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["state_e_activated"]), + ), + LektricoBinarySensorEntityDescription( + key="overtemp", + translation_key="overtemp", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["overtemp"]), + ), + LektricoBinarySensorEntityDescription( + key="critical_temp", + translation_key="critical_temp", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["critical_temp"]), + ), + LektricoBinarySensorEntityDescription( + key="overcurrent", + translation_key="overcurrent", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["overcurrent"]), + ), + LektricoBinarySensorEntityDescription( + key="meter_fault", + translation_key="meter_fault", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["meter_fault"]), + ), + LektricoBinarySensorEntityDescription( + key="undervoltage", + translation_key="undervoltage", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["undervoltage_error"]), + ), + LektricoBinarySensorEntityDescription( + key="overvoltage", + translation_key="overvoltage", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["overvoltage_error"]), + ), + LektricoBinarySensorEntityDescription( + key="rcd_error", + translation_key="rcd_error", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["rcd_error"]), + ), + LektricoBinarySensorEntityDescription( + key="cp_diode_failure", + translation_key="cp_diode_failure", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["cp_diode_failure"]), + ), + LektricoBinarySensorEntityDescription( + key="contactor_failure", + translation_key="contactor_failure", + entity_category=EntityCategory.DIAGNOSTIC, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda data: bool(data["contactor_failure"]), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: LektricoConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Lektrico binary sensor entities based on a config entry.""" + coordinator = entry.runtime_data + + async_add_entities( + LektricoBinarySensor( + description, + coordinator, + f"{entry.data[CONF_TYPE]}_{entry.data[ATTR_SERIAL_NUMBER]}", + ) + for description in BINARY_SENSORS + ) + + +class LektricoBinarySensor(LektricoEntity, BinarySensorEntity): + """Defines a Lektrico binary sensor entity.""" + + entity_description: LektricoBinarySensorEntityDescription + + def __init__( + self, + description: LektricoBinarySensorEntityDescription, + coordinator: LektricoDeviceDataUpdateCoordinator, + device_name: str, + ) -> None: + """Initialize Lektrico binary sensor.""" + super().__init__(coordinator, device_name) + self.entity_description = description + self._coordinator = coordinator + self._attr_unique_id = f"{coordinator.serial_number}_{description.key}" + + @property + def is_on(self) -> bool: + """Return the state of the binary sensor.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/lektrico/strings.json b/homeassistant/components/lektrico/strings.json index e6dc7b9eb46..e24700c9b09 100644 --- a/homeassistant/components/lektrico/strings.json +++ b/homeassistant/components/lektrico/strings.json @@ -22,6 +22,38 @@ } }, "entity": { + "binary_sensor": { + "state_e_activated": { + "name": "Ev error" + }, + "overtemp": { + "name": "Thermal throttling" + }, + "critical_temp": { + "name": "Overheating" + }, + "overcurrent": { + "name": "Overcurrent" + }, + "meter_fault": { + "name": "Metering error" + }, + "undervoltage": { + "name": "Undervoltage" + }, + "overvoltage": { + "name": "Overvoltage" + }, + "rcd_error": { + "name": "Rcd error" + }, + "cp_diode_failure": { + "name": "Ev diode short" + }, + "contactor_failure": { + "name": "Relay contacts welded" + } + }, "button": { "charge_start": { "name": "Charge start" diff --git a/homeassistant/components/lg_thinq/__init__.py b/homeassistant/components/lg_thinq/__init__.py index a8d3fe175ef..657524f0ef5 100644 --- a/homeassistant/components/lg_thinq/__init__.py +++ b/homeassistant/components/lg_thinq/__init__.py @@ -95,6 +95,7 @@ async def async_setup_coordinators( raise ConfigEntryNotReady(exc.message) from exc if not bridge_list: + _LOGGER.warning("No devices registered with the correct profile") return # Setup coordinator per device. diff --git a/homeassistant/components/lg_thinq/climate.py b/homeassistant/components/lg_thinq/climate.py index 9ead57ab7b0..5cf9ccbd442 100644 --- a/homeassistant/components/lg_thinq/climate.py +++ b/homeassistant/components/lg_thinq/climate.py @@ -12,7 +12,6 @@ from thinqconnect.integration import ExtendedProperty from homeassistant.components.climate import ( ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, - FAN_OFF, ClimateEntity, ClimateEntityDescription, ClimateEntityFeature, @@ -37,7 +36,7 @@ class ThinQClimateEntityDescription(ClimateEntityDescription): step: float | None = None -DEVIE_TYPE_CLIMATE_MAP: dict[DeviceType, tuple[ThinQClimateEntityDescription, ...]] = { +DEVICE_TYPE_CLIMATE_MAP: dict[DeviceType, tuple[ThinQClimateEntityDescription, ...]] = { DeviceType.AIR_CONDITIONER: ( ThinQClimateEntityDescription( key=ExtendedProperty.CLIMATE_AIR_CONDITIONER, @@ -86,7 +85,7 @@ async def async_setup_entry( entities: list[ThinQClimateEntity] = [] for coordinator in entry.runtime_data.coordinators.values(): if ( - descriptions := DEVIE_TYPE_CLIMATE_MAP.get( + descriptions := DEVICE_TYPE_CLIMATE_MAP.get( coordinator.api.device.device_type ) ) is not None: @@ -149,10 +148,9 @@ class ThinQClimateEntity(ThinQEntity, ClimateEntity): super()._update_status() # Update fan, hvac and preset mode. + if self.supported_features & ClimateEntityFeature.FAN_MODE: + self._attr_fan_mode = self.data.fan_mode if self.data.is_on: - if self.supported_features & ClimateEntityFeature.FAN_MODE: - self._attr_fan_mode = self.data.fan_mode - hvac_mode = self._requested_hvac_mode or self.data.hvac_mode if hvac_mode in STR_TO_HVAC: self._attr_hvac_mode = STR_TO_HVAC.get(hvac_mode) @@ -160,9 +158,6 @@ class ThinQClimateEntity(ThinQEntity, ClimateEntity): elif hvac_mode in THINQ_PRESET_MODE: self._attr_preset_mode = hvac_mode else: - if self.supported_features & ClimateEntityFeature.FAN_MODE: - self._attr_fan_mode = FAN_OFF - self._attr_hvac_mode = HVACMode.OFF self._attr_preset_mode = None @@ -170,6 +165,7 @@ class ThinQClimateEntity(ThinQEntity, ClimateEntity): self._attr_current_humidity = self.data.humidity self._attr_current_temperature = self.data.current_temp + # Update min, max and step. if (max_temp := self.entity_description.max_temp) is not None or ( max_temp := self.data.max ) is not None: @@ -184,26 +180,18 @@ class ThinQClimateEntity(ThinQEntity, ClimateEntity): self._attr_target_temperature_step = step # Update target temperatures. - if ( - self.supported_features & ClimateEntityFeature.TARGET_TEMPERATURE_RANGE - and self.hvac_mode == HVACMode.AUTO - ): - self._attr_target_temperature = None - self._attr_target_temperature_high = self.data.target_temp_high - self._attr_target_temperature_low = self.data.target_temp_low - else: - self._attr_target_temperature = self.data.target_temp - self._attr_target_temperature_high = None - self._attr_target_temperature_low = None + self._attr_target_temperature = self.data.target_temp + self._attr_target_temperature_high = self.data.target_temp_high + self._attr_target_temperature_low = self.data.target_temp_low _LOGGER.debug( - "[%s:%s] update status: %s/%s -> %s/%s, hvac:%s, unit:%s, step:%s", + "[%s:%s] update status: c:%s, t:%s, l:%s, h:%s, hvac:%s, unit:%s, step:%s", self.coordinator.device_name, self.property_id, - self.data.current_temp, - self.data.target_temp, self.current_temperature, self.target_temperature, + self.target_temperature_low, + self.target_temperature_high, self.hvac_mode, self.temperature_unit, self.target_temperature_step, diff --git a/homeassistant/components/lg_thinq/config_flow.py b/homeassistant/components/lg_thinq/config_flow.py index cdb41916688..3bbcf3cd226 100644 --- a/homeassistant/components/lg_thinq/config_flow.py +++ b/homeassistant/components/lg_thinq/config_flow.py @@ -6,7 +6,7 @@ import logging from typing import Any import uuid -from thinqconnect import ThinQApi, ThinQAPIException +from thinqconnect import ThinQApi, ThinQAPIErrorCodes, ThinQAPIException from thinqconnect.country import Country import voluptuous as vol @@ -26,6 +26,13 @@ from .const import ( ) SUPPORTED_COUNTRIES = [country.value for country in Country] +THINQ_ERRORS = { + ThinQAPIErrorCodes.INVALID_TOKEN: "invalid_token", + ThinQAPIErrorCodes.NOT_ACCEPTABLE_TERMS: "not_acceptable_terms", + ThinQAPIErrorCodes.NOT_ALLOWED_API_AGAIN: "not_allowed_api_again", + ThinQAPIErrorCodes.NOT_SUPPORTED_COUNTRY: "not_supported_country", + ThinQAPIErrorCodes.EXCEEDED_API_CALLS: "exceeded_api_calls", +} _LOGGER = logging.getLogger(__name__) @@ -83,8 +90,9 @@ class ThinQFlowHandler(ConfigFlow, domain=DOMAIN): try: return await self._validate_and_create_entry(access_token, country_code) - except ThinQAPIException: - errors["base"] = "token_unauthorized" + except ThinQAPIException as exc: + errors["base"] = THINQ_ERRORS.get(exc.code, "token_unauthorized") + _LOGGER.error("Failed to validate access_token %s", exc) return self.async_show_form( step_id="user", diff --git a/homeassistant/components/lg_thinq/coordinator.py b/homeassistant/components/lg_thinq/coordinator.py index 0ba859b1228..9f317dc21d9 100644 --- a/homeassistant/components/lg_thinq/coordinator.py +++ b/homeassistant/components/lg_thinq/coordinator.py @@ -77,5 +77,9 @@ async def async_setup_device_coordinator( coordinator = DeviceDataUpdateCoordinator(hass, ha_bridge) await coordinator.async_refresh() - _LOGGER.debug("Setup device's coordinator: %s", coordinator.device_name) + _LOGGER.debug( + "Setup device's coordinator: %s, model:%s", + coordinator.device_name, + coordinator.api.device.model_name, + ) return coordinator diff --git a/homeassistant/components/lg_thinq/entity.py b/homeassistant/components/lg_thinq/entity.py index f31b535dcaf..7856506559b 100644 --- a/homeassistant/components/lg_thinq/entity.py +++ b/homeassistant/components/lg_thinq/entity.py @@ -51,7 +51,7 @@ class ThinQEntity(CoordinatorEntity[DeviceDataUpdateCoordinator]): self._attr_device_info = dr.DeviceInfo( identifiers={(DOMAIN, coordinator.unique_id)}, manufacturer=COMPANY, - model=coordinator.api.device.model_name, + model=f"{coordinator.api.device.model_name} ({self.coordinator.api.device.device_type})", name=coordinator.device_name, ) self._attr_unique_id = f"{coordinator.unique_id}_{self.property_id}" diff --git a/homeassistant/components/lg_thinq/manifest.json b/homeassistant/components/lg_thinq/manifest.json index 665a5a9e179..6dd60909c66 100644 --- a/homeassistant/components/lg_thinq/manifest.json +++ b/homeassistant/components/lg_thinq/manifest.json @@ -3,9 +3,8 @@ "name": "LG ThinQ", "codeowners": ["@LG-ThinQ-Integration"], "config_flow": true, - "dependencies": [], - "documentation": "https://www.home-assistant.io/integrations/lg_thinq/", + "documentation": "https://www.home-assistant.io/integrations/lg_thinq", "iot_class": "cloud_push", "loggers": ["thinqconnect"], - "requirements": ["thinqconnect==1.0.0"] + "requirements": ["thinqconnect==1.0.2"] } diff --git a/homeassistant/components/lg_thinq/mqtt.py b/homeassistant/components/lg_thinq/mqtt.py index 30d1302e458..8759869aad3 100644 --- a/homeassistant/components/lg_thinq/mqtt.py +++ b/homeassistant/components/lg_thinq/mqtt.py @@ -167,7 +167,6 @@ class ThinQMQTT: async def async_handle_device_event(self, message: dict) -> None: """Handle received mqtt message.""" - _LOGGER.debug("async_handle_device_event: message=%s", message) unique_id = ( f"{message["deviceId"]}_{list(message["report"].keys())[0]}" if message["deviceType"] == DeviceType.WASHTOWER @@ -178,6 +177,12 @@ class ThinQMQTT: _LOGGER.error("Failed to handle device event: No device") return + _LOGGER.debug( + "async_handle_device_event: %s, model:%s, message=%s", + coordinator.device_name, + coordinator.api.device.model_name, + message, + ) push_type = message.get("pushType") if push_type == DEVICE_STATUS_MESSAGE: diff --git a/homeassistant/components/lg_thinq/strings.json b/homeassistant/components/lg_thinq/strings.json index 277e3db3df0..a776dde2054 100644 --- a/homeassistant/components/lg_thinq/strings.json +++ b/homeassistant/components/lg_thinq/strings.json @@ -5,6 +5,12 @@ "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]" }, "error": { + "invalid_token": "The token is not valid.", + "not_acceptable_terms": "The service terms are not accepted.", + "not_allowed_api_again": "The user does NOT have permission on the API call.", + "not_supported_country": "The country is not supported.", + "exceeded_api_calls": "The number of API calls has been exceeded.", + "exceeded_user_api_calls": "The number of User API calls has been exceeded.", "token_unauthorized": "The token is invalid or unauthorized." }, "step": { diff --git a/homeassistant/components/lifx_cloud/manifest.json b/homeassistant/components/lifx_cloud/manifest.json index 7799de85b8d..61e5d66c821 100644 --- a/homeassistant/components/lifx_cloud/manifest.json +++ b/homeassistant/components/lifx_cloud/manifest.json @@ -3,5 +3,6 @@ "name": "LIFX Cloud", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/lifx_cloud", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/lightwave/manifest.json b/homeassistant/components/lightwave/manifest.json index d242195a71c..75b39b18c26 100644 --- a/homeassistant/components/lightwave/manifest.json +++ b/homeassistant/components/lightwave/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/lightwave", "iot_class": "assumed_state", "loggers": ["lightwave"], + "quality_scale": "legacy", "requirements": ["lightwave==0.24"] } diff --git a/homeassistant/components/limitlessled/manifest.json b/homeassistant/components/limitlessled/manifest.json index 3495ac2c981..c2a921c6e24 100644 --- a/homeassistant/components/limitlessled/manifest.json +++ b/homeassistant/components/limitlessled/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/limitlessled", "iot_class": "assumed_state", "loggers": ["limitlessled"], + "quality_scale": "legacy", "requirements": ["limitlessled==1.1.3"] } diff --git a/homeassistant/components/linkplay/button.py b/homeassistant/components/linkplay/button.py new file mode 100644 index 00000000000..1c93ebcdc3e --- /dev/null +++ b/homeassistant/components/linkplay/button.py @@ -0,0 +1,82 @@ +"""Support for LinkPlay buttons.""" + +from __future__ import annotations + +from collections.abc import Callable, Coroutine +from dataclasses import dataclass +import logging +from typing import Any + +from linkplay.bridge import LinkPlayBridge + +from homeassistant.components.button import ( + ButtonDeviceClass, + ButtonEntity, + ButtonEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import LinkPlayConfigEntry +from .entity import LinkPlayBaseEntity, exception_wrap + +_LOGGER = logging.getLogger(__name__) + + +@dataclass(frozen=True, kw_only=True) +class LinkPlayButtonEntityDescription(ButtonEntityDescription): + """Class describing LinkPlay button entities.""" + + remote_function: Callable[[LinkPlayBridge], Coroutine[Any, Any, None]] + + +BUTTON_TYPES: tuple[LinkPlayButtonEntityDescription, ...] = ( + LinkPlayButtonEntityDescription( + key="timesync", + translation_key="timesync", + remote_function=lambda linkplay_bridge: linkplay_bridge.device.timesync(), + entity_category=EntityCategory.CONFIG, + ), + LinkPlayButtonEntityDescription( + key="restart", + device_class=ButtonDeviceClass.RESTART, + remote_function=lambda linkplay_bridge: linkplay_bridge.device.reboot(), + entity_category=EntityCategory.CONFIG, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: LinkPlayConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the LinkPlay buttons from config entry.""" + + # add entities + async_add_entities( + LinkPlayButton(config_entry.runtime_data.bridge, description) + for description in BUTTON_TYPES + ) + + +class LinkPlayButton(LinkPlayBaseEntity, ButtonEntity): + """Representation of LinkPlay button.""" + + entity_description: LinkPlayButtonEntityDescription + + def __init__( + self, + bridge: LinkPlayBridge, + description: LinkPlayButtonEntityDescription, + ) -> None: + """Initialize LinkPlay button.""" + super().__init__(bridge) + self.entity_description = description + self._attr_unique_id = f"{bridge.device.uuid}-{description.key}" + + @exception_wrap + async def async_press(self) -> None: + """Press the button.""" + await self.entity_description.remote_function(self._bridge) diff --git a/homeassistant/components/linkplay/const.py b/homeassistant/components/linkplay/const.py index a776365e38f..e10450cf255 100644 --- a/homeassistant/components/linkplay/const.py +++ b/homeassistant/components/linkplay/const.py @@ -8,5 +8,5 @@ from homeassistant.util.hass_dict import HassKey DOMAIN = "linkplay" CONTROLLER = "controller" CONTROLLER_KEY: HassKey[LinkPlayController] = HassKey(CONTROLLER) -PLATFORMS = [Platform.MEDIA_PLAYER] +PLATFORMS = [Platform.BUTTON, Platform.MEDIA_PLAYER] DATA_SESSION = "session" diff --git a/homeassistant/components/linkplay/diagnostics.py b/homeassistant/components/linkplay/diagnostics.py new file mode 100644 index 00000000000..cfc1346aff4 --- /dev/null +++ b/homeassistant/components/linkplay/diagnostics.py @@ -0,0 +1,17 @@ +"""Diagnostics support for Linkplay.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.core import HomeAssistant + +from . import LinkPlayConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: LinkPlayConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + data = entry.runtime_data + return {"device_info": data.bridge.to_dict()} diff --git a/homeassistant/components/linkplay/entity.py b/homeassistant/components/linkplay/entity.py new file mode 100644 index 00000000000..00e2f39b233 --- /dev/null +++ b/homeassistant/components/linkplay/entity.py @@ -0,0 +1,57 @@ +"""BaseEntity to support multiple LinkPlay platforms.""" + +from collections.abc import Callable, Coroutine +from typing import Any, Concatenate + +from linkplay.bridge import LinkPlayBridge + +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.entity import Entity + +from . import DOMAIN, LinkPlayRequestException +from .utils import MANUFACTURER_GENERIC, get_info_from_project + + +def exception_wrap[_LinkPlayEntityT: LinkPlayBaseEntity, **_P, _R]( + func: Callable[Concatenate[_LinkPlayEntityT, _P], Coroutine[Any, Any, _R]], +) -> Callable[Concatenate[_LinkPlayEntityT, _P], Coroutine[Any, Any, _R]]: + """Define a wrapper to catch exceptions and raise HomeAssistant errors.""" + + async def _wrap(self: _LinkPlayEntityT, *args: _P.args, **kwargs: _P.kwargs) -> _R: + try: + return await func(self, *args, **kwargs) + except LinkPlayRequestException as err: + raise HomeAssistantError( + f"Exception occurred when communicating with API {func}: {err}" + ) from err + + return _wrap + + +class LinkPlayBaseEntity(Entity): + """Representation of a LinkPlay base entity.""" + + _attr_has_entity_name = True + + def __init__(self, bridge: LinkPlayBridge) -> None: + """Initialize the LinkPlay media player.""" + + self._bridge = bridge + + manufacturer, model = get_info_from_project(bridge.device.properties["project"]) + model_id = None + if model != MANUFACTURER_GENERIC: + model_id = bridge.device.properties["project"] + + self._attr_device_info = dr.DeviceInfo( + configuration_url=bridge.endpoint, + connections={(dr.CONNECTION_NETWORK_MAC, bridge.device.properties["MAC"])}, + hw_version=bridge.device.properties["hardware"], + identifiers={(DOMAIN, bridge.device.uuid)}, + manufacturer=manufacturer, + model=model, + model_id=model_id, + name=bridge.device.name, + sw_version=bridge.device.properties["firmware"], + ) diff --git a/homeassistant/components/linkplay/icons.json b/homeassistant/components/linkplay/icons.json index ee76344dc39..c0fe86d9ac7 100644 --- a/homeassistant/components/linkplay/icons.json +++ b/homeassistant/components/linkplay/icons.json @@ -1,4 +1,11 @@ { + "entity": { + "button": { + "timesync": { + "default": "mdi:clock" + } + } + }, "services": { "play_preset": { "service": "mdi:play-box-outline" diff --git a/homeassistant/components/linkplay/media_player.py b/homeassistant/components/linkplay/media_player.py index c29c2978522..456fbf23289 100644 --- a/homeassistant/components/linkplay/media_player.py +++ b/homeassistant/components/linkplay/media_player.py @@ -2,9 +2,8 @@ from __future__ import annotations -from collections.abc import Callable, Coroutine import logging -from typing import Any, Concatenate +from typing import Any from linkplay.bridge import LinkPlayBridge from linkplay.consts import EqualizerMode, LoopMode, PlayingMode, PlayingStatus @@ -28,7 +27,6 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import ( config_validation as cv, - device_registry as dr, entity_platform, entity_registry as er, ) @@ -37,7 +35,7 @@ from homeassistant.util.dt import utcnow from . import LinkPlayConfigEntry, LinkPlayData from .const import CONTROLLER_KEY, DOMAIN -from .utils import MANUFACTURER_GENERIC, get_info_from_project +from .entity import LinkPlayBaseEntity, exception_wrap _LOGGER = logging.getLogger(__name__) STATE_MAP: dict[PlayingStatus, MediaPlayerState] = { @@ -145,58 +143,24 @@ async def async_setup_entry( async_add_entities([LinkPlayMediaPlayerEntity(entry.runtime_data.bridge)]) -def exception_wrap[_LinkPlayEntityT: LinkPlayMediaPlayerEntity, **_P, _R]( - func: Callable[Concatenate[_LinkPlayEntityT, _P], Coroutine[Any, Any, _R]], -) -> Callable[Concatenate[_LinkPlayEntityT, _P], Coroutine[Any, Any, _R]]: - """Define a wrapper to catch exceptions and raise HomeAssistant errors.""" - - async def _wrap(self: _LinkPlayEntityT, *args: _P.args, **kwargs: _P.kwargs) -> _R: - try: - return await func(self, *args, **kwargs) - except LinkPlayRequestException as err: - raise HomeAssistantError( - f"Exception occurred when communicating with API {func}: {err}" - ) from err - - return _wrap - - -class LinkPlayMediaPlayerEntity(MediaPlayerEntity): +class LinkPlayMediaPlayerEntity(LinkPlayBaseEntity, MediaPlayerEntity): """Representation of a LinkPlay media player.""" _attr_sound_mode_list = list(EQUALIZER_MAP.values()) _attr_device_class = MediaPlayerDeviceClass.RECEIVER _attr_media_content_type = MediaType.MUSIC - _attr_has_entity_name = True _attr_name = None def __init__(self, bridge: LinkPlayBridge) -> None: """Initialize the LinkPlay media player.""" - self._bridge = bridge + super().__init__(bridge) self._attr_unique_id = bridge.device.uuid self._attr_source_list = [ SOURCE_MAP[playing_mode] for playing_mode in bridge.device.playmode_support ] - manufacturer, model = get_info_from_project(bridge.device.properties["project"]) - model_id = None - if model != MANUFACTURER_GENERIC: - model_id = bridge.device.properties["project"] - - self._attr_device_info = dr.DeviceInfo( - configuration_url=bridge.endpoint, - connections={(dr.CONNECTION_NETWORK_MAC, bridge.device.properties["MAC"])}, - hw_version=bridge.device.properties["hardware"], - identifiers={(DOMAIN, bridge.device.uuid)}, - manufacturer=manufacturer, - model=model, - model_id=model_id, - name=bridge.device.name, - sw_version=bridge.device.properties["firmware"], - ) - @exception_wrap async def async_update(self) -> None: """Update the state of the media player.""" diff --git a/homeassistant/components/linkplay/strings.json b/homeassistant/components/linkplay/strings.json index f3495b293e0..31b4649e131 100644 --- a/homeassistant/components/linkplay/strings.json +++ b/homeassistant/components/linkplay/strings.json @@ -35,6 +35,13 @@ } } }, + "entity": { + "button": { + "timesync": { + "name": "Sync time" + } + } + }, "exceptions": { "invalid_grouping_entity": { "message": "Entity with id {entity_id} can't be added to the LinkPlay multiroom. Is the entity a LinkPlay mediaplayer?" diff --git a/homeassistant/components/linkplay/utils.py b/homeassistant/components/linkplay/utils.py index 36a492f8464..00bb691362b 100644 --- a/homeassistant/components/linkplay/utils.py +++ b/homeassistant/components/linkplay/utils.py @@ -13,45 +13,68 @@ from .const import DATA_SESSION, DOMAIN MANUFACTURER_ARTSOUND: Final[str] = "ArtSound" MANUFACTURER_ARYLIC: Final[str] = "Arylic" MANUFACTURER_IEAST: Final[str] = "iEAST" +MANUFACTURER_WIIM: Final[str] = "WiiM" +MANUFACTURER_GGMM: Final[str] = "GGMM" +MANUFACTURER_MEDION: Final[str] = "Medion" MANUFACTURER_GENERIC: Final[str] = "Generic" MODELS_ARTSOUND_SMART_ZONE4: Final[str] = "Smart Zone 4 AMP" MODELS_ARTSOUND_SMART_HYDE: Final[str] = "Smart Hyde" MODELS_ARYLIC_S50: Final[str] = "S50+" MODELS_ARYLIC_S50_PRO: Final[str] = "S50 Pro" MODELS_ARYLIC_A30: Final[str] = "A30" +MODELS_ARYLIC_A50: Final[str] = "A50" MODELS_ARYLIC_A50S: Final[str] = "A50+" +MODELS_ARYLIC_UP2STREAM_AMP: Final[str] = "Up2Stream Amp 2.0" MODELS_ARYLIC_UP2STREAM_AMP_V3: Final[str] = "Up2Stream Amp v3" MODELS_ARYLIC_UP2STREAM_AMP_V4: Final[str] = "Up2Stream Amp v4" +MODELS_ARYLIC_UP2STREAM_PRO: Final[str] = "Up2Stream Pro v1" MODELS_ARYLIC_UP2STREAM_PRO_V3: Final[str] = "Up2Stream Pro v3" +MODELS_ARYLIC_UP2STREAM_PLATE_AMP: Final[str] = "Up2Stream Plate Amp" MODELS_IEAST_AUDIOCAST_M5: Final[str] = "AudioCast M5" +MODELS_WIIM_AMP: Final[str] = "WiiM Amp" +MODELS_WIIM_MINI: Final[str] = "WiiM Mini" +MODELS_GGMM_GGMM_E2: Final[str] = "GGMM E2" +MODELS_MEDION_MD_43970: Final[str] = "Life P66970 (MD 43970)" MODELS_GENERIC: Final[str] = "Generic" +PROJECTID_LOOKUP: Final[dict[str, tuple[str, str]]] = { + "SMART_ZONE4_AMP": (MANUFACTURER_ARTSOUND, MODELS_ARTSOUND_SMART_ZONE4), + "SMART_HYDE": (MANUFACTURER_ARTSOUND, MODELS_ARTSOUND_SMART_HYDE), + "ARYLIC_S50": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_S50), + "RP0016_S50PRO_S": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_S50_PRO), + "RP0011_WB60_S": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_A30), + "X-50": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_A50), + "ARYLIC_A50S": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_A50S), + "RP0011_WB60": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP), + "UP2STREAM_AMP_V3": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP_V3), + "UP2STREAM_AMP_V4": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP_V4), + "UP2STREAM_PRO_V3": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_PRO_V3), + "ARYLIC_V20": (MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_PLATE_AMP), + "UP2STREAM_MINI_V3": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "UP2STREAM_AMP_2P1": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "RP0014_A50C_S": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "ARYLIC_A30": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "ARYLIC_SUBWOOFER": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "ARYLIC_S50A": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "RP0010_D5_S": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "RP0001": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "RP0013_WA31S": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "RP0010_D5": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "RP0013_WA31S_S": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "RP0014_A50D_S": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "ARYLIC_A50TE": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "ARYLIC_A50N": (MANUFACTURER_ARYLIC, MODELS_GENERIC), + "iEAST-02": (MANUFACTURER_IEAST, MODELS_IEAST_AUDIOCAST_M5), + "WiiM_Amp_4layer": (MANUFACTURER_WIIM, MODELS_WIIM_AMP), + "Muzo_Mini": (MANUFACTURER_WIIM, MODELS_WIIM_MINI), + "GGMM_E2A": (MANUFACTURER_GGMM, MODELS_GGMM_GGMM_E2), + "A16": (MANUFACTURER_MEDION, MODELS_MEDION_MD_43970), +} + def get_info_from_project(project: str) -> tuple[str, str]: """Get manufacturer and model info based on given project.""" - match project: - case "SMART_ZONE4_AMP": - return MANUFACTURER_ARTSOUND, MODELS_ARTSOUND_SMART_ZONE4 - case "SMART_HYDE": - return MANUFACTURER_ARTSOUND, MODELS_ARTSOUND_SMART_HYDE - case "ARYLIC_S50": - return MANUFACTURER_ARYLIC, MODELS_ARYLIC_S50 - case "RP0016_S50PRO_S": - return MANUFACTURER_ARYLIC, MODELS_ARYLIC_S50_PRO - case "RP0011_WB60_S": - return MANUFACTURER_ARYLIC, MODELS_ARYLIC_A30 - case "ARYLIC_A50S": - return MANUFACTURER_ARYLIC, MODELS_ARYLIC_A50S - case "UP2STREAM_AMP_V3": - return MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP_V3 - case "UP2STREAM_AMP_V4": - return MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP_V4 - case "UP2STREAM_PRO_V3": - return MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_PRO_V3 - case "iEAST-02": - return MANUFACTURER_IEAST, MODELS_IEAST_AUDIOCAST_M5 - case _: - return MANUFACTURER_GENERIC, MODELS_GENERIC + return PROJECTID_LOOKUP.get(project, (MANUFACTURER_GENERIC, MODELS_GENERIC)) async def async_get_client_session(hass: HomeAssistant) -> ClientSession: diff --git a/homeassistant/components/linksys_smart/manifest.json b/homeassistant/components/linksys_smart/manifest.json index 6200da5866d..4f099f81277 100644 --- a/homeassistant/components/linksys_smart/manifest.json +++ b/homeassistant/components/linksys_smart/manifest.json @@ -3,5 +3,6 @@ "name": "Linksys Smart Wi-Fi", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/linksys_smart", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/linode/manifest.json b/homeassistant/components/linode/manifest.json index bedd6c2d172..975747de86d 100644 --- a/homeassistant/components/linode/manifest.json +++ b/homeassistant/components/linode/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/linode", "iot_class": "cloud_polling", "loggers": ["linode"], + "quality_scale": "legacy", "requirements": ["linode-api==4.1.9b1"] } diff --git a/homeassistant/components/linux_battery/manifest.json b/homeassistant/components/linux_battery/manifest.json index 12b49c18aee..39bd331e3a4 100644 --- a/homeassistant/components/linux_battery/manifest.json +++ b/homeassistant/components/linux_battery/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/linux_battery", "iot_class": "local_polling", "loggers": ["batinfo"], + "quality_scale": "legacy", "requirements": ["batinfo==0.4.2"] } diff --git a/homeassistant/components/lirc/manifest.json b/homeassistant/components/lirc/manifest.json index 3cc5d453721..64dbee06390 100644 --- a/homeassistant/components/lirc/manifest.json +++ b/homeassistant/components/lirc/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/lirc", "iot_class": "local_push", "loggers": ["lirc"], + "quality_scale": "legacy", "requirements": ["python-lirc==1.2.3"] } diff --git a/homeassistant/components/litejet/config_flow.py b/homeassistant/components/litejet/config_flow.py index b9f8a0f4b66..9aa0b19c506 100644 --- a/homeassistant/components/litejet/config_flow.py +++ b/homeassistant/components/litejet/config_flow.py @@ -24,10 +24,6 @@ from .const import CONF_DEFAULT_TRANSITION, DOMAIN class LiteJetOptionsFlow(OptionsFlow): """Handle LiteJet options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize LiteJet options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -84,4 +80,4 @@ class LiteJetConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> LiteJetOptionsFlow: """Get the options flow for this handler.""" - return LiteJetOptionsFlow(config_entry) + return LiteJetOptionsFlow() diff --git a/homeassistant/components/litejet/manifest.json b/homeassistant/components/litejet/manifest.json index 1df907029a9..cd2e5fda11a 100644 --- a/homeassistant/components/litejet/manifest.json +++ b/homeassistant/components/litejet/manifest.json @@ -7,7 +7,6 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["pylitejet"], - "quality_scale": "platinum", "requirements": ["pylitejet==0.6.3"], "single_config_entry": true } diff --git a/homeassistant/components/llamalab_automate/manifest.json b/homeassistant/components/llamalab_automate/manifest.json index 861b919f24b..4343d617e93 100644 --- a/homeassistant/components/llamalab_automate/manifest.json +++ b/homeassistant/components/llamalab_automate/manifest.json @@ -3,5 +3,6 @@ "name": "LlamaLab Automate", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/llamalab_automate", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/logentries/manifest.json b/homeassistant/components/logentries/manifest.json index ecf2d8a227c..e63e83aff00 100644 --- a/homeassistant/components/logentries/manifest.json +++ b/homeassistant/components/logentries/manifest.json @@ -3,5 +3,6 @@ "name": "Logentries", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/logentries", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/london_air/manifest.json b/homeassistant/components/london_air/manifest.json index 60eed8d83bd..653a951ae56 100644 --- a/homeassistant/components/london_air/manifest.json +++ b/homeassistant/components/london_air/manifest.json @@ -3,5 +3,6 @@ "name": "London Air", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/london_air", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/london_underground/const.py b/homeassistant/components/london_underground/const.py index 532f4333ba9..447ed4461f3 100644 --- a/homeassistant/components/london_underground/const.py +++ b/homeassistant/components/london_underground/const.py @@ -24,4 +24,10 @@ TUBE_LINES = [ "Piccadilly", "Victoria", "Waterloo & City", + "Liberty", + "Lioness", + "Mildmay", + "Suffragette", + "Weaver", + "Windrush", ] diff --git a/homeassistant/components/london_underground/coordinator.py b/homeassistant/components/london_underground/coordinator.py index cf14ad14b43..29d1e8e2f54 100644 --- a/homeassistant/components/london_underground/coordinator.py +++ b/homeassistant/components/london_underground/coordinator.py @@ -24,6 +24,7 @@ class LondonTubeCoordinator(DataUpdateCoordinator[dict[str, dict[str, str]]]): super().__init__( hass, _LOGGER, + config_entry=None, name=DOMAIN, update_interval=SCAN_INTERVAL, ) diff --git a/homeassistant/components/london_underground/manifest.json b/homeassistant/components/london_underground/manifest.json index eafc63c6ae7..94b993097c0 100644 --- a/homeassistant/components/london_underground/manifest.json +++ b/homeassistant/components/london_underground/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/london_underground", "iot_class": "cloud_polling", "loggers": ["london_tube_status"], + "quality_scale": "legacy", "requirements": ["london-tube-status==0.5"] } diff --git a/homeassistant/components/lookin/config_flow.py b/homeassistant/components/lookin/config_flow.py index e2d2c3f2625..aaf98a06fa8 100644 --- a/homeassistant/components/lookin/config_flow.py +++ b/homeassistant/components/lookin/config_flow.py @@ -97,7 +97,10 @@ class LookinFlowHandler(ConfigFlow, domain=DOMAIN): if user_input is None: return self.async_show_form( step_id="discovery_confirm", - description_placeholders={"name": self._name, "host": self._host}, + description_placeholders={ + "name": self._name or "LOOKin", + "host": self._host, + }, ) return self.async_create_entry( diff --git a/homeassistant/components/luci/manifest.json b/homeassistant/components/luci/manifest.json index 597aad30648..a8df2c63df4 100644 --- a/homeassistant/components/luci/manifest.json +++ b/homeassistant/components/luci/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/luci", "iot_class": "local_polling", "loggers": ["openwrt_luci_rpc"], + "quality_scale": "legacy", "requirements": ["openwrt-luci-rpc==1.1.17"] } diff --git a/homeassistant/components/luftdaten/manifest.json b/homeassistant/components/luftdaten/manifest.json index 96927bdd4a8..bafffe4d6ae 100644 --- a/homeassistant/components/luftdaten/manifest.json +++ b/homeassistant/components/luftdaten/manifest.json @@ -7,6 +7,5 @@ "integration_type": "device", "iot_class": "cloud_polling", "loggers": ["luftdaten"], - "quality_scale": "gold", "requirements": ["luftdaten==0.7.4"] } diff --git a/homeassistant/components/lutron_caseta/manifest.json b/homeassistant/components/lutron_caseta/manifest.json index e96778f0a31..ec278615743 100644 --- a/homeassistant/components/lutron_caseta/manifest.json +++ b/homeassistant/components/lutron_caseta/manifest.json @@ -9,7 +9,7 @@ }, "iot_class": "local_push", "loggers": ["pylutron_caseta"], - "requirements": ["pylutron-caseta==0.21.1"], + "requirements": ["pylutron-caseta==0.22.0"], "zeroconf": [ { "type": "_lutron._tcp.local.", diff --git a/homeassistant/components/lw12wifi/manifest.json b/homeassistant/components/lw12wifi/manifest.json index d8b2290b234..683498f2056 100644 --- a/homeassistant/components/lw12wifi/manifest.json +++ b/homeassistant/components/lw12wifi/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/lw12wifi", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["lw12==0.9.2"] } diff --git a/homeassistant/components/lyric/manifest.json b/homeassistant/components/lyric/manifest.json index 8bed909ace2..cca69969f70 100644 --- a/homeassistant/components/lyric/manifest.json +++ b/homeassistant/components/lyric/manifest.json @@ -21,6 +21,5 @@ "documentation": "https://www.home-assistant.io/integrations/lyric", "iot_class": "cloud_polling", "loggers": ["aiolyric"], - "quality_scale": "silver", "requirements": ["aiolyric==2.0.1"] } diff --git a/homeassistant/components/manual_mqtt/manifest.json b/homeassistant/components/manual_mqtt/manifest.json index d4adcaf3bc9..bf2fccb62ae 100644 --- a/homeassistant/components/manual_mqtt/manifest.json +++ b/homeassistant/components/manual_mqtt/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["mqtt"], "documentation": "https://www.home-assistant.io/integrations/manual_mqtt", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/marytts/manifest.json b/homeassistant/components/marytts/manifest.json index bbf23327547..814d3c64925 100644 --- a/homeassistant/components/marytts/manifest.json +++ b/homeassistant/components/marytts/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/marytts", "iot_class": "local_push", "loggers": ["speak2mary"], + "quality_scale": "legacy", "requirements": ["speak2mary==1.4.0"] } diff --git a/homeassistant/components/mastodon/sensor.py b/homeassistant/components/mastodon/sensor.py index 12acfc04743..a7a1d40fcc4 100644 --- a/homeassistant/components/mastodon/sensor.py +++ b/homeassistant/components/mastodon/sensor.py @@ -35,21 +35,18 @@ ENTITY_DESCRIPTIONS = ( MastodonSensorEntityDescription( key="followers", translation_key="followers", - native_unit_of_measurement="accounts", state_class=SensorStateClass.TOTAL, value_fn=lambda data: data.get(ACCOUNT_FOLLOWERS_COUNT), ), MastodonSensorEntityDescription( key="following", translation_key="following", - native_unit_of_measurement="accounts", state_class=SensorStateClass.TOTAL, value_fn=lambda data: data.get(ACCOUNT_FOLLOWING_COUNT), ), MastodonSensorEntityDescription( key="posts", translation_key="posts", - native_unit_of_measurement="posts", state_class=SensorStateClass.TOTAL, value_fn=lambda data: data.get(ACCOUNT_STATUSES_COUNT), ), diff --git a/homeassistant/components/mastodon/strings.json b/homeassistant/components/mastodon/strings.json index fd4dd890b37..c6aefefca06 100644 --- a/homeassistant/components/mastodon/strings.json +++ b/homeassistant/components/mastodon/strings.json @@ -9,7 +9,10 @@ "access_token": "[%key:common::config_flow::data::access_token%]" }, "data_description": { - "base_url": "The URL of your Mastodon instance e.g. https://mastodon.social." + "base_url": "The URL of your Mastodon instance e.g. https://mastodon.social.", + "client_id": "The client key for the application created within your Mastodon account.", + "client_secret": "The client secret for the application created within your Mastodon account.", + "access_token": "The access token for the application created within your Mastodon account." } } }, @@ -39,13 +42,16 @@ "entity": { "sensor": { "followers": { - "name": "Followers" + "name": "Followers", + "unit_of_measurement": "accounts" }, "following": { - "name": "Following" + "name": "Following", + "unit_of_measurement": "[%key:component::mastodon::entity::sensor::followers::unit_of_measurement%]" }, "posts": { - "name": "Posts" + "name": "Posts", + "unit_of_measurement": "posts" } } } diff --git a/homeassistant/components/matrix/manifest.json b/homeassistant/components/matrix/manifest.json index 520bd0550cc..e06eed1176f 100644 --- a/homeassistant/components/matrix/manifest.json +++ b/homeassistant/components/matrix/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/matrix", "iot_class": "cloud_push", "loggers": ["matrix_client"], - "requirements": ["matrix-nio==0.25.2", "Pillow==10.4.0"] + "quality_scale": "legacy", + "requirements": ["matrix-nio==0.25.2", "Pillow==11.0.0"] } diff --git a/homeassistant/components/matter/adapter.py b/homeassistant/components/matter/adapter.py index 475e4a44538..0ccd3e065ff 100644 --- a/homeassistant/components/matter/adapter.py +++ b/homeassistant/components/matter/adapter.py @@ -45,6 +45,7 @@ class MatterAdapter: self.hass = hass self.config_entry = config_entry self.platform_handlers: dict[Platform, AddEntitiesCallback] = {} + self.discovered_entities: set[str] = set() def register_platform_handler( self, platform: Platform, add_entities: AddEntitiesCallback @@ -54,23 +55,19 @@ class MatterAdapter: async def setup_nodes(self) -> None: """Set up all existing nodes and subscribe to new nodes.""" - initialized_nodes: set[int] = set() for node in self.matter_client.get_nodes(): - initialized_nodes.add(node.node_id) self._setup_node(node) def node_added_callback(event: EventType, node: MatterNode) -> None: """Handle node added event.""" - initialized_nodes.add(node.node_id) self._setup_node(node) def node_updated_callback(event: EventType, node: MatterNode) -> None: """Handle node updated event.""" - if node.node_id in initialized_nodes: - return if not node.available: return - initialized_nodes.add(node.node_id) + # We always run the discovery logic again, + # because the firmware version could have been changed or features added. self._setup_node(node) def endpoint_added_callback(event: EventType, data: dict[str, int]) -> None: @@ -237,11 +234,20 @@ class MatterAdapter: self._create_device_registry(endpoint) # run platform discovery from device type instances for entity_info in async_discover_entities(endpoint): + discovery_key = ( + f"{entity_info.platform}_{endpoint.node.node_id}_{endpoint.endpoint_id}_" + f"{entity_info.primary_attribute.cluster_id}_" + f"{entity_info.primary_attribute.attribute_id}_" + f"{entity_info.entity_description.key}" + ) + if discovery_key in self.discovered_entities: + continue LOGGER.debug( "Creating %s entity for %s", entity_info.platform, entity_info.primary_attribute, ) + self.discovered_entities.add(discovery_key) new_entity = entity_info.entity_class( self.matter_client, endpoint, entity_info ) diff --git a/homeassistant/components/matter/binary_sensor.py b/homeassistant/components/matter/binary_sensor.py index 875b063dc88..6882078a712 100644 --- a/homeassistant/components/matter/binary_sensor.py +++ b/homeassistant/components/matter/binary_sensor.py @@ -159,6 +159,7 @@ DISCOVERY_SCHEMAS = [ ), entity_class=MatterBinarySensor, required_attributes=(clusters.DoorLock.Attributes.DoorState,), + featuremap_contains=clusters.DoorLock.Bitmaps.Feature.kDoorPositionSensor, ), MatterDiscoverySchema( platform=Platform.BINARY_SENSOR, diff --git a/homeassistant/components/matter/button.py b/homeassistant/components/matter/button.py index 918b334061b..153124a4f7e 100644 --- a/homeassistant/components/matter/button.py +++ b/homeassistant/components/matter/button.py @@ -69,6 +69,7 @@ DISCOVERY_SCHEMAS = [ entity_class=MatterCommandButton, required_attributes=(clusters.Identify.Attributes.AcceptedCommandList,), value_contains=clusters.Identify.Commands.Identify.command_id, + allow_multi=True, ), MatterDiscoverySchema( platform=Platform.BUTTON, diff --git a/homeassistant/components/matter/const.py b/homeassistant/components/matter/const.py index a0e160a6c01..8018d5e09ed 100644 --- a/homeassistant/components/matter/const.py +++ b/homeassistant/components/matter/const.py @@ -13,3 +13,5 @@ LOGGER = logging.getLogger(__package__) # prefixes to identify device identifier id types ID_TYPE_DEVICE_ID = "deviceid" ID_TYPE_SERIAL = "serial" + +FEATUREMAP_ATTRIBUTE_ID = 65532 diff --git a/homeassistant/components/matter/discovery.py b/homeassistant/components/matter/discovery.py index 5b07f9a069f..3b9fb0b8a94 100644 --- a/homeassistant/components/matter/discovery.py +++ b/homeassistant/components/matter/discovery.py @@ -13,6 +13,7 @@ from homeassistant.core import callback from .binary_sensor import DISCOVERY_SCHEMAS as BINARY_SENSOR_SCHEMAS from .button import DISCOVERY_SCHEMAS as BUTTON_SCHEMAS from .climate import DISCOVERY_SCHEMAS as CLIMATE_SENSOR_SCHEMAS +from .const import FEATUREMAP_ATTRIBUTE_ID from .cover import DISCOVERY_SCHEMAS as COVER_SCHEMAS from .event import DISCOVERY_SCHEMAS as EVENT_SCHEMAS from .fan import DISCOVERY_SCHEMAS as FAN_SCHEMAS @@ -121,12 +122,24 @@ def async_discover_entities( continue # check for required value in (primary) attribute + primary_attribute = schema.required_attributes[0] + primary_value = endpoint.get_attribute_value(None, primary_attribute) if schema.value_contains is not None and ( - (primary_attribute := next((x for x in schema.required_attributes), None)) - is None - or (value := endpoint.get_attribute_value(None, primary_attribute)) is None - or not isinstance(value, list) - or schema.value_contains not in value + isinstance(primary_value, list) + and schema.value_contains not in primary_value + ): + continue + + # check for required value in cluster featuremap + if schema.featuremap_contains is not None and ( + not bool( + int( + endpoint.get_attribute_value( + primary_attribute.cluster_id, FEATUREMAP_ATTRIBUTE_ID + ) + ) + & schema.featuremap_contains + ) ): continue @@ -147,6 +160,7 @@ def async_discover_entities( attributes_to_watch=attributes_to_watch, entity_description=schema.entity_description, entity_class=schema.entity_class, + discovery_schema=schema, ) # prevent re-discovery of the primary attribute if not allowed diff --git a/homeassistant/components/matter/entity.py b/homeassistant/components/matter/entity.py index 7c378fe465e..50a0f2b1fee 100644 --- a/homeassistant/components/matter/entity.py +++ b/homeassistant/components/matter/entity.py @@ -16,9 +16,10 @@ from propcache import cached_property from homeassistant.core import callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import Entity, EntityDescription +import homeassistant.helpers.entity_registry as er from homeassistant.helpers.typing import UndefinedType -from .const import DOMAIN, ID_TYPE_DEVICE_ID +from .const import DOMAIN, FEATUREMAP_ATTRIBUTE_ID, ID_TYPE_DEVICE_ID from .helpers import get_device_id if TYPE_CHECKING: @@ -140,6 +141,19 @@ class MatterEntity(Entity): node_filter=self._endpoint.node.node_id, ) ) + # subscribe to FeatureMap attribute (as that can dynamically change) + self._unsubscribes.append( + self.matter_client.subscribe_events( + callback=self._on_featuremap_update, + event_filter=EventType.ATTRIBUTE_UPDATED, + node_filter=self._endpoint.node.node_id, + attr_path_filter=create_attribute_path( + endpoint=self._endpoint.endpoint_id, + cluster_id=self._entity_info.primary_attribute.cluster_id, + attribute_id=FEATUREMAP_ATTRIBUTE_ID, + ), + ) + ) @cached_property def name(self) -> str | UndefinedType | None: @@ -159,6 +173,29 @@ class MatterEntity(Entity): self._update_from_device() self.async_write_ha_state() + @callback + def _on_featuremap_update( + self, event: EventType, data: tuple[int, str, int] | None + ) -> None: + """Handle FeatureMap attribute updates.""" + if data is None: + return + new_value = data[2] + # handle edge case where a Feature is removed from a cluster + if ( + self._entity_info.discovery_schema.featuremap_contains is not None + and not bool( + new_value & self._entity_info.discovery_schema.featuremap_contains + ) + ): + # this entity is no longer supported by the device + ent_reg = er.async_get(self.hass) + ent_reg.async_remove(self.entity_id) + + return + # all other cases, just update the entity + self._on_matter_event(event, data) + @callback def _update_from_device(self) -> None: """Update data from Matter device.""" diff --git a/homeassistant/components/matter/lock.py b/homeassistant/components/matter/lock.py index c5e10554fe7..d69d0fd3dab 100644 --- a/homeassistant/components/matter/lock.py +++ b/homeassistant/components/matter/lock.py @@ -206,6 +206,5 @@ DISCOVERY_SCHEMAS = [ ), entity_class=MatterLock, required_attributes=(clusters.DoorLock.Attributes.LockState,), - optional_attributes=(clusters.DoorLock.Attributes.DoorState,), ), ] diff --git a/homeassistant/components/matter/models.py b/homeassistant/components/matter/models.py index f04c0f7e107..a00963c825a 100644 --- a/homeassistant/components/matter/models.py +++ b/homeassistant/components/matter/models.py @@ -51,6 +51,9 @@ class MatterEntityInfo: # entity class to use to instantiate the entity entity_class: type + # the original discovery schema used to create this entity + discovery_schema: MatterDiscoverySchema + @property def primary_attribute(self) -> type[ClusterAttributeDescriptor]: """Return Primary Attribute belonging to the entity.""" @@ -113,6 +116,10 @@ class MatterDiscoverySchema: # NOTE: only works for list values value_contains: Any | None = None + # [optional] the primary attribute's cluster featuremap must contain this value + # for example for the DoorSensor on a DoorLock Cluster + featuremap_contains: int | None = None + # [optional] bool to specify if this primary value may be discovered # by multiple platforms allow_multi: bool = False diff --git a/homeassistant/components/maxcube/manifest.json b/homeassistant/components/maxcube/manifest.json index 6421686d2cf..d57ccacc5b1 100644 --- a/homeassistant/components/maxcube/manifest.json +++ b/homeassistant/components/maxcube/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/maxcube", "iot_class": "local_polling", "loggers": ["maxcube"], + "quality_scale": "legacy", "requirements": ["maxcube-api==0.4.3"] } diff --git a/homeassistant/components/mazda/manifest.json b/homeassistant/components/mazda/manifest.json index 75a83a9f468..fcd39e11a10 100644 --- a/homeassistant/components/mazda/manifest.json +++ b/homeassistant/components/mazda/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/mazda", "integration_type": "system", "iot_class": "cloud_polling", + "quality_scale": "legacy", "requirements": [] } diff --git a/homeassistant/components/mealie/config_flow.py b/homeassistant/components/mealie/config_flow.py index 2f90ceaf97a..2addd23284e 100644 --- a/homeassistant/components/mealie/config_flow.py +++ b/homeassistant/components/mealie/config_flow.py @@ -38,6 +38,10 @@ class MealieConfigFlow(ConfigFlow, domain=DOMAIN): ) -> tuple[dict[str, str], str | None]: """Check connection to the Mealie API.""" assert self.host is not None + + if "/hassio/ingress/" in self.host: + return {"base": "ingress_url"}, None + client = MealieClient( self.host, token=api_token, diff --git a/homeassistant/components/mealie/manifest.json b/homeassistant/components/mealie/manifest.json index f594f1398e3..c555fcbc3d6 100644 --- a/homeassistant/components/mealie/manifest.json +++ b/homeassistant/components/mealie/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/mealie", "integration_type": "service", "iot_class": "local_polling", - "requirements": ["aiomealie==0.9.3"] + "requirements": ["aiomealie==0.9.4"] } diff --git a/homeassistant/components/mealie/sensor.py b/homeassistant/components/mealie/sensor.py index b4baac34ebe..141a28ecdab 100644 --- a/homeassistant/components/mealie/sensor.py +++ b/homeassistant/components/mealie/sensor.py @@ -28,31 +28,26 @@ class MealieStatisticsSensorEntityDescription(SensorEntityDescription): SENSOR_TYPES: tuple[MealieStatisticsSensorEntityDescription, ...] = ( MealieStatisticsSensorEntityDescription( key="recipes", - native_unit_of_measurement="recipes", state_class=SensorStateClass.TOTAL, value_fn=lambda statistics: statistics.total_recipes, ), MealieStatisticsSensorEntityDescription( key="users", - native_unit_of_measurement="users", state_class=SensorStateClass.TOTAL, value_fn=lambda statistics: statistics.total_users, ), MealieStatisticsSensorEntityDescription( key="categories", - native_unit_of_measurement="categories", state_class=SensorStateClass.TOTAL, value_fn=lambda statistics: statistics.total_categories, ), MealieStatisticsSensorEntityDescription( key="tags", - native_unit_of_measurement="tags", state_class=SensorStateClass.TOTAL, value_fn=lambda statistics: statistics.total_tags, ), MealieStatisticsSensorEntityDescription( key="tools", - native_unit_of_measurement="tools", state_class=SensorStateClass.TOTAL, value_fn=lambda statistics: statistics.total_tools, ), diff --git a/homeassistant/components/mealie/strings.json b/homeassistant/components/mealie/strings.json index b59399815ea..830d43d8f93 100644 --- a/homeassistant/components/mealie/strings.json +++ b/homeassistant/components/mealie/strings.json @@ -8,7 +8,7 @@ "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" }, "data_description": { - "host": "The URL of your Mealie instance." + "host": "The URL of your Mealie instance, for example, http://192.168.1.123:1234" } }, "reauth_confirm": { @@ -29,6 +29,7 @@ "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "ingress_url": "Ingress URLs are only used for accessing the Mealie UI. Use your Home Assistant IP address and the network port within the configuration tab of the Mealie add-on.", "unknown": "[%key:common::config_flow::error::unknown%]", "mealie_version": "Minimum required version is v1.0.0. Please upgrade Mealie and then retry." }, @@ -56,19 +57,24 @@ }, "sensor": { "recipes": { - "name": "Recipes" + "name": "Recipes", + "unit_of_measurement": "recipes" }, "users": { - "name": "Users" + "name": "Users", + "unit_of_measurement": "users" }, "categories": { - "name": "Categories" + "name": "Categories", + "unit_of_measurement": "categories" }, "tags": { - "name": "Tags" + "name": "Tags", + "unit_of_measurement": "tags" }, "tools": { - "name": "Tools" + "name": "Tools", + "unit_of_measurement": "tools" } } }, diff --git a/homeassistant/components/media_extractor/__init__.py b/homeassistant/components/media_extractor/__init__.py index b8bb5f98cd0..79fa9d6fb9a 100644 --- a/homeassistant/components/media_extractor/__init__.py +++ b/homeassistant/components/media_extractor/__init__.py @@ -16,10 +16,9 @@ from homeassistant.components.media_player import ( MEDIA_PLAYER_PLAY_MEDIA_SCHEMA, SERVICE_PLAY_MEDIA, ) -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import ( - DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, ServiceCall, ServiceResponse, @@ -27,7 +26,6 @@ from homeassistant.core import ( ) from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType from .const import ( @@ -43,19 +41,7 @@ _LOGGER = logging.getLogger(__name__) CONF_CUSTOMIZE_ENTITIES = "customize" CONF_DEFAULT_STREAM_QUERY = "default_query" -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.Schema( - { - vol.Optional(CONF_DEFAULT_STREAM_QUERY): cv.string, - vol.Optional(CONF_CUSTOMIZE_ENTITIES): vol.Schema( - {cv.entity_id: vol.Schema({cv.string: cv.string})} - ), - } - ) - }, - extra=vol.ALLOW_EXTRA, -) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: @@ -67,29 +53,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the media extractor service.""" - if DOMAIN in config: - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.12.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Media extractor", - }, - ) - - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - ) - ) - async def extract_media_url(call: ServiceCall) -> ServiceResponse: """Extract media url.""" diff --git a/homeassistant/components/media_extractor/config_flow.py b/homeassistant/components/media_extractor/config_flow.py index b91942d7b13..cb2166c35f1 100644 --- a/homeassistant/components/media_extractor/config_flow.py +++ b/homeassistant/components/media_extractor/config_flow.py @@ -24,7 +24,3 @@ class MediaExtractorConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_create_entry(title="Media extractor", data={}) return self.async_show_form(step_id="user", data_schema=vol.Schema({})) - - async def async_step_import(self, import_data: None) -> ConfigFlowResult: - """Handle import.""" - return self.async_create_entry(title="Media extractor", data={}) diff --git a/homeassistant/components/media_extractor/manifest.json b/homeassistant/components/media_extractor/manifest.json index ebfa79d7190..f85f1561bb9 100644 --- a/homeassistant/components/media_extractor/manifest.json +++ b/homeassistant/components/media_extractor/manifest.json @@ -8,6 +8,6 @@ "iot_class": "calculated", "loggers": ["yt_dlp"], "quality_scale": "internal", - "requirements": ["yt-dlp[default]==2024.11.04"], + "requirements": ["yt-dlp[default]==2024.12.03"], "single_config_entry": true } diff --git a/homeassistant/components/media_source/__init__.py b/homeassistant/components/media_source/__init__.py index 604f9b7cc88..3ea8f581245 100644 --- a/homeassistant/components/media_source/__init__.py +++ b/homeassistant/components/media_source/__init__.py @@ -18,7 +18,7 @@ from homeassistant.components.media_player import ( from homeassistant.components.websocket_api import ActiveConnection from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.frame import report +from homeassistant.helpers.frame import report_usage from homeassistant.helpers.integration_platform import ( async_process_integration_platforms, ) @@ -156,7 +156,7 @@ async def async_resolve_media( raise Unresolvable("Media Source not loaded") if target_media_player is UNDEFINED: - report( + report_usage( "calls media_source.async_resolve_media without passing an entity_id", exclude_integrations={DOMAIN}, ) diff --git a/homeassistant/components/mediaroom/manifest.json b/homeassistant/components/mediaroom/manifest.json index 4cd7b11c22f..060a40b036a 100644 --- a/homeassistant/components/mediaroom/manifest.json +++ b/homeassistant/components/mediaroom/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/mediaroom", "iot_class": "local_polling", "loggers": ["pymediaroom"], + "quality_scale": "legacy", "requirements": ["pymediaroom==0.6.5.4"] } diff --git a/homeassistant/components/melissa/manifest.json b/homeassistant/components/melissa/manifest.json index 60d1d7f145f..a583c3b88fa 100644 --- a/homeassistant/components/melissa/manifest.json +++ b/homeassistant/components/melissa/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/melissa", "iot_class": "cloud_polling", "loggers": ["melissa"], + "quality_scale": "legacy", "requirements": ["py-melissa-climate==2.1.4"] } diff --git a/homeassistant/components/meraki/manifest.json b/homeassistant/components/meraki/manifest.json index 4fb7d27d4bb..5b8690ae52d 100644 --- a/homeassistant/components/meraki/manifest.json +++ b/homeassistant/components/meraki/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/meraki", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/message_bird/manifest.json b/homeassistant/components/message_bird/manifest.json index d5118dc3486..3b3c56029c5 100644 --- a/homeassistant/components/message_bird/manifest.json +++ b/homeassistant/components/message_bird/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/message_bird", "iot_class": "cloud_push", "loggers": ["messagebird"], + "quality_scale": "legacy", "requirements": ["messagebird==1.2.0"] } diff --git a/homeassistant/components/met/config_flow.py b/homeassistant/components/met/config_flow.py index 84a44682413..62964d22bb1 100644 --- a/homeassistant/components/met/config_flow.py +++ b/homeassistant/components/met/config_flow.py @@ -11,7 +11,6 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import ( CONF_ELEVATION, @@ -143,12 +142,12 @@ class MetConfigFlowHandler(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> OptionsFlow: + ) -> MetOptionsFlowHandler: """Get the options flow for Met.""" - return MetOptionsFlowHandler(config_entry) + return MetOptionsFlowHandler() -class MetOptionsFlowHandler(OptionsFlowWithConfigEntry): +class MetOptionsFlowHandler(OptionsFlow): """Options flow for Met component.""" async def async_step_init( @@ -159,13 +158,13 @@ class MetOptionsFlowHandler(OptionsFlowWithConfigEntry): if user_input is not None: # Update config entry with data from user input self.hass.config_entries.async_update_entry( - self._config_entry, data=user_input + self.config_entry, data=user_input ) return self.async_create_entry( - title=self._config_entry.title, data=user_input + title=self.config_entry.title, data=user_input ) return self.async_show_form( step_id="init", - data_schema=_get_data_schema(self.hass, config_entry=self._config_entry), + data_schema=_get_data_schema(self.hass, config_entry=self.config_entry), ) diff --git a/homeassistant/components/met_eireann/manifest.json b/homeassistant/components/met_eireann/manifest.json index 72afc6977dd..7b913df4d3c 100644 --- a/homeassistant/components/met_eireann/manifest.json +++ b/homeassistant/components/met_eireann/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/met_eireann", "iot_class": "cloud_polling", "loggers": ["meteireann"], - "requirements": ["PyMetEireann==2021.8.0"] + "requirements": ["PyMetEireann==2024.11.0"] } diff --git a/homeassistant/components/meteoalarm/manifest.json b/homeassistant/components/meteoalarm/manifest.json index 4de91f6a431..58b6a63ed1d 100644 --- a/homeassistant/components/meteoalarm/manifest.json +++ b/homeassistant/components/meteoalarm/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/meteoalarm", "iot_class": "cloud_polling", "loggers": ["meteoalertapi"], + "quality_scale": "legacy", "requirements": ["meteoalertapi==0.3.1"] } diff --git a/homeassistant/components/mfi/manifest.json b/homeassistant/components/mfi/manifest.json index b569009d400..3024fe145c5 100644 --- a/homeassistant/components/mfi/manifest.json +++ b/homeassistant/components/mfi/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/mfi", "iot_class": "local_polling", "loggers": ["mficlient"], + "quality_scale": "legacy", "requirements": ["mficlient==0.5.0"] } diff --git a/homeassistant/components/microbees/manifest.json b/homeassistant/components/microbees/manifest.json index 91b7d66d80f..be28bf881d2 100644 --- a/homeassistant/components/microbees/manifest.json +++ b/homeassistant/components/microbees/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/microbees", "iot_class": "cloud_polling", - "requirements": ["microBeesPy==0.3.2"] + "requirements": ["microBeesPy==0.3.5"] } diff --git a/homeassistant/components/microsoft/manifest.json b/homeassistant/components/microsoft/manifest.json index dba2f58ba98..3d8f0629cec 100644 --- a/homeassistant/components/microsoft/manifest.json +++ b/homeassistant/components/microsoft/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/microsoft", "iot_class": "cloud_push", "loggers": ["pycsspeechtts"], + "quality_scale": "legacy", "requirements": ["pycsspeechtts==1.0.8"] } diff --git a/homeassistant/components/microsoft_face/manifest.json b/homeassistant/components/microsoft_face/manifest.json index 0ef18a12271..e13d1c76ccb 100644 --- a/homeassistant/components/microsoft_face/manifest.json +++ b/homeassistant/components/microsoft_face/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["camera"], "documentation": "https://www.home-assistant.io/integrations/microsoft_face", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/microsoft_face_detect/manifest.json b/homeassistant/components/microsoft_face_detect/manifest.json index 1b72ce92c95..f3f9f0fa095 100644 --- a/homeassistant/components/microsoft_face_detect/manifest.json +++ b/homeassistant/components/microsoft_face_detect/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["microsoft_face"], "documentation": "https://www.home-assistant.io/integrations/microsoft_face_detect", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/microsoft_face_identify/manifest.json b/homeassistant/components/microsoft_face_identify/manifest.json index 63418ac2a0b..b3964ee1254 100644 --- a/homeassistant/components/microsoft_face_identify/manifest.json +++ b/homeassistant/components/microsoft_face_identify/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["microsoft_face"], "documentation": "https://www.home-assistant.io/integrations/microsoft_face_identify", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/mikrotik/config_flow.py b/homeassistant/components/mikrotik/config_flow.py index 98303889194..bca394f0d38 100644 --- a/homeassistant/components/mikrotik/config_flow.py +++ b/homeassistant/components/mikrotik/config_flow.py @@ -46,7 +46,7 @@ class MikrotikFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> MikrotikOptionsFlowHandler: """Get the options flow for this handler.""" - return MikrotikOptionsFlowHandler(config_entry) + return MikrotikOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -122,10 +122,6 @@ class MikrotikFlowHandler(ConfigFlow, domain=DOMAIN): class MikrotikOptionsFlowHandler(OptionsFlow): """Handle Mikrotik options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize Mikrotik options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/mill/manifest.json b/homeassistant/components/mill/manifest.json index 16e7bf552ba..6316eb72096 100644 --- a/homeassistant/components/mill/manifest.json +++ b/homeassistant/components/mill/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/mill", "iot_class": "local_polling", "loggers": ["mill", "mill_local"], - "requirements": ["millheater==0.11.8", "mill-local==0.3.0"] + "requirements": ["millheater==0.12.2", "mill-local==0.3.0"] } diff --git a/homeassistant/components/mill/sensor.py b/homeassistant/components/mill/sensor.py index 64b9008a82b..c4b975ab039 100644 --- a/homeassistant/components/mill/sensor.py +++ b/homeassistant/components/mill/sensor.py @@ -57,6 +57,19 @@ HEATER_SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, state_class=SensorStateClass.TOTAL_INCREASING, ), + SensorEntityDescription( + key="current_power", + translation_key="current_power", + device_class=SensorDeviceClass.POWER, + native_unit_of_measurement=UnitOfPower.WATT, + state_class=SensorStateClass.MEASUREMENT, + ), + SensorEntityDescription( + key="control_signal", + translation_key="control_signal", + native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, + ), ) SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( @@ -118,6 +131,16 @@ LOCAL_SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( ), ) +SOCKET_SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( + SensorEntityDescription( + key=HUMIDITY, + device_class=SensorDeviceClass.HUMIDITY, + native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, + ), + *HEATER_SENSOR_TYPES, +) + async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback @@ -145,7 +168,9 @@ async def async_setup_entry( ) for mill_device in mill_data_coordinator.data.values() for entity_description in ( - HEATER_SENSOR_TYPES + SOCKET_SENSOR_TYPES + if isinstance(mill_device, mill.Socket) + else HEATER_SENSOR_TYPES if isinstance(mill_device, mill.Heater) else SENSOR_TYPES ) diff --git a/homeassistant/components/minecraft_server/manifest.json b/homeassistant/components/minecraft_server/manifest.json index 8e098f98a15..d6ade4853c9 100644 --- a/homeassistant/components/minecraft_server/manifest.json +++ b/homeassistant/components/minecraft_server/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/minecraft_server", "iot_class": "local_polling", "loggers": ["dnspython", "mcstatus"], - "quality_scale": "platinum", "requirements": ["mcstatus==11.1.1"] } diff --git a/homeassistant/components/minio/manifest.json b/homeassistant/components/minio/manifest.json index 5fee7893841..3ab6b82bb86 100644 --- a/homeassistant/components/minio/manifest.json +++ b/homeassistant/components/minio/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/minio", "iot_class": "cloud_push", "loggers": ["minio"], + "quality_scale": "legacy", "requirements": ["minio==7.1.12"] } diff --git a/homeassistant/components/mjpeg/config_flow.py b/homeassistant/components/mjpeg/config_flow.py index 84267936788..e0150f8c461 100644 --- a/homeassistant/components/mjpeg/config_flow.py +++ b/homeassistant/components/mjpeg/config_flow.py @@ -141,7 +141,7 @@ class MJPEGFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> MJPEGOptionsFlowHandler: """Get the options flow for this handler.""" - return MJPEGOptionsFlowHandler(config_entry) + return MJPEGOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -183,10 +183,6 @@ class MJPEGFlowHandler(ConfigFlow, domain=DOMAIN): class MJPEGOptionsFlowHandler(OptionsFlow): """Handle MJPEG IP Camera options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize MJPEG IP Camera options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/mochad/manifest.json b/homeassistant/components/mochad/manifest.json index e4680cc6ff5..96795789c8c 100644 --- a/homeassistant/components/mochad/manifest.json +++ b/homeassistant/components/mochad/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/mochad", "iot_class": "local_polling", "loggers": ["pbr", "pymochad"], + "quality_scale": "legacy", "requirements": ["pymochad==0.2.0"] } diff --git a/homeassistant/components/modbus/binary_sensor.py b/homeassistant/components/modbus/binary_sensor.py index 54ee49ed6a2..b50d21faf42 100644 --- a/homeassistant/components/modbus/binary_sensor.py +++ b/homeassistant/components/modbus/binary_sensor.py @@ -90,6 +90,7 @@ class ModbusBinarySensor(BasePlatform, RestoreEntity, BinarySensorEntity): self._coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=None, name=name, ) diff --git a/homeassistant/components/modbus/manifest.json b/homeassistant/components/modbus/manifest.json index 4482801482f..7cba4692eb6 100644 --- a/homeassistant/components/modbus/manifest.json +++ b/homeassistant/components/modbus/manifest.json @@ -5,6 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/modbus", "iot_class": "local_polling", "loggers": ["pymodbus"], - "quality_scale": "silver", "requirements": ["pymodbus==3.6.9"] } diff --git a/homeassistant/components/modbus/modbus.py b/homeassistant/components/modbus/modbus.py index d85b4e0e67f..18d91f8dd3b 100644 --- a/homeassistant/components/modbus/modbus.py +++ b/homeassistant/components/modbus/modbus.py @@ -158,8 +158,6 @@ async def async_modbus_setup( async def async_stop_modbus(event: Event) -> None: """Stop Modbus service.""" - - async_dispatcher_send(hass, SIGNAL_STOP_ENTITY) for client in hub_collect.values(): await client.async_close() diff --git a/homeassistant/components/modbus/sensor.py b/homeassistant/components/modbus/sensor.py index 4b4fd5bd51a..d5a16c95cc4 100644 --- a/homeassistant/components/modbus/sensor.py +++ b/homeassistant/components/modbus/sensor.py @@ -91,6 +91,7 @@ class ModbusRegisterSensor(BaseStructPlatform, RestoreSensor, SensorEntity): self._coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=None, name=name, ) diff --git a/homeassistant/components/modern_forms/config_flow.py b/homeassistant/components/modern_forms/config_flow.py index dee08736234..6799dbf97d3 100644 --- a/homeassistant/components/modern_forms/config_flow.py +++ b/homeassistant/components/modern_forms/config_flow.py @@ -9,11 +9,13 @@ import voluptuous as vol from homeassistant.components import zeroconf from homeassistant.config_entries import SOURCE_ZEROCONF, ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME +from homeassistant.const import CONF_HOST, CONF_MAC from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN +USER_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str}) + class ModernFormsFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a ModernForms config flow.""" @@ -22,7 +24,7 @@ class ModernFormsFlowHandler(ConfigFlow, domain=DOMAIN): host: str | None = None mac: str | None = None - name: str | None = None + name: str async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -55,17 +57,21 @@ class ModernFormsFlowHandler(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None, prepare: bool = False ) -> ConfigFlowResult: """Config flow handler for ModernForms.""" - source = self.context["source"] - # Request user input, unless we are preparing discovery flow if user_input is None: user_input = {} if not prepare: - if source == SOURCE_ZEROCONF: - return self._show_confirm_dialog() - return self._show_setup_form() + if self.source == SOURCE_ZEROCONF: + return self.async_show_form( + step_id="zeroconf_confirm", + description_placeholders={"name": self.name}, + ) + return self.async_show_form( + step_id="user", + data_schema=USER_SCHEMA, + ) - if source == SOURCE_ZEROCONF: + if self.source == SOURCE_ZEROCONF: user_input[CONF_HOST] = self.host user_input[CONF_MAC] = self.mac @@ -75,18 +81,21 @@ class ModernFormsFlowHandler(ConfigFlow, domain=DOMAIN): try: device = await device.update() except ModernFormsConnectionError: - if source == SOURCE_ZEROCONF: + if self.source == SOURCE_ZEROCONF: return self.async_abort(reason="cannot_connect") - return self._show_setup_form({"base": "cannot_connect"}) + return self.async_show_form( + step_id="user", + data_schema=USER_SCHEMA, + errors={"base": "cannot_connect"}, + ) user_input[CONF_MAC] = device.info.mac_address - user_input[CONF_NAME] = device.info.device_name # Check if already configured await self.async_set_unique_id(user_input[CONF_MAC]) self._abort_if_unique_id_configured(updates={CONF_HOST: user_input[CONF_HOST]}) title = device.info.device_name - if source == SOURCE_ZEROCONF: + if self.source == SOURCE_ZEROCONF: title = self.name if prepare: @@ -96,19 +105,3 @@ class ModernFormsFlowHandler(ConfigFlow, domain=DOMAIN): title=title, data={CONF_HOST: user_input[CONF_HOST], CONF_MAC: user_input[CONF_MAC]}, ) - - def _show_setup_form(self, errors: dict | None = None) -> ConfigFlowResult: - """Show the setup form to the user.""" - return self.async_show_form( - step_id="user", - data_schema=vol.Schema({vol.Required(CONF_HOST): str}), - errors=errors or {}, - ) - - def _show_confirm_dialog(self, errors: dict | None = None) -> ConfigFlowResult: - """Show the confirm dialog to the user.""" - return self.async_show_form( - step_id="zeroconf_confirm", - description_placeholders={"name": self.name}, - errors=errors or {}, - ) diff --git a/homeassistant/components/mold_indicator/sensor.py b/homeassistant/components/mold_indicator/sensor.py index eb4c0bf7284..262d13ad3af 100644 --- a/homeassistant/components/mold_indicator/sensor.py +++ b/homeassistant/components/mold_indicator/sensor.py @@ -22,6 +22,7 @@ from homeassistant.const import ( CONF_NAME, CONF_UNIQUE_ID, PERCENTAGE, + STATE_UNAVAILABLE, STATE_UNKNOWN, UnitOfTemperature, ) @@ -37,7 +38,7 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device import async_device_info_to_link_from_entity from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_track_state_change_event -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util.unit_conversion import TemperatureConverter from homeassistant.util.unit_system import METRIC_SYSTEM @@ -150,7 +151,6 @@ class MoldIndicator(SensorEntity): unique_id: str | None, ) -> None: """Initialize the sensor.""" - self._state: str | None = None self._attr_name = name self._attr_unique_id = unique_id self._indoor_temp_sensor = indoor_temp_sensor @@ -272,7 +272,7 @@ class MoldIndicator(SensorEntity): # re-calculate dewpoint and mold indicator self._calc_dewpoint() self._calc_moldindicator() - if self._state is None: + if self._attr_native_value is None: self._attr_available = False else: self._attr_available = True @@ -311,7 +311,7 @@ class MoldIndicator(SensorEntity): _LOGGER.debug("Updating temp sensor with value %s", state.state) # Return an error if the sensor change its state to Unknown. - if state.state == STATE_UNKNOWN: + if state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE): _LOGGER.error( "Unable to parse temperature sensor %s with state: %s", state.entity_id, @@ -319,8 +319,6 @@ class MoldIndicator(SensorEntity): ) return None - unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) - if (temp := util.convert(state.state, float)) is None: _LOGGER.error( "Unable to parse temperature sensor %s with state: %s", @@ -330,12 +328,10 @@ class MoldIndicator(SensorEntity): return None # convert to celsius if necessary - if unit == UnitOfTemperature.FAHRENHEIT: - return TemperatureConverter.convert( - temp, UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS - ) - if unit == UnitOfTemperature.CELSIUS: - return temp + if ( + unit := state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) + ) in UnitOfTemperature: + return TemperatureConverter.convert(temp, unit, UnitOfTemperature.CELSIUS) _LOGGER.error( "Temp sensor %s has unsupported unit: %s (allowed: %s, %s)", state.entity_id, @@ -352,7 +348,7 @@ class MoldIndicator(SensorEntity): _LOGGER.debug("Updating humidity sensor with value %s", state.state) # Return an error if the sensor change its state to Unknown. - if state.state == STATE_UNKNOWN: + if state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE): _LOGGER.error( "Unable to parse humidity sensor %s, state: %s", state.entity_id, @@ -370,19 +366,18 @@ class MoldIndicator(SensorEntity): if (unit := state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)) != PERCENTAGE: _LOGGER.error( - "Humidity sensor %s has unsupported unit: %s %s", + "Humidity sensor %s has unsupported unit: %s (allowed: %s)", state.entity_id, unit, - " (allowed: %)", + PERCENTAGE, ) return None if hum > 100 or hum < 0: _LOGGER.error( - "Humidity sensor %s is out of range: %s %s", + "Humidity sensor %s is out of range: %s (allowed: 0-100)", state.entity_id, hum, - "(allowed: 0-100%)", ) return None @@ -401,7 +396,7 @@ class MoldIndicator(SensorEntity): # re-calculate dewpoint and mold indicator self._calc_dewpoint() self._calc_moldindicator() - if self._state is None: + if self._attr_native_value is None: self._attr_available = False self._dewpoint = None self._crit_temp = None @@ -437,7 +432,7 @@ class MoldIndicator(SensorEntity): self._dewpoint, self._calib_factor, ) - self._state = None + self._attr_native_value = None self._attr_available = False self._crit_temp = None return @@ -468,18 +463,13 @@ class MoldIndicator(SensorEntity): # check bounds and format if crit_humidity > 100: - self._state = "100" + self._attr_native_value = "100" elif crit_humidity < 0: - self._state = "0" + self._attr_native_value = "0" else: - self._state = f"{int(crit_humidity):d}" + self._attr_native_value = f"{int(crit_humidity):d}" - _LOGGER.debug("Mold indicator humidity: %s", self._state) - - @property - def native_value(self) -> StateType: - """Return the state of the entity.""" - return self._state + _LOGGER.debug("Mold indicator humidity: %s", self.native_value) @property def extra_state_attributes(self) -> dict[str, Any]: diff --git a/homeassistant/components/mold_indicator/strings.json b/homeassistant/components/mold_indicator/strings.json index e19fed690b2..74614bba139 100644 --- a/homeassistant/components/mold_indicator/strings.json +++ b/homeassistant/components/mold_indicator/strings.json @@ -9,7 +9,7 @@ }, "step": { "user": { - "description": "Add Mold indicator helper", + "description": "Create Mold indicator helper", "data": { "name": "[%key:common::config_flow::data::name%]", "indoor_humidity_sensor": "Indoor humidity sensor", diff --git a/homeassistant/components/monoprice/config_flow.py b/homeassistant/components/monoprice/config_flow.py index cac673e38c1..b2619623a07 100644 --- a/homeassistant/components/monoprice/config_flow.py +++ b/homeassistant/components/monoprice/config_flow.py @@ -108,7 +108,7 @@ class MonoPriceConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> MonopriceOptionsFlowHandler: """Define the config flow to handle options.""" - return MonopriceOptionsFlowHandler(config_entry) + return MonopriceOptionsFlowHandler() @callback @@ -126,10 +126,6 @@ def _key_for_source(index, source, previous_sources): class MonopriceOptionsFlowHandler(OptionsFlow): """Handle a Monoprice options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize.""" - self.config_entry = config_entry - @callback def _previous_sources(self): if CONF_SOURCES in self.config_entry.options: diff --git a/homeassistant/components/monzo/coordinator.py b/homeassistant/components/monzo/coordinator.py index 223d7b05ffe..caac551f986 100644 --- a/homeassistant/components/monzo/coordinator.py +++ b/homeassistant/components/monzo/coordinator.py @@ -3,13 +3,14 @@ from dataclasses import dataclass from datetime import timedelta import logging +from pprint import pformat from typing import Any -from monzopy import AuthorisationExpiredError +from monzopy import AuthorisationExpiredError, InvalidMonzoAPIResponseError from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .api import AuthenticatedMonzoAPI from .const import DOMAIN @@ -45,5 +46,16 @@ class MonzoCoordinator(DataUpdateCoordinator[MonzoData]): pots = await self.api.user_account.pots() except AuthorisationExpiredError as err: raise ConfigEntryAuthFailed from err + except InvalidMonzoAPIResponseError as err: + message = "Invalid Monzo API response." + if err.missing_key: + _LOGGER.debug( + "%s\nMissing key: %s\nResponse:\n%s", + message, + err.missing_key, + pformat(err.response), + ) + message += " Enabling debug logging for details." + raise UpdateFailed(message) from err return MonzoData(accounts, pots) diff --git a/homeassistant/components/monzo/manifest.json b/homeassistant/components/monzo/manifest.json index d9d17eb8abc..7038cecd7ea 100644 --- a/homeassistant/components/monzo/manifest.json +++ b/homeassistant/components/monzo/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/monzo", "iot_class": "cloud_polling", - "requirements": ["monzopy==1.3.2"] + "requirements": ["monzopy==1.4.2"] } diff --git a/homeassistant/components/mopeka/config_flow.py b/homeassistant/components/mopeka/config_flow.py index 72e9386a47f..2e35ff4283f 100644 --- a/homeassistant/components/mopeka/config_flow.py +++ b/homeassistant/components/mopeka/config_flow.py @@ -58,7 +58,7 @@ class MopekaConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: config_entries.ConfigEntry, ) -> MopekaOptionsFlow: """Return the options flow for this handler.""" - return MopekaOptionsFlow(config_entry) + return MopekaOptionsFlow() async def async_step_bluetooth( self, discovery_info: BluetoothServiceInfoBleak @@ -139,10 +139,6 @@ class MopekaConfigFlow(ConfigFlow, domain=DOMAIN): class MopekaOptionsFlow(config_entries.OptionsFlow): """Handle options for the Mopeka component.""" - def __init__(self, config_entry: config_entries.ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/motion_blinds/config_flow.py b/homeassistant/components/motion_blinds/config_flow.py index 131299314a2..e961880375c 100644 --- a/homeassistant/components/motion_blinds/config_flow.py +++ b/homeassistant/components/motion_blinds/config_flow.py @@ -38,10 +38,6 @@ CONFIG_SCHEMA = vol.Schema( class OptionsFlowHandler(OptionsFlow): """Options for the component.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Init object.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -83,7 +79,7 @@ class MotionBlindsFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_dhcp( self, discovery_info: dhcp.DhcpServiceInfo diff --git a/homeassistant/components/motionblinds_ble/config_flow.py b/homeassistant/components/motionblinds_ble/config_flow.py index cda673b13ac..30417c62c65 100644 --- a/homeassistant/components/motionblinds_ble/config_flow.py +++ b/homeassistant/components/motionblinds_ble/config_flow.py @@ -48,11 +48,12 @@ CONFIG_SCHEMA = vol.Schema({vol.Required(CONF_MAC_CODE): str}) class FlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow for Motionblinds Bluetooth.""" + _display_name: str + def __init__(self) -> None: """Initialize a ConfigFlow.""" self._discovery_info: BluetoothServiceInfoBleak | BLEDevice | None = None self._mac_code: str | None = None - self._display_name: str | None = None self._blind_type: MotionBlindType | None = None async def async_step_bluetooth( @@ -67,8 +68,8 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): self._discovery_info = discovery_info self._mac_code = get_mac_from_local_name(discovery_info.name) - self._display_name = display_name = DISPLAY_NAME.format(mac_code=self._mac_code) - self.context["title_placeholders"] = {"name": display_name} + self._display_name = DISPLAY_NAME.format(mac_code=self._mac_code) + self.context["title_placeholders"] = {"name": self._display_name} return await self.async_step_confirm() @@ -113,7 +114,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): assert self._discovery_info is not None return self.async_create_entry( - title=str(self._display_name), + title=self._display_name, data={ CONF_ADDRESS: self._discovery_info.address, CONF_LOCAL_NAME: self._discovery_info.name, @@ -187,16 +188,12 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlow: """Create the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle an options flow for Motionblinds BLE.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/motionblinds_ble/manifest.json b/homeassistant/components/motionblinds_ble/manifest.json index ce7e7a6bb8b..70cddce30a1 100644 --- a/homeassistant/components/motionblinds_ble/manifest.json +++ b/homeassistant/components/motionblinds_ble/manifest.json @@ -14,5 +14,5 @@ "integration_type": "device", "iot_class": "assumed_state", "loggers": ["motionblindsble"], - "requirements": ["motionblindsble==0.1.2"] + "requirements": ["motionblindsble==0.1.3"] } diff --git a/homeassistant/components/motioneye/config_flow.py b/homeassistant/components/motioneye/config_flow.py index f6d947dab5f..80a6449a22d 100644 --- a/homeassistant/components/motioneye/config_flow.py +++ b/homeassistant/components/motioneye/config_flow.py @@ -179,18 +179,16 @@ class MotionEyeConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> MotionEyeOptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> MotionEyeOptionsFlow: """Get the Hyperion Options flow.""" - return MotionEyeOptionsFlow(config_entry) + return MotionEyeOptionsFlow() class MotionEyeOptionsFlow(OptionsFlow): """motionEye options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize a motionEye options flow.""" - self._config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -201,14 +199,14 @@ class MotionEyeOptionsFlow(OptionsFlow): schema: dict[vol.Marker, type] = { vol.Required( CONF_WEBHOOK_SET, - default=self._config_entry.options.get( + default=self.config_entry.options.get( CONF_WEBHOOK_SET, DEFAULT_WEBHOOK_SET, ), ): bool, vol.Required( CONF_WEBHOOK_SET_OVERWRITE, - default=self._config_entry.options.get( + default=self.config_entry.options.get( CONF_WEBHOOK_SET_OVERWRITE, DEFAULT_WEBHOOK_SET_OVERWRITE, ), @@ -219,9 +217,9 @@ class MotionEyeOptionsFlow(OptionsFlow): # The input URL is not validated as being a URL, to allow for the possibility # the template input won't be a valid URL until after it's rendered description: dict[str, str] | None = None - if CONF_STREAM_URL_TEMPLATE in self._config_entry.options: + if CONF_STREAM_URL_TEMPLATE in self.config_entry.options: description = { - "suggested_value": self._config_entry.options[ + "suggested_value": self.config_entry.options[ CONF_STREAM_URL_TEMPLATE ] } diff --git a/homeassistant/components/mqtt/__init__.py b/homeassistant/components/mqtt/__init__.py index 907b1a1dd11..bcad8747c39 100644 --- a/homeassistant/components/mqtt/__init__.py +++ b/homeassistant/components/mqtt/__init__.py @@ -225,77 +225,27 @@ async def async_check_config_schema( ) from exc -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Load a config entry.""" - conf: dict[str, Any] - mqtt_data: MqttData +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the actions and websocket API for the MQTT component.""" - async def _setup_client( - client_available: asyncio.Future[bool], - ) -> tuple[MqttData, dict[str, Any]]: - """Set up the MQTT client.""" - # Fetch configuration - conf = dict(entry.data) - hass_config = await conf_util.async_hass_config_yaml(hass) - mqtt_yaml = CONFIG_SCHEMA(hass_config).get(DOMAIN, []) - await async_create_certificate_temp_files(hass, conf) - client = MQTT(hass, entry, conf) - if DOMAIN in hass.data: - mqtt_data = hass.data[DATA_MQTT] - mqtt_data.config = mqtt_yaml - mqtt_data.client = client - else: - # Initial setup - websocket_api.async_register_command(hass, websocket_subscribe) - websocket_api.async_register_command(hass, websocket_mqtt_info) - hass.data[DATA_MQTT] = mqtt_data = MqttData(config=mqtt_yaml, client=client) - await client.async_start(mqtt_data) - - # Restore saved subscriptions - if mqtt_data.subscriptions_to_restore: - mqtt_data.client.async_restore_tracked_subscriptions( - mqtt_data.subscriptions_to_restore - ) - mqtt_data.subscriptions_to_restore = set() - mqtt_data.reload_dispatchers.append( - entry.add_update_listener(_async_config_entry_updated) - ) - - return (mqtt_data, conf) - - client_available: asyncio.Future[bool] - if DATA_MQTT_AVAILABLE not in hass.data: - client_available = hass.data[DATA_MQTT_AVAILABLE] = hass.loop.create_future() - else: - client_available = hass.data[DATA_MQTT_AVAILABLE] - - mqtt_data, conf = await _setup_client(client_available) - platforms_used = platforms_from_config(mqtt_data.config) - platforms_used.update( - entry.domain - for entry in er.async_entries_for_config_entry( - er.async_get(hass), entry.entry_id - ) - ) - integration = async_get_loaded_integration(hass, DOMAIN) - # Preload platforms we know we are going to use so - # discovery can setup each platform synchronously - # and avoid creating a flood of tasks at startup - # while waiting for the the imports to complete - if not integration.platforms_are_loaded(platforms_used): - with async_pause_setup(hass, SetupPhases.WAIT_IMPORT_PLATFORMS): - await integration.async_get_platforms(platforms_used) - - # Wait to connect until the platforms are loaded so - # we can be sure discovery does not have to wait for - # each platform to load when we get the flood of retained - # messages on connect - await mqtt_data.client.async_connect(client_available) + websocket_api.async_register_command(hass, websocket_subscribe) + websocket_api.async_register_command(hass, websocket_mqtt_info) async def async_publish_service(call: ServiceCall) -> None: """Handle MQTT publish service calls.""" msg_topic: str | None = call.data.get(ATTR_TOPIC) msg_topic_template: str | None = call.data.get(ATTR_TOPIC_TEMPLATE) + + if not mqtt_config_entry_enabled(hass): + raise ServiceValidationError( + translation_key="mqtt_not_setup_cannot_publish", + translation_domain=DOMAIN, + translation_placeholders={ + "topic": str(msg_topic or msg_topic_template) + }, + ) + + mqtt_data = hass.data[DATA_MQTT] payload: PublishPayloadType = call.data.get(ATTR_PAYLOAD) evaluate_payload: bool = call.data.get(ATTR_EVALUATE_PAYLOAD, False) payload_template: str | None = call.data.get(ATTR_PAYLOAD_TEMPLATE) @@ -402,6 +352,71 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: } ), ) + return True + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Load a config entry.""" + conf: dict[str, Any] + mqtt_data: MqttData + + async def _setup_client() -> tuple[MqttData, dict[str, Any]]: + """Set up the MQTT client.""" + # Fetch configuration + conf = dict(entry.data) + hass_config = await conf_util.async_hass_config_yaml(hass) + mqtt_yaml = CONFIG_SCHEMA(hass_config).get(DOMAIN, []) + await async_create_certificate_temp_files(hass, conf) + client = MQTT(hass, entry, conf) + if DOMAIN in hass.data: + mqtt_data = hass.data[DATA_MQTT] + mqtt_data.config = mqtt_yaml + mqtt_data.client = client + else: + # Initial setup + hass.data[DATA_MQTT] = mqtt_data = MqttData(config=mqtt_yaml, client=client) + await client.async_start(mqtt_data) + + # Restore saved subscriptions + if mqtt_data.subscriptions_to_restore: + mqtt_data.client.async_restore_tracked_subscriptions( + mqtt_data.subscriptions_to_restore + ) + mqtt_data.subscriptions_to_restore = set() + mqtt_data.reload_dispatchers.append( + entry.add_update_listener(_async_config_entry_updated) + ) + + return (mqtt_data, conf) + + client_available: asyncio.Future[bool] + if DATA_MQTT_AVAILABLE not in hass.data: + client_available = hass.data[DATA_MQTT_AVAILABLE] = hass.loop.create_future() + else: + client_available = hass.data[DATA_MQTT_AVAILABLE] + + mqtt_data, conf = await _setup_client() + platforms_used = platforms_from_config(mqtt_data.config) + platforms_used.update( + entry.domain + for entry in er.async_entries_for_config_entry( + er.async_get(hass), entry.entry_id + ) + ) + integration = async_get_loaded_integration(hass, DOMAIN) + # Preload platforms we know we are going to use so + # discovery can setup each platform synchronously + # and avoid creating a flood of tasks at startup + # while waiting for the the imports to complete + if not integration.platforms_are_loaded(platforms_used): + with async_pause_setup(hass, SetupPhases.WAIT_IMPORT_PLATFORMS): + await integration.async_get_platforms(platforms_used) + + # Wait to connect until the platforms are loaded so + # we can be sure discovery does not have to wait for + # each platform to load when we get the flood of retained + # messages on connect + await mqtt_data.client.async_connect(client_available) # setup platforms and discovery async def _reload_config(call: ServiceCall) -> None: @@ -557,10 +572,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: mqtt_data = hass.data[DATA_MQTT] mqtt_client = mqtt_data.client - # Unload publish and dump services. - hass.services.async_remove(DOMAIN, SERVICE_PUBLISH) - hass.services.async_remove(DOMAIN, SERVICE_DUMP) - # Stop the discovery await discovery.async_stop(hass) # Unload the platforms diff --git a/homeassistant/components/mqtt/alarm_control_panel.py b/homeassistant/components/mqtt/alarm_control_panel.py index 76bac8540a4..613f665c302 100644 --- a/homeassistant/components/mqtt/alarm_control_panel.py +++ b/homeassistant/components/mqtt/alarm_control_panel.py @@ -35,6 +35,8 @@ from .util import valid_publish_topic, valid_subscribe_topic _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + _SUPPORTED_FEATURES = { "arm_home": AlarmControlPanelEntityFeature.ARM_HOME, "arm_away": AlarmControlPanelEntityFeature.ARM_AWAY, diff --git a/homeassistant/components/mqtt/binary_sensor.py b/homeassistant/components/mqtt/binary_sensor.py index 7f89a78991a..b49dc7aa24c 100644 --- a/homeassistant/components/mqtt/binary_sensor.py +++ b/homeassistant/components/mqtt/binary_sensor.py @@ -43,6 +43,8 @@ from .schemas import MQTT_ENTITY_COMMON_SCHEMA _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + DEFAULT_NAME = "MQTT Binary sensor" CONF_OFF_DELAY = "off_delay" DEFAULT_PAYLOAD_OFF = "OFF" diff --git a/homeassistant/components/mqtt/button.py b/homeassistant/components/mqtt/button.py index 2aac51890c1..8e5446b532e 100644 --- a/homeassistant/components/mqtt/button.py +++ b/homeassistant/components/mqtt/button.py @@ -20,6 +20,8 @@ from .models import MqttCommandTemplate from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic +PARALLEL_UPDATES = 0 + CONF_PAYLOAD_PRESS = "payload_press" DEFAULT_NAME = "MQTT Button" DEFAULT_PAYLOAD_PRESS = "PRESS" diff --git a/homeassistant/components/mqtt/camera.py b/homeassistant/components/mqtt/camera.py index ca622defb25..88fabad0446 100644 --- a/homeassistant/components/mqtt/camera.py +++ b/homeassistant/components/mqtt/camera.py @@ -27,6 +27,8 @@ from .util import valid_subscribe_topic _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + CONF_IMAGE_ENCODING = "image_encoding" DEFAULT_NAME = "MQTT Camera" diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py index a626e0e5b28..ee6f02912b2 100644 --- a/homeassistant/components/mqtt/client.py +++ b/homeassistant/components/mqtt/client.py @@ -227,7 +227,7 @@ def async_subscribe_internal( translation_placeholders={"topic": topic}, ) from exc client = mqtt_data.client - if not client.connected and not mqtt_config_entry_enabled(hass): + if not mqtt_config_entry_enabled(hass): raise HomeAssistantError( f"Cannot subscribe to topic '{topic}', MQTT is not enabled", translation_key="mqtt_not_setup_cannot_subscribe", diff --git a/homeassistant/components/mqtt/climate.py b/homeassistant/components/mqtt/climate.py index dd3efa4054b..2419e3f32ac 100644 --- a/homeassistant/components/mqtt/climate.py +++ b/homeassistant/components/mqtt/climate.py @@ -91,6 +91,8 @@ from .util import valid_publish_topic, valid_subscribe_topic _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + DEFAULT_NAME = "MQTT HVAC" CONF_FAN_MODE_COMMAND_TEMPLATE = "fan_mode_command_template" diff --git a/homeassistant/components/mqtt/config_flow.py b/homeassistant/components/mqtt/config_flow.py index 3ed88d0d823..69306a1c383 100644 --- a/homeassistant/components/mqtt/config_flow.py +++ b/homeassistant/components/mqtt/config_flow.py @@ -220,7 +220,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> MQTTOptionsFlowHandler: """Get the options flow for this handler.""" - return MQTTOptionsFlowHandler(config_entry) + return MQTTOptionsFlowHandler() async def _async_install_addon(self) -> None: """Install the Mosquitto Mqtt broker add-on.""" @@ -543,11 +543,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): class MQTTOptionsFlowHandler(OptionsFlow): """Handle MQTT options.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize MQTT options flow.""" - self.config_entry = config_entry self.broker_config: dict[str, str | int] = {} - self.options = config_entry.options async def async_step_init(self, user_input: None = None) -> ConfigFlowResult: """Manage the MQTT options.""" diff --git a/homeassistant/components/mqtt/cover.py b/homeassistant/components/mqtt/cover.py index 0b495663803..c7d041848f0 100644 --- a/homeassistant/components/mqtt/cover.py +++ b/homeassistant/components/mqtt/cover.py @@ -69,6 +69,8 @@ from .util import valid_publish_topic, valid_subscribe_topic _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + CONF_GET_POSITION_TOPIC = "position_topic" CONF_GET_POSITION_TEMPLATE = "position_template" CONF_SET_POSITION_TOPIC = "set_position_topic" diff --git a/homeassistant/components/mqtt/device_tracker.py b/homeassistant/components/mqtt/device_tracker.py index b87db40ccf7..bdf543e046a 100644 --- a/homeassistant/components/mqtt/device_tracker.py +++ b/homeassistant/components/mqtt/device_tracker.py @@ -36,6 +36,8 @@ from .util import valid_subscribe_topic _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + CONF_PAYLOAD_HOME = "payload_home" CONF_PAYLOAD_NOT_HOME = "payload_not_home" CONF_SOURCE_TYPE = "source_type" diff --git a/homeassistant/components/mqtt/entity.py b/homeassistant/components/mqtt/entity.py index 46b2c9e1d42..c73e1975a68 100644 --- a/homeassistant/components/mqtt/entity.py +++ b/homeassistant/components/mqtt/entity.py @@ -1185,6 +1185,33 @@ def device_info_from_specifications( return info +@callback +def ensure_via_device_exists( + hass: HomeAssistant, device_info: DeviceInfo | None, config_entry: ConfigEntry +) -> None: + """Ensure the via device is in the device registry.""" + if ( + device_info is None + or CONF_VIA_DEVICE not in device_info + or (device_registry := dr.async_get(hass)).async_get_device( + identifiers={device_info["via_device"]} + ) + ): + return + + # Ensure the via device exists in the device registry + _LOGGER.debug( + "Device identifier %s via_device reference from device_info %s " + "not found in the Device Registry, creating new entry", + device_info["via_device"], + device_info, + ) + device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={device_info["via_device"]}, + ) + + class MqttEntityDeviceInfo(Entity): """Mixin used for mqtt platforms that support the device registry.""" @@ -1203,6 +1230,7 @@ class MqttEntityDeviceInfo(Entity): device_info = self.device_info if device_info is not None: + ensure_via_device_exists(self.hass, device_info, self._config_entry) device_registry.async_get_or_create( config_entry_id=config_entry_id, **device_info ) @@ -1256,6 +1284,7 @@ class MqttEntity( self, hass, discovery_data, self.discovery_update ) MqttEntityDeviceInfo.__init__(self, config.get(CONF_DEVICE), config_entry) + ensure_via_device_exists(self.hass, self.device_info, self._config_entry) def _init_entity_id(self) -> None: """Set entity_id from object_id if defined in config.""" @@ -1490,6 +1519,8 @@ def update_device( config_entry_id = config_entry.entry_id device_info = device_info_from_specifications(config[CONF_DEVICE]) + ensure_via_device_exists(hass, device_info, config_entry) + if config_entry_id is not None and device_info is not None: update_device_info = cast(dict[str, Any], device_info) update_device_info["config_entry_id"] = config_entry_id diff --git a/homeassistant/components/mqtt/event.py b/homeassistant/components/mqtt/event.py index 3f67891ca5e..d9812aaaf48 100644 --- a/homeassistant/components/mqtt/event.py +++ b/homeassistant/components/mqtt/event.py @@ -38,6 +38,8 @@ from .schemas import MQTT_ENTITY_COMMON_SCHEMA _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + CONF_EVENT_TYPES = "event_types" MQTT_EVENT_ATTRIBUTES_BLOCKED = frozenset( diff --git a/homeassistant/components/mqtt/fan.py b/homeassistant/components/mqtt/fan.py index 70187ee9eb1..b3c0f22789c 100644 --- a/homeassistant/components/mqtt/fan.py +++ b/homeassistant/components/mqtt/fan.py @@ -57,6 +57,8 @@ from .models import ( from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic, valid_subscribe_topic +PARALLEL_UPDATES = 0 + CONF_DIRECTION_STATE_TOPIC = "direction_state_topic" CONF_DIRECTION_COMMAND_TOPIC = "direction_command_topic" CONF_DIRECTION_VALUE_TEMPLATE = "direction_value_template" diff --git a/homeassistant/components/mqtt/humidifier.py b/homeassistant/components/mqtt/humidifier.py index 304d293de79..5d1af03ad24 100644 --- a/homeassistant/components/mqtt/humidifier.py +++ b/homeassistant/components/mqtt/humidifier.py @@ -59,6 +59,8 @@ from .models import ( from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic, valid_subscribe_topic +PARALLEL_UPDATES = 0 + CONF_AVAILABLE_MODES_LIST = "modes" CONF_DEVICE_CLASS = "device_class" CONF_MODE_COMMAND_TEMPLATE = "mode_command_template" diff --git a/homeassistant/components/mqtt/image.py b/homeassistant/components/mqtt/image.py index 6ecdee06489..4b7b2d783d2 100644 --- a/homeassistant/components/mqtt/image.py +++ b/homeassistant/components/mqtt/image.py @@ -37,6 +37,8 @@ from .util import valid_subscribe_topic _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + CONF_CONTENT_TYPE = "content_type" CONF_IMAGE_ENCODING = "image_encoding" CONF_IMAGE_TOPIC = "image_topic" diff --git a/homeassistant/components/mqtt/lawn_mower.py b/homeassistant/components/mqtt/lawn_mower.py index 11afe4220c4..87577c4b4d9 100644 --- a/homeassistant/components/mqtt/lawn_mower.py +++ b/homeassistant/components/mqtt/lawn_mower.py @@ -38,6 +38,8 @@ from .util import valid_publish_topic, valid_subscribe_topic _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + CONF_ACTIVITY_STATE_TOPIC = "activity_state_topic" CONF_ACTIVITY_VALUE_TEMPLATE = "activity_value_template" CONF_DOCK_COMMAND_TOPIC = "dock_command_topic" diff --git a/homeassistant/components/mqtt/light/__init__.py b/homeassistant/components/mqtt/light/__init__.py index a1ba955181d..328f80cb5ea 100644 --- a/homeassistant/components/mqtt/light/__init__.py +++ b/homeassistant/components/mqtt/light/__init__.py @@ -30,6 +30,8 @@ from .schema_template import ( MqttLightTemplate, ) +PARALLEL_UPDATES = 0 + def validate_mqtt_light_discovery(config_value: dict[str, Any]) -> ConfigType: """Validate MQTT light schema for discovery.""" diff --git a/homeassistant/components/mqtt/lock.py b/homeassistant/components/mqtt/lock.py index e58d15b659d..2113dbbd5ba 100644 --- a/homeassistant/components/mqtt/lock.py +++ b/homeassistant/components/mqtt/lock.py @@ -45,6 +45,8 @@ from .schemas import MQTT_ENTITY_COMMON_SCHEMA _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + CONF_CODE_FORMAT = "code_format" CONF_PAYLOAD_LOCK = "payload_lock" diff --git a/homeassistant/components/mqtt/manifest.json b/homeassistant/components/mqtt/manifest.json index 25e98c01aaf..081449b142a 100644 --- a/homeassistant/components/mqtt/manifest.json +++ b/homeassistant/components/mqtt/manifest.json @@ -7,7 +7,6 @@ "dependencies": ["file_upload", "http"], "documentation": "https://www.home-assistant.io/integrations/mqtt", "iot_class": "local_push", - "quality_scale": "platinum", "requirements": ["paho-mqtt==1.6.1"], "single_config_entry": true } diff --git a/homeassistant/components/mqtt/notify.py b/homeassistant/components/mqtt/notify.py index 4a5ccc02774..84442e75e73 100644 --- a/homeassistant/components/mqtt/notify.py +++ b/homeassistant/components/mqtt/notify.py @@ -20,6 +20,8 @@ from .models import MqttCommandTemplate from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic +PARALLEL_UPDATES = 0 + DEFAULT_NAME = "MQTT notify" PLATFORM_SCHEMA_MODERN = MQTT_BASE_SCHEMA.extend( diff --git a/homeassistant/components/mqtt/number.py b/homeassistant/components/mqtt/number.py index 895334f2e1e..a9bf1829b63 100644 --- a/homeassistant/components/mqtt/number.py +++ b/homeassistant/components/mqtt/number.py @@ -50,6 +50,8 @@ from .schemas import MQTT_ENTITY_COMMON_SCHEMA _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + CONF_MIN = "min" CONF_MAX = "max" CONF_STEP = "step" diff --git a/homeassistant/components/mqtt/quality_scale.yaml b/homeassistant/components/mqtt/quality_scale.yaml new file mode 100644 index 00000000000..b3084f67da3 --- /dev/null +++ b/homeassistant/components/mqtt/quality_scale.yaml @@ -0,0 +1,122 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: done + comment: > + Entities are updated through dispatchers, and these are + cleaned up when the integration unloads. + entity-unique-id: + status: exempt + comment: > + This is user configurable, but not required. + It is required though when a user wants to use device based discovery. + has-entity-name: done + runtime-data: + status: exempt + comment: > + Runtime data is not used, as the mqtt entry data is only used to set up the + MQTT broker, this happens during integration setup, + and only one config entry is allowed. + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: + status: done + comment: | + Only supported for entities the user has assigned a unique_id. + action-exceptions: done + reauthentication-flow: done + parallel-updates: done + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: done + + # Gold + entity-translations: + status: exempt + comment: > + This is not possible because the integrations generates entities + based on a user supplied config or discovery. + entity-device-class: + status: done + comment: An entity device class can be configured by the user for each entity. + devices: + status: done + comment: > + A device context can be configured by the user for each entity. + It is not required though, except when using device based discovery. + entity-category: + status: done + comment: An entity category can be configured by the user for each entity. + entity-disabled-by-default: + status: done + comment: > + The user can configure this through YAML or discover + entities that are disabled by default. + discovery: + status: done + comment: > + When the Mosquitto MQTT broker add on is installed, + a MQTT config flow allows an automatic setup from its discovered settings. + stale-devices: + status: exempt + comment: > + This is is only supported for entities that are configured through MQTT discovery. + Users must manually cleanup stale entities that were set up though YAML. + diagnostics: done + exception-translations: done + icon-translations: + status: exempt + comment: > + This is not possible because the integrations generates entities + based on a user supplied config or discovery. + reconfiguration-flow: done + dynamic-devices: + status: done + comment: | + MQTT allow to dynamically create and remove devices through MQTT discovery. + discovery-update-info: + status: done + comment: > + If the Mosquitto broker add-on is used to set up MQTT from discovery, + and the broker add-on is re-installed, + MQTT will automatically update from the new brokers credentials. + repair-issues: + status: done + comment: > + This integration uses repair-issues when entities are set up through YAML. + To avoid user panic, discovery deprecation issues are logged only. + It is the responsibility of the maintainer or the service or device to + correct the discovery messages. Extra options are allowed + in MQTT messages to avoid breaking issues. + docs-use-cases: done + docs-supported-devices: done + docs-supported-functions: done + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: done + docs-examples: done + + # Platinum + async-dependency: done + inject-websession: + status: exempt + comment: | + This integration does not use web sessions. + strict-typing: done diff --git a/homeassistant/components/mqtt/scene.py b/homeassistant/components/mqtt/scene.py index dad596d9c4f..314bd716ee0 100644 --- a/homeassistant/components/mqtt/scene.py +++ b/homeassistant/components/mqtt/scene.py @@ -21,6 +21,8 @@ from .entity import MqttEntity, async_setup_entity_entry_helper from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic +PARALLEL_UPDATES = 0 + DEFAULT_NAME = "MQTT Scene" DEFAULT_RETAIN = False diff --git a/homeassistant/components/mqtt/select.py b/homeassistant/components/mqtt/select.py index 37d3287988f..55d56ecd774 100644 --- a/homeassistant/components/mqtt/select.py +++ b/homeassistant/components/mqtt/select.py @@ -37,6 +37,8 @@ from .schemas import MQTT_ENTITY_COMMON_SCHEMA _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + DEFAULT_NAME = "MQTT Select" MQTT_SELECT_ATTRIBUTES_BLOCKED = frozenset( diff --git a/homeassistant/components/mqtt/sensor.py b/homeassistant/components/mqtt/sensor.py index 17ea0ab1f5b..bacbf4d323e 100644 --- a/homeassistant/components/mqtt/sensor.py +++ b/homeassistant/components/mqtt/sensor.py @@ -47,6 +47,8 @@ from .util import check_state_too_long _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + CONF_EXPIRE_AFTER = "expire_after" CONF_LAST_RESET_VALUE_TEMPLATE = "last_reset_value_template" CONF_SUGGESTED_DISPLAY_PRECISION = "suggested_display_precision" diff --git a/homeassistant/components/mqtt/siren.py b/homeassistant/components/mqtt/siren.py index 1937b60fde0..22f64053d23 100644 --- a/homeassistant/components/mqtt/siren.py +++ b/homeassistant/components/mqtt/siren.py @@ -55,6 +55,8 @@ from .models import ( ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA +PARALLEL_UPDATES = 0 + DEFAULT_NAME = "MQTT Siren" DEFAULT_PAYLOAD_ON = "ON" DEFAULT_PAYLOAD_OFF = "OFF" diff --git a/homeassistant/components/mqtt/strings.json b/homeassistant/components/mqtt/strings.json index 8ab31e37857..7cf35783569 100644 --- a/homeassistant/components/mqtt/strings.json +++ b/homeassistant/components/mqtt/strings.json @@ -61,6 +61,7 @@ "client_id": "The unique ID to identify the Home Assistant MQTT API as MQTT client. It is recommended to leave this option blank.", "client_cert": "The client certificate to authenticate against your MQTT broker.", "client_key": "The private key file that belongs to your client certificate.", + "keepalive": "A value less than 90 seconds is advised.", "tls_insecure": "Option to ignore validation of your MQTT broker's certificate.", "protocol": "The MQTT protocol your broker operates at. For example 3.1.1.", "set_ca_cert": "Select **Auto** for automatic CA validation, or **Custom** and select **Next** to set a custom CA certificate, to allow validating your MQTT brokers certificate.", @@ -172,6 +173,7 @@ "client_id": "[%key:component::mqtt::config::step::broker::data_description::client_id%]", "client_cert": "[%key:component::mqtt::config::step::broker::data_description::client_cert%]", "client_key": "[%key:component::mqtt::config::step::broker::data_description::client_key%]", + "keepalive": "[%key:component::mqtt::config::step::broker::data_description::keepalive%]", "tls_insecure": "[%key:component::mqtt::config::step::broker::data_description::tls_insecure%]", "protocol": "[%key:component::mqtt::config::step::broker::data_description::protocol%]", "set_ca_cert": "[%key:component::mqtt::config::step::broker::data_description::set_ca_cert%]", diff --git a/homeassistant/components/mqtt/subscription.py b/homeassistant/components/mqtt/subscription.py index 3f3f67970f3..08d501ede12 100644 --- a/homeassistant/components/mqtt/subscription.py +++ b/homeassistant/components/mqtt/subscription.py @@ -86,7 +86,7 @@ class EntitySubscription: @callback def async_prepare_subscribe_topics( hass: HomeAssistant, - new_state: dict[str, EntitySubscription] | None, + sub_state: dict[str, EntitySubscription] | None, topics: dict[str, dict[str, Any]], ) -> dict[str, EntitySubscription]: """Prepare (re)subscribe to a set of MQTT topics. @@ -101,8 +101,9 @@ def async_prepare_subscribe_topics( sets of topics. Every call to async_subscribe_topics must always contain _all_ the topics the subscription state should manage. """ - current_subscriptions = new_state if new_state is not None else {} - new_state = {} + current_subscriptions: dict[str, EntitySubscription] + current_subscriptions = sub_state if sub_state is not None else {} + sub_state = {} for key, value in topics.items(): # Extract the new requested subscription requested = EntitySubscription( @@ -119,7 +120,7 @@ def async_prepare_subscribe_topics( # Get the current subscription state current = current_subscriptions.pop(key, None) requested.resubscribe_if_necessary(hass, current) - new_state[key] = requested + sub_state[key] = requested # Go through all remaining subscriptions and unsubscribe them for remaining in current_subscriptions.values(): @@ -132,7 +133,7 @@ def async_prepare_subscribe_topics( remaining.entity_id, ) - return new_state + return sub_state async def async_subscribe_topics( diff --git a/homeassistant/components/mqtt/switch.py b/homeassistant/components/mqtt/switch.py index a73c4fe53f8..c90174e8a01 100644 --- a/homeassistant/components/mqtt/switch.py +++ b/homeassistant/components/mqtt/switch.py @@ -43,6 +43,8 @@ from .models import ( ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA +PARALLEL_UPDATES = 0 + DEFAULT_NAME = "MQTT Switch" DEFAULT_PAYLOAD_ON = "ON" DEFAULT_PAYLOAD_OFF = "OFF" diff --git a/homeassistant/components/mqtt/text.py b/homeassistant/components/mqtt/text.py index edfecfbc038..b4ed33a7730 100644 --- a/homeassistant/components/mqtt/text.py +++ b/homeassistant/components/mqtt/text.py @@ -40,6 +40,8 @@ from .util import check_state_too_long _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + CONF_MAX = "max" CONF_MIN = "min" CONF_PATTERN = "pattern" diff --git a/homeassistant/components/mqtt/update.py b/homeassistant/components/mqtt/update.py index 8878ff63127..99b4e5cb821 100644 --- a/homeassistant/components/mqtt/update.py +++ b/homeassistant/components/mqtt/update.py @@ -32,6 +32,8 @@ from .util import valid_publish_topic, valid_subscribe_topic _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + DEFAULT_NAME = "MQTT Update" CONF_DISPLAY_PRECISION = "display_precision" diff --git a/homeassistant/components/mqtt/vacuum.py b/homeassistant/components/mqtt/vacuum.py index 86b32aa281b..ac6dca3cbbc 100644 --- a/homeassistant/components/mqtt/vacuum.py +++ b/homeassistant/components/mqtt/vacuum.py @@ -39,6 +39,8 @@ from .models import ReceiveMessage from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic +PARALLEL_UPDATES = 0 + BATTERY = "battery_level" FAN_SPEED = "fan_speed" STATE = "state" diff --git a/homeassistant/components/mqtt/valve.py b/homeassistant/components/mqtt/valve.py index 00d3d7d79bd..50c5960f801 100644 --- a/homeassistant/components/mqtt/valve.py +++ b/homeassistant/components/mqtt/valve.py @@ -63,6 +63,8 @@ from .util import valid_publish_topic, valid_subscribe_topic _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + CONF_REPORTS_POSITION = "reports_position" DEFAULT_NAME = "MQTT Valve" diff --git a/homeassistant/components/mqtt/water_heater.py b/homeassistant/components/mqtt/water_heater.py index b98d73e0bfe..4c1d3fa8a53 100644 --- a/homeassistant/components/mqtt/water_heater.py +++ b/homeassistant/components/mqtt/water_heater.py @@ -72,6 +72,8 @@ from .util import valid_publish_topic, valid_subscribe_topic _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 + DEFAULT_NAME = "MQTT Water Heater" MQTT_WATER_HEATER_ATTRIBUTES_BLOCKED = frozenset( diff --git a/homeassistant/components/mqtt_eventstream/manifest.json b/homeassistant/components/mqtt_eventstream/manifest.json index 978b11de994..95e97ebb5fa 100644 --- a/homeassistant/components/mqtt_eventstream/manifest.json +++ b/homeassistant/components/mqtt_eventstream/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["mqtt"], "documentation": "https://www.home-assistant.io/integrations/mqtt_eventstream", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/mqtt_json/manifest.json b/homeassistant/components/mqtt_json/manifest.json index 24ed99979cc..ccaa4996fea 100644 --- a/homeassistant/components/mqtt_json/manifest.json +++ b/homeassistant/components/mqtt_json/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["mqtt"], "documentation": "https://www.home-assistant.io/integrations/mqtt_json", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/mqtt_room/manifest.json b/homeassistant/components/mqtt_room/manifest.json index efc5e375cfd..858a1cbb98c 100644 --- a/homeassistant/components/mqtt_room/manifest.json +++ b/homeassistant/components/mqtt_room/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["mqtt"], "documentation": "https://www.home-assistant.io/integrations/mqtt_room", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/mqtt_statestream/manifest.json b/homeassistant/components/mqtt_statestream/manifest.json index 134cd80d383..c3c278a08bb 100644 --- a/homeassistant/components/mqtt_statestream/manifest.json +++ b/homeassistant/components/mqtt_statestream/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["mqtt"], "documentation": "https://www.home-assistant.io/integrations/mqtt_statestream", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/msteams/manifest.json b/homeassistant/components/msteams/manifest.json index e4b40140441..3ded77c2176 100644 --- a/homeassistant/components/msteams/manifest.json +++ b/homeassistant/components/msteams/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/msteams", "iot_class": "cloud_push", "loggers": ["pymsteams"], + "quality_scale": "legacy", "requirements": ["pymsteams==0.1.12"] } diff --git a/homeassistant/components/music_assistant/__init__.py b/homeassistant/components/music_assistant/__init__.py new file mode 100644 index 00000000000..22de510ebe3 --- /dev/null +++ b/homeassistant/components/music_assistant/__init__.py @@ -0,0 +1,165 @@ +"""Music Assistant (music-assistant.io) integration.""" + +from __future__ import annotations + +import asyncio +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from music_assistant_client import MusicAssistantClient +from music_assistant_client.exceptions import CannotConnect, InvalidServerVersion +from music_assistant_models.enums import EventType +from music_assistant_models.errors import MusicAssistantError + +from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.const import CONF_URL, EVENT_HOMEASSISTANT_STOP, Platform +from homeassistant.core import Event, HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.issue_registry import ( + IssueSeverity, + async_create_issue, + async_delete_issue, +) + +from .const import DOMAIN, LOGGER + +if TYPE_CHECKING: + from music_assistant_models.event import MassEvent + +PLATFORMS = [Platform.MEDIA_PLAYER] + +CONNECT_TIMEOUT = 10 +LISTEN_READY_TIMEOUT = 30 + +type MusicAssistantConfigEntry = ConfigEntry[MusicAssistantEntryData] + + +@dataclass +class MusicAssistantEntryData: + """Hold Mass data for the config entry.""" + + mass: MusicAssistantClient + listen_task: asyncio.Task + + +async def async_setup_entry( + hass: HomeAssistant, entry: MusicAssistantConfigEntry +) -> bool: + """Set up Music Assistant from a config entry.""" + http_session = async_get_clientsession(hass, verify_ssl=False) + mass_url = entry.data[CONF_URL] + mass = MusicAssistantClient(mass_url, http_session) + + try: + async with asyncio.timeout(CONNECT_TIMEOUT): + await mass.connect() + except (TimeoutError, CannotConnect) as err: + raise ConfigEntryNotReady( + f"Failed to connect to music assistant server {mass_url}" + ) from err + except InvalidServerVersion as err: + async_create_issue( + hass, + DOMAIN, + "invalid_server_version", + is_fixable=False, + severity=IssueSeverity.ERROR, + translation_key="invalid_server_version", + ) + raise ConfigEntryNotReady(f"Invalid server version: {err}") from err + except MusicAssistantError as err: + LOGGER.exception("Failed to connect to music assistant server", exc_info=err) + raise ConfigEntryNotReady( + f"Unknown error connecting to the Music Assistant server {mass_url}" + ) from err + + async_delete_issue(hass, DOMAIN, "invalid_server_version") + + async def on_hass_stop(event: Event) -> None: + """Handle incoming stop event from Home Assistant.""" + await mass.disconnect() + + entry.async_on_unload( + hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, on_hass_stop) + ) + + # launch the music assistant client listen task in the background + # use the init_ready event to wait until initialization is done + init_ready = asyncio.Event() + listen_task = asyncio.create_task(_client_listen(hass, entry, mass, init_ready)) + + try: + async with asyncio.timeout(LISTEN_READY_TIMEOUT): + await init_ready.wait() + except TimeoutError as err: + listen_task.cancel() + raise ConfigEntryNotReady("Music Assistant client not ready") from err + + # store the listen task and mass client in the entry data + entry.runtime_data = MusicAssistantEntryData(mass, listen_task) + + # If the listen task is already failed, we need to raise ConfigEntryNotReady + if listen_task.done() and (listen_error := listen_task.exception()) is not None: + await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + try: + await mass.disconnect() + finally: + raise ConfigEntryNotReady(listen_error) from listen_error + + # initialize platforms + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + # register listener for removed players + async def handle_player_removed(event: MassEvent) -> None: + """Handle Mass Player Removed event.""" + if event.object_id is None: + return + dev_reg = dr.async_get(hass) + if hass_device := dev_reg.async_get_device({(DOMAIN, event.object_id)}): + dev_reg.async_update_device( + hass_device.id, remove_config_entry_id=entry.entry_id + ) + + entry.async_on_unload( + mass.subscribe(handle_player_removed, EventType.PLAYER_REMOVED) + ) + + return True + + +async def _client_listen( + hass: HomeAssistant, + entry: ConfigEntry, + mass: MusicAssistantClient, + init_ready: asyncio.Event, +) -> None: + """Listen with the client.""" + try: + await mass.start_listening(init_ready) + except MusicAssistantError as err: + if entry.state != ConfigEntryState.LOADED: + raise + LOGGER.error("Failed to listen: %s", err) + except Exception as err: # pylint: disable=broad-except + # We need to guard against unknown exceptions to not crash this task. + if entry.state != ConfigEntryState.LOADED: + raise + LOGGER.exception("Unexpected exception: %s", err) + + if not hass.is_stopping: + LOGGER.debug("Disconnected from server. Reloading integration") + hass.async_create_task(hass.config_entries.async_reload(entry.entry_id)) + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + if unload_ok: + mass_entry_data: MusicAssistantEntryData = entry.runtime_data + mass_entry_data.listen_task.cancel() + await mass_entry_data.mass.disconnect() + + return unload_ok diff --git a/homeassistant/components/music_assistant/config_flow.py b/homeassistant/components/music_assistant/config_flow.py new file mode 100644 index 00000000000..fc50a2d654b --- /dev/null +++ b/homeassistant/components/music_assistant/config_flow.py @@ -0,0 +1,137 @@ +"""Config flow for MusicAssistant integration.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from music_assistant_client import MusicAssistantClient +from music_assistant_client.exceptions import ( + CannotConnect, + InvalidServerVersion, + MusicAssistantClientException, +) +from music_assistant_models.api import ServerInfoMessage +import voluptuous as vol + +from homeassistant.components import zeroconf +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_URL +from homeassistant.core import HomeAssistant +from homeassistant.helpers import aiohttp_client + +from .const import DOMAIN, LOGGER + +DEFAULT_URL = "http://mass.local:8095" +DEFAULT_TITLE = "Music Assistant" + + +def get_manual_schema(user_input: dict[str, Any]) -> vol.Schema: + """Return a schema for the manual step.""" + default_url = user_input.get(CONF_URL, DEFAULT_URL) + return vol.Schema( + { + vol.Required(CONF_URL, default=default_url): str, + } + ) + + +async def get_server_info(hass: HomeAssistant, url: str) -> ServerInfoMessage: + """Validate the user input allows us to connect.""" + async with MusicAssistantClient( + url, aiohttp_client.async_get_clientsession(hass) + ) as client: + if TYPE_CHECKING: + assert client.server_info is not None + return client.server_info + + +class MusicAssistantConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for MusicAssistant.""" + + VERSION = 1 + + def __init__(self) -> None: + """Set up flow instance.""" + self.server_info: ServerInfoMessage | None = None + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a manual configuration.""" + errors: dict[str, str] = {} + if user_input is not None: + try: + self.server_info = await get_server_info( + self.hass, user_input[CONF_URL] + ) + await self.async_set_unique_id( + self.server_info.server_id, raise_on_progress=False + ) + self._abort_if_unique_id_configured( + updates={CONF_URL: self.server_info.base_url}, + reload_on_update=True, + ) + except CannotConnect: + errors["base"] = "cannot_connect" + except InvalidServerVersion: + errors["base"] = "invalid_server_version" + except MusicAssistantClientException: + LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + return self.async_create_entry( + title=DEFAULT_TITLE, + data={ + CONF_URL: self.server_info.base_url, + }, + ) + + return self.async_show_form( + step_id="user", data_schema=get_manual_schema(user_input), errors=errors + ) + + return self.async_show_form(step_id="user", data_schema=get_manual_schema({})) + + async def async_step_zeroconf( + self, discovery_info: zeroconf.ZeroconfServiceInfo + ) -> ConfigFlowResult: + """Handle a discovered Mass server. + + This flow is triggered by the Zeroconf component. It will check if the + host is already configured and delegate to the import step if not. + """ + # abort if discovery info is not what we expect + if "server_id" not in discovery_info.properties: + return self.async_abort(reason="missing_server_id") + # abort if we already have exactly this server_id + # reload the integration if the host got updated + self.server_info = ServerInfoMessage.from_dict(discovery_info.properties) + await self.async_set_unique_id(self.server_info.server_id) + self._abort_if_unique_id_configured( + updates={CONF_URL: self.server_info.base_url}, + reload_on_update=True, + ) + try: + await get_server_info(self.hass, self.server_info.base_url) + except CannotConnect: + return self.async_abort(reason="cannot_connect") + return await self.async_step_discovery_confirm() + + async def async_step_discovery_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle user-confirmation of discovered server.""" + if TYPE_CHECKING: + assert self.server_info is not None + if user_input is not None: + return self.async_create_entry( + title=DEFAULT_TITLE, + data={ + CONF_URL: self.server_info.base_url, + }, + ) + self._set_confirm_only() + return self.async_show_form( + step_id="discovery_confirm", + description_placeholders={"url": self.server_info.base_url}, + ) diff --git a/homeassistant/components/music_assistant/const.py b/homeassistant/components/music_assistant/const.py new file mode 100644 index 00000000000..6512f58b96c --- /dev/null +++ b/homeassistant/components/music_assistant/const.py @@ -0,0 +1,18 @@ +"""Constants for Music Assistant Component.""" + +import logging + +DOMAIN = "music_assistant" +DOMAIN_EVENT = f"{DOMAIN}_event" + +DEFAULT_NAME = "Music Assistant" + +ATTR_IS_GROUP = "is_group" +ATTR_GROUP_MEMBERS = "group_members" +ATTR_GROUP_PARENTS = "group_parents" + +ATTR_MASS_PLAYER_TYPE = "mass_player_type" +ATTR_ACTIVE_QUEUE = "active_queue" +ATTR_STREAM_TITLE = "stream_title" + +LOGGER = logging.getLogger(__package__) diff --git a/homeassistant/components/music_assistant/entity.py b/homeassistant/components/music_assistant/entity.py new file mode 100644 index 00000000000..f5b6d92b0cf --- /dev/null +++ b/homeassistant/components/music_assistant/entity.py @@ -0,0 +1,86 @@ +"""Base entity model.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from music_assistant_models.enums import EventType +from music_assistant_models.event import MassEvent +from music_assistant_models.player import Player + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity + +from .const import DOMAIN + +if TYPE_CHECKING: + from music_assistant_client import MusicAssistantClient + + +class MusicAssistantEntity(Entity): + """Base Entity from Music Assistant Player.""" + + _attr_has_entity_name = True + _attr_should_poll = False + + def __init__(self, mass: MusicAssistantClient, player_id: str) -> None: + """Initialize MediaPlayer entity.""" + self.mass = mass + self.player_id = player_id + provider = self.mass.get_provider(self.player.provider) + if TYPE_CHECKING: + assert provider is not None + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, player_id)}, + manufacturer=self.player.device_info.manufacturer or provider.name, + model=self.player.device_info.model or self.player.name, + name=self.player.display_name, + configuration_url=f"{mass.server_url}/#/settings/editplayer/{player_id}", + ) + + async def async_added_to_hass(self) -> None: + """Register callbacks.""" + await self.async_on_update() + self.async_on_remove( + self.mass.subscribe( + self.__on_mass_update, EventType.PLAYER_UPDATED, self.player_id + ) + ) + self.async_on_remove( + self.mass.subscribe( + self.__on_mass_update, + EventType.QUEUE_UPDATED, + ) + ) + + @property + def player(self) -> Player: + """Return the Mass Player attached to this HA entity.""" + return self.mass.players[self.player_id] + + @property + def unique_id(self) -> str | None: + """Return unique id for entity.""" + _base = self.player_id + if hasattr(self, "entity_description"): + return f"{_base}_{self.entity_description.key}" + return _base + + @property + def available(self) -> bool: + """Return availability of entity.""" + return self.player.available and bool(self.mass.connection.connected) + + async def __on_mass_update(self, event: MassEvent) -> None: + """Call when we receive an event from MusicAssistant.""" + if event.event == EventType.QUEUE_UPDATED and event.object_id not in ( + self.player.active_source, + self.player.active_group, + self.player.player_id, + ): + return + await self.async_on_update() + self.async_write_ha_state() + + async def async_on_update(self) -> None: + """Handle player updates.""" diff --git a/homeassistant/components/music_assistant/icons.json b/homeassistant/components/music_assistant/icons.json new file mode 100644 index 00000000000..7533dbb6dad --- /dev/null +++ b/homeassistant/components/music_assistant/icons.json @@ -0,0 +1,7 @@ +{ + "services": { + "play_media": { "service": "mdi:play" }, + "play_announcement": { "service": "mdi:bullhorn" }, + "transfer_queue": { "service": "mdi:transfer" } + } +} diff --git a/homeassistant/components/music_assistant/manifest.json b/homeassistant/components/music_assistant/manifest.json new file mode 100644 index 00000000000..f5cdcf50673 --- /dev/null +++ b/homeassistant/components/music_assistant/manifest.json @@ -0,0 +1,12 @@ +{ + "domain": "music_assistant", + "name": "Music Assistant", + "after_dependencies": ["media_source", "media_player"], + "codeowners": ["@music-assistant"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/music_assistant", + "iot_class": "local_push", + "loggers": ["music_assistant"], + "requirements": ["music-assistant-client==1.0.8"], + "zeroconf": ["_mass._tcp.local."] +} diff --git a/homeassistant/components/music_assistant/media_browser.py b/homeassistant/components/music_assistant/media_browser.py new file mode 100644 index 00000000000..e65d6d4a975 --- /dev/null +++ b/homeassistant/components/music_assistant/media_browser.py @@ -0,0 +1,351 @@ +"""Media Source Implementation.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from music_assistant_models.media_items import MediaItemType + +from homeassistant.components import media_source +from homeassistant.components.media_player import ( + BrowseError, + BrowseMedia, + MediaClass, + MediaType, +) +from homeassistant.core import HomeAssistant + +from .const import DEFAULT_NAME, DOMAIN + +if TYPE_CHECKING: + from music_assistant_client import MusicAssistantClient + +MEDIA_TYPE_RADIO = "radio" + +PLAYABLE_MEDIA_TYPES = [ + MediaType.PLAYLIST, + MediaType.ALBUM, + MediaType.ARTIST, + MEDIA_TYPE_RADIO, + MediaType.TRACK, +] + +LIBRARY_ARTISTS = "artists" +LIBRARY_ALBUMS = "albums" +LIBRARY_TRACKS = "tracks" +LIBRARY_PLAYLISTS = "playlists" +LIBRARY_RADIO = "radio" + + +LIBRARY_TITLE_MAP = { + LIBRARY_ARTISTS: "Artists", + LIBRARY_ALBUMS: "Albums", + LIBRARY_TRACKS: "Tracks", + LIBRARY_PLAYLISTS: "Playlists", + LIBRARY_RADIO: "Radio stations", +} + +LIBRARY_MEDIA_CLASS_MAP = { + LIBRARY_ARTISTS: MediaClass.ARTIST, + LIBRARY_ALBUMS: MediaClass.ALBUM, + LIBRARY_TRACKS: MediaClass.TRACK, + LIBRARY_PLAYLISTS: MediaClass.PLAYLIST, + LIBRARY_RADIO: MediaClass.MUSIC, # radio is not accepted by HA +} + +MEDIA_CONTENT_TYPE_FLAC = "audio/flac" +THUMB_SIZE = 200 + + +def media_source_filter(item: BrowseMedia) -> bool: + """Filter media sources.""" + return item.media_content_type.startswith("audio/") + + +async def async_browse_media( + hass: HomeAssistant, + mass: MusicAssistantClient, + media_content_id: str | None, + media_content_type: str | None, +) -> BrowseMedia: + """Browse media.""" + if media_content_id is None: + return await build_main_listing(hass) + + assert media_content_type is not None + + if media_source.is_media_source_id(media_content_id): + return await media_source.async_browse_media( + hass, media_content_id, content_filter=media_source_filter + ) + + if media_content_id == LIBRARY_ARTISTS: + return await build_artists_listing(mass) + if media_content_id == LIBRARY_ALBUMS: + return await build_albums_listing(mass) + if media_content_id == LIBRARY_TRACKS: + return await build_tracks_listing(mass) + if media_content_id == LIBRARY_PLAYLISTS: + return await build_playlists_listing(mass) + if media_content_id == LIBRARY_RADIO: + return await build_radio_listing(mass) + if "artist" in media_content_id: + return await build_artist_items_listing(mass, media_content_id) + if "album" in media_content_id: + return await build_album_items_listing(mass, media_content_id) + if "playlist" in media_content_id: + return await build_playlist_items_listing(mass, media_content_id) + + raise BrowseError(f"Media not found: {media_content_type} / {media_content_id}") + + +async def build_main_listing(hass: HomeAssistant) -> BrowseMedia: + """Build main browse listing.""" + children: list[BrowseMedia] = [] + for library, media_class in LIBRARY_MEDIA_CLASS_MAP.items(): + child_source = BrowseMedia( + media_class=MediaClass.DIRECTORY, + media_content_id=library, + media_content_type=DOMAIN, + title=LIBRARY_TITLE_MAP[library], + children_media_class=media_class, + can_play=False, + can_expand=True, + ) + children.append(child_source) + + try: + item = await media_source.async_browse_media( + hass, None, content_filter=media_source_filter + ) + # If domain is None, it's overview of available sources + if item.domain is None and item.children is not None: + children.extend(item.children) + else: + children.append(item) + except media_source.BrowseError: + pass + + return BrowseMedia( + media_class=MediaClass.DIRECTORY, + media_content_id="", + media_content_type=DOMAIN, + title=DEFAULT_NAME, + can_play=False, + can_expand=True, + children=children, + ) + + +async def build_playlists_listing(mass: MusicAssistantClient) -> BrowseMedia: + """Build Playlists browse listing.""" + media_class = LIBRARY_MEDIA_CLASS_MAP[LIBRARY_PLAYLISTS] + return BrowseMedia( + media_class=MediaClass.DIRECTORY, + media_content_id=LIBRARY_PLAYLISTS, + media_content_type=MediaType.PLAYLIST, + title=LIBRARY_TITLE_MAP[LIBRARY_PLAYLISTS], + can_play=False, + can_expand=True, + children_media_class=media_class, + children=sorted( + [ + build_item(mass, item, can_expand=True) + # we only grab the first page here because the + # HA media browser does not support paging + for item in await mass.music.get_library_playlists(limit=500) + if item.available + ], + key=lambda x: x.title, + ), + ) + + +async def build_playlist_items_listing( + mass: MusicAssistantClient, identifier: str +) -> BrowseMedia: + """Build Playlist items browse listing.""" + playlist = await mass.music.get_item_by_uri(identifier) + + return BrowseMedia( + media_class=MediaClass.PLAYLIST, + media_content_id=playlist.uri, + media_content_type=MediaType.PLAYLIST, + title=playlist.name, + can_play=True, + can_expand=True, + children_media_class=MediaClass.TRACK, + children=[ + build_item(mass, item, can_expand=False) + # we only grab the first page here because the + # HA media browser does not support paging + for item in await mass.music.get_playlist_tracks( + playlist.item_id, playlist.provider + ) + if item.available + ], + ) + + +async def build_artists_listing(mass: MusicAssistantClient) -> BrowseMedia: + """Build Albums browse listing.""" + media_class = LIBRARY_MEDIA_CLASS_MAP[LIBRARY_ARTISTS] + + return BrowseMedia( + media_class=MediaClass.DIRECTORY, + media_content_id=LIBRARY_ARTISTS, + media_content_type=MediaType.ARTIST, + title=LIBRARY_TITLE_MAP[LIBRARY_ARTISTS], + can_play=False, + can_expand=True, + children_media_class=media_class, + children=sorted( + [ + build_item(mass, artist, can_expand=True) + # we only grab the first page here because the + # HA media browser does not support paging + for artist in await mass.music.get_library_artists(limit=500) + if artist.available + ], + key=lambda x: x.title, + ), + ) + + +async def build_artist_items_listing( + mass: MusicAssistantClient, identifier: str +) -> BrowseMedia: + """Build Artist items browse listing.""" + artist = await mass.music.get_item_by_uri(identifier) + albums = await mass.music.get_artist_albums(artist.item_id, artist.provider) + + return BrowseMedia( + media_class=MediaType.ARTIST, + media_content_id=artist.uri, + media_content_type=MediaType.ARTIST, + title=artist.name, + can_play=True, + can_expand=True, + children_media_class=MediaClass.ALBUM, + children=[ + build_item(mass, album, can_expand=True) + for album in albums + if album.available + ], + ) + + +async def build_albums_listing(mass: MusicAssistantClient) -> BrowseMedia: + """Build Albums browse listing.""" + media_class = LIBRARY_MEDIA_CLASS_MAP[LIBRARY_ALBUMS] + + return BrowseMedia( + media_class=MediaClass.DIRECTORY, + media_content_id=LIBRARY_ALBUMS, + media_content_type=MediaType.ALBUM, + title=LIBRARY_TITLE_MAP[LIBRARY_ALBUMS], + can_play=False, + can_expand=True, + children_media_class=media_class, + children=sorted( + [ + build_item(mass, album, can_expand=True) + # we only grab the first page here because the + # HA media browser does not support paging + for album in await mass.music.get_library_albums(limit=500) + if album.available + ], + key=lambda x: x.title, + ), + ) + + +async def build_album_items_listing( + mass: MusicAssistantClient, identifier: str +) -> BrowseMedia: + """Build Album items browse listing.""" + album = await mass.music.get_item_by_uri(identifier) + tracks = await mass.music.get_album_tracks(album.item_id, album.provider) + + return BrowseMedia( + media_class=MediaType.ALBUM, + media_content_id=album.uri, + media_content_type=MediaType.ALBUM, + title=album.name, + can_play=True, + can_expand=True, + children_media_class=MediaClass.TRACK, + children=[ + build_item(mass, track, False) for track in tracks if track.available + ], + ) + + +async def build_tracks_listing(mass: MusicAssistantClient) -> BrowseMedia: + """Build Tracks browse listing.""" + media_class = LIBRARY_MEDIA_CLASS_MAP[LIBRARY_TRACKS] + + return BrowseMedia( + media_class=MediaClass.DIRECTORY, + media_content_id=LIBRARY_TRACKS, + media_content_type=MediaType.TRACK, + title=LIBRARY_TITLE_MAP[LIBRARY_TRACKS], + can_play=False, + can_expand=True, + children_media_class=media_class, + children=sorted( + [ + build_item(mass, track, can_expand=False) + # we only grab the first page here because the + # HA media browser does not support paging + for track in await mass.music.get_library_tracks(limit=500) + if track.available + ], + key=lambda x: x.title, + ), + ) + + +async def build_radio_listing(mass: MusicAssistantClient) -> BrowseMedia: + """Build Radio browse listing.""" + media_class = LIBRARY_MEDIA_CLASS_MAP[LIBRARY_RADIO] + return BrowseMedia( + media_class=MediaClass.DIRECTORY, + media_content_id=LIBRARY_RADIO, + media_content_type=DOMAIN, + title=LIBRARY_TITLE_MAP[LIBRARY_RADIO], + can_play=False, + can_expand=True, + children_media_class=media_class, + children=[ + build_item(mass, track, can_expand=False, media_class=media_class) + # we only grab the first page here because the + # HA media browser does not support paging + for track in await mass.music.get_library_radios(limit=500) + if track.available + ], + ) + + +def build_item( + mass: MusicAssistantClient, + item: MediaItemType, + can_expand: bool = True, + media_class: Any = None, +) -> BrowseMedia: + """Return BrowseMedia for MediaItem.""" + if artists := getattr(item, "artists", None): + title = f"{artists[0].name} - {item.name}" + else: + title = item.name + img_url = mass.get_media_item_image_url(item) + + return BrowseMedia( + media_class=media_class or item.media_type.value, + media_content_id=item.uri, + media_content_type=MediaType.MUSIC, + title=title, + can_play=True, + can_expand=can_expand, + thumbnail=img_url, + ) diff --git a/homeassistant/components/music_assistant/media_player.py b/homeassistant/components/music_assistant/media_player.py new file mode 100644 index 00000000000..fdf3a0c0c48 --- /dev/null +++ b/homeassistant/components/music_assistant/media_player.py @@ -0,0 +1,641 @@ +"""MediaPlayer platform for Music Assistant integration.""" + +from __future__ import annotations + +import asyncio +from collections.abc import Awaitable, Callable, Coroutine, Mapping +from contextlib import suppress +import functools +import os +from typing import TYPE_CHECKING, Any + +from music_assistant_models.enums import ( + EventType, + MediaType, + PlayerFeature, + PlayerState as MassPlayerState, + QueueOption, + RepeatMode as MassRepeatMode, +) +from music_assistant_models.errors import MediaNotFoundError, MusicAssistantError +from music_assistant_models.event import MassEvent +from music_assistant_models.media_items import ItemMapping, MediaItemType, Track +import voluptuous as vol + +from homeassistant.components import media_source +from homeassistant.components.media_player import ( + ATTR_MEDIA_ENQUEUE, + ATTR_MEDIA_EXTRA, + BrowseMedia, + MediaPlayerDeviceClass, + MediaPlayerEnqueue, + MediaPlayerEntity, + MediaPlayerEntityFeature, + MediaPlayerState, + MediaType as HAMediaType, + RepeatMode, + async_process_play_media_url, +) +from homeassistant.const import STATE_OFF +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er +import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.entity_platform import ( + AddEntitiesCallback, + async_get_current_platform, +) +from homeassistant.util.dt import utc_from_timestamp + +from . import MusicAssistantConfigEntry +from .const import ATTR_ACTIVE_QUEUE, ATTR_MASS_PLAYER_TYPE, DOMAIN +from .entity import MusicAssistantEntity +from .media_browser import async_browse_media + +if TYPE_CHECKING: + from music_assistant_client import MusicAssistantClient + from music_assistant_models.player import Player + from music_assistant_models.player_queue import PlayerQueue + +SUPPORTED_FEATURES = ( + MediaPlayerEntityFeature.PAUSE + | MediaPlayerEntityFeature.VOLUME_SET + | MediaPlayerEntityFeature.STOP + | MediaPlayerEntityFeature.PREVIOUS_TRACK + | MediaPlayerEntityFeature.NEXT_TRACK + | MediaPlayerEntityFeature.SHUFFLE_SET + | MediaPlayerEntityFeature.REPEAT_SET + | MediaPlayerEntityFeature.TURN_ON + | MediaPlayerEntityFeature.TURN_OFF + | MediaPlayerEntityFeature.PLAY + | MediaPlayerEntityFeature.PLAY_MEDIA + | MediaPlayerEntityFeature.VOLUME_STEP + | MediaPlayerEntityFeature.CLEAR_PLAYLIST + | MediaPlayerEntityFeature.BROWSE_MEDIA + | MediaPlayerEntityFeature.MEDIA_ENQUEUE + | MediaPlayerEntityFeature.MEDIA_ANNOUNCE + | MediaPlayerEntityFeature.SEEK +) + +QUEUE_OPTION_MAP = { + # map from HA enqueue options to MA enqueue options + # which are the same but just in case + MediaPlayerEnqueue.ADD: QueueOption.ADD, + MediaPlayerEnqueue.NEXT: QueueOption.NEXT, + MediaPlayerEnqueue.PLAY: QueueOption.PLAY, + MediaPlayerEnqueue.REPLACE: QueueOption.REPLACE, +} + +SERVICE_PLAY_MEDIA_ADVANCED = "play_media" +SERVICE_PLAY_ANNOUNCEMENT = "play_announcement" +SERVICE_TRANSFER_QUEUE = "transfer_queue" +ATTR_RADIO_MODE = "radio_mode" +ATTR_MEDIA_ID = "media_id" +ATTR_MEDIA_TYPE = "media_type" +ATTR_ARTIST = "artist" +ATTR_ALBUM = "album" +ATTR_URL = "url" +ATTR_USE_PRE_ANNOUNCE = "use_pre_announce" +ATTR_ANNOUNCE_VOLUME = "announce_volume" +ATTR_SOURCE_PLAYER = "source_player" +ATTR_AUTO_PLAY = "auto_play" + + +def catch_musicassistant_error[_R, **P]( + func: Callable[..., Awaitable[_R]], +) -> Callable[..., Coroutine[Any, Any, _R | None]]: + """Check and log commands to players.""" + + @functools.wraps(func) + async def wrapper( + self: MusicAssistantPlayer, *args: P.args, **kwargs: P.kwargs + ) -> _R | None: + """Catch Music Assistant errors and convert to Home Assistant error.""" + try: + return await func(self, *args, **kwargs) + except MusicAssistantError as err: + error_msg = str(err) or err.__class__.__name__ + raise HomeAssistantError(error_msg) from err + + return wrapper + + +async def async_setup_entry( + hass: HomeAssistant, + entry: MusicAssistantConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Music Assistant MediaPlayer(s) from Config Entry.""" + mass = entry.runtime_data.mass + added_ids = set() + + async def handle_player_added(event: MassEvent) -> None: + """Handle Mass Player Added event.""" + if TYPE_CHECKING: + assert event.object_id is not None + if event.object_id in added_ids: + return + added_ids.add(event.object_id) + async_add_entities([MusicAssistantPlayer(mass, event.object_id)]) + + # register listener for new players + entry.async_on_unload(mass.subscribe(handle_player_added, EventType.PLAYER_ADDED)) + mass_players = [] + # add all current players + for player in mass.players: + added_ids.add(player.player_id) + mass_players.append(MusicAssistantPlayer(mass, player.player_id)) + + async_add_entities(mass_players) + + # add platform service for play_media with advanced options + platform = async_get_current_platform() + platform.async_register_entity_service( + SERVICE_PLAY_MEDIA_ADVANCED, + { + vol.Required(ATTR_MEDIA_ID): vol.All(cv.ensure_list, [cv.string]), + vol.Optional(ATTR_MEDIA_TYPE): vol.Coerce(MediaType), + vol.Optional(ATTR_MEDIA_ENQUEUE): vol.Coerce(QueueOption), + vol.Optional(ATTR_ARTIST): cv.string, + vol.Optional(ATTR_ALBUM): cv.string, + vol.Optional(ATTR_RADIO_MODE): vol.Coerce(bool), + }, + "_async_handle_play_media", + ) + platform.async_register_entity_service( + SERVICE_PLAY_ANNOUNCEMENT, + { + vol.Required(ATTR_URL): cv.string, + vol.Optional(ATTR_USE_PRE_ANNOUNCE): vol.Coerce(bool), + vol.Optional(ATTR_ANNOUNCE_VOLUME): vol.Coerce(int), + }, + "_async_handle_play_announcement", + ) + platform.async_register_entity_service( + SERVICE_TRANSFER_QUEUE, + { + vol.Optional(ATTR_SOURCE_PLAYER): cv.entity_id, + vol.Optional(ATTR_AUTO_PLAY): vol.Coerce(bool), + }, + "_async_handle_transfer_queue", + ) + + +class MusicAssistantPlayer(MusicAssistantEntity, MediaPlayerEntity): + """Representation of MediaPlayerEntity from Music Assistant Player.""" + + _attr_name = None + _attr_media_image_remotely_accessible = True + _attr_media_content_type = HAMediaType.MUSIC + + def __init__(self, mass: MusicAssistantClient, player_id: str) -> None: + """Initialize MediaPlayer entity.""" + super().__init__(mass, player_id) + self._attr_icon = self.player.icon.replace("mdi-", "mdi:") + self._attr_supported_features = SUPPORTED_FEATURES + if PlayerFeature.SET_MEMBERS in self.player.supported_features: + self._attr_supported_features |= MediaPlayerEntityFeature.GROUPING + if PlayerFeature.VOLUME_MUTE in self.player.supported_features: + self._attr_supported_features |= MediaPlayerEntityFeature.VOLUME_MUTE + self._attr_device_class = MediaPlayerDeviceClass.SPEAKER + self._prev_time: float = 0 + + async def async_added_to_hass(self) -> None: + """Register callbacks.""" + await super().async_added_to_hass() + + # we subscribe to player queue time update but we only + # accept a state change on big time jumps (e.g. seeking) + async def queue_time_updated(event: MassEvent) -> None: + if event.object_id != self.player.active_source: + return + if abs((self._prev_time or 0) - event.data) > 5: + await self.async_on_update() + self.async_write_ha_state() + self._prev_time = event.data + + self.async_on_remove( + self.mass.subscribe( + queue_time_updated, + EventType.QUEUE_TIME_UPDATED, + ) + ) + + @property + def active_queue(self) -> PlayerQueue | None: + """Return the active queue for this player (if any).""" + if not self.player.active_source: + return None + return self.mass.player_queues.get(self.player.active_source) + + @property + def extra_state_attributes(self) -> Mapping[str, Any]: + """Return additional state attributes.""" + return { + ATTR_MASS_PLAYER_TYPE: self.player.type.value, + ATTR_ACTIVE_QUEUE: ( + self.active_queue.queue_id if self.active_queue else None + ), + } + + async def async_on_update(self) -> None: + """Handle player updates.""" + if not self.available: + return + player = self.player + active_queue = self.active_queue + # update generic attributes + if player.powered and active_queue is not None: + self._attr_state = MediaPlayerState(active_queue.state.value) + if player.powered and player.state is not None: + self._attr_state = MediaPlayerState(player.state.value) + else: + self._attr_state = MediaPlayerState(STATE_OFF) + group_members_entity_ids: list[str] = [] + if player.group_childs: + # translate MA group_childs to HA group_members as entity id's + entity_registry = er.async_get(self.hass) + group_members_entity_ids = [ + entity_id + for child_id in player.group_childs + if ( + entity_id := entity_registry.async_get_entity_id( + self.platform.domain, DOMAIN, child_id + ) + ) + ] + # NOTE: we sort the group_members for now, + # until the MA API returns them sorted (group_childs is now a set) + self._attr_group_members = sorted(group_members_entity_ids) + self._attr_volume_level = ( + player.volume_level / 100 if player.volume_level is not None else None + ) + self._attr_is_volume_muted = player.volume_muted + self._update_media_attributes(player, active_queue) + self._update_media_image_url(player, active_queue) + + @catch_musicassistant_error + async def async_media_play(self) -> None: + """Send play command to device.""" + await self.mass.players.player_command_play(self.player_id) + + @catch_musicassistant_error + async def async_media_pause(self) -> None: + """Send pause command to device.""" + await self.mass.players.player_command_pause(self.player_id) + + @catch_musicassistant_error + async def async_media_stop(self) -> None: + """Send stop command to device.""" + await self.mass.players.player_command_stop(self.player_id) + + @catch_musicassistant_error + async def async_media_next_track(self) -> None: + """Send next track command to device.""" + await self.mass.players.player_command_next_track(self.player_id) + + @catch_musicassistant_error + async def async_media_previous_track(self) -> None: + """Send previous track command to device.""" + await self.mass.players.player_command_previous_track(self.player_id) + + @catch_musicassistant_error + async def async_media_seek(self, position: float) -> None: + """Send seek command.""" + position = int(position) + await self.mass.players.player_command_seek(self.player_id, position) + + @catch_musicassistant_error + async def async_mute_volume(self, mute: bool) -> None: + """Mute the volume.""" + await self.mass.players.player_command_volume_mute(self.player_id, mute) + + @catch_musicassistant_error + async def async_set_volume_level(self, volume: float) -> None: + """Send new volume_level to device.""" + volume = int(volume * 100) + await self.mass.players.player_command_volume_set(self.player_id, volume) + + @catch_musicassistant_error + async def async_volume_up(self) -> None: + """Send new volume_level to device.""" + await self.mass.players.player_command_volume_up(self.player_id) + + @catch_musicassistant_error + async def async_volume_down(self) -> None: + """Send new volume_level to device.""" + await self.mass.players.player_command_volume_down(self.player_id) + + @catch_musicassistant_error + async def async_turn_on(self) -> None: + """Turn on device.""" + await self.mass.players.player_command_power(self.player_id, True) + + @catch_musicassistant_error + async def async_turn_off(self) -> None: + """Turn off device.""" + await self.mass.players.player_command_power(self.player_id, False) + + @catch_musicassistant_error + async def async_set_shuffle(self, shuffle: bool) -> None: + """Set shuffle state.""" + if not self.active_queue: + return + await self.mass.player_queues.queue_command_shuffle( + self.active_queue.queue_id, shuffle + ) + + @catch_musicassistant_error + async def async_set_repeat(self, repeat: RepeatMode) -> None: + """Set repeat state.""" + if not self.active_queue: + return + await self.mass.player_queues.queue_command_repeat( + self.active_queue.queue_id, MassRepeatMode(repeat) + ) + + @catch_musicassistant_error + async def async_clear_playlist(self) -> None: + """Clear players playlist.""" + if TYPE_CHECKING: + assert self.player.active_source is not None + if queue := self.mass.player_queues.get(self.player.active_source): + await self.mass.player_queues.queue_command_clear(queue.queue_id) + + @catch_musicassistant_error + async def async_play_media( + self, + media_type: MediaType | str, + media_id: str, + enqueue: MediaPlayerEnqueue | None = None, + announce: bool | None = None, + **kwargs: Any, + ) -> None: + """Send the play_media command to the media player.""" + if media_source.is_media_source_id(media_id): + # Handle media_source + sourced_media = await media_source.async_resolve_media( + self.hass, media_id, self.entity_id + ) + media_id = sourced_media.url + media_id = async_process_play_media_url(self.hass, media_id) + + if announce: + await self._async_handle_play_announcement( + media_id, + use_pre_announce=kwargs[ATTR_MEDIA_EXTRA].get("use_pre_announce"), + announce_volume=kwargs[ATTR_MEDIA_EXTRA].get("announce_volume"), + ) + return + + # forward to our advanced play_media handler + await self._async_handle_play_media( + media_id=[media_id], + enqueue=enqueue, + media_type=media_type, + radio_mode=kwargs[ATTR_MEDIA_EXTRA].get(ATTR_RADIO_MODE), + ) + + @catch_musicassistant_error + async def async_join_players(self, group_members: list[str]) -> None: + """Join `group_members` as a player group with the current player.""" + player_ids: list[str] = [] + entity_registry = er.async_get(self.hass) + for child_entity_id in group_members: + # resolve HA entity_id to MA player_id + if not (entity_reg_entry := entity_registry.async_get(child_entity_id)): + raise HomeAssistantError(f"Entity {child_entity_id} not found") + # unique id is the MA player_id + player_ids.append(entity_reg_entry.unique_id) + await self.mass.players.player_command_group_many(self.player_id, player_ids) + + @catch_musicassistant_error + async def async_unjoin_player(self) -> None: + """Remove this player from any group.""" + await self.mass.players.player_command_ungroup(self.player_id) + + @catch_musicassistant_error + async def _async_handle_play_media( + self, + media_id: list[str], + artist: str | None = None, + album: str | None = None, + enqueue: MediaPlayerEnqueue | QueueOption | None = None, + radio_mode: bool | None = None, + media_type: str | None = None, + ) -> None: + """Send the play_media command to the media player.""" + media_uris: list[str] = [] + item: MediaItemType | ItemMapping | None = None + # work out (all) uri(s) to play + for media_id_str in media_id: + # URL or URI string + if "://" in media_id_str: + media_uris.append(media_id_str) + continue + # try content id as library id + if media_type and media_id_str.isnumeric(): + with suppress(MediaNotFoundError): + item = await self.mass.music.get_item( + MediaType(media_type), media_id_str, "library" + ) + if isinstance(item, MediaItemType | ItemMapping) and item.uri: + media_uris.append(item.uri) + continue + # try local accessible filename + elif await asyncio.to_thread(os.path.isfile, media_id_str): + media_uris.append(media_id_str) + continue + # last resort: search for media item by name/search + if item := await self.mass.music.get_item_by_name( + name=media_id_str, + artist=artist, + album=album, + media_type=MediaType(media_type) if media_type else None, + ): + media_uris.append(item.uri) + + if not media_uris: + raise HomeAssistantError( + f"Could not resolve {media_id} to playable media item" + ) + + # determine active queue to send the play request to + if TYPE_CHECKING: + assert self.player.active_source is not None + if queue := self.mass.player_queues.get(self.player.active_source): + queue_id = queue.queue_id + else: + queue_id = self.player_id + + await self.mass.player_queues.play_media( + queue_id, + media=media_uris, + option=self._convert_queueoption_to_media_player_enqueue(enqueue), + radio_mode=radio_mode if radio_mode else False, + ) + + @catch_musicassistant_error + async def _async_handle_play_announcement( + self, + url: str, + use_pre_announce: bool | None = None, + announce_volume: int | None = None, + ) -> None: + """Send the play_announcement command to the media player.""" + await self.mass.players.play_announcement( + self.player_id, url, use_pre_announce, announce_volume + ) + + @catch_musicassistant_error + async def _async_handle_transfer_queue( + self, source_player: str | None = None, auto_play: bool | None = None + ) -> None: + """Transfer the current queue to another player.""" + if not source_player: + # no source player given; try to find a playing player(queue) + for queue in self.mass.player_queues: + if queue.state == MassPlayerState.PLAYING: + source_queue_id = queue.queue_id + break + else: + raise HomeAssistantError( + "Source player not specified and no playing player found." + ) + else: + # resolve HA entity_id to MA player_id + entity_registry = er.async_get(self.hass) + if (entity := entity_registry.async_get(source_player)) is None: + raise HomeAssistantError("Source player not available.") + source_queue_id = entity.unique_id # unique_id is the MA player_id + target_queue_id = self.player_id + await self.mass.player_queues.transfer_queue( + source_queue_id, target_queue_id, auto_play + ) + + async def async_browse_media( + self, + media_content_type: MediaType | str | None = None, + media_content_id: str | None = None, + ) -> BrowseMedia: + """Implement the websocket media browsing helper.""" + return await async_browse_media( + self.hass, + self.mass, + media_content_id, + media_content_type, + ) + + def _update_media_image_url( + self, player: Player, queue: PlayerQueue | None + ) -> None: + """Update image URL for the active queue item.""" + if queue is None or queue.current_item is None: + self._attr_media_image_url = None + return + if image_url := self.mass.get_media_item_image_url(queue.current_item): + self._attr_media_image_remotely_accessible = ( + self.mass.server_url not in image_url + ) + self._attr_media_image_url = image_url + return + self._attr_media_image_url = None + + def _update_media_attributes( + self, player: Player, queue: PlayerQueue | None + ) -> None: + """Update media attributes for the active queue item.""" + # pylint: disable=too-many-statements + self._attr_media_artist = None + self._attr_media_album_artist = None + self._attr_media_album_name = None + self._attr_media_title = None + self._attr_media_content_id = None + self._attr_media_duration = None + self._attr_media_position = None + self._attr_media_position_updated_at = None + + if queue is None and player.current_media: + # player has some external source active + self._attr_media_content_id = player.current_media.uri + self._attr_app_id = player.active_source + self._attr_media_title = player.current_media.title + self._attr_media_artist = player.current_media.artist + self._attr_media_album_name = player.current_media.album + self._attr_media_duration = player.current_media.duration + # shuffle and repeat are not (yet) supported for external sources + self._attr_shuffle = None + self._attr_repeat = None + if TYPE_CHECKING: + assert player.elapsed_time is not None + self._attr_media_position = int(player.elapsed_time) + self._attr_media_position_updated_at = ( + utc_from_timestamp(player.elapsed_time_last_updated) + if player.elapsed_time_last_updated + else None + ) + if TYPE_CHECKING: + assert player.elapsed_time is not None + self._prev_time = player.elapsed_time + return + + if queue is None: + # player has no MA queue active + self._attr_source = player.active_source + self._attr_app_id = player.active_source + return + + # player has an MA queue active (either its own queue or some group queue) + self._attr_app_id = DOMAIN + self._attr_shuffle = queue.shuffle_enabled + self._attr_repeat = queue.repeat_mode.value + if not (cur_item := queue.current_item): + # queue is empty + return + + self._attr_media_content_id = queue.current_item.uri + self._attr_media_duration = queue.current_item.duration + self._attr_media_position = int(queue.elapsed_time) + self._attr_media_position_updated_at = utc_from_timestamp( + queue.elapsed_time_last_updated + ) + self._prev_time = queue.elapsed_time + + # handle stream title (radio station icy metadata) + if (stream_details := cur_item.streamdetails) and stream_details.stream_title: + self._attr_media_album_name = cur_item.name + if " - " in stream_details.stream_title: + stream_title_parts = stream_details.stream_title.split(" - ", 1) + self._attr_media_title = stream_title_parts[1] + self._attr_media_artist = stream_title_parts[0] + else: + self._attr_media_title = stream_details.stream_title + return + + if not (media_item := cur_item.media_item): + # queue is not playing a regular media item (edge case?!) + self._attr_media_title = cur_item.name + return + + # queue is playing regular media item + self._attr_media_title = media_item.name + # for tracks we can extract more info + if media_item.media_type == MediaType.TRACK: + if TYPE_CHECKING: + assert isinstance(media_item, Track) + self._attr_media_artist = media_item.artist_str + if media_item.version: + self._attr_media_title += f" ({media_item.version})" + if media_item.album: + self._attr_media_album_name = media_item.album.name + self._attr_media_album_artist = getattr( + media_item.album, "artist_str", None + ) + + def _convert_queueoption_to_media_player_enqueue( + self, queue_option: MediaPlayerEnqueue | QueueOption | None + ) -> QueueOption | None: + """Convert a QueueOption to a MediaPlayerEnqueue.""" + if isinstance(queue_option, MediaPlayerEnqueue): + queue_option = QUEUE_OPTION_MAP.get(queue_option) + return queue_option diff --git a/homeassistant/components/music_assistant/services.yaml b/homeassistant/components/music_assistant/services.yaml new file mode 100644 index 00000000000..00f895c4ef6 --- /dev/null +++ b/homeassistant/components/music_assistant/services.yaml @@ -0,0 +1,90 @@ +# Descriptions for Music Assistant custom services + +play_media: + target: + entity: + domain: media_player + integration: music_assistant + supported_features: + - media_player.MediaPlayerEntityFeature.PLAY_MEDIA + fields: + media_id: + required: true + example: "spotify://playlist/aabbccddeeff" + selector: + object: + media_type: + example: "playlist" + selector: + select: + translation_key: media_type + options: + - artist + - album + - playlist + - track + - radio + artist: + example: "Queen" + selector: + text: + album: + example: "News of the world" + selector: + text: + enqueue: + selector: + select: + options: + - "play" + - "replace" + - "next" + - "replace_next" + - "add" + translation_key: enqueue + radio_mode: + advanced: true + selector: + boolean: + +play_announcement: + target: + entity: + domain: media_player + integration: music_assistant + supported_features: + - media_player.MediaPlayerEntityFeature.PLAY_MEDIA + - media_player.MediaPlayerEntityFeature.MEDIA_ANNOUNCE + fields: + url: + required: true + example: "http://someremotesite.com/doorbell.mp3" + selector: + text: + use_pre_announce: + example: "true" + selector: + boolean: + announce_volume: + example: 75 + selector: + number: + min: 1 + max: 100 + step: 1 + +transfer_queue: + target: + entity: + domain: media_player + integration: music_assistant + fields: + source_player: + selector: + entity: + domain: media_player + integration: music_assistant + auto_play: + example: "true" + selector: + boolean: diff --git a/homeassistant/components/music_assistant/strings.json b/homeassistant/components/music_assistant/strings.json new file mode 100644 index 00000000000..cce7f9607c2 --- /dev/null +++ b/homeassistant/components/music_assistant/strings.json @@ -0,0 +1,124 @@ +{ + "config": { + "step": { + "init": { + "data": { + "url": "URL of the Music Assistant server" + } + }, + "manual": { + "title": "Manually add Music Assistant Server", + "description": "Enter the URL to your already running Music Assistant Server. If you do not have the Music Assistant Server running, you should install it first.", + "data": { + "url": "URL of the Music Assistant server" + } + }, + "discovery_confirm": { + "description": "Do you want to add the Music Assistant Server `{url}` to Home Assistant?", + "title": "Discovered Music Assistant Server" + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_server_version": "The Music Assistant server is not the correct version", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "already_in_progress": "Configuration flow is already in progress", + "reconfiguration_successful": "Successfully reconfigured the Music Assistant integration.", + "cannot_connect": "Failed to connect", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + } + }, + "issues": { + "invalid_server_version": { + "title": "The Music Assistant server is not the correct version", + "description": "Check if there are updates available for the Music Assistant Server and/or integration." + } + }, + "services": { + "play_media": { + "name": "Play media", + "description": "Play media on a Music Assistant player with more fine-grained control options.", + "fields": { + "media_id": { + "name": "Media ID(s)", + "description": "URI or name of the item you want to play. Specify a list if you want to play/enqueue multiple items." + }, + "media_type": { + "name": "Media type", + "description": "The type of the content to play. Such as artist, album, track or playlist. Will be auto-determined if omitted." + }, + "enqueue": { + "name": "Enqueue", + "description": "If the content should be played now or added to the queue." + }, + "artist": { + "name": "Artist name", + "description": "When specifying a track or album by name in the Media ID field, you can optionally restrict results by this artist name." + }, + "album": { + "name": "Album name", + "description": "When specifying a track by name in the Media ID field, you can optionally restrict results by this album name." + }, + "radio_mode": { + "name": "Enable radio mode", + "description": "Enable radio mode to auto-generate a playlist based on the selection." + } + } + }, + "play_announcement": { + "name": "Play announcement", + "description": "Play announcement on a Music Assistant player with more fine-grained control options.", + "fields": { + "url": { + "name": "URL", + "description": "URL to the notification sound." + }, + "use_pre_announce": { + "name": "Use pre-announce", + "description": "Use pre-announcement sound for the announcement. Omit to use the player default." + }, + "announce_volume": { + "name": "Announce volume", + "description": "Use a forced volume level for the announcement. Omit to use player default." + } + } + }, + "transfer_queue": { + "name": "Transfer queue", + "description": "Transfer the player's queue to another player.", + "fields": { + "source_player": { + "name": "Source media player", + "description": "The source media player which has the queue you want to transfer. When omitted, the first playing player will be used." + }, + "auto_play": { + "name": "Auto play", + "description": "Start playing the queue on the target player. Omit to use the default behavior." + } + } + } + }, + "selector": { + "enqueue": { + "options": { + "play": "Play", + "next": "Play next", + "add": "Add to queue", + "replace": "Play now and clear queue", + "replace_next": "Play next and clear queue" + } + }, + "media_type": { + "options": { + "artist": "Artist", + "album": "Album", + "track": "Track", + "playlist": "Playlist", + "radio": "Radio" + } + } + } +} diff --git a/homeassistant/components/mvglive/manifest.json b/homeassistant/components/mvglive/manifest.json index f73d4612c2e..2c4e6a7e735 100644 --- a/homeassistant/components/mvglive/manifest.json +++ b/homeassistant/components/mvglive/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/mvglive", "iot_class": "cloud_polling", "loggers": ["MVGLive"], + "quality_scale": "legacy", "requirements": ["PyMVGLive==1.1.4"] } diff --git a/homeassistant/components/mycroft/manifest.json b/homeassistant/components/mycroft/manifest.json index 9b8731f0701..568bb8b1784 100644 --- a/homeassistant/components/mycroft/manifest.json +++ b/homeassistant/components/mycroft/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/mycroft", "iot_class": "local_push", "loggers": ["mycroftapi"], + "quality_scale": "legacy", "requirements": ["mycroftapi==2.0"] } diff --git a/homeassistant/components/mythicbeastsdns/manifest.json b/homeassistant/components/mythicbeastsdns/manifest.json index ed0b96575c9..a4381c312bc 100644 --- a/homeassistant/components/mythicbeastsdns/manifest.json +++ b/homeassistant/components/mythicbeastsdns/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/mythicbeastsdns", "iot_class": "cloud_push", "loggers": ["mbddns"], + "quality_scale": "legacy", "requirements": ["mbddns==0.1.2"] } diff --git a/homeassistant/components/nad/manifest.json b/homeassistant/components/nad/manifest.json index 2e2d44341af..64c7855af2d 100644 --- a/homeassistant/components/nad/manifest.json +++ b/homeassistant/components/nad/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/nad", "iot_class": "local_polling", "loggers": ["nad_receiver"], + "quality_scale": "legacy", "requirements": ["nad-receiver==0.3.0"] } diff --git a/homeassistant/components/nam/manifest.json b/homeassistant/components/nam/manifest.json index 7b37d1f7ede..d837aa69b9d 100644 --- a/homeassistant/components/nam/manifest.json +++ b/homeassistant/components/nam/manifest.json @@ -7,7 +7,6 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["nettigo_air_monitor"], - "quality_scale": "platinum", "requirements": ["nettigo-air-monitor==3.3.0"], "zeroconf": [ { diff --git a/homeassistant/components/namecheapdns/manifest.json b/homeassistant/components/namecheapdns/manifest.json index fc9aa3cc033..f97f6568192 100644 --- a/homeassistant/components/namecheapdns/manifest.json +++ b/homeassistant/components/namecheapdns/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/namecheapdns", "iot_class": "cloud_push", + "quality_scale": "legacy", "requirements": ["defusedxml==0.7.1"] } diff --git a/homeassistant/components/nasweb/__init__.py b/homeassistant/components/nasweb/__init__.py new file mode 100644 index 00000000000..1992cc41c75 --- /dev/null +++ b/homeassistant/components/nasweb/__init__.py @@ -0,0 +1,125 @@ +"""The NASweb integration.""" + +from __future__ import annotations + +import logging + +from webio_api import WebioAPI +from webio_api.api_client import AuthError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.network import NoURLAvailableError +from homeassistant.util.hass_dict import HassKey + +from .const import DOMAIN, MANUFACTURER, SUPPORT_EMAIL +from .coordinator import NASwebCoordinator +from .nasweb_data import NASwebData + +PLATFORMS: list[Platform] = [Platform.SWITCH] + +NASWEB_CONFIG_URL = "https://{host}/page" + +_LOGGER = logging.getLogger(__name__) +type NASwebConfigEntry = ConfigEntry[NASwebCoordinator] +DATA_NASWEB: HassKey[NASwebData] = HassKey(DOMAIN) + + +async def async_setup_entry(hass: HomeAssistant, entry: NASwebConfigEntry) -> bool: + """Set up NASweb from a config entry.""" + + if DATA_NASWEB not in hass.data: + data = NASwebData() + data.initialize(hass) + hass.data[DATA_NASWEB] = data + nasweb_data = hass.data[DATA_NASWEB] + + webio_api = WebioAPI( + entry.data[CONF_HOST], entry.data[CONF_USERNAME], entry.data[CONF_PASSWORD] + ) + try: + if not await webio_api.check_connection(): + raise ConfigEntryNotReady( + f"[{entry.data[CONF_HOST]}] Check connection failed" + ) + if not await webio_api.refresh_device_info(): + _LOGGER.error("[%s] Refresh device info failed", entry.data[CONF_HOST]) + raise ConfigEntryError( + translation_key="config_entry_error_internal_error", + translation_placeholders={"support_email": SUPPORT_EMAIL}, + ) + webio_serial = webio_api.get_serial_number() + if webio_serial is None: + _LOGGER.error("[%s] Serial number not available", entry.data[CONF_HOST]) + raise ConfigEntryError( + translation_key="config_entry_error_internal_error", + translation_placeholders={"support_email": SUPPORT_EMAIL}, + ) + if entry.unique_id != webio_serial: + _LOGGER.error( + "[%s] Serial number doesn't match config entry", entry.data[CONF_HOST] + ) + raise ConfigEntryError(translation_key="config_entry_error_serial_mismatch") + + coordinator = NASwebCoordinator( + hass, webio_api, name=f"NASweb[{webio_api.get_name()}]" + ) + entry.runtime_data = coordinator + nasweb_data.notify_coordinator.add_coordinator(webio_serial, entry.runtime_data) + + webhook_url = nasweb_data.get_webhook_url(hass) + if not await webio_api.status_subscription(webhook_url, True): + _LOGGER.error("Failed to subscribe for status updates from webio") + raise ConfigEntryError( + translation_key="config_entry_error_internal_error", + translation_placeholders={"support_email": SUPPORT_EMAIL}, + ) + if not await nasweb_data.notify_coordinator.check_connection(webio_serial): + _LOGGER.error("Did not receive status from device") + raise ConfigEntryError( + translation_key="config_entry_error_no_status_update", + translation_placeholders={"support_email": SUPPORT_EMAIL}, + ) + except TimeoutError as error: + raise ConfigEntryNotReady( + f"[{entry.data[CONF_HOST]}] Check connection reached timeout" + ) from error + except AuthError as error: + raise ConfigEntryError( + translation_key="config_entry_error_invalid_authentication" + ) from error + except NoURLAvailableError as error: + raise ConfigEntryError( + translation_key="config_entry_error_missing_internal_url" + ) from error + + device_registry = dr.async_get(hass) + device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + identifiers={(DOMAIN, webio_serial)}, + manufacturer=MANUFACTURER, + name=webio_api.get_name(), + configuration_url=NASWEB_CONFIG_URL.format(host=entry.data[CONF_HOST]), + ) + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: NASwebConfigEntry) -> bool: + """Unload a config entry.""" + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + nasweb_data = hass.data[DATA_NASWEB] + coordinator = entry.runtime_data + serial = entry.unique_id + if serial is not None: + nasweb_data.notify_coordinator.remove_coordinator(serial) + if nasweb_data.can_be_deinitialized(): + nasweb_data.deinitialize(hass) + hass.data.pop(DATA_NASWEB) + webhook_url = nasweb_data.get_webhook_url(hass) + await coordinator.webio_api.status_subscription(webhook_url, False) + + return unload_ok diff --git a/homeassistant/components/nasweb/config_flow.py b/homeassistant/components/nasweb/config_flow.py new file mode 100644 index 00000000000..3a9ad3f7d49 --- /dev/null +++ b/homeassistant/components/nasweb/config_flow.py @@ -0,0 +1,137 @@ +"""Config flow for NASweb integration.""" + +from __future__ import annotations + +import logging +from typing import Any + +import voluptuous as vol +from webio_api import WebioAPI +from webio_api.api_client import AuthError + +from homeassistant import config_entries +from homeassistant.config_entries import ConfigFlowResult +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_UNIQUE_ID, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import AbortFlow +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.network import NoURLAvailableError + +from .const import DOMAIN +from .coordinator import NASwebCoordinator +from .nasweb_data import NASwebData + +NASWEB_SCHEMA_IMG_URL = ( + "https://home-assistant.io/images/integrations/nasweb/nasweb_scheme.png" +) + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): str, + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, + } +) + + +async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]: + """Validate user-provided data.""" + webio_api = WebioAPI(data[CONF_HOST], data[CONF_USERNAME], data[CONF_PASSWORD]) + if not await webio_api.check_connection(): + raise CannotConnect + try: + await webio_api.refresh_device_info() + except AuthError as e: + raise InvalidAuth from e + + nasweb_data = NASwebData() + nasweb_data.initialize(hass) + try: + webio_serial = webio_api.get_serial_number() + if webio_serial is None: + raise MissingNASwebData("Device serial number is not available") + + coordinator = NASwebCoordinator(hass, webio_api) + webhook_url = nasweb_data.get_webhook_url(hass) + nasweb_data.notify_coordinator.add_coordinator(webio_serial, coordinator) + subscription = await webio_api.status_subscription(webhook_url, True) + if not subscription: + nasweb_data.notify_coordinator.remove_coordinator(webio_serial) + raise MissingNASwebData( + "Failed to subscribe for status updates from device" + ) + + result = await nasweb_data.notify_coordinator.check_connection(webio_serial) + nasweb_data.notify_coordinator.remove_coordinator(webio_serial) + if not result: + if subscription: + await webio_api.status_subscription(webhook_url, False) + raise MissingNASwebStatus("Did not receive status from device") + + name = webio_api.get_name() + finally: + nasweb_data.deinitialize(hass) + return {"title": name, CONF_UNIQUE_ID: webio_serial} + + +class NASwebConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): + """Handle a config flow for NASweb.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + if user_input is not None: + try: + info = await validate_input(self.hass, user_input) + await self.async_set_unique_id(info[CONF_UNIQUE_ID]) + self._abort_if_unique_id_configured() + except CannotConnect: + errors["base"] = "cannot_connect" + except InvalidAuth: + errors["base"] = "invalid_auth" + except NoURLAvailableError: + errors["base"] = "missing_internal_url" + except MissingNASwebData: + errors["base"] = "missing_nasweb_data" + except MissingNASwebStatus: + errors["base"] = "missing_status" + except AbortFlow: + raise + except Exception: # pylint: disable=broad-except + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + return self.async_create_entry(title=info["title"], data=user_input) + + return self.async_show_form( + step_id="user", + data_schema=self.add_suggested_values_to_schema( + STEP_USER_DATA_SCHEMA, user_input + ), + errors=errors, + description_placeholders={ + "nasweb_schema_img": '
', + }, + ) + + +class CannotConnect(HomeAssistantError): + """Error to indicate we cannot connect.""" + + +class InvalidAuth(HomeAssistantError): + """Error to indicate there is invalid auth.""" + + +class MissingNASwebData(HomeAssistantError): + """Error to indicate missing information from NASweb.""" + + +class MissingNASwebStatus(HomeAssistantError): + """Error to indicate there was no status received from NASweb.""" diff --git a/homeassistant/components/nasweb/const.py b/homeassistant/components/nasweb/const.py new file mode 100644 index 00000000000..ec750c90c8c --- /dev/null +++ b/homeassistant/components/nasweb/const.py @@ -0,0 +1,7 @@ +"""Constants for the NASweb integration.""" + +DOMAIN = "nasweb" +MANUFACTURER = "chomtech.pl" +STATUS_UPDATE_MAX_TIME_INTERVAL = 60 +SUPPORT_EMAIL = "support@chomtech.eu" +WEBHOOK_URL = "{internal_url}/api/webhook/{webhook_id}" diff --git a/homeassistant/components/nasweb/coordinator.py b/homeassistant/components/nasweb/coordinator.py new file mode 100644 index 00000000000..90dca0f3022 --- /dev/null +++ b/homeassistant/components/nasweb/coordinator.py @@ -0,0 +1,191 @@ +"""Message routing coordinators for handling NASweb push notifications.""" + +from __future__ import annotations + +import asyncio +from collections.abc import Callable +from datetime import datetime, timedelta +import logging +import time +from typing import Any + +from aiohttp.web import Request, Response +from webio_api import WebioAPI +from webio_api.const import KEY_DEVICE_SERIAL, KEY_OUTPUTS, KEY_TYPE, TYPE_STATUS_UPDATE + +from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback +from homeassistant.helpers import event +from homeassistant.helpers.update_coordinator import BaseDataUpdateCoordinatorProtocol + +from .const import STATUS_UPDATE_MAX_TIME_INTERVAL + +_LOGGER = logging.getLogger(__name__) + + +class NotificationCoordinator: + """Coordinator redirecting push notifications for this integration to appropriate NASwebCoordinator.""" + + def __init__(self) -> None: + """Initialize coordinator.""" + self._coordinators: dict[str, NASwebCoordinator] = {} + + def add_coordinator(self, serial: str, coordinator: NASwebCoordinator) -> None: + """Add NASwebCoordinator to possible notification targets.""" + self._coordinators[serial] = coordinator + _LOGGER.debug("Added NASwebCoordinator for NASweb[%s]", serial) + + def remove_coordinator(self, serial: str) -> None: + """Remove NASwebCoordinator from possible notification targets.""" + self._coordinators.pop(serial) + _LOGGER.debug("Removed NASwebCoordinator for NASweb[%s]", serial) + + def has_coordinators(self) -> bool: + """Check if there is any registered coordinator for push notifications.""" + return len(self._coordinators) > 0 + + async def check_connection(self, serial: str) -> bool: + """Wait for first status update to confirm connection with NASweb.""" + nasweb_coordinator = self._coordinators.get(serial) + if nasweb_coordinator is None: + _LOGGER.error("Cannot check connection. No device match serial number") + return False + for counter in range(10): + _LOGGER.debug("Checking connection with: %s (%s)", serial, counter) + if nasweb_coordinator.is_connection_confirmed(): + return True + await asyncio.sleep(1) + return False + + async def handle_webhook_request( + self, hass: HomeAssistant, webhook_id: str, request: Request + ) -> Response | None: + """Handle webhook request from Push API.""" + if not self.has_coordinators(): + return None + notification = await request.json() + serial = notification.get(KEY_DEVICE_SERIAL, None) + _LOGGER.debug("Received push: %s", notification) + if serial is None: + _LOGGER.warning("Received notification without nasweb identifier") + return None + nasweb_coordinator = self._coordinators.get(serial) + if nasweb_coordinator is None: + _LOGGER.warning("Received notification for not registered nasweb") + return None + await nasweb_coordinator.handle_push_notification(notification) + return Response(body='{"response": "ok"}', content_type="application/json") + + +class NASwebCoordinator(BaseDataUpdateCoordinatorProtocol): + """Coordinator managing status of single NASweb device. + + Since status updates are managed through push notifications, this class schedules + periodic checks to ensure that devices are marked unavailable if updates + haven't been received for a prolonged period. + """ + + def __init__( + self, hass: HomeAssistant, webio_api: WebioAPI, name: str = "NASweb[default]" + ) -> None: + """Initialize NASweb coordinator.""" + self._hass = hass + self.name = name + self.webio_api = webio_api + self._last_update: float | None = None + job_name = f"NASwebCoordinator[{name}]" + self._job = HassJob(self._handle_max_update_interval, job_name) + self._unsub_last_update_check: CALLBACK_TYPE | None = None + self._listeners: dict[CALLBACK_TYPE, tuple[CALLBACK_TYPE, object | None]] = {} + data: dict[str, Any] = {} + data[KEY_OUTPUTS] = self.webio_api.outputs + self.async_set_updated_data(data) + + def is_connection_confirmed(self) -> bool: + """Check whether coordinator received status update from NASweb.""" + return self._last_update is not None + + @callback + def async_add_listener( + self, update_callback: CALLBACK_TYPE, context: Any = None + ) -> Callable[[], None]: + """Listen for data updates.""" + schedule_update_check = not self._listeners + + @callback + def remove_listener() -> None: + """Remove update listener.""" + self._listeners.pop(remove_listener) + if not self._listeners: + self._async_unsub_last_update_check() + + self._listeners[remove_listener] = (update_callback, context) + # This is the first listener, set up interval. + if schedule_update_check: + self._schedule_last_update_check() + return remove_listener + + @callback + def async_set_updated_data(self, data: dict[str, Any]) -> None: + """Update data and notify listeners.""" + self.data = data + self.last_update = self._hass.loop.time() + _LOGGER.debug("Updated %s data", self.name) + if self._listeners: + self._schedule_last_update_check() + self.async_update_listeners() + + @callback + def async_update_listeners(self) -> None: + """Update all registered listeners.""" + for update_callback, _ in list(self._listeners.values()): + update_callback() + + async def _handle_max_update_interval(self, now: datetime) -> None: + """Handle max update interval occurrence. + + This method is called when `STATUS_UPDATE_MAX_TIME_INTERVAL` has passed without + receiving a status update. It only needs to trigger state update of entities + which then change their state accordingly. + """ + self._unsub_last_update_check = None + if self._listeners: + self.async_update_listeners() + + def _schedule_last_update_check(self) -> None: + """Schedule a task to trigger entities state update after `STATUS_UPDATE_MAX_TIME_INTERVAL`. + + This method schedules a task (`_handle_max_update_interval`) to be executed after + `STATUS_UPDATE_MAX_TIME_INTERVAL` seconds without status update, which enables entities + to change their state to unavailable. After each status update this task is rescheduled. + """ + self._async_unsub_last_update_check() + now = self._hass.loop.time() + next_check = ( + now + timedelta(seconds=STATUS_UPDATE_MAX_TIME_INTERVAL).total_seconds() + ) + self._unsub_last_update_check = event.async_call_at( + self._hass, + self._job, + next_check, + ) + + def _async_unsub_last_update_check(self) -> None: + """Cancel any scheduled update check call.""" + if self._unsub_last_update_check: + self._unsub_last_update_check() + self._unsub_last_update_check = None + + async def handle_push_notification(self, notification: dict) -> None: + """Handle incoming push notification from NASweb.""" + msg_type = notification.get(KEY_TYPE) + _LOGGER.debug("Received push notification: %s", msg_type) + + if msg_type == TYPE_STATUS_UPDATE: + await self.process_status_update(notification) + self._last_update = time.time() + + async def process_status_update(self, new_status: dict) -> None: + """Process status update from NASweb.""" + self.webio_api.update_device_status(new_status) + new_data = {KEY_OUTPUTS: self.webio_api.outputs} + self.async_set_updated_data(new_data) diff --git a/homeassistant/components/nasweb/manifest.json b/homeassistant/components/nasweb/manifest.json new file mode 100644 index 00000000000..69efdafbc82 --- /dev/null +++ b/homeassistant/components/nasweb/manifest.json @@ -0,0 +1,11 @@ +{ + "domain": "nasweb", + "name": "NASweb", + "codeowners": ["@nasWebio"], + "config_flow": true, + "dependencies": ["webhook"], + "documentation": "https://www.home-assistant.io/integrations/nasweb", + "integration_type": "hub", + "iot_class": "local_push", + "requirements": ["webio-api==0.1.8"] +} diff --git a/homeassistant/components/nasweb/nasweb_data.py b/homeassistant/components/nasweb/nasweb_data.py new file mode 100644 index 00000000000..4f6a37e6cc7 --- /dev/null +++ b/homeassistant/components/nasweb/nasweb_data.py @@ -0,0 +1,64 @@ +"""Dataclass storing integration data in hass.data[DOMAIN].""" + +from dataclasses import dataclass, field +import logging + +from aiohttp.hdrs import METH_POST + +from homeassistant.components.webhook import ( + async_generate_id, + async_register as webhook_register, + async_unregister as webhook_unregister, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.network import get_url + +from .const import DOMAIN, WEBHOOK_URL +from .coordinator import NotificationCoordinator + +_LOGGER = logging.getLogger(__name__) + + +@dataclass +class NASwebData: + """Class storing integration data.""" + + notify_coordinator: NotificationCoordinator = field( + default_factory=NotificationCoordinator + ) + webhook_id = "" + + def is_initialized(self) -> bool: + """Return True if instance was initialized and is ready for use.""" + return bool(self.webhook_id) + + def can_be_deinitialized(self) -> bool: + """Return whether this instance can be deinitialized.""" + return not self.notify_coordinator.has_coordinators() + + def initialize(self, hass: HomeAssistant) -> None: + """Initialize NASwebData instance.""" + if self.is_initialized(): + return + new_webhook_id = async_generate_id() + webhook_register( + hass, + DOMAIN, + "NASweb", + new_webhook_id, + self.notify_coordinator.handle_webhook_request, + allowed_methods=[METH_POST], + ) + self.webhook_id = new_webhook_id + _LOGGER.debug("Registered webhook: %s", self.webhook_id) + + def deinitialize(self, hass: HomeAssistant) -> None: + """Deinitialize NASwebData instance.""" + if not self.is_initialized(): + return + webhook_unregister(hass, self.webhook_id) + + def get_webhook_url(self, hass: HomeAssistant) -> str: + """Return webhook url for Push API.""" + hass_url = get_url(hass, allow_external=False) + return WEBHOOK_URL.format(internal_url=hass_url, webhook_id=self.webhook_id) diff --git a/homeassistant/components/nasweb/strings.json b/homeassistant/components/nasweb/strings.json new file mode 100644 index 00000000000..b8af8cd54db --- /dev/null +++ b/homeassistant/components/nasweb/strings.json @@ -0,0 +1,50 @@ +{ + "config": { + "step": { + "user": { + "title": "Add NASweb device", + "description": "{nasweb_schema_img}NASweb combines the functions of a control panel and the ability to manage building automation. The device monitors the flow of information from sensors and programmable switches and stores settings, definitions and configured actions.", + "data": { + "host": "[%key:common::config_flow::data::host%]", + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "missing_internal_url": "Make sure Home Assistant has valid internal url", + "missing_nasweb_data": "Something isn't right with device internal configuration. Try restarting the device and HomeAssistant.", + "missing_status": "Did not received any status updates within the expected time window. Make sure the Home Assistant Internal URL is reachable from the NASweb device.", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + }, + "exceptions": { + "config_entry_error_invalid_authentication": { + "message": "Invalid username/password. Most likely user changed password or was removed. Delete this entry and create new one with correct username/password." + }, + "config_entry_error_internal_error": { + "message": "Something isn't right with device internal configuration. Try restarting the device and HomeAssistant. If the issue persists contact support at {support_email}" + }, + "config_entry_error_no_status_update": { + "message": "Did not received any status updates within the expected time window. Make sure the Home Assistant Internal URL is reachable from the NASweb device. If the issue persists contact support at {support_email}" + }, + "config_entry_error_missing_internal_url": { + "message": "[%key:component::nasweb::config::error::missing_internal_url%]" + }, + "serial_mismatch": { + "message": "Connected to different NASweb device (serial number mismatch)." + } + }, + "entity": { + "switch": { + "switch_output": { + "name": "Relay Switch {index}" + } + } + } +} diff --git a/homeassistant/components/nasweb/switch.py b/homeassistant/components/nasweb/switch.py new file mode 100644 index 00000000000..00e5a21da18 --- /dev/null +++ b/homeassistant/components/nasweb/switch.py @@ -0,0 +1,133 @@ +"""Platform for NASweb output.""" + +from __future__ import annotations + +import logging +import time +from typing import Any + +from webio_api import Output as NASwebOutput + +from homeassistant.components.switch import DOMAIN as DOMAIN_SWITCH, SwitchEntity +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +import homeassistant.helpers.entity_registry as er +from homeassistant.helpers.typing import DiscoveryInfoType +from homeassistant.helpers.update_coordinator import ( + BaseCoordinatorEntity, + BaseDataUpdateCoordinatorProtocol, +) + +from . import NASwebConfigEntry +from .const import DOMAIN, STATUS_UPDATE_MAX_TIME_INTERVAL +from .coordinator import NASwebCoordinator + +OUTPUT_TRANSLATION_KEY = "switch_output" + +_LOGGER = logging.getLogger(__name__) + + +def _get_output(coordinator: NASwebCoordinator, index: int) -> NASwebOutput | None: + for out in coordinator.webio_api.outputs: + if out.index == index: + return out + return None + + +async def async_setup_entry( + hass: HomeAssistant, + config: NASwebConfigEntry, + async_add_entities: AddEntitiesCallback, + discovery_info: DiscoveryInfoType | None = None, +) -> None: + """Set up switch platform.""" + coordinator = config.runtime_data + current_outputs: set[int] = set() + + @callback + def _check_entities() -> None: + received_outputs = {out.index for out in coordinator.webio_api.outputs} + added = {i for i in received_outputs if i not in current_outputs} + removed = {i for i in current_outputs if i not in received_outputs} + entities_to_add: list[RelaySwitch] = [] + for index in added: + webio_output = _get_output(coordinator, index) + if not isinstance(webio_output, NASwebOutput): + _LOGGER.error("Cannot create RelaySwitch entity without NASwebOutput") + continue + new_output = RelaySwitch(coordinator, webio_output) + entities_to_add.append(new_output) + current_outputs.add(index) + async_add_entities(entities_to_add) + entity_registry = er.async_get(hass) + for index in removed: + unique_id = f"{DOMAIN}.{config.unique_id}.relay_switch.{index}" + if entity_id := entity_registry.async_get_entity_id( + DOMAIN_SWITCH, DOMAIN, unique_id + ): + entity_registry.async_remove(entity_id) + current_outputs.remove(index) + else: + _LOGGER.warning("Failed to remove old output: no entity_id") + + coordinator.async_add_listener(_check_entities) + _check_entities() + + +class RelaySwitch(SwitchEntity, BaseCoordinatorEntity): + """Entity representing NASweb Output.""" + + def __init__( + self, + coordinator: BaseDataUpdateCoordinatorProtocol, + nasweb_output: NASwebOutput, + ) -> None: + """Initialize RelaySwitch.""" + super().__init__(coordinator) + self._output = nasweb_output + self._attr_icon = "mdi:export" + self._attr_has_entity_name = True + self._attr_translation_key = OUTPUT_TRANSLATION_KEY + self._attr_translation_placeholders = {"index": f"{nasweb_output.index:2d}"} + self._attr_unique_id = ( + f"{DOMAIN}.{self._output.webio_serial}.relay_switch.{self._output.index}" + ) + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self._output.webio_serial)}, + ) + + async def async_added_to_hass(self) -> None: + """When entity is added to hass.""" + await super().async_added_to_hass() + self._handle_coordinator_update() + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self._attr_is_on = self._output.state + if ( + self.coordinator.last_update is None + or time.time() - self._output.last_update >= STATUS_UPDATE_MAX_TIME_INTERVAL + ): + self._attr_available = False + else: + self._attr_available = ( + self._output.available if self._output.available is not None else False + ) + self.async_write_ha_state() + + async def async_update(self) -> None: + """Update the entity. + + Only used by the generic entity update service. + Scheduling updates is not necessary, the coordinator takes care of updates via push notifications. + """ + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn On RelaySwitch.""" + await self._output.turn_on() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn Off RelaySwitch.""" + await self._output.turn_off() diff --git a/homeassistant/components/neato/manifest.json b/homeassistant/components/neato/manifest.json index d6eff486b05..e4b471cb5ac 100644 --- a/homeassistant/components/neato/manifest.json +++ b/homeassistant/components/neato/manifest.json @@ -1,7 +1,7 @@ { "domain": "neato", "name": "Neato Botvac", - "codeowners": ["@Santobert"], + "codeowners": [], "config_flow": true, "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/neato", diff --git a/homeassistant/components/nederlandse_spoorwegen/manifest.json b/homeassistant/components/nederlandse_spoorwegen/manifest.json index aa8d0f4adf4..8a8a20c453b 100644 --- a/homeassistant/components/nederlandse_spoorwegen/manifest.json +++ b/homeassistant/components/nederlandse_spoorwegen/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@YarmoM"], "documentation": "https://www.home-assistant.io/integrations/nederlandse_spoorwegen", "iot_class": "cloud_polling", + "quality_scale": "legacy", "requirements": ["nsapi==3.0.5"] } diff --git a/homeassistant/components/ness_alarm/manifest.json b/homeassistant/components/ness_alarm/manifest.json index c3bb4239048..3d97e3290e0 100644 --- a/homeassistant/components/ness_alarm/manifest.json +++ b/homeassistant/components/ness_alarm/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/ness_alarm", "iot_class": "local_push", "loggers": ["nessclient"], + "quality_scale": "legacy", "requirements": ["nessclient==1.1.2"] } diff --git a/homeassistant/components/nest/__init__.py b/homeassistant/components/nest/__init__.py index 6b094c68cb0..e89969cbe16 100644 --- a/homeassistant/components/nest/__init__.py +++ b/homeassistant/components/nest/__init__.py @@ -49,7 +49,6 @@ from homeassistant.helpers import ( config_validation as cv, device_registry as dr, entity_registry as er, - issue_registry as ir, ) from homeassistant.helpers.entity_registry import async_entries_for_device from homeassistant.helpers.typing import ConfigType @@ -119,20 +118,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: hass.http.register_view(NestEventMediaView(hass)) hass.http.register_view(NestEventMediaThumbnailView(hass)) - if DOMAIN in config and CONF_PROJECT_ID not in config[DOMAIN]: - ir.async_create_issue( - hass, - DOMAIN, - "legacy_nest_deprecated", - breaks_in_ha_version="2023.8.0", - is_fixable=False, - severity=ir.IssueSeverity.WARNING, - translation_key="legacy_nest_removed", - translation_placeholders={ - "documentation_url": "https://www.home-assistant.io/integrations/nest/", - }, - ) - return False return True diff --git a/homeassistant/components/nest/camera.py b/homeassistant/components/nest/camera.py index 0a46d67a3ad..b7e0f210741 100644 --- a/homeassistant/components/nest/camera.py +++ b/homeassistant/components/nest/camera.py @@ -19,12 +19,11 @@ from google_nest_sdm.camera_traits import ( from google_nest_sdm.device import Device from google_nest_sdm.device_manager import DeviceManager from google_nest_sdm.exceptions import ApiException -from webrtc_models import RTCIceCandidate +from webrtc_models import RTCIceCandidateInit from homeassistant.components.camera import ( Camera, CameraEntityFeature, - StreamType, WebRTCAnswer, WebRTCClientConfiguration, WebRTCSendMessage, @@ -254,11 +253,6 @@ class NestWebRTCEntity(NestCameraBaseEntity): self._webrtc_sessions: dict[str, WebRtcStream] = {} self._refresh_unsub: dict[str, Callable[[], None]] = {} - @property - def frontend_stream_type(self) -> StreamType | None: - """Return the type of stream supported by this camera.""" - return StreamType.WEB_RTC - async def _async_refresh_stream(self, session_id: str) -> datetime.datetime | None: """Refresh stream to extend expiration time.""" if not (webrtc_stream := self._webrtc_sessions.get(session_id)): @@ -304,7 +298,7 @@ class NestWebRTCEntity(NestCameraBaseEntity): self._refresh_unsub[session_id] = refresh.unsub async def async_on_webrtc_candidate( - self, session_id: str, candidate: RTCIceCandidate + self, session_id: str, candidate: RTCIceCandidateInit ) -> None: """Ignore WebRTC candidates for Nest cloud based cameras.""" return diff --git a/homeassistant/components/nest/manifest.json b/homeassistant/components/nest/manifest.json index 44eaeeaf62d..07c34c51568 100644 --- a/homeassistant/components/nest/manifest.json +++ b/homeassistant/components/nest/manifest.json @@ -19,6 +19,5 @@ "documentation": "https://www.home-assistant.io/integrations/nest", "iot_class": "cloud_push", "loggers": ["google_nest_sdm"], - "quality_scale": "platinum", "requirements": ["google-nest-sdm==6.1.5"] } diff --git a/homeassistant/components/nest/strings.json b/homeassistant/components/nest/strings.json index f6a64dd66e6..a31a2856544 100644 --- a/homeassistant/components/nest/strings.json +++ b/homeassistant/components/nest/strings.json @@ -84,12 +84,6 @@ "doorbell_chime": "Doorbell pressed" } }, - "issues": { - "legacy_nest_removed": { - "title": "Legacy Works With Nest has been removed", - "description": "Legacy Works With Nest has been removed from Home Assistant, and the API shuts down as of September 2023.\n\nYou must take action to use the SDM API. Remove all `nest` configuration from `configuration.yaml` and restart Home Assistant, then see the Nest [integration instructions]({documentation_url}) for set up instructions and supported devices." - } - }, "entity": { "event": { "chime": { diff --git a/homeassistant/components/netatmo/config_flow.py b/homeassistant/components/netatmo/config_flow.py index 0da4d6f16b7..d853694ffea 100644 --- a/homeassistant/components/netatmo/config_flow.py +++ b/homeassistant/components/netatmo/config_flow.py @@ -101,7 +101,6 @@ class NetatmoOptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize Netatmo options flow.""" - self.config_entry = config_entry self.options = dict(config_entry.options) self.options.setdefault(CONF_WEATHER_AREAS, {}) diff --git a/homeassistant/components/netdata/manifest.json b/homeassistant/components/netdata/manifest.json index 99410ce033d..8901a271de2 100644 --- a/homeassistant/components/netdata/manifest.json +++ b/homeassistant/components/netdata/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/netdata", "iot_class": "local_polling", "loggers": ["netdata"], - "requirements": ["netdata==1.1.0"] + "quality_scale": "legacy", + "requirements": ["netdata==1.3.0"] } diff --git a/homeassistant/components/netdata/sensor.py b/homeassistant/components/netdata/sensor.py index b77a4392ef4..f33349c56ce 100644 --- a/homeassistant/components/netdata/sensor.py +++ b/homeassistant/components/netdata/sensor.py @@ -24,6 +24,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import PlatformNotReady import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.httpx_client import get_async_client from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType _LOGGER = logging.getLogger(__name__) @@ -70,7 +71,9 @@ async def async_setup_platform( port = config[CONF_PORT] resources = config[CONF_RESOURCES] - netdata = NetdataData(Netdata(host, port=port, timeout=20.0)) + netdata = NetdataData( + Netdata(host, port=port, timeout=20.0, httpx_client=get_async_client(hass)) + ) await netdata.async_update() if netdata.api.metrics is None: diff --git a/homeassistant/components/netgear/config_flow.py b/homeassistant/components/netgear/config_flow.py index fba934af38d..965e3618645 100644 --- a/homeassistant/components/netgear/config_flow.py +++ b/homeassistant/components/netgear/config_flow.py @@ -63,10 +63,6 @@ def _ordered_shared_schema(schema_input): class OptionsFlowHandler(OptionsFlow): """Options for the component.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Init object.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, int] | None = None ) -> ConfigFlowResult: @@ -109,7 +105,7 @@ class NetgearFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def _show_setup_form( self, diff --git a/homeassistant/components/netio/manifest.json b/homeassistant/components/netio/manifest.json index 683df22e1ff..f2914b17dec 100644 --- a/homeassistant/components/netio/manifest.json +++ b/homeassistant/components/netio/manifest.json @@ -5,5 +5,6 @@ "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/netio", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["pynetio==0.1.9.1"] } diff --git a/homeassistant/components/neurio_energy/manifest.json b/homeassistant/components/neurio_energy/manifest.json index 467825da012..3a524ac4b5f 100644 --- a/homeassistant/components/neurio_energy/manifest.json +++ b/homeassistant/components/neurio_energy/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/neurio_energy", "iot_class": "cloud_polling", "loggers": ["neurio"], + "quality_scale": "legacy", "requirements": ["neurio==0.3.1"] } diff --git a/homeassistant/components/nexia/strings.json b/homeassistant/components/nexia/strings.json index aec145b8806..d88ce0b898d 100644 --- a/homeassistant/components/nexia/strings.json +++ b/homeassistant/components/nexia/strings.json @@ -64,7 +64,7 @@ "services": { "set_aircleaner_mode": { "name": "Set air cleaner mode", - "description": "The air cleaner mode.", + "description": "Sets the air cleaner mode.", "fields": { "aircleaner_mode": { "name": "Air cleaner mode", @@ -74,17 +74,17 @@ }, "set_humidify_setpoint": { "name": "Set humidify set point", - "description": "The humidification set point.", + "description": "Sets the target humidity.", "fields": { "humidity": { - "name": "Humidify", + "name": "Humidity", "description": "The humidification setpoint." } } }, "set_hvac_run_mode": { "name": "Set hvac run mode", - "description": "The HVAC run mode.", + "description": "Sets the HVAC operation mode.", "fields": { "run_mode": { "name": "Run mode", diff --git a/homeassistant/components/nextdns/manifest.json b/homeassistant/components/nextdns/manifest.json index f3ed62a2f0c..d10a1728a94 100644 --- a/homeassistant/components/nextdns/manifest.json +++ b/homeassistant/components/nextdns/manifest.json @@ -7,6 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["nextdns"], - "quality_scale": "platinum", - "requirements": ["nextdns==3.3.0"] + "requirements": ["nextdns==4.0.0"] } diff --git a/homeassistant/components/nextdns/sensor.py b/homeassistant/components/nextdns/sensor.py index b390ac93e06..ef2b5140fa1 100644 --- a/homeassistant/components/nextdns/sensor.py +++ b/homeassistant/components/nextdns/sensor.py @@ -54,7 +54,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( coordinator_type=ATTR_STATUS, entity_category=EntityCategory.DIAGNOSTIC, translation_key="all_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.all_queries, ), @@ -63,7 +62,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( coordinator_type=ATTR_STATUS, entity_category=EntityCategory.DIAGNOSTIC, translation_key="blocked_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.blocked_queries, ), @@ -72,7 +70,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( coordinator_type=ATTR_STATUS, entity_category=EntityCategory.DIAGNOSTIC, translation_key="relayed_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.relayed_queries, ), @@ -91,7 +88,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="doh_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.doh_queries, ), @@ -101,7 +97,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="doh3_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.doh3_queries, ), @@ -111,7 +106,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="dot_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.dot_queries, ), @@ -121,7 +115,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="doq_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.doq_queries, ), @@ -131,7 +124,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="tcp_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.tcp_queries, ), @@ -141,7 +133,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="udp_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.udp_queries, ), @@ -211,7 +202,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="encrypted_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.encrypted_queries, ), @@ -221,7 +211,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="unencrypted_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.unencrypted_queries, ), @@ -241,7 +230,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="ipv4_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.ipv4_queries, ), @@ -251,7 +239,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="ipv6_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.ipv6_queries, ), @@ -271,7 +258,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="validated_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.validated_queries, ), @@ -281,7 +267,6 @@ SENSORS: tuple[NextDnsSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, translation_key="not_validated_queries", - native_unit_of_measurement="queries", state_class=SensorStateClass.TOTAL, value=lambda data: data.not_validated_queries, ), diff --git a/homeassistant/components/nextdns/strings.json b/homeassistant/components/nextdns/strings.json index 9dbc8061849..f2a5fa2816d 100644 --- a/homeassistant/components/nextdns/strings.json +++ b/homeassistant/components/nextdns/strings.json @@ -48,76 +48,91 @@ }, "sensor": { "all_queries": { - "name": "DNS queries" + "name": "DNS queries", + "unit_of_measurement": "queries" }, "blocked_queries": { - "name": "DNS queries blocked" + "name": "DNS queries blocked", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "blocked_queries_ratio": { "name": "DNS queries blocked ratio" }, "doh3_queries": { - "name": "DNS-over-HTTP/3 queries" + "name": "DNS-over-HTTP/3 queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "doh3_queries_ratio": { "name": "DNS-over-HTTP/3 queries ratio" }, "doh_queries": { - "name": "DNS-over-HTTPS queries" + "name": "DNS-over-HTTPS queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "doh_queries_ratio": { "name": "DNS-over-HTTPS queries ratio" }, "doq_queries": { - "name": "DNS-over-QUIC queries" + "name": "DNS-over-QUIC queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "doq_queries_ratio": { "name": "DNS-over-QUIC queries ratio" }, "dot_queries": { - "name": "DNS-over-TLS queries" + "name": "DNS-over-TLS queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "dot_queries_ratio": { "name": "DNS-over-TLS queries ratio" }, "encrypted_queries": { - "name": "Encrypted queries" + "name": "Encrypted queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "encrypted_queries_ratio": { "name": "Encrypted queries ratio" }, "ipv4_queries": { - "name": "IPv4 queries" + "name": "IPv4 queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "ipv6_queries": { - "name": "IPv6 queries" + "name": "IPv6 queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "ipv6_queries_ratio": { "name": "IPv6 queries ratio" }, "not_validated_queries": { - "name": "DNSSEC not validated queries" + "name": "DNSSEC not validated queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "relayed_queries": { - "name": "DNS queries relayed" + "name": "DNS queries relayed", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "tcp_queries": { - "name": "TCP queries" + "name": "TCP queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "tcp_queries_ratio": { "name": "TCP queries ratio" }, "udp_queries": { - "name": "UDP queries" + "name": "UDP queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "udp_queries_ratio": { "name": "UDP queries ratio" }, "unencrypted_queries": { - "name": "Unencrypted queries" + "name": "Unencrypted queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "validated_queries": { - "name": "DNSSEC validated queries" + "name": "DNSSEC validated queries", + "unit_of_measurement": "[%key:component::nextdns::entity::sensor::all_queries::unit_of_measurement%]" }, "validated_queries_ratio": { "name": "DNSSEC validated queries ratio" diff --git a/homeassistant/components/nibe_heatpump/manifest.json b/homeassistant/components/nibe_heatpump/manifest.json index b3e5597da73..407cdfcfd57 100644 --- a/homeassistant/components/nibe_heatpump/manifest.json +++ b/homeassistant/components/nibe_heatpump/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/nibe_heatpump", "iot_class": "local_polling", - "requirements": ["nibe==2.11.0"] + "requirements": ["nibe==2.13.0"] } diff --git a/homeassistant/components/nightscout/manifest.json b/homeassistant/components/nightscout/manifest.json index 3551b29ee0b..9b075a6df87 100644 --- a/homeassistant/components/nightscout/manifest.json +++ b/homeassistant/components/nightscout/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/nightscout", "iot_class": "cloud_polling", "loggers": ["py_nightscout"], - "quality_scale": "platinum", "requirements": ["py-nightscout==1.2.2"] } diff --git a/homeassistant/components/nightscout/sensor.py b/homeassistant/components/nightscout/sensor.py index 92291bdc4f9..620349ec3c3 100644 --- a/homeassistant/components/nightscout/sensor.py +++ b/homeassistant/components/nightscout/sensor.py @@ -9,9 +9,9 @@ from typing import Any from aiohttp import ClientError from py_nightscout import Api as NightscoutAPI -from homeassistant.components.sensor import SensorEntity +from homeassistant.components.sensor import SensorDeviceClass, SensorEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_DATE +from homeassistant.const import ATTR_DATE, UnitOfBloodGlucoseConcentration from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -37,7 +37,10 @@ async def async_setup_entry( class NightscoutSensor(SensorEntity): """Implementation of a Nightscout sensor.""" - _attr_native_unit_of_measurement = "mg/dL" + _attr_device_class = SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION + _attr_native_unit_of_measurement = ( + UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER + ) _attr_icon = "mdi:cloud-question" def __init__(self, api: NightscoutAPI, name: str, unique_id: str | None) -> None: diff --git a/homeassistant/components/niko_home_control/manifest.json b/homeassistant/components/niko_home_control/manifest.json index 72f9dd2f6b3..316dc1dc958 100644 --- a/homeassistant/components/niko_home_control/manifest.json +++ b/homeassistant/components/niko_home_control/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/niko_home_control", "iot_class": "local_polling", "loggers": ["nikohomecontrol"], + "quality_scale": "legacy", "requirements": ["niko-home-control==0.2.1"] } diff --git a/homeassistant/components/nilu/manifest.json b/homeassistant/components/nilu/manifest.json index 1eabf9e726e..d99a918ef4f 100644 --- a/homeassistant/components/nilu/manifest.json +++ b/homeassistant/components/nilu/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/nilu", "iot_class": "cloud_polling", "loggers": ["niluclient"], + "quality_scale": "legacy", "requirements": ["niluclient==0.1.2"] } diff --git a/homeassistant/components/nina/binary_sensor.py b/homeassistant/components/nina/binary_sensor.py index 397ced0f5d3..10d3008fd82 100644 --- a/homeassistant/components/nina/binary_sensor.py +++ b/homeassistant/components/nina/binary_sensor.py @@ -25,6 +25,7 @@ from .const import ( ATTR_SENT, ATTR_SEVERITY, ATTR_START, + ATTR_WEB, CONF_MESSAGE_SLOTS, CONF_REGIONS, DOMAIN, @@ -103,6 +104,7 @@ class NINAMessage(CoordinatorEntity[NINADataUpdateCoordinator], BinarySensorEnti ATTR_SEVERITY: data.severity, ATTR_RECOMMENDED_ACTIONS: data.recommended_actions, ATTR_AFFECTED_AREAS: data.affected_areas, + ATTR_WEB: data.web, ATTR_ID: data.id, ATTR_SENT: data.sent, ATTR_START: data.start, diff --git a/homeassistant/components/nina/config_flow.py b/homeassistant/components/nina/config_flow.py index dd4319d566b..a1ba9ae0c61 100644 --- a/homeassistant/components/nina/config_flow.py +++ b/homeassistant/components/nina/config_flow.py @@ -171,8 +171,7 @@ class OptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry - self.data = dict(self.config_entry.data) + self.data = dict(config_entry.data) self._all_region_codes_sorted: dict[str, str] = {} self.regions: dict[str, dict[str, Any]] = {} diff --git a/homeassistant/components/nina/const.py b/homeassistant/components/nina/const.py index 1e755056079..47194c4c2de 100644 --- a/homeassistant/components/nina/const.py +++ b/homeassistant/components/nina/const.py @@ -27,6 +27,7 @@ ATTR_SENDER: str = "sender" ATTR_SEVERITY: str = "severity" ATTR_RECOMMENDED_ACTIONS: str = "recommended_actions" ATTR_AFFECTED_AREAS: str = "affected_areas" +ATTR_WEB: str = "web" ATTR_ID: str = "id" ATTR_SENT: str = "sent" ATTR_START: str = "start" diff --git a/homeassistant/components/nina/coordinator.py b/homeassistant/components/nina/coordinator.py index c731c7a62d7..2d9548f3d12 100644 --- a/homeassistant/components/nina/coordinator.py +++ b/homeassistant/components/nina/coordinator.py @@ -27,6 +27,7 @@ class NinaWarningData: severity: str recommended_actions: str affected_areas: str + web: str sent: str start: str expires: str @@ -127,6 +128,7 @@ class NINADataUpdateCoordinator( raw_warn.severity, " ".join([str(action) for action in raw_warn.recommended_actions]), affected_areas_string, + raw_warn.web or "", raw_warn.sent or "", raw_warn.start or "", raw_warn.expires or "", diff --git a/homeassistant/components/nina/manifest.json b/homeassistant/components/nina/manifest.json index 53a54f26dcf..45212c0220b 100644 --- a/homeassistant/components/nina/manifest.json +++ b/homeassistant/components/nina/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/nina", "iot_class": "cloud_polling", "loggers": ["pynina"], - "requirements": ["PyNINA==0.3.3"], + "requirements": ["PyNINA==0.3.4"], "single_config_entry": true } diff --git a/homeassistant/components/nina/strings.json b/homeassistant/components/nina/strings.json index 9747feaddb7..98ea88d8798 100644 --- a/homeassistant/components/nina/strings.json +++ b/homeassistant/components/nina/strings.json @@ -38,12 +38,10 @@ } } }, - "abort": { - "unknown": "[%key:common::config_flow::error::unknown%]" - }, "error": { "no_selection": "[%key:component::nina::config::error::no_selection%]", - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unknown": "[%key:common::config_flow::error::unknown%]" } } } diff --git a/homeassistant/components/nissan_leaf/manifest.json b/homeassistant/components/nissan_leaf/manifest.json index 9c3df39c69f..9ad8773ee44 100644 --- a/homeassistant/components/nissan_leaf/manifest.json +++ b/homeassistant/components/nissan_leaf/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/nissan_leaf", "iot_class": "cloud_polling", "loggers": ["pycarwings2"], + "quality_scale": "legacy", "requirements": ["pycarwings2==2.14"] } diff --git a/homeassistant/components/nmap_tracker/config_flow.py b/homeassistant/components/nmap_tracker/config_flow.py index b724dca1a81..e05150995aa 100644 --- a/homeassistant/components/nmap_tracker/config_flow.py +++ b/homeassistant/components/nmap_tracker/config_flow.py @@ -213,6 +213,6 @@ class NmapTrackerConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler: """Get the options flow for this handler.""" return OptionsFlowHandler(config_entry) diff --git a/homeassistant/components/nmbs/manifest.json b/homeassistant/components/nmbs/manifest.json index 24aadb6b4f0..e17d1227bed 100644 --- a/homeassistant/components/nmbs/manifest.json +++ b/homeassistant/components/nmbs/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/nmbs", "iot_class": "cloud_polling", "loggers": ["pyrail"], + "quality_scale": "legacy", "requirements": ["pyrail==0.0.3"] } diff --git a/homeassistant/components/no_ip/manifest.json b/homeassistant/components/no_ip/manifest.json index cf995e34b47..8e1e247143e 100644 --- a/homeassistant/components/no_ip/manifest.json +++ b/homeassistant/components/no_ip/manifest.json @@ -3,5 +3,6 @@ "name": "No-IP.com", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/no_ip", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/noaa_tides/manifest.json b/homeassistant/components/noaa_tides/manifest.json index 85c6fbcb788..8cc81857770 100644 --- a/homeassistant/components/noaa_tides/manifest.json +++ b/homeassistant/components/noaa_tides/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/noaa_tides", "iot_class": "cloud_polling", "loggers": ["noaa_coops"], + "quality_scale": "legacy", "requirements": ["noaa-coops==0.1.9"] } diff --git a/homeassistant/components/nobo_hub/config_flow.py b/homeassistant/components/nobo_hub/config_flow.py index 8aed520f21e..7e1ae4c1d9b 100644 --- a/homeassistant/components/nobo_hub/config_flow.py +++ b/homeassistant/components/nobo_hub/config_flow.py @@ -175,7 +175,7 @@ class NoboHubConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlow: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class NoboHubConnectError(HomeAssistantError): @@ -190,10 +190,6 @@ class NoboHubConnectError(HomeAssistantError): class OptionsFlowHandler(OptionsFlow): """Handles options flow for the component.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize the options flow.""" - self.config_entry = config_entry - async def async_step_init(self, user_input=None) -> ConfigFlowResult: """Manage the options.""" diff --git a/homeassistant/components/nordpool/__init__.py b/homeassistant/components/nordpool/__init__.py new file mode 100644 index 00000000000..82db98e2148 --- /dev/null +++ b/homeassistant/components/nordpool/__init__.py @@ -0,0 +1,36 @@ +"""The Nord Pool component.""" + +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.util import dt as dt_util + +from .const import DOMAIN, PLATFORMS +from .coordinator import NordPoolDataUpdateCoordinator + +type NordPoolConfigEntry = ConfigEntry[NordPoolDataUpdateCoordinator] + + +async def async_setup_entry(hass: HomeAssistant, entry: NordPoolConfigEntry) -> bool: + """Set up Nord Pool from a config entry.""" + + coordinator = NordPoolDataUpdateCoordinator(hass, entry) + await coordinator.fetch_data(dt_util.utcnow()) + if not coordinator.last_update_success: + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="initial_update_failed", + translation_placeholders={"error": str(coordinator.last_exception)}, + ) + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: NordPoolConfigEntry) -> bool: + """Unload Nord Pool config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/nordpool/config_flow.py b/homeassistant/components/nordpool/config_flow.py new file mode 100644 index 00000000000..1d75d825e47 --- /dev/null +++ b/homeassistant/components/nordpool/config_flow.py @@ -0,0 +1,115 @@ +"""Adds config flow for Nord Pool integration.""" + +from __future__ import annotations + +from typing import Any + +from pynordpool import ( + Currency, + NordPoolClient, + NordPoolEmptyResponseError, + NordPoolError, +) +from pynordpool.const import AREAS +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_CURRENCY +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.selector import ( + SelectOptionDict, + SelectSelector, + SelectSelectorConfig, + SelectSelectorMode, +) +from homeassistant.util import dt as dt_util + +from .const import CONF_AREAS, DEFAULT_NAME, DOMAIN + +SELECT_AREAS = [ + SelectOptionDict(value=area, label=name) for area, name in AREAS.items() +] +SELECT_CURRENCY = [currency.value for currency in Currency] + +DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_AREAS, default=[]): SelectSelector( + SelectSelectorConfig( + options=SELECT_AREAS, + multiple=True, + mode=SelectSelectorMode.DROPDOWN, + sort=True, + ) + ), + vol.Required(CONF_CURRENCY, default="SEK"): SelectSelector( + SelectSelectorConfig( + options=SELECT_CURRENCY, + multiple=False, + mode=SelectSelectorMode.DROPDOWN, + sort=True, + ) + ), + } +) + + +async def test_api(hass: HomeAssistant, user_input: dict[str, Any]) -> dict[str, str]: + """Test fetch data from Nord Pool.""" + client = NordPoolClient(async_get_clientsession(hass)) + try: + await client.async_get_delivery_period( + dt_util.now(), + Currency(user_input[CONF_CURRENCY]), + user_input[CONF_AREAS], + ) + except NordPoolEmptyResponseError: + return {"base": "no_data"} + except NordPoolError: + return {"base": "cannot_connect"} + + return {} + + +class NordpoolConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Nord Pool integration.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + if user_input: + errors = await test_api(self.hass, user_input) + if not errors: + return self.async_create_entry( + title=DEFAULT_NAME, + data=user_input, + ) + + return self.async_show_form( + step_id="user", + data_schema=DATA_SCHEMA, + errors=errors, + ) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the reconfiguration step.""" + errors: dict[str, str] = {} + if user_input: + errors = await test_api(self.hass, user_input) + reconfigure_entry = self._get_reconfigure_entry() + if not errors: + return self.async_update_reload_and_abort( + reconfigure_entry, data_updates=user_input + ) + + return self.async_show_form( + step_id="reconfigure", + data_schema=DATA_SCHEMA, + errors=errors, + ) diff --git a/homeassistant/components/nordpool/const.py b/homeassistant/components/nordpool/const.py new file mode 100644 index 00000000000..19a978d946c --- /dev/null +++ b/homeassistant/components/nordpool/const.py @@ -0,0 +1,14 @@ +"""Constants for Nord Pool.""" + +import logging + +from homeassistant.const import Platform + +LOGGER = logging.getLogger(__package__) + +DEFAULT_SCAN_INTERVAL = 60 +DOMAIN = "nordpool" +PLATFORMS = [Platform.SENSOR] +DEFAULT_NAME = "Nord Pool" + +CONF_AREAS = "areas" diff --git a/homeassistant/components/nordpool/coordinator.py b/homeassistant/components/nordpool/coordinator.py new file mode 100644 index 00000000000..fa4e9ca2548 --- /dev/null +++ b/homeassistant/components/nordpool/coordinator.py @@ -0,0 +1,91 @@ +"""DataUpdateCoordinator for the Nord Pool integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from datetime import datetime, timedelta +from typing import TYPE_CHECKING + +from pynordpool import ( + Currency, + DeliveryPeriodData, + NordPoolClient, + NordPoolEmptyResponseError, + NordPoolError, + NordPoolResponseError, +) + +from homeassistant.const import CONF_CURRENCY +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.event import async_track_point_in_utc_time +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from homeassistant.util import dt as dt_util + +from .const import CONF_AREAS, DOMAIN, LOGGER + +if TYPE_CHECKING: + from . import NordPoolConfigEntry + + +class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodData]): + """A Nord Pool Data Update Coordinator.""" + + config_entry: NordPoolConfigEntry + + def __init__(self, hass: HomeAssistant, config_entry: NordPoolConfigEntry) -> None: + """Initialize the Nord Pool coordinator.""" + super().__init__( + hass, + LOGGER, + config_entry=config_entry, + name=DOMAIN, + ) + self.client = NordPoolClient(session=async_get_clientsession(hass)) + self.unsub: Callable[[], None] | None = None + + def get_next_interval(self, now: datetime) -> datetime: + """Compute next time an update should occur.""" + next_hour = dt_util.utcnow() + timedelta(hours=1) + next_run = datetime( + next_hour.year, + next_hour.month, + next_hour.day, + next_hour.hour, + tzinfo=dt_util.UTC, + ) + LOGGER.debug("Next update at %s", next_run) + return next_run + + async def async_shutdown(self) -> None: + """Cancel any scheduled call, and ignore new runs.""" + await super().async_shutdown() + if self.unsub: + self.unsub() + self.unsub = None + + async def fetch_data(self, now: datetime) -> None: + """Fetch data from Nord Pool.""" + self.unsub = async_track_point_in_utc_time( + self.hass, self.fetch_data, self.get_next_interval(dt_util.utcnow()) + ) + try: + data = await self.client.async_get_delivery_period( + dt_util.now(), + Currency(self.config_entry.data[CONF_CURRENCY]), + self.config_entry.data[CONF_AREAS], + ) + except NordPoolEmptyResponseError as error: + LOGGER.debug("Empty response error: %s", error) + self.async_set_update_error(error) + return + except NordPoolResponseError as error: + LOGGER.debug("Response error: %s", error) + self.async_set_update_error(error) + return + except NordPoolError as error: + LOGGER.debug("Connection error: %s", error) + self.async_set_update_error(error) + return + + self.async_set_updated_data(data) diff --git a/homeassistant/components/nordpool/diagnostics.py b/homeassistant/components/nordpool/diagnostics.py new file mode 100644 index 00000000000..3160c2bfa6d --- /dev/null +++ b/homeassistant/components/nordpool/diagnostics.py @@ -0,0 +1,16 @@ +"""Diagnostics support for Nord Pool.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.core import HomeAssistant + +from . import NordPoolConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: NordPoolConfigEntry +) -> dict[str, Any]: + """Return diagnostics for Nord Pool config entry.""" + return {"raw": entry.runtime_data.data.raw} diff --git a/homeassistant/components/nordpool/entity.py b/homeassistant/components/nordpool/entity.py new file mode 100644 index 00000000000..32240aad12c --- /dev/null +++ b/homeassistant/components/nordpool/entity.py @@ -0,0 +1,32 @@ +"""Base entity for Nord Pool.""" + +from __future__ import annotations + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import NordPoolDataUpdateCoordinator + + +class NordpoolBaseEntity(CoordinatorEntity[NordPoolDataUpdateCoordinator]): + """Representation of a Nord Pool base entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: NordPoolDataUpdateCoordinator, + entity_description: EntityDescription, + area: str, + ) -> None: + """Initiate Nord Pool base entity.""" + super().__init__(coordinator) + self.entity_description = entity_description + self._attr_unique_id = f"{area}-{entity_description.key}" + self.area = area + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, area)}, + name=f"Nord Pool {area}", + ) diff --git a/homeassistant/components/nordpool/icons.json b/homeassistant/components/nordpool/icons.json new file mode 100644 index 00000000000..85434a2d09b --- /dev/null +++ b/homeassistant/components/nordpool/icons.json @@ -0,0 +1,42 @@ +{ + "entity": { + "sensor": { + "updated_at": { + "default": "mdi:clock-outline" + }, + "currency": { + "default": "mdi:currency-usd" + }, + "exchange_rate": { + "default": "mdi:currency-usd" + }, + "current_price": { + "default": "mdi:cash" + }, + "last_price": { + "default": "mdi:cash" + }, + "next_price": { + "default": "mdi:cash" + }, + "block_average": { + "default": "mdi:cash-multiple" + }, + "block_min": { + "default": "mdi:cash-multiple" + }, + "block_max": { + "default": "mdi:cash-multiple" + }, + "block_start_time": { + "default": "mdi:clock-time-twelve-outline" + }, + "block_end_time": { + "default": "mdi:clock-time-two-outline" + }, + "daily_average": { + "default": "mdi:cash-multiple" + } + } + } +} diff --git a/homeassistant/components/nordpool/manifest.json b/homeassistant/components/nordpool/manifest.json new file mode 100644 index 00000000000..bf093eb3ee9 --- /dev/null +++ b/homeassistant/components/nordpool/manifest.json @@ -0,0 +1,12 @@ +{ + "domain": "nordpool", + "name": "Nord Pool", + "codeowners": ["@gjohansson-ST"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/nordpool", + "integration_type": "hub", + "iot_class": "cloud_polling", + "loggers": ["pynordpool"], + "requirements": ["pynordpool==0.2.2"], + "single_config_entry": true +} diff --git a/homeassistant/components/nordpool/sensor.py b/homeassistant/components/nordpool/sensor.py new file mode 100644 index 00000000000..e7e655a6657 --- /dev/null +++ b/homeassistant/components/nordpool/sensor.py @@ -0,0 +1,328 @@ +"""Sensor platform for Nord Pool integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import datetime, timedelta + +from pynordpool import DeliveryPeriodData + +from homeassistant.components.sensor import ( + EntityCategory, + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import dt as dt_util, slugify + +from . import NordPoolConfigEntry +from .const import LOGGER +from .coordinator import NordPoolDataUpdateCoordinator +from .entity import NordpoolBaseEntity + +PARALLEL_UPDATES = 0 + + +def get_prices(data: DeliveryPeriodData) -> dict[str, tuple[float, float, float]]: + """Return previous, current and next prices. + + Output: {"SE3": (10.0, 10.5, 12.1)} + """ + last_price_entries: dict[str, float] = {} + current_price_entries: dict[str, float] = {} + next_price_entries: dict[str, float] = {} + current_time = dt_util.utcnow() + previous_time = current_time - timedelta(hours=1) + next_time = current_time + timedelta(hours=1) + price_data = data.entries + for entry in price_data: + if entry.start <= current_time <= entry.end: + current_price_entries = entry.entry + if entry.start <= previous_time <= entry.end: + last_price_entries = entry.entry + if entry.start <= next_time <= entry.end: + next_price_entries = entry.entry + + result = {} + for area, price in current_price_entries.items(): + result[area] = (last_price_entries[area], price, next_price_entries[area]) + LOGGER.debug("Prices: %s", result) + return result + + +def get_blockprices( + data: DeliveryPeriodData, +) -> dict[str, dict[str, tuple[datetime, datetime, float, float, float]]]: + """Return average, min and max for block prices. + + Output: {"SE3": {"Off-peak 1": (_datetime_, _datetime_, 9.3, 10.5, 12.1)}} + """ + result: dict[str, dict[str, tuple[datetime, datetime, float, float, float]]] = {} + block_prices = data.block_prices + for entry in block_prices: + for _area in entry.average: + if _area not in result: + result[_area] = {} + result[_area][entry.name] = ( + entry.start, + entry.end, + entry.average[_area]["average"], + entry.average[_area]["min"], + entry.average[_area]["max"], + ) + + LOGGER.debug("Block prices: %s", result) + return result + + +@dataclass(frozen=True, kw_only=True) +class NordpoolDefaultSensorEntityDescription(SensorEntityDescription): + """Describes Nord Pool default sensor entity.""" + + value_fn: Callable[[DeliveryPeriodData], str | float | datetime | None] + + +@dataclass(frozen=True, kw_only=True) +class NordpoolPricesSensorEntityDescription(SensorEntityDescription): + """Describes Nord Pool prices sensor entity.""" + + value_fn: Callable[[tuple[float, float, float]], float | None] + + +@dataclass(frozen=True, kw_only=True) +class NordpoolBlockPricesSensorEntityDescription(SensorEntityDescription): + """Describes Nord Pool block prices sensor entity.""" + + value_fn: Callable[ + [tuple[datetime, datetime, float, float, float]], float | datetime | None + ] + + +DEFAULT_SENSOR_TYPES: tuple[NordpoolDefaultSensorEntityDescription, ...] = ( + NordpoolDefaultSensorEntityDescription( + key="updated_at", + translation_key="updated_at", + device_class=SensorDeviceClass.TIMESTAMP, + value_fn=lambda data: data.updated_at, + entity_category=EntityCategory.DIAGNOSTIC, + ), + NordpoolDefaultSensorEntityDescription( + key="currency", + translation_key="currency", + value_fn=lambda data: data.currency, + entity_category=EntityCategory.DIAGNOSTIC, + ), + NordpoolDefaultSensorEntityDescription( + key="exchange_rate", + translation_key="exchange_rate", + value_fn=lambda data: data.exchange_rate, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + entity_category=EntityCategory.DIAGNOSTIC, + ), +) +PRICES_SENSOR_TYPES: tuple[NordpoolPricesSensorEntityDescription, ...] = ( + NordpoolPricesSensorEntityDescription( + key="current_price", + translation_key="current_price", + value_fn=lambda data: data[1] / 1000, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=2, + ), + NordpoolPricesSensorEntityDescription( + key="last_price", + translation_key="last_price", + value_fn=lambda data: data[0] / 1000, + suggested_display_precision=2, + ), + NordpoolPricesSensorEntityDescription( + key="next_price", + translation_key="next_price", + value_fn=lambda data: data[2] / 1000, + suggested_display_precision=2, + ), +) +BLOCK_PRICES_SENSOR_TYPES: tuple[NordpoolBlockPricesSensorEntityDescription, ...] = ( + NordpoolBlockPricesSensorEntityDescription( + key="block_average", + translation_key="block_average", + value_fn=lambda data: data[2] / 1000, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=2, + entity_registry_enabled_default=False, + ), + NordpoolBlockPricesSensorEntityDescription( + key="block_min", + translation_key="block_min", + value_fn=lambda data: data[3] / 1000, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=2, + entity_registry_enabled_default=False, + ), + NordpoolBlockPricesSensorEntityDescription( + key="block_max", + translation_key="block_max", + value_fn=lambda data: data[4] / 1000, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=2, + entity_registry_enabled_default=False, + ), + NordpoolBlockPricesSensorEntityDescription( + key="block_start_time", + translation_key="block_start_time", + value_fn=lambda data: data[0], + device_class=SensorDeviceClass.TIMESTAMP, + entity_registry_enabled_default=False, + ), + NordpoolBlockPricesSensorEntityDescription( + key="block_end_time", + translation_key="block_end_time", + value_fn=lambda data: data[1], + device_class=SensorDeviceClass.TIMESTAMP, + entity_registry_enabled_default=False, + ), +) +DAILY_AVERAGE_PRICES_SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( + SensorEntityDescription( + key="daily_average", + translation_key="daily_average", + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=2, + entity_registry_enabled_default=False, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: NordPoolConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Nord Pool sensor platform.""" + + coordinator = entry.runtime_data + + entities: list[NordpoolBaseEntity] = [] + currency = entry.runtime_data.data.currency + + for area in get_prices(entry.runtime_data.data): + LOGGER.debug("Setting up base sensors for area %s", area) + entities.extend( + NordpoolSensor(coordinator, description, area) + for description in DEFAULT_SENSOR_TYPES + ) + LOGGER.debug( + "Setting up price sensors for area %s with currency %s", area, currency + ) + entities.extend( + NordpoolPriceSensor(coordinator, description, area, currency) + for description in PRICES_SENSOR_TYPES + ) + entities.extend( + NordpoolDailyAveragePriceSensor(coordinator, description, area, currency) + for description in DAILY_AVERAGE_PRICES_SENSOR_TYPES + ) + for block_name in get_blockprices(coordinator.data)[area]: + LOGGER.debug( + "Setting up block price sensors for area %s with currency %s in block %s", + area, + currency, + block_name, + ) + entities.extend( + NordpoolBlockPriceSensor( + coordinator, description, area, currency, block_name + ) + for description in BLOCK_PRICES_SENSOR_TYPES + ) + async_add_entities(entities) + + +class NordpoolSensor(NordpoolBaseEntity, SensorEntity): + """Representation of a Nord Pool sensor.""" + + entity_description: NordpoolDefaultSensorEntityDescription + + @property + def native_value(self) -> str | float | datetime | None: + """Return value of sensor.""" + return self.entity_description.value_fn(self.coordinator.data) + + +class NordpoolPriceSensor(NordpoolBaseEntity, SensorEntity): + """Representation of a Nord Pool price sensor.""" + + entity_description: NordpoolPricesSensorEntityDescription + + def __init__( + self, + coordinator: NordPoolDataUpdateCoordinator, + entity_description: NordpoolPricesSensorEntityDescription, + area: str, + currency: str, + ) -> None: + """Initiate Nord Pool sensor.""" + super().__init__(coordinator, entity_description, area) + self._attr_native_unit_of_measurement = f"{currency}/kWh" + + @property + def native_value(self) -> float | None: + """Return value of sensor.""" + return self.entity_description.value_fn( + get_prices(self.coordinator.data)[self.area] + ) + + +class NordpoolBlockPriceSensor(NordpoolBaseEntity, SensorEntity): + """Representation of a Nord Pool block price sensor.""" + + entity_description: NordpoolBlockPricesSensorEntityDescription + + def __init__( + self, + coordinator: NordPoolDataUpdateCoordinator, + entity_description: NordpoolBlockPricesSensorEntityDescription, + area: str, + currency: str, + block_name: str, + ) -> None: + """Initiate Nord Pool sensor.""" + super().__init__(coordinator, entity_description, area) + if entity_description.device_class is not SensorDeviceClass.TIMESTAMP: + self._attr_native_unit_of_measurement = f"{currency}/kWh" + self._attr_unique_id = f"{slugify(block_name)}-{area}-{entity_description.key}" + self.block_name = block_name + self._attr_translation_placeholders = {"block": block_name} + + @property + def native_value(self) -> float | datetime | None: + """Return value of sensor.""" + return self.entity_description.value_fn( + get_blockprices(self.coordinator.data)[self.area][self.block_name] + ) + + +class NordpoolDailyAveragePriceSensor(NordpoolBaseEntity, SensorEntity): + """Representation of a Nord Pool daily average price sensor.""" + + entity_description: SensorEntityDescription + + def __init__( + self, + coordinator: NordPoolDataUpdateCoordinator, + entity_description: SensorEntityDescription, + area: str, + currency: str, + ) -> None: + """Initiate Nord Pool sensor.""" + super().__init__(coordinator, entity_description, area) + self._attr_native_unit_of_measurement = f"{currency}/kWh" + + @property + def native_value(self) -> float | None: + """Return value of sensor.""" + return self.coordinator.data.area_average[self.area] / 1000 diff --git a/homeassistant/components/nordpool/strings.json b/homeassistant/components/nordpool/strings.json new file mode 100644 index 00000000000..1a4551fe61a --- /dev/null +++ b/homeassistant/components/nordpool/strings.json @@ -0,0 +1,70 @@ +{ + "config": { + "abort": { + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "no_data": "API connected but the response was empty" + }, + "step": { + "user": { + "data": { + "currency": "Currency", + "areas": "Areas" + } + }, + "reconfigure": { + "data": { + "currency": "[%key:component::nordpool::config::step::user::data::currency%]", + "areas": "[%key:component::nordpool::config::step::user::data::areas%]" + } + } + } + }, + "entity": { + "sensor": { + "updated_at": { + "name": "Last updated" + }, + "currency": { + "name": "Currency" + }, + "exchange_rate": { + "name": "Exchange rate" + }, + "current_price": { + "name": "Current price" + }, + "last_price": { + "name": "Previous price" + }, + "next_price": { + "name": "Next price" + }, + "block_average": { + "name": "{block} average" + }, + "block_min": { + "name": "{block} lowest price" + }, + "block_max": { + "name": "{block} highest price" + }, + "block_start_time": { + "name": "{block} time from" + }, + "block_end_time": { + "name": "{block} time until" + }, + "daily_average": { + "name": "Daily average" + } + } + }, + "exceptions": { + "initial_update_failed": { + "message": "Initial update failed on startup with error {error}" + } + } +} diff --git a/homeassistant/components/norway_air/manifest.json b/homeassistant/components/norway_air/manifest.json index 0c8f15b9b78..5ce6efd944c 100644 --- a/homeassistant/components/norway_air/manifest.json +++ b/homeassistant/components/norway_air/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/norway_air", "iot_class": "cloud_polling", "loggers": ["metno"], + "quality_scale": "legacy", "requirements": ["PyMetno==0.13.0"] } diff --git a/homeassistant/components/notify/strings.json b/homeassistant/components/notify/strings.json index b7d4ec1ad25..e832bfc248a 100644 --- a/homeassistant/components/notify/strings.json +++ b/homeassistant/components/notify/strings.json @@ -67,7 +67,7 @@ "fix_flow": { "step": { "confirm": { - "description": "The {integration_title} `notify` actions(s) are migrated. A new `notify` entity is available now to replace each legacy `notify` action.\n\nUpdate any automations to use the new `notify.send_message` action exposed with this new entity. When this is done, fix this issue and restart Home Assistant.", + "description": "The {integration_title} `notify` action(s) are migrated. A new `notify` entity is available now to replace each legacy `notify` action.\n\nUpdate any automations to use the new `notify.send_message` action exposed with this new entity. When this is done, fix this issue and restart Home Assistant.", "title": "Migrate legacy {integration_title} notify action for domain `{domain}`" } } diff --git a/homeassistant/components/notify_events/manifest.json b/homeassistant/components/notify_events/manifest.json index a2c01e1d718..e154ab85cae 100644 --- a/homeassistant/components/notify_events/manifest.json +++ b/homeassistant/components/notify_events/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/notify_events", "iot_class": "cloud_push", "loggers": ["notify_events"], + "quality_scale": "legacy", "requirements": ["notify-events==1.0.4"] } diff --git a/homeassistant/components/nsw_fuel_station/__init__.py b/homeassistant/components/nsw_fuel_station/__init__.py index 76dc9d4c6ff..85e204b6f51 100644 --- a/homeassistant/components/nsw_fuel_station/__init__.py +++ b/homeassistant/components/nsw_fuel_station/__init__.py @@ -33,6 +33,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: coordinator = DataUpdateCoordinator( hass, _LOGGER, + config_entry=None, name="sensor", update_interval=SCAN_INTERVAL, update_method=async_update_data, diff --git a/homeassistant/components/nsw_fuel_station/manifest.json b/homeassistant/components/nsw_fuel_station/manifest.json index 5c105fd0281..3fccab39189 100644 --- a/homeassistant/components/nsw_fuel_station/manifest.json +++ b/homeassistant/components/nsw_fuel_station/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/nsw_fuel_station", "iot_class": "cloud_polling", "loggers": ["nsw_fuel"], + "quality_scale": "legacy", "requirements": ["nsw-fuel-api-client==1.1.0"] } diff --git a/homeassistant/components/nsw_rural_fire_service_feed/manifest.json b/homeassistant/components/nsw_rural_fire_service_feed/manifest.json index 9d1f60e33d1..802f4c89b72 100644 --- a/homeassistant/components/nsw_rural_fire_service_feed/manifest.json +++ b/homeassistant/components/nsw_rural_fire_service_feed/manifest.json @@ -6,5 +6,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["aio_geojson_nsw_rfs_incidents"], + "quality_scale": "legacy", "requirements": ["aio-geojson-nsw-rfs-incidents==0.7"] } diff --git a/homeassistant/components/numato/manifest.json b/homeassistant/components/numato/manifest.json index f7bcf0527c2..81f3793fa6c 100644 --- a/homeassistant/components/numato/manifest.json +++ b/homeassistant/components/numato/manifest.json @@ -6,5 +6,6 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["numato_gpio"], + "quality_scale": "legacy", "requirements": ["numato-gpio==0.13.0"] } diff --git a/homeassistant/components/number/__init__.py b/homeassistant/components/number/__init__.py index dc169fcb348..9f4aef08aa9 100644 --- a/homeassistant/components/number/__init__.py +++ b/homeassistant/components/number/__init__.py @@ -384,6 +384,18 @@ class NumberEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): ): return self.hass.config.units.temperature_unit + if (translation_key := self._unit_of_measurement_translation_key) and ( + unit_of_measurement + := self.platform.default_language_platform_translations.get(translation_key) + ): + if native_unit_of_measurement is not None: + raise ValueError( + f"Number entity {type(self)} from integration '{self.platform.platform_name}' " + f"has a translation key for unit_of_measurement '{unit_of_measurement}', " + f"but also has a native_unit_of_measurement '{native_unit_of_measurement}'" + ) + return unit_of_measurement + return native_unit_of_measurement @cached_property diff --git a/homeassistant/components/number/const.py b/homeassistant/components/number/const.py index ad95c9b5358..7330b781e75 100644 --- a/homeassistant/components/number/const.py +++ b/homeassistant/components/number/const.py @@ -17,6 +17,8 @@ from homeassistant.const import ( SIGNAL_STRENGTH_DECIBELS, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, UnitOfApparentPower, + UnitOfArea, + UnitOfBloodGlucoseConcentration, UnitOfConductivity, UnitOfDataRate, UnitOfElectricCurrent, @@ -97,6 +99,12 @@ class NumberDeviceClass(StrEnum): Unit of measurement: `None` """ + AREA = "area" + """Area + + Unit of measurement: `UnitOfArea` units + """ + ATMOSPHERIC_PRESSURE = "atmospheric_pressure" """Atmospheric pressure. @@ -109,6 +117,12 @@ class NumberDeviceClass(StrEnum): Unit of measurement: `%` """ + BLOOD_GLUCOSE_CONCENTRATION = "blood_glucose_concentration" + """Blood glucose concentration. + + Unit of measurement: `mg/dL`, `mmol/L` + """ + CO = "carbon_monoxide" """Carbon Monoxide gas concentration. @@ -162,7 +176,7 @@ class NumberDeviceClass(StrEnum): ENERGY = "energy" """Energy. - Unit of measurement: `Wh`, `kWh`, `MWh`, `MJ`, `GJ` + Unit of measurement: `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `MJ`, `GJ` """ ENERGY_STORAGE = "energy_storage" @@ -171,7 +185,7 @@ class NumberDeviceClass(StrEnum): Use this device class for sensors measuring stored energy, for example the amount of electric energy currently stored in a battery or the capacity of a battery. - Unit of measurement: `Wh`, `kWh`, `MWh`, `MJ`, `GJ` + Unit of measurement: `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `MJ`, `GJ` """ FREQUENCY = "frequency" @@ -279,7 +293,7 @@ class NumberDeviceClass(StrEnum): POWER = "power" """Power. - Unit of measurement: `W`, `kW` + Unit of measurement: `W`, `kW`, `MW`, `GW`, `TW` """ PRECIPITATION = "precipitation" @@ -362,7 +376,7 @@ class NumberDeviceClass(StrEnum): VOLTAGE = "voltage" """Voltage. - Unit of measurement: `V`, `mV` + Unit of measurement: `V`, `mV`, `µV` """ VOLUME = "volume" @@ -390,7 +404,7 @@ class NumberDeviceClass(StrEnum): """Generic flow rate Unit of measurement: UnitOfVolumeFlowRate - - SI / metric: `m³/h`, `L/min` + - SI / metric: `m³/h`, `L/min`, `mL/s` - USCS / imperial: `ft³/min`, `gal/min` """ @@ -427,8 +441,10 @@ DEVICE_CLASSES_SCHEMA: Final = vol.All(vol.Lower, vol.Coerce(NumberDeviceClass)) DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = { NumberDeviceClass.APPARENT_POWER: set(UnitOfApparentPower), NumberDeviceClass.AQI: {None}, + NumberDeviceClass.AREA: set(UnitOfArea), NumberDeviceClass.ATMOSPHERIC_PRESSURE: set(UnitOfPressure), NumberDeviceClass.BATTERY: {PERCENTAGE}, + NumberDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: set(UnitOfBloodGlucoseConcentration), NumberDeviceClass.CO: {CONCENTRATION_PARTS_PER_MILLION}, NumberDeviceClass.CO2: {CONCENTRATION_PARTS_PER_MILLION}, NumberDeviceClass.CONDUCTIVITY: set(UnitOfConductivity), diff --git a/homeassistant/components/number/icons.json b/homeassistant/components/number/icons.json index a122aaecb09..636fa0a7751 100644 --- a/homeassistant/components/number/icons.json +++ b/homeassistant/components/number/icons.json @@ -9,12 +9,18 @@ "aqi": { "default": "mdi:air-filter" }, + "area": { + "default": "mdi:texture-box" + }, "atmospheric_pressure": { "default": "mdi:thermometer-lines" }, "battery": { "default": "mdi:battery" }, + "blood_glucose_concentration": { + "default": "mdi:spoon-sugar" + }, "carbon_dioxide": { "default": "mdi:molecule-co2" }, diff --git a/homeassistant/components/number/strings.json b/homeassistant/components/number/strings.json index 580385172e3..cc77d224d72 100644 --- a/homeassistant/components/number/strings.json +++ b/homeassistant/components/number/strings.json @@ -37,12 +37,18 @@ "aqi": { "name": "[%key:component::sensor::entity_component::aqi::name%]" }, + "area": { + "name": "[%key:component::sensor::entity_component::area::name%]" + }, "atmospheric_pressure": { "name": "[%key:component::sensor::entity_component::atmospheric_pressure::name%]" }, "battery": { "name": "[%key:component::sensor::entity_component::battery::name%]" }, + "blood_glucose_concentration": { + "name": "[%key:component::sensor::entity_component::blood_glucose_concentration::name%]" + }, "carbon_dioxide": { "name": "[%key:component::sensor::entity_component::carbon_dioxide::name%]" }, diff --git a/homeassistant/components/nut/__init__.py b/homeassistant/components/nut/__init__.py index c9b2bcc13b2..169dbbbff5d 100644 --- a/homeassistant/components/nut/__init__.py +++ b/homeassistant/components/nut/__init__.py @@ -130,7 +130,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: NutConfigEntry) -> bool: name=data.name.title(), manufacturer=data.device_info.manufacturer, model=data.device_info.model, + model_id=data.device_info.model_id, sw_version=data.device_info.firmware, + serial_number=data.device_info.serial, + suggested_area=data.device_info.device_location, ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -208,7 +211,10 @@ class NUTDeviceInfo: manufacturer: str | None = None model: str | None = None + model_id: str | None = None firmware: str | None = None + serial: str | None = None + device_location: str | None = None class PyNUTData: @@ -267,8 +273,13 @@ class PyNUTData: manufacturer = _manufacturer_from_status(self._status) model = _model_from_status(self._status) + model_id: str | None = self._status.get("device.part") firmware = _firmware_from_status(self._status) - return NUTDeviceInfo(manufacturer, model, firmware) + serial = _serial_from_status(self._status) + device_location: str | None = self._status.get("device.location") + return NUTDeviceInfo( + manufacturer, model, model_id, firmware, serial, device_location + ) async def _async_get_status(self) -> dict[str, str]: """Get the ups status from NUT.""" diff --git a/homeassistant/components/nut/config_flow.py b/homeassistant/components/nut/config_flow.py index d0a2da124a6..966c51e98e9 100644 --- a/homeassistant/components/nut/config_flow.py +++ b/homeassistant/components/nut/config_flow.py @@ -235,16 +235,12 @@ class NutConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for nut.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/nut/sensor.py b/homeassistant/components/nut/sensor.py index 7f211d5452b..bb702873052 100644 --- a/homeassistant/components/nut/sensor.py +++ b/homeassistant/components/nut/sensor.py @@ -15,6 +15,7 @@ from homeassistant.components.sensor import ( from homeassistant.const import ( ATTR_MANUFACTURER, ATTR_MODEL, + ATTR_SERIAL_NUMBER, ATTR_SW_VERSION, PERCENTAGE, STATE_UNKNOWN, @@ -42,6 +43,7 @@ NUT_DEV_INFO_TO_DEV_INFO: dict[str, str] = { "manufacturer": ATTR_MANUFACTURER, "model": ATTR_MODEL, "firmware": ATTR_SW_VERSION, + "serial": ATTR_SERIAL_NUMBER, } _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/nws/manifest.json b/homeassistant/components/nws/manifest.json index d11a0e62bcf..0e02e652b49 100644 --- a/homeassistant/components/nws/manifest.json +++ b/homeassistant/components/nws/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/nws", "iot_class": "cloud_polling", "loggers": ["metar", "pynws"], - "quality_scale": "platinum", "requirements": ["pynws[retry]==1.8.2"] } diff --git a/homeassistant/components/nx584/manifest.json b/homeassistant/components/nx584/manifest.json index 84ead05d083..9ac469224d0 100644 --- a/homeassistant/components/nx584/manifest.json +++ b/homeassistant/components/nx584/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/nx584", "iot_class": "local_push", "loggers": ["nx584"], + "quality_scale": "legacy", "requirements": ["pynx584==0.8.2"] } diff --git a/homeassistant/components/oasa_telematics/manifest.json b/homeassistant/components/oasa_telematics/manifest.json index d3dbaad98e3..7365081a959 100644 --- a/homeassistant/components/oasa_telematics/manifest.json +++ b/homeassistant/components/oasa_telematics/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/oasa_telematics", "iot_class": "cloud_polling", "loggers": ["oasatelematics"], + "quality_scale": "legacy", "requirements": ["oasatelematics==0.3"] } diff --git a/homeassistant/components/oem/manifest.json b/homeassistant/components/oem/manifest.json index a8ce99b9372..f7ab34adbd9 100644 --- a/homeassistant/components/oem/manifest.json +++ b/homeassistant/components/oem/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/oem", "iot_class": "local_polling", "loggers": ["oemthermostat"], + "quality_scale": "legacy", "requirements": ["oemthermostat==1.1.1"] } diff --git a/homeassistant/components/ohmconnect/manifest.json b/homeassistant/components/ohmconnect/manifest.json index 74754485ea0..e2f02add22d 100644 --- a/homeassistant/components/ohmconnect/manifest.json +++ b/homeassistant/components/ohmconnect/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@robbiet480"], "documentation": "https://www.home-assistant.io/integrations/ohmconnect", "iot_class": "cloud_polling", + "quality_scale": "legacy", "requirements": ["defusedxml==0.7.1"] } diff --git a/homeassistant/components/ollama/config_flow.py b/homeassistant/components/ollama/config_flow.py index 65b8efaf525..1024a824c25 100644 --- a/homeassistant/components/ollama/config_flow.py +++ b/homeassistant/components/ollama/config_flow.py @@ -207,9 +207,8 @@ class OllamaOptionsFlow(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry - self.url: str = self.config_entry.data[CONF_URL] - self.model: str = self.config_entry.data[CONF_MODEL] + self.url: str = config_entry.data[CONF_URL] + self.model: str = config_entry.data[CONF_MODEL] async def async_step_init( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/ombi/manifest.json b/homeassistant/components/ombi/manifest.json index d9da13d2381..1afc385a5a7 100644 --- a/homeassistant/components/ombi/manifest.json +++ b/homeassistant/components/ombi/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@larssont"], "documentation": "https://www.home-assistant.io/integrations/ombi", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["pyombi==0.1.10"] } diff --git a/homeassistant/components/omnilogic/config_flow.py b/homeassistant/components/omnilogic/config_flow.py index 489c8e6f601..dfbd010ea98 100644 --- a/homeassistant/components/omnilogic/config_flow.py +++ b/homeassistant/components/omnilogic/config_flow.py @@ -34,7 +34,7 @@ class OmniLogicConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -78,10 +78,6 @@ class OmniLogicConfigFlow(ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(OptionsFlow): """Handle Omnilogic client options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/onewire/config_flow.py b/homeassistant/components/onewire/config_flow.py index ab8b44f895d..3889db2a069 100644 --- a/homeassistant/components/onewire/config_flow.py +++ b/homeassistant/components/onewire/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from copy import deepcopy from typing import Any import voluptuous as vol @@ -10,7 +11,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant, callback @@ -100,12 +101,14 @@ class OneWireFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OnewireOptionsFlowHandler: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> OnewireOptionsFlowHandler: """Get the options flow for this handler.""" return OnewireOptionsFlowHandler(config_entry) -class OnewireOptionsFlowHandler(OptionsFlowWithConfigEntry): +class OnewireOptionsFlowHandler(OptionsFlow): """Handle OneWire Config options.""" configurable_devices: dict[str, str] @@ -123,6 +126,10 @@ class OnewireOptionsFlowHandler(OptionsFlowWithConfigEntry): current_device: str """Friendly name of the currently selected device.""" + def __init__(self, config_entry: ConfigEntry) -> None: + """Initialize options flow.""" + self.options = deepcopy(dict(config_entry.options)) + async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/onewire/manifest.json b/homeassistant/components/onewire/manifest.json index 32a08223075..4f3cb5d04ab 100644 --- a/homeassistant/components/onewire/manifest.json +++ b/homeassistant/components/onewire/manifest.json @@ -7,6 +7,5 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["pyownet"], - "quality_scale": "gold", "requirements": ["pyownet==0.10.0.post1"] } diff --git a/homeassistant/components/onkyo/config_flow.py b/homeassistant/components/onkyo/config_flow.py index 4c5de362172..a8ced6fae64 100644 --- a/homeassistant/components/onkyo/config_flow.py +++ b/homeassistant/components/onkyo/config_flow.py @@ -11,7 +11,6 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import callback @@ -323,16 +322,13 @@ class OnkyoConfigFlow(ConfigFlow, domain=DOMAIN): return OnkyoOptionsFlowHandler(config_entry) -class OnkyoOptionsFlowHandler(OptionsFlowWithConfigEntry): +class OnkyoOptionsFlowHandler(OptionsFlow): """Handle an options flow for Onkyo.""" def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - super().__init__(config_entry) - - sources_store: dict[str, str] = self.options[OPTION_INPUT_SOURCES] - sources = {InputSource(k): v for k, v in sources_store.items()} - self.options[OPTION_INPUT_SOURCES] = sources + sources_store: dict[str, str] = config_entry.options[OPTION_INPUT_SOURCES] + self._input_sources = {InputSource(k): v for k, v in sources_store.items()} async def async_step_init( self, user_input: dict[str, Any] | None = None @@ -347,7 +343,9 @@ class OnkyoOptionsFlowHandler(OptionsFlowWithConfigEntry): return self.async_create_entry( data={ - OPTION_VOLUME_RESOLUTION: self.options[OPTION_VOLUME_RESOLUTION], + OPTION_VOLUME_RESOLUTION: self.config_entry.options[ + OPTION_VOLUME_RESOLUTION + ], OPTION_MAX_VOLUME: user_input[OPTION_MAX_VOLUME], OPTION_INPUT_SOURCES: sources_store, } @@ -355,22 +353,19 @@ class OnkyoOptionsFlowHandler(OptionsFlowWithConfigEntry): schema_dict: dict[Any, Selector] = {} - max_volume: float = self.options[OPTION_MAX_VOLUME] + max_volume: float = self.config_entry.options[OPTION_MAX_VOLUME] schema_dict[vol.Required(OPTION_MAX_VOLUME, default=max_volume)] = ( NumberSelector( NumberSelectorConfig(min=1, max=100, mode=NumberSelectorMode.BOX) ) ) - sources: dict[InputSource, str] = self.options[OPTION_INPUT_SOURCES] - for source in sources: - schema_dict[vol.Required(source.value_meaning, default=sources[source])] = ( + for source, source_name in self._input_sources.items(): + schema_dict[vol.Required(source.value_meaning, default=source_name)] = ( TextSelector() ) - schema = vol.Schema(schema_dict) - return self.async_show_form( step_id="init", - data_schema=schema, + data_schema=vol.Schema(schema_dict), ) diff --git a/homeassistant/components/onvif/config_flow.py b/homeassistant/components/onvif/config_flow.py index 34f322b9f75..66e566af0bf 100644 --- a/homeassistant/components/onvif/config_flow.py +++ b/homeassistant/components/onvif/config_flow.py @@ -391,7 +391,6 @@ class OnvifOptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize ONVIF options flow.""" - self.config_entry = config_entry self.options = dict(config_entry.options) async def async_step_init(self, user_input: None = None) -> ConfigFlowResult: diff --git a/homeassistant/components/openai_conversation/config_flow.py b/homeassistant/components/openai_conversation/config_flow.py index c6b8487ad0d..2a1764e6b5e 100644 --- a/homeassistant/components/openai_conversation/config_flow.py +++ b/homeassistant/components/openai_conversation/config_flow.py @@ -115,7 +115,6 @@ class OpenAIOptionsFlow(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry self.last_rendered_recommended = config_entry.options.get( CONF_RECOMMENDED, False ) diff --git a/homeassistant/components/openalpr_cloud/manifest.json b/homeassistant/components/openalpr_cloud/manifest.json index 45bce5c7345..5148cb396b6 100644 --- a/homeassistant/components/openalpr_cloud/manifest.json +++ b/homeassistant/components/openalpr_cloud/manifest.json @@ -3,5 +3,6 @@ "name": "OpenALPR Cloud", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/openalpr_cloud", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/openerz/manifest.json b/homeassistant/components/openerz/manifest.json index c7a5a202568..f75e3e492a8 100644 --- a/homeassistant/components/openerz/manifest.json +++ b/homeassistant/components/openerz/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/openerz", "iot_class": "cloud_polling", "loggers": ["openerz_api"], + "quality_scale": "legacy", "requirements": ["openerz-api==0.3.0"] } diff --git a/homeassistant/components/openevse/manifest.json b/homeassistant/components/openevse/manifest.json index 066eb5ee384..45452fe325b 100644 --- a/homeassistant/components/openevse/manifest.json +++ b/homeassistant/components/openevse/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/openevse", "iot_class": "local_polling", "loggers": ["openevsewifi"], + "quality_scale": "legacy", "requirements": ["openevsewifi==1.1.2"] } diff --git a/homeassistant/components/openhardwaremonitor/manifest.json b/homeassistant/components/openhardwaremonitor/manifest.json index 562a2433eab..901424eebc1 100644 --- a/homeassistant/components/openhardwaremonitor/manifest.json +++ b/homeassistant/components/openhardwaremonitor/manifest.json @@ -3,5 +3,6 @@ "name": "Open Hardware Monitor", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/openhardwaremonitor", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/opensensemap/manifest.json b/homeassistant/components/opensensemap/manifest.json index 8fed7ec906e..0256ae42a3a 100644 --- a/homeassistant/components/opensensemap/manifest.json +++ b/homeassistant/components/opensensemap/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/opensensemap", "iot_class": "cloud_polling", "loggers": ["opensensemap_api"], + "quality_scale": "legacy", "requirements": ["opensensemap-api==0.2.0"] } diff --git a/homeassistant/components/opensky/config_flow.py b/homeassistant/components/opensky/config_flow.py index 3cfd1ad30a0..867a4781265 100644 --- a/homeassistant/components/opensky/config_flow.py +++ b/homeassistant/components/opensky/config_flow.py @@ -13,12 +13,11 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, - CONF_NAME, CONF_PASSWORD, CONF_RADIUS, CONF_USERNAME, @@ -45,7 +44,7 @@ class OpenSkyConfigFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OpenSkyOptionsFlowHandler: """Get the options flow for this handler.""" - return OpenSkyOptionsFlowHandler(config_entry) + return OpenSkyOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -83,7 +82,7 @@ class OpenSkyConfigFlowHandler(ConfigFlow, domain=DOMAIN): ) -class OpenSkyOptionsFlowHandler(OptionsFlowWithConfigEntry): +class OpenSkyOptionsFlowHandler(OptionsFlow): """OpenSky Options flow handler.""" async def async_step_init( @@ -112,10 +111,7 @@ class OpenSkyOptionsFlowHandler(OptionsFlowWithConfigEntry): except OpenSkyUnauthenticatedError: errors["base"] = "invalid_auth" if not errors: - return self.async_create_entry( - title=self.options.get(CONF_NAME, "OpenSky"), - data=user_input, - ) + return self.async_create_entry(data=user_input) return self.async_show_form( step_id="init", @@ -130,6 +126,6 @@ class OpenSkyOptionsFlowHandler(OptionsFlowWithConfigEntry): vol.Optional(CONF_CONTRIBUTING_USER, default=False): bool, } ), - user_input or self.options, + user_input or self.config_entry.options, ), ) diff --git a/homeassistant/components/opentherm_gw/config_flow.py b/homeassistant/components/opentherm_gw/config_flow.py index 1f52b47cbad..80c16ee88e1 100644 --- a/homeassistant/components/opentherm_gw/config_flow.py +++ b/homeassistant/components/opentherm_gw/config_flow.py @@ -49,7 +49,7 @@ class OpenThermGwConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OpenThermGwOptionsFlow: """Get the options flow for this handler.""" - return OpenThermGwOptionsFlow(config_entry) + return OpenThermGwOptionsFlow() async def async_step_init( self, info: dict[str, Any] | None = None @@ -132,10 +132,6 @@ class OpenThermGwConfigFlow(ConfigFlow, domain=DOMAIN): class OpenThermGwOptionsFlow(OptionsFlow): """Handle opentherm_gw options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize the options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/openweathermap/config_flow.py b/homeassistant/components/openweathermap/config_flow.py index 5fe06ea2dcd..8d33e117287 100644 --- a/homeassistant/components/openweathermap/config_flow.py +++ b/homeassistant/components/openweathermap/config_flow.py @@ -44,7 +44,7 @@ class OpenWeatherMapConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OpenWeatherMapOptionsFlow: """Get the options flow for this handler.""" - return OpenWeatherMapOptionsFlow(config_entry) + return OpenWeatherMapOptionsFlow() async def async_step_user(self, user_input=None) -> ConfigFlowResult: """Handle a flow initialized by the user.""" @@ -97,10 +97,6 @@ class OpenWeatherMapConfigFlow(ConfigFlow, domain=DOMAIN): class OpenWeatherMapOptionsFlow(OptionsFlow): """Handle options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init(self, user_input: dict | None = None) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: diff --git a/homeassistant/components/opnsense/manifest.json b/homeassistant/components/opnsense/manifest.json index bf8a41d1785..4dd82216f1a 100644 --- a/homeassistant/components/opnsense/manifest.json +++ b/homeassistant/components/opnsense/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/opnsense", "iot_class": "local_polling", "loggers": ["pbr", "pyopnsense"], + "quality_scale": "legacy", "requirements": ["pyopnsense==0.4.0"] } diff --git a/homeassistant/components/opple/manifest.json b/homeassistant/components/opple/manifest.json index 174907dfd0f..dc28d1f0f33 100644 --- a/homeassistant/components/opple/manifest.json +++ b/homeassistant/components/opple/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/opple", "iot_class": "local_polling", "loggers": ["pyoppleio"], + "quality_scale": "legacy", "requirements": ["pyoppleio-legacy==1.0.8"] } diff --git a/homeassistant/components/oru/manifest.json b/homeassistant/components/oru/manifest.json index 23c43e32306..347388b6f15 100644 --- a/homeassistant/components/oru/manifest.json +++ b/homeassistant/components/oru/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/oru", "iot_class": "cloud_polling", "loggers": ["oru"], + "quality_scale": "legacy", "requirements": ["oru==0.1.11"] } diff --git a/homeassistant/components/orvibo/manifest.json b/homeassistant/components/orvibo/manifest.json index 05ce5edd8bd..e3a6676b2f2 100644 --- a/homeassistant/components/orvibo/manifest.json +++ b/homeassistant/components/orvibo/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/orvibo", "iot_class": "local_push", "loggers": ["orvibo"], + "quality_scale": "legacy", "requirements": ["orvibo==1.1.2"] } diff --git a/homeassistant/components/osramlightify/manifest.json b/homeassistant/components/osramlightify/manifest.json index f6a922a09ec..3b11200f1e5 100644 --- a/homeassistant/components/osramlightify/manifest.json +++ b/homeassistant/components/osramlightify/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/osramlightify", "iot_class": "local_polling", "loggers": ["lightify"], + "quality_scale": "legacy", "requirements": ["lightify==1.0.7.3"] } diff --git a/homeassistant/components/overkiz/manifest.json b/homeassistant/components/overkiz/manifest.json index 52fd1dfc669..8c750aec6bd 100644 --- a/homeassistant/components/overkiz/manifest.json +++ b/homeassistant/components/overkiz/manifest.json @@ -20,7 +20,7 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["boto3", "botocore", "pyhumps", "pyoverkiz", "s3transfer"], - "requirements": ["pyoverkiz==1.14.1"], + "requirements": ["pyoverkiz==1.15.0"], "zeroconf": [ { "type": "_kizbox._tcp.local.", diff --git a/homeassistant/components/overkiz/water_heater/atlantic_domestic_hot_water_production_mlb_component.py b/homeassistant/components/overkiz/water_heater/atlantic_domestic_hot_water_production_mlb_component.py index 1b2a1e218d4..8ba2c1678c2 100644 --- a/homeassistant/components/overkiz/water_heater/atlantic_domestic_hot_water_production_mlb_component.py +++ b/homeassistant/components/overkiz/water_heater/atlantic_domestic_hot_water_production_mlb_component.py @@ -13,6 +13,7 @@ from homeassistant.components.water_heater import ( WaterHeaterEntityFeature, ) from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature +from homeassistant.util import dt as dt_util from .. import OverkizDataUpdateCoordinator from ..entity import OverkizEntity @@ -153,11 +154,11 @@ class AtlanticDomesticHotWaterProductionMBLComponent(OverkizEntity, WaterHeaterE async def async_turn_away_mode_on(self) -> None: """Turn away mode on. - This requires the start date and the end date to be also set. + This requires the start date and the end date to be also set, and those dates have to match the device datetime. The API accepts setting dates in the format of the core:DateTimeState state for the DHW - {'day': 11, 'hour': 21, 'minute': 12, 'month': 7, 'second': 53, 'weekday': 3, 'year': 2024}) - The dict is then passed as an away mode start date, and then as an end date, but with the year incremented by 1, - so the away mode is getting turned on for the next year. + {'day': 11, 'hour': 21, 'minute': 12, 'month': 7, 'second': 53, 'weekday': 3, 'year': 2024} + The dict is then passed as an actual device date, the away mode start date, and then as an end date, + but with the year incremented by 1, so the away mode is getting turned on for the next year. The weekday number seems to have no effect so the calculation of the future date's weekday number is redundant, but possible via homeassistant dt_util to form both start and end dates dictionaries from scratch based on datetime.now() and datetime.timedelta into the future. @@ -167,13 +168,19 @@ class AtlanticDomesticHotWaterProductionMBLComponent(OverkizEntity, WaterHeaterE With `refresh_afterwards=False` on the first commands, and `refresh_afterwards=True` only the last command, the API is not choking and the transition is smooth without the unavailability state. """ - now_date = cast( - dict, - self.executor.select_state(OverkizState.CORE_DATETIME), - ) + now = dt_util.now() + now_date = { + "month": now.month, + "hour": now.hour, + "year": now.year, + "weekday": now.weekday(), + "day": now.day, + "minute": now.minute, + "second": now.second, + } await self.executor.async_execute_command( - OverkizCommand.SET_ABSENCE_MODE, - OverkizCommandParam.PROG, + OverkizCommand.SET_DATE_TIME, + now_date, refresh_afterwards=False, ) await self.executor.async_execute_command( @@ -183,7 +190,11 @@ class AtlanticDomesticHotWaterProductionMBLComponent(OverkizEntity, WaterHeaterE await self.executor.async_execute_command( OverkizCommand.SET_ABSENCE_END_DATE, now_date, refresh_afterwards=False ) - + await self.executor.async_execute_command( + OverkizCommand.SET_ABSENCE_MODE, + OverkizCommandParam.PROG, + refresh_afterwards=False, + ) await self.coordinator.async_refresh() async def async_turn_away_mode_off(self) -> None: diff --git a/homeassistant/components/p1_monitor/__init__.py b/homeassistant/components/p1_monitor/__init__.py index 3361506dafb..d2ccc83972a 100644 --- a/homeassistant/components/p1_monitor/__init__.py +++ b/homeassistant/components/p1_monitor/__init__.py @@ -7,10 +7,12 @@ from homeassistant.const import CONF_HOST, CONF_PORT, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from .const import DOMAIN, LOGGER +from .const import LOGGER from .coordinator import P1MonitorDataUpdateCoordinator -PLATFORMS = [Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.SENSOR] + +type P1MonitorConfigEntry = ConfigEntry[P1MonitorDataUpdateCoordinator] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: @@ -23,8 +25,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await coordinator.p1monitor.close() raise - hass.data.setdefault(DOMAIN, {}) - hass.data[DOMAIN][entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True @@ -55,7 +56,4 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload P1 Monitor config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - del hass.data[DOMAIN][entry.entry_id] - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/p1_monitor/diagnostics.py b/homeassistant/components/p1_monitor/diagnostics.py index c8b4e99099e..d2e2ec5c24e 100644 --- a/homeassistant/components/p1_monitor/diagnostics.py +++ b/homeassistant/components/p1_monitor/diagnostics.py @@ -11,13 +11,11 @@ from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from .const import ( - DOMAIN, SERVICE_PHASES, SERVICE_SETTINGS, SERVICE_SMARTMETER, SERVICE_WATERMETER, ) -from .coordinator import P1MonitorDataUpdateCoordinator if TYPE_CHECKING: from _typeshed import DataclassInstance @@ -29,23 +27,21 @@ async def async_get_config_entry_diagnostics( hass: HomeAssistant, entry: ConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: P1MonitorDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] - data = { "entry": { "title": entry.title, "data": async_redact_data(entry.data, TO_REDACT), }, "data": { - "smartmeter": asdict(coordinator.data[SERVICE_SMARTMETER]), - "phases": asdict(coordinator.data[SERVICE_PHASES]), - "settings": asdict(coordinator.data[SERVICE_SETTINGS]), + "smartmeter": asdict(entry.runtime_data.data[SERVICE_SMARTMETER]), + "phases": asdict(entry.runtime_data.data[SERVICE_PHASES]), + "settings": asdict(entry.runtime_data.data[SERVICE_SETTINGS]), }, } - if coordinator.has_water_meter: + if entry.runtime_data.has_water_meter: data["data"]["watermeter"] = asdict( - cast("DataclassInstance", coordinator.data[SERVICE_WATERMETER]) + cast("DataclassInstance", entry.runtime_data.data[SERVICE_WATERMETER]) ) return data diff --git a/homeassistant/components/p1_monitor/manifest.json b/homeassistant/components/p1_monitor/manifest.json index dfc681977a5..28016242a6a 100644 --- a/homeassistant/components/p1_monitor/manifest.json +++ b/homeassistant/components/p1_monitor/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/p1_monitor", "iot_class": "local_polling", "loggers": ["p1monitor"], - "quality_scale": "platinum", "requirements": ["p1monitor==3.1.0"] } diff --git a/homeassistant/components/p1_monitor/sensor.py b/homeassistant/components/p1_monitor/sensor.py index 88f6d165f14..771ef0e19af 100644 --- a/homeassistant/components/p1_monitor/sensor.py +++ b/homeassistant/components/p1_monitor/sensor.py @@ -239,11 +239,10 @@ async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up P1 Monitor Sensors based on a config entry.""" - coordinator: P1MonitorDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] entities: list[P1MonitorSensorEntity] = [] entities.extend( P1MonitorSensorEntity( - coordinator=coordinator, + entry=entry, description=description, name="SmartMeter", service=SERVICE_SMARTMETER, @@ -252,7 +251,7 @@ async def async_setup_entry( ) entities.extend( P1MonitorSensorEntity( - coordinator=coordinator, + entry=entry, description=description, name="Phases", service=SERVICE_PHASES, @@ -261,17 +260,17 @@ async def async_setup_entry( ) entities.extend( P1MonitorSensorEntity( - coordinator=coordinator, + entry=entry, description=description, name="Settings", service=SERVICE_SETTINGS, ) for description in SENSORS_SETTINGS ) - if coordinator.has_water_meter: + if entry.runtime_data.has_water_meter: entities.extend( P1MonitorSensorEntity( - coordinator=coordinator, + entry=entry, description=description, name="WaterMeter", service=SERVICE_WATERMETER, @@ -291,24 +290,26 @@ class P1MonitorSensorEntity( def __init__( self, *, - coordinator: P1MonitorDataUpdateCoordinator, + entry: ConfigEntry, description: SensorEntityDescription, name: str, service: Literal["smartmeter", "watermeter", "phases", "settings"], ) -> None: """Initialize P1 Monitor sensor.""" - super().__init__(coordinator=coordinator) + super().__init__(coordinator=entry.runtime_data) self._service_key = service self.entity_description = description self._attr_unique_id = ( - f"{coordinator.config_entry.entry_id}_{service}_{description.key}" + f"{entry.runtime_data.config_entry.entry_id}_{service}_{description.key}" ) self._attr_device_info = DeviceInfo( entry_type=DeviceEntryType.SERVICE, - identifiers={(DOMAIN, f"{coordinator.config_entry.entry_id}_{service}")}, - configuration_url=f"http://{coordinator.config_entry.data[CONF_HOST]}", + identifiers={ + (DOMAIN, f"{entry.runtime_data.config_entry.entry_id}_{service}") + }, + configuration_url=f"http://{entry.runtime_data.config_entry.data[CONF_HOST]}", manufacturer="P1 Monitor", name=name, ) diff --git a/homeassistant/components/palazzetti/__init__.py b/homeassistant/components/palazzetti/__init__.py index ecaa8089097..4bea4434496 100644 --- a/homeassistant/components/palazzetti/__init__.py +++ b/homeassistant/components/palazzetti/__init__.py @@ -7,7 +7,7 @@ from homeassistant.core import HomeAssistant from .coordinator import PalazzettiConfigEntry, PalazzettiDataUpdateCoordinator -PLATFORMS: list[Platform] = [Platform.CLIMATE] +PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: PalazzettiConfigEntry) -> bool: diff --git a/homeassistant/components/palazzetti/climate.py b/homeassistant/components/palazzetti/climate.py index aff988051f3..356f3a7306f 100644 --- a/homeassistant/components/palazzetti/climate.py +++ b/homeassistant/components/palazzetti/climate.py @@ -7,18 +7,18 @@ from pypalazzetti.exceptions import CommunicationError, ValidationError from homeassistant.components.climate import ( ClimateEntity, ClimateEntityFeature, + HVACAction, HVACMode, ) from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers import device_registry as dr from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import PalazzettiConfigEntry -from .const import DOMAIN, FAN_AUTO, FAN_HIGH, FAN_MODES, FAN_SILENT, PALAZZETTI +from .const import DOMAIN, FAN_AUTO, FAN_HIGH, FAN_MODES, FAN_SILENT from .coordinator import PalazzettiDataUpdateCoordinator +from .entity import PalazzettiEntity async def async_setup_entry( @@ -30,9 +30,7 @@ async def async_setup_entry( async_add_entities([PalazzettiClimateEntity(entry.runtime_data)]) -class PalazzettiClimateEntity( - CoordinatorEntity[PalazzettiDataUpdateCoordinator], ClimateEntity -): +class PalazzettiClimateEntity(PalazzettiEntity, ClimateEntity): """Defines a Palazzetti climate.""" _attr_has_entity_name = True @@ -52,15 +50,7 @@ class PalazzettiClimateEntity( super().__init__(coordinator) client = coordinator.client mac = coordinator.config_entry.unique_id - assert mac is not None self._attr_unique_id = mac - self._attr_device_info = dr.DeviceInfo( - connections={(dr.CONNECTION_NETWORK_MAC, mac)}, - name=client.name, - manufacturer=PALAZZETTI, - sw_version=client.sw_version, - hw_version=client.hw_version, - ) self._attr_hvac_modes = [HVACMode.HEAT, HVACMode.OFF] self._attr_min_temp = client.target_temperature_min self._attr_max_temp = client.target_temperature_max @@ -74,16 +64,19 @@ class PalazzettiClimateEntity( if client.has_fan_auto: self._attr_fan_modes.append(FAN_AUTO) - @property - def available(self) -> bool: - """Is the entity available.""" - return super().available and self.coordinator.client.connected - @property def hvac_mode(self) -> HVACMode: """Return hvac operation ie. heat or off mode.""" - is_heating = bool(self.coordinator.client.is_heating) - return HVACMode.HEAT if is_heating else HVACMode.OFF + return HVACMode.HEAT if self.coordinator.client.is_on else HVACMode.OFF + + @property + def hvac_action(self) -> HVACAction: + """Return hvac action ie. heating or idle.""" + return ( + HVACAction.HEATING + if self.coordinator.client.is_heating + else HVACAction.IDLE + ) async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: """Set new target hvac mode.""" diff --git a/homeassistant/components/palazzetti/config_flow.py b/homeassistant/components/palazzetti/config_flow.py index a58461b9ca7..fe892b6624d 100644 --- a/homeassistant/components/palazzetti/config_flow.py +++ b/homeassistant/components/palazzetti/config_flow.py @@ -6,6 +6,7 @@ from pypalazzetti.client import PalazzettiClient from pypalazzetti.exceptions import CommunicationError import voluptuous as vol +from homeassistant.components import dhcp from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST from homeassistant.helpers import device_registry as dr @@ -16,6 +17,8 @@ from .const import DOMAIN, LOGGER class PalazzettiConfigFlow(ConfigFlow, domain=DOMAIN): """Palazzetti config flow.""" + _discovered_device: PalazzettiClient + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -48,3 +51,41 @@ class PalazzettiConfigFlow(ConfigFlow, domain=DOMAIN): data_schema=vol.Schema({vol.Required(CONF_HOST): str}), errors=errors, ) + + async def async_step_dhcp( + self, discovery_info: dhcp.DhcpServiceInfo + ) -> ConfigFlowResult: + """Handle DHCP discovery.""" + + LOGGER.debug( + "DHCP discovery detected Palazzetti: %s", discovery_info.macaddress + ) + + await self.async_set_unique_id(dr.format_mac(discovery_info.macaddress)) + self._abort_if_unique_id_configured() + self._discovered_device = PalazzettiClient(hostname=discovery_info.ip) + try: + await self._discovered_device.connect() + except CommunicationError: + return self.async_abort(reason="cannot_connect") + + return await self.async_step_discovery_confirm() + + async def async_step_discovery_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm discovery.""" + if user_input is not None: + return self.async_create_entry( + title=self._discovered_device.name, + data={CONF_HOST: self._discovered_device.host}, + ) + + self._set_confirm_only() + return self.async_show_form( + step_id="discovery_confirm", + description_placeholders={ + "name": self._discovered_device.name, + "host": self._discovered_device.host, + }, + ) diff --git a/homeassistant/components/palazzetti/diagnostics.py b/homeassistant/components/palazzetti/diagnostics.py new file mode 100644 index 00000000000..3843f0ec111 --- /dev/null +++ b/homeassistant/components/palazzetti/diagnostics.py @@ -0,0 +1,20 @@ +"""Provides diagnostics for Palazzetti.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.core import HomeAssistant + +from . import PalazzettiConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: PalazzettiConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + client = entry.runtime_data.client + + return { + "api_data": client.to_dict(redact=True), + } diff --git a/homeassistant/components/palazzetti/entity.py b/homeassistant/components/palazzetti/entity.py new file mode 100644 index 00000000000..677c6ccbdc4 --- /dev/null +++ b/homeassistant/components/palazzetti/entity.py @@ -0,0 +1,32 @@ +"""Base class for Palazzetti entities.""" + +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import PALAZZETTI +from .coordinator import PalazzettiDataUpdateCoordinator + + +class PalazzettiEntity(CoordinatorEntity[PalazzettiDataUpdateCoordinator]): + """Defines a base Palazzetti entity.""" + + _attr_has_entity_name = True + + def __init__(self, coordinator: PalazzettiDataUpdateCoordinator) -> None: + """Initialize Palazzetti entity.""" + super().__init__(coordinator) + client = coordinator.client + mac = coordinator.config_entry.unique_id + assert mac is not None + self._attr_device_info = dr.DeviceInfo( + connections={(dr.CONNECTION_NETWORK_MAC, mac)}, + name=client.name, + manufacturer=PALAZZETTI, + sw_version=client.sw_version, + hw_version=client.hw_version, + ) + + @property + def available(self) -> bool: + """Is the entity available.""" + return super().available and self.coordinator.client.connected diff --git a/homeassistant/components/palazzetti/manifest.json b/homeassistant/components/palazzetti/manifest.json index a1b25f563bf..05a5d260b50 100644 --- a/homeassistant/components/palazzetti/manifest.json +++ b/homeassistant/components/palazzetti/manifest.json @@ -3,8 +3,17 @@ "name": "Palazzetti", "codeowners": ["@dotvav"], "config_flow": true, + "dhcp": [ + { + "hostname": "connbox*", + "macaddress": "40F3857*" + }, + { + "registered_devices": true + } + ], "documentation": "https://www.home-assistant.io/integrations/palazzetti", "integration_type": "device", "iot_class": "local_polling", - "requirements": ["pypalazzetti==0.1.10"] + "requirements": ["pypalazzetti==0.1.14"] } diff --git a/homeassistant/components/palazzetti/quality_scale.yaml b/homeassistant/components/palazzetti/quality_scale.yaml new file mode 100644 index 00000000000..493b2595117 --- /dev/null +++ b/homeassistant/components/palazzetti/quality_scale.yaml @@ -0,0 +1,86 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not register actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not register actions. + docs-high-level-description: done + docs-installation-instructions: todo + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + This integration does not subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + This integration does not have configuration. + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + test-coverage: todo + # Gold + devices: done + diagnostics: done + discovery-update-info: done + discovery: done + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: done + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + This integration connects to a single device. + entity-category: todo + entity-device-class: done + entity-disabled-by-default: todo + entity-translations: done + exception-translations: done + icon-translations: + status: exempt + comment: | + This integration does not have custom icons. + reconfiguration-flow: todo + repair-issues: + status: exempt + comment: | + This integration does not raise any repairable issues. + stale-devices: + status: exempt + comment: | + This integration connects to a single device. + + # Platinum + async-dependency: done + inject-websession: todo + strict-typing: todo diff --git a/homeassistant/components/palazzetti/sensor.py b/homeassistant/components/palazzetti/sensor.py new file mode 100644 index 00000000000..ead2b236b17 --- /dev/null +++ b/homeassistant/components/palazzetti/sensor.py @@ -0,0 +1,106 @@ +"""Support for Palazzetti sensors.""" + +from dataclasses import dataclass + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import UnitOfLength, UnitOfMass, UnitOfTemperature +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from . import PalazzettiConfigEntry +from .coordinator import PalazzettiDataUpdateCoordinator +from .entity import PalazzettiEntity + + +@dataclass(frozen=True, kw_only=True) +class PropertySensorEntityDescription(SensorEntityDescription): + """Describes a Palazzetti sensor entity that is read from a `PalazzettiClient` property.""" + + client_property: str + presence_flag: None | str = None + + +PROPERTY_SENSOR_DESCRIPTIONS: list[PropertySensorEntityDescription] = [ + PropertySensorEntityDescription( + key="pellet_quantity", + device_class=SensorDeviceClass.WEIGHT, + native_unit_of_measurement=UnitOfMass.KILOGRAMS, + state_class=SensorStateClass.MEASUREMENT, + translation_key="pellet_quantity", + client_property="pellet_quantity", + ), + PropertySensorEntityDescription( + key="pellet_level", + device_class=SensorDeviceClass.DISTANCE, + native_unit_of_measurement=UnitOfLength.CENTIMETERS, + state_class=SensorStateClass.MEASUREMENT, + translation_key="pellet_level", + presence_flag="has_pellet_level", + client_property="pellet_level", + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: PalazzettiConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Palazzetti sensor entities based on a config entry.""" + + coordinator = entry.runtime_data + + sensors = [ + PalazzettiSensor( + coordinator, + PropertySensorEntityDescription( + key=sensor.description_key.value, + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + translation_key=sensor.description_key.value, + client_property=sensor.state_property, + ), + ) + for sensor in coordinator.client.list_temperatures() + ] + + sensors.extend( + [ + PalazzettiSensor(coordinator, description) + for description in PROPERTY_SENSOR_DESCRIPTIONS + if not description.presence_flag + or getattr(coordinator.client, description.presence_flag) + ] + ) + + if sensors: + async_add_entities(sensors) + + +class PalazzettiSensor(PalazzettiEntity, SensorEntity): + """Define a Palazzetti sensor.""" + + entity_description: PropertySensorEntityDescription + + def __init__( + self, + coordinator: PalazzettiDataUpdateCoordinator, + description: PropertySensorEntityDescription, + ) -> None: + """Initialize Palazzetti sensor.""" + super().__init__(coordinator) + self.entity_description = description + self._attr_unique_id = f"{coordinator.config_entry.unique_id}-{description.key}" + + @property + def native_value(self) -> StateType: + """Return the state value of the sensor.""" + + return getattr(self.coordinator.client, self.entity_description.client_property) diff --git a/homeassistant/components/palazzetti/strings.json b/homeassistant/components/palazzetti/strings.json index fdf50f29f0d..435ec0aab85 100644 --- a/homeassistant/components/palazzetti/strings.json +++ b/homeassistant/components/palazzetti/strings.json @@ -8,6 +8,9 @@ "data_description": { "host": "The host name or the IP address of the Palazzetti CBox" } + }, + "discovery_confirm": { + "description": "Do you want to add {name} ({host}) to Home Assistant?" } }, "abort": { @@ -24,7 +27,7 @@ "invalid_fan_mode": { "message": "Fan mode {value} is invalid." }, - "invalid_target_temperatures": { + "invalid_target_temperature": { "message": "Target temperature {value} is invalid." }, "cannot_connect": { @@ -44,6 +47,35 @@ } } } + }, + "sensor": { + "pellet_quantity": { + "name": "Pellet quantity" + }, + "pellet_level": { + "name": "Pellet level" + }, + "air_outlet_temperature": { + "name": "Air outlet temperature" + }, + "wood_combustion_temperature": { + "name": "Wood combustion temperature" + }, + "room_temperature": { + "name": "Room temperature" + }, + "return_water_temperature": { + "name": "Return water temperature" + }, + "tank_water_temperature": { + "name": "Tank water temperature" + }, + "t1_hydro": { + "name": "Hydro temperature 1" + }, + "t2_hydro": { + "name": "Hydro temperature 2" + } } } } diff --git a/homeassistant/components/panasonic_bluray/manifest.json b/homeassistant/components/panasonic_bluray/manifest.json index fa0202c0871..3de12b051e5 100644 --- a/homeassistant/components/panasonic_bluray/manifest.json +++ b/homeassistant/components/panasonic_bluray/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/panasonic_bluray", "iot_class": "local_polling", "loggers": ["panacotta"], + "quality_scale": "legacy", "requirements": ["panacotta==0.2"] } diff --git a/homeassistant/components/pandora/manifest.json b/homeassistant/components/pandora/manifest.json index b86f0754af3..e7d8946fb38 100644 --- a/homeassistant/components/pandora/manifest.json +++ b/homeassistant/components/pandora/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/pandora", "iot_class": "local_polling", "loggers": ["pexpect", "ptyprocess"], + "quality_scale": "legacy", "requirements": ["pexpect==4.6.0"] } diff --git a/homeassistant/components/pegel_online/coordinator.py b/homeassistant/components/pegel_online/coordinator.py index 1802af8e05c..c8233673fde 100644 --- a/homeassistant/components/pegel_online/coordinator.py +++ b/homeassistant/components/pegel_online/coordinator.py @@ -7,7 +7,7 @@ from aiopegelonline import CONNECT_ERRORS, PegelOnline, Station, StationMeasurem from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import MIN_TIME_BETWEEN_UPDATES +from .const import DOMAIN, MIN_TIME_BETWEEN_UPDATES _LOGGER = logging.getLogger(__name__) @@ -33,4 +33,8 @@ class PegelOnlineDataUpdateCoordinator(DataUpdateCoordinator[StationMeasurements try: return await self.api.async_get_station_measurements(self.station.uuid) except CONNECT_ERRORS as err: - raise UpdateFailed(f"Failed to communicate with API: {err}") from err + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="communication_error", + translation_placeholders={"error": str(err)}, + ) from err diff --git a/homeassistant/components/pegel_online/manifest.json b/homeassistant/components/pegel_online/manifest.json index d51278d0c1b..443e8c58467 100644 --- a/homeassistant/components/pegel_online/manifest.json +++ b/homeassistant/components/pegel_online/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["aiopegelonline"], - "requirements": ["aiopegelonline==0.0.10"] + "requirements": ["aiopegelonline==0.1.0"] } diff --git a/homeassistant/components/pegel_online/strings.json b/homeassistant/components/pegel_online/strings.json index e777f6169ba..b8d18e63a4f 100644 --- a/homeassistant/components/pegel_online/strings.json +++ b/homeassistant/components/pegel_online/strings.json @@ -48,5 +48,10 @@ "name": "Water temperature" } } + }, + "exceptions": { + "communication_error": { + "message": "Failed to communicate with API: {error}" + } } } diff --git a/homeassistant/components/pencom/manifest.json b/homeassistant/components/pencom/manifest.json index 34ebe315972..306b2e7be49 100644 --- a/homeassistant/components/pencom/manifest.json +++ b/homeassistant/components/pencom/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/pencom", "iot_class": "local_polling", "loggers": ["pencompy"], + "quality_scale": "legacy", "requirements": ["pencompy==0.0.3"] } diff --git a/homeassistant/components/pi_hole/sensor.py b/homeassistant/components/pi_hole/sensor.py index 503883e9326..4cf5133e700 100644 --- a/homeassistant/components/pi_hole/sensor.py +++ b/homeassistant/components/pi_hole/sensor.py @@ -18,7 +18,6 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( SensorEntityDescription( key="ads_blocked_today", translation_key="ads_blocked_today", - native_unit_of_measurement="ads", ), SensorEntityDescription( key="ads_percentage_today", @@ -28,38 +27,20 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( SensorEntityDescription( key="clients_ever_seen", translation_key="clients_ever_seen", - native_unit_of_measurement="clients", ), SensorEntityDescription( - key="dns_queries_today", - translation_key="dns_queries_today", - native_unit_of_measurement="queries", + key="dns_queries_today", translation_key="dns_queries_today" ), SensorEntityDescription( key="domains_being_blocked", translation_key="domains_being_blocked", - native_unit_of_measurement="domains", ), + SensorEntityDescription(key="queries_cached", translation_key="queries_cached"), SensorEntityDescription( - key="queries_cached", - translation_key="queries_cached", - native_unit_of_measurement="queries", - ), - SensorEntityDescription( - key="queries_forwarded", - translation_key="queries_forwarded", - native_unit_of_measurement="queries", - ), - SensorEntityDescription( - key="unique_clients", - translation_key="unique_clients", - native_unit_of_measurement="clients", - ), - SensorEntityDescription( - key="unique_domains", - translation_key="unique_domains", - native_unit_of_measurement="domains", + key="queries_forwarded", translation_key="queries_forwarded" ), + SensorEntityDescription(key="unique_clients", translation_key="unique_clients"), + SensorEntityDescription(key="unique_domains", translation_key="unique_domains"), ) diff --git a/homeassistant/components/pi_hole/strings.json b/homeassistant/components/pi_hole/strings.json index b76b61f1903..9e1d5948a09 100644 --- a/homeassistant/components/pi_hole/strings.json +++ b/homeassistant/components/pi_hole/strings.json @@ -41,31 +41,39 @@ }, "sensor": { "ads_blocked_today": { - "name": "Ads blocked today" + "name": "Ads blocked today", + "unit_of_measurement": "ads" }, "ads_percentage_today": { "name": "Ads percentage blocked today" }, "clients_ever_seen": { - "name": "Seen clients" + "name": "Seen clients", + "unit_of_measurement": "clients" }, "dns_queries_today": { - "name": "DNS queries today" + "name": "DNS queries today", + "unit_of_measurement": "queries" }, "domains_being_blocked": { - "name": "Domains blocked" + "name": "Domains blocked", + "unit_of_measurement": "domains" }, "queries_cached": { - "name": "DNS queries cached" + "name": "DNS queries cached", + "unit_of_measurement": "[%key:component::pi_hole::entity::sensor::dns_queries_today::unit_of_measurement%]" }, "queries_forwarded": { - "name": "DNS queries forwarded" + "name": "DNS queries forwarded", + "unit_of_measurement": "[%key:component::pi_hole::entity::sensor::dns_queries_today::unit_of_measurement%]" }, "unique_clients": { - "name": "DNS unique clients" + "name": "DNS unique clients", + "unit_of_measurement": "[%key:component::pi_hole::entity::sensor::clients_ever_seen::unit_of_measurement%]" }, "unique_domains": { - "name": "DNS unique domains" + "name": "DNS unique domains", + "unit_of_measurement": "[%key:component::pi_hole::entity::sensor::domains_being_blocked::unit_of_measurement%]" } }, "update": { diff --git a/homeassistant/components/picotts/manifest.json b/homeassistant/components/picotts/manifest.json index 74b91e187ba..6e8c346a3c9 100644 --- a/homeassistant/components/picotts/manifest.json +++ b/homeassistant/components/picotts/manifest.json @@ -3,5 +3,6 @@ "name": "Pico TTS", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/picotts", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/pilight/manifest.json b/homeassistant/components/pilight/manifest.json index 341d0abdf67..da07c4ee645 100644 --- a/homeassistant/components/pilight/manifest.json +++ b/homeassistant/components/pilight/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/pilight", "iot_class": "local_push", "loggers": ["pilight"], + "quality_scale": "legacy", "requirements": ["pilight==0.1.1"] } diff --git a/homeassistant/components/ping/__init__.py b/homeassistant/components/ping/__init__.py index f4a04caae5b..4b03e5e4407 100644 --- a/homeassistant/components/ping/__init__.py +++ b/homeassistant/components/ping/__init__.py @@ -2,7 +2,6 @@ from __future__ import annotations -from dataclasses import dataclass import logging from icmplib import SocketPermissionError, async_ping @@ -12,6 +11,7 @@ from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType +from homeassistant.util.hass_dict import HassKey from .const import CONF_PING_COUNT, DOMAIN from .coordinator import PingUpdateCoordinator @@ -21,13 +21,7 @@ _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) PLATFORMS = [Platform.BINARY_SENSOR, Platform.DEVICE_TRACKER, Platform.SENSOR] - - -@dataclass(slots=True) -class PingDomainData: - """Dataclass to store privileged status.""" - - privileged: bool | None +DATA_PRIVILEGED_KEY: HassKey[bool | None] = HassKey(DOMAIN) type PingConfigEntry = ConfigEntry[PingUpdateCoordinator] @@ -35,29 +29,25 @@ type PingConfigEntry = ConfigEntry[PingUpdateCoordinator] async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the ping integration.""" - - hass.data[DOMAIN] = PingDomainData( - privileged=await _can_use_icmp_lib_with_privilege(), - ) + hass.data[DATA_PRIVILEGED_KEY] = await _can_use_icmp_lib_with_privilege() return True async def async_setup_entry(hass: HomeAssistant, entry: PingConfigEntry) -> bool: """Set up Ping (ICMP) from a config entry.""" - - data: PingDomainData = hass.data[DOMAIN] + privileged = hass.data[DATA_PRIVILEGED_KEY] host: str = entry.options[CONF_HOST] count: int = int(entry.options[CONF_PING_COUNT]) ping_cls: type[PingDataICMPLib | PingDataSubProcess] - if data.privileged is None: + if privileged is None: ping_cls = PingDataSubProcess else: ping_cls = PingDataICMPLib coordinator = PingUpdateCoordinator( - hass=hass, ping=ping_cls(hass, host, count, data.privileged) + hass=hass, ping=ping_cls(hass, host, count, privileged) ) await coordinator.async_config_entry_first_refresh() diff --git a/homeassistant/components/ping/config_flow.py b/homeassistant/components/ping/config_flow.py index 505e0a370a0..27cb3f62bcd 100644 --- a/homeassistant/components/ping/config_flow.py +++ b/homeassistant/components/ping/config_flow.py @@ -73,16 +73,12 @@ class PingConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlow: """Create the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle an options flow for Ping.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/pioneer/manifest.json b/homeassistant/components/pioneer/manifest.json index c8aa3a79789..019b7680e09 100644 --- a/homeassistant/components/pioneer/manifest.json +++ b/homeassistant/components/pioneer/manifest.json @@ -3,5 +3,6 @@ "name": "Pioneer", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/pioneer", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/pjlink/manifest.json b/homeassistant/components/pjlink/manifest.json index 553ed185241..787311b250a 100644 --- a/homeassistant/components/pjlink/manifest.json +++ b/homeassistant/components/pjlink/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/pjlink", "iot_class": "local_polling", "loggers": ["pypjlink"], + "quality_scale": "legacy", "requirements": ["pypjlink2==1.2.1"] } diff --git a/homeassistant/components/plaato/config_flow.py b/homeassistant/components/plaato/config_flow.py index 74967c417a4..f398a733cd6 100644 --- a/homeassistant/components/plaato/config_flow.py +++ b/homeassistant/components/plaato/config_flow.py @@ -176,23 +176,19 @@ class PlaatoConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> PlaatoOptionsFlowHandler: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> PlaatoOptionsFlowHandler: """Get the options flow for this handler.""" - return PlaatoOptionsFlowHandler(config_entry) + return PlaatoOptionsFlowHandler() class PlaatoOptionsFlowHandler(OptionsFlow): """Handle Plaato options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize domain options flow.""" - super().__init__() - - self._config_entry = config_entry - async def async_step_init(self, user_input: None = None) -> ConfigFlowResult: """Manage the options.""" - use_webhook = self._config_entry.data.get(CONF_USE_WEBHOOK, False) + use_webhook = self.config_entry.data.get(CONF_USE_WEBHOOK, False) if use_webhook: return await self.async_step_webhook() @@ -211,7 +207,7 @@ class PlaatoOptionsFlowHandler(OptionsFlow): { vol.Optional( CONF_SCAN_INTERVAL, - default=self._config_entry.options.get( + default=self.config_entry.options.get( CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL ), ): cv.positive_int @@ -226,7 +222,7 @@ class PlaatoOptionsFlowHandler(OptionsFlow): if user_input is not None: return self.async_create_entry(title="", data=user_input) - webhook_id = self._config_entry.data.get(CONF_WEBHOOK_ID, None) + webhook_id = self.config_entry.data.get(CONF_WEBHOOK_ID, None) webhook_url = ( "" if webhook_id is None diff --git a/homeassistant/components/plex/config_flow.py b/homeassistant/components/plex/config_flow.py index fcd5751effb..ae7cbb12574 100644 --- a/homeassistant/components/plex/config_flow.py +++ b/homeassistant/components/plex/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations from collections.abc import Mapping -import copy +from copy import deepcopy import logging from typing import TYPE_CHECKING, Any @@ -385,7 +385,7 @@ class PlexOptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize Plex options flow.""" - self.options = copy.deepcopy(dict(config_entry.options)) + self.options = deepcopy(dict(config_entry.options)) self.server_id = config_entry.data[CONF_SERVER_IDENTIFIER] async def async_step_init(self, user_input: None = None) -> ConfigFlowResult: diff --git a/homeassistant/components/plugwise/__init__.py b/homeassistant/components/plugwise/__init__.py index 7d1b9ceac8a..a100103b029 100644 --- a/homeassistant/components/plugwise/__init__.py +++ b/homeassistant/components/plugwise/__init__.py @@ -83,7 +83,7 @@ def migrate_sensor_entities( # Migrating opentherm_outdoor_temperature # to opentherm_outdoor_air_temperature sensor for device_id, device in coordinator.data.devices.items(): - if device.get("dev_class") != "heater_central": + if device["dev_class"] != "heater_central": continue old_unique_id = f"{device_id}-outdoor_temperature" diff --git a/homeassistant/components/plugwise/climate.py b/homeassistant/components/plugwise/climate.py index 7b0fe35835d..242b0944782 100644 --- a/homeassistant/components/plugwise/climate.py +++ b/homeassistant/components/plugwise/climate.py @@ -39,11 +39,19 @@ async def async_setup_entry( if not coordinator.new_devices: return - async_add_entities( - PlugwiseClimateEntity(coordinator, device_id) - for device_id in coordinator.new_devices - if coordinator.data.devices[device_id]["dev_class"] in MASTER_THERMOSTATS - ) + if coordinator.data.gateway["smile_name"] == "Adam": + async_add_entities( + PlugwiseClimateEntity(coordinator, device_id) + for device_id in coordinator.new_devices + if coordinator.data.devices[device_id]["dev_class"] == "climate" + ) + else: + async_add_entities( + PlugwiseClimateEntity(coordinator, device_id) + for device_id in coordinator.new_devices + if coordinator.data.devices[device_id]["dev_class"] + in MASTER_THERMOSTATS + ) _add_entities() entry.async_on_unload(coordinator.async_add_listener(_add_entities)) @@ -69,15 +77,19 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): super().__init__(coordinator, device_id) self._attr_extra_state_attributes = {} self._attr_unique_id = f"{device_id}-climate" - self.cdr_gateway = coordinator.data.gateway - gateway_id: str = coordinator.data.gateway["gateway_id"] - self.gateway_data = coordinator.data.devices[gateway_id] + + self._devices = coordinator.data.devices + self._gateway = coordinator.data.gateway + gateway_id: str = self._gateway["gateway_id"] + self._gateway_data = self._devices[gateway_id] + + self._location = device_id + if (location := self.device.get("location")) is not None: + self._location = location + # Determine supported features self._attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE - if ( - self.cdr_gateway["cooling_present"] - and self.cdr_gateway["smile_name"] != "Adam" - ): + if self._gateway["cooling_present"] and self._gateway["smile_name"] != "Adam": self._attr_supported_features = ( ClimateEntityFeature.TARGET_TEMPERATURE_RANGE ) @@ -103,10 +115,10 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): """ # When no cooling available, _previous_mode is always heating if ( - "regulation_modes" in self.gateway_data - and "cooling" in self.gateway_data["regulation_modes"] + "regulation_modes" in self._gateway_data + and "cooling" in self._gateway_data["regulation_modes"] ): - mode = self.gateway_data["select_regulation_mode"] + mode = self._gateway_data["select_regulation_mode"] if mode in ("cooling", "heating"): self._previous_mode = mode @@ -143,7 +155,9 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): @property def hvac_mode(self) -> HVACMode: """Return HVAC operation ie. auto, cool, heat, heat_cool, or off mode.""" - if (mode := self.device.get("mode")) is None or mode not in self.hvac_modes: + if ( + mode := self.device.get("climate_mode") + ) is None or mode not in self.hvac_modes: return HVACMode.HEAT return HVACMode(mode) @@ -151,17 +165,17 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): def hvac_modes(self) -> list[HVACMode]: """Return a list of available HVACModes.""" hvac_modes: list[HVACMode] = [] - if "regulation_modes" in self.gateway_data: + if "regulation_modes" in self._gateway_data: hvac_modes.append(HVACMode.OFF) if "available_schedules" in self.device: hvac_modes.append(HVACMode.AUTO) - if self.cdr_gateway["cooling_present"]: - if "regulation_modes" in self.gateway_data: - if self.gateway_data["select_regulation_mode"] == "cooling": + if self._gateway["cooling_present"]: + if "regulation_modes" in self._gateway_data: + if self._gateway_data["select_regulation_mode"] == "cooling": hvac_modes.append(HVACMode.COOL) - if self.gateway_data["select_regulation_mode"] == "heating": + if self._gateway_data["select_regulation_mode"] == "heating": hvac_modes.append(HVACMode.HEAT) else: hvac_modes.append(HVACMode.HEAT_COOL) @@ -177,17 +191,21 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): self._previous_action_mode(self.coordinator) # Adam provides the hvac_action for each thermostat - if (control_state := self.device.get("control_state")) == "cooling": - return HVACAction.COOLING - if control_state == "heating": - return HVACAction.HEATING - if control_state == "preheating": - return HVACAction.PREHEATING - if control_state == "off": + if self._gateway["smile_name"] == "Adam": + if (control_state := self.device.get("control_state")) == "cooling": + return HVACAction.COOLING + if control_state == "heating": + return HVACAction.HEATING + if control_state == "preheating": + return HVACAction.PREHEATING + if control_state == "off": + return HVACAction.IDLE + return HVACAction.IDLE - heater: str = self.coordinator.data.gateway["heater_id"] - heater_data = self.coordinator.data.devices[heater] + # Anna + heater: str = self._gateway["heater_id"] + heater_data = self._devices[heater] if heater_data["binary_sensors"]["heating_state"]: return HVACAction.HEATING if heater_data["binary_sensors"].get("cooling_state", False): @@ -220,7 +238,7 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): if mode := kwargs.get(ATTR_HVAC_MODE): await self.async_set_hvac_mode(mode) - await self.coordinator.api.set_temperature(self.device["location"], data) + await self.coordinator.api.set_temperature(self._location, data) @plugwise_command async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: @@ -235,7 +253,7 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): await self.coordinator.api.set_regulation_mode(hvac_mode) else: await self.coordinator.api.set_schedule_state( - self.device["location"], + self._location, "on" if hvac_mode == HVACMode.AUTO else "off", ) if self.hvac_mode == HVACMode.OFF: @@ -244,4 +262,4 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity): @plugwise_command async def async_set_preset_mode(self, preset_mode: str) -> None: """Set the preset mode.""" - await self.coordinator.api.set_preset(self.device["location"], preset_mode) + await self.coordinator.api.set_preset(self._location, preset_mode) diff --git a/homeassistant/components/plugwise/config_flow.py b/homeassistant/components/plugwise/config_flow.py index b0d68aaa33b..57abb1ccb86 100644 --- a/homeassistant/components/plugwise/config_flow.py +++ b/homeassistant/components/plugwise/config_flow.py @@ -71,7 +71,6 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> Smile: password=data[CONF_PASSWORD], port=data[CONF_PORT], username=data[CONF_USERNAME], - timeout=30, websession=websession, ) await api.connect() diff --git a/homeassistant/components/plugwise/coordinator.py b/homeassistant/components/plugwise/coordinator.py index da2ef810d35..6ce6855e7d6 100644 --- a/homeassistant/components/plugwise/coordinator.py +++ b/homeassistant/components/plugwise/coordinator.py @@ -54,7 +54,6 @@ class PlugwiseDataUpdateCoordinator(DataUpdateCoordinator[PlugwiseData]): username=self.config_entry.data.get(CONF_USERNAME, DEFAULT_USERNAME), password=self.config_entry.data[CONF_PASSWORD], port=self.config_entry.data.get(CONF_PORT, DEFAULT_PORT), - timeout=30, websession=async_get_clientsession(hass, verify_ssl=False), ) self._current_devices: set[str] = set() @@ -65,11 +64,11 @@ class PlugwiseDataUpdateCoordinator(DataUpdateCoordinator[PlugwiseData]): version = await self.api.connect() self._connected = isinstance(version, Version) if self._connected: - self.api.get_all_devices() + self.api.get_all_gateway_entities() async def _async_update_data(self) -> PlugwiseData: """Fetch data from Plugwise.""" - data = PlugwiseData({}, {}) + data = PlugwiseData(devices={}, gateway={}) try: if not self._connected: await self._connect() diff --git a/homeassistant/components/plugwise/diagnostics.py b/homeassistant/components/plugwise/diagnostics.py index 9d15ea4fe28..47ff7d1a9fb 100644 --- a/homeassistant/components/plugwise/diagnostics.py +++ b/homeassistant/components/plugwise/diagnostics.py @@ -15,6 +15,6 @@ async def async_get_config_entry_diagnostics( """Return diagnostics for a config entry.""" coordinator = entry.runtime_data return { - "gateway": coordinator.data.gateway, "devices": coordinator.data.devices, + "gateway": coordinator.data.gateway, } diff --git a/homeassistant/components/plugwise/entity.py b/homeassistant/components/plugwise/entity.py index e24f3d1e1bb..7b28bf78342 100644 --- a/homeassistant/components/plugwise/entity.py +++ b/homeassistant/components/plugwise/entity.py @@ -2,7 +2,7 @@ from __future__ import annotations -from plugwise.constants import DeviceData +from plugwise.constants import GwEntityData from homeassistant.const import ATTR_NAME, ATTR_VIA_DEVICE, CONF_HOST from homeassistant.helpers.device_registry import ( @@ -74,7 +74,7 @@ class PlugwiseEntity(CoordinatorEntity[PlugwiseDataUpdateCoordinator]): ) @property - def device(self) -> DeviceData: + def device(self) -> GwEntityData: """Return data for this device.""" return self.coordinator.data.devices[self._dev_id] diff --git a/homeassistant/components/plugwise/manifest.json b/homeassistant/components/plugwise/manifest.json index a4253a30cb5..d4d80749a8d 100644 --- a/homeassistant/components/plugwise/manifest.json +++ b/homeassistant/components/plugwise/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["plugwise"], - "requirements": ["plugwise==1.4.4"], + "requirements": ["plugwise==1.6.0"], "zeroconf": ["_plugwise._tcp.local."] } diff --git a/homeassistant/components/plugwise/number.py b/homeassistant/components/plugwise/number.py index 06db5faa55b..833ea3ec761 100644 --- a/homeassistant/components/plugwise/number.py +++ b/homeassistant/components/plugwise/number.py @@ -91,12 +91,12 @@ class PlugwiseNumberEntity(PlugwiseEntity, NumberEntity): ) -> None: """Initiate Plugwise Number.""" super().__init__(coordinator, device_id) - self.device_id = device_id - self.entity_description = description - self._attr_unique_id = f"{device_id}-{description.key}" self._attr_mode = NumberMode.BOX self._attr_native_max_value = self.device[description.key]["upper_bound"] self._attr_native_min_value = self.device[description.key]["lower_bound"] + self._attr_unique_id = f"{device_id}-{description.key}" + self.device_id = device_id + self.entity_description = description native_step = self.device[description.key]["resolution"] if description.key != "temperature_offset": diff --git a/homeassistant/components/plugwise/select.py b/homeassistant/components/plugwise/select.py index b7d4a0a1ded..46b27ca6225 100644 --- a/homeassistant/components/plugwise/select.py +++ b/homeassistant/components/plugwise/select.py @@ -10,7 +10,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import PlugwiseConfigEntry -from .const import LOCATION, SelectOptionsType, SelectType +from .const import SelectOptionsType, SelectType from .coordinator import PlugwiseDataUpdateCoordinator from .entity import PlugwiseEntity from .util import plugwise_command @@ -89,8 +89,12 @@ class PlugwiseSelectEntity(PlugwiseEntity, SelectEntity): ) -> None: """Initialise the selector.""" super().__init__(coordinator, device_id) - self.entity_description = entity_description self._attr_unique_id = f"{device_id}-{entity_description.key}" + self.entity_description = entity_description + + self._location = device_id + if (location := self.device.get("location")) is not None: + self._location = location @property def current_option(self) -> str: @@ -106,8 +110,8 @@ class PlugwiseSelectEntity(PlugwiseEntity, SelectEntity): async def async_select_option(self, option: str) -> None: """Change to the selected entity option. - self.device[LOCATION] and STATE_ON are required for the thermostat-schedule select. + self._location and STATE_ON are required for the thermostat-schedule select. """ await self.coordinator.api.set_select( - self.entity_description.key, self.device[LOCATION], option, STATE_ON + self.entity_description.key, self._location, option, STATE_ON ) diff --git a/homeassistant/components/plugwise/sensor.py b/homeassistant/components/plugwise/sensor.py index ae5b4e6ed91..41ca439451a 100644 --- a/homeassistant/components/plugwise/sensor.py +++ b/homeassistant/components/plugwise/sensor.py @@ -439,8 +439,8 @@ class PlugwiseSensorEntity(PlugwiseEntity, SensorEntity): ) -> None: """Initialise the sensor.""" super().__init__(coordinator, device_id) - self.entity_description = description self._attr_unique_id = f"{device_id}-{description.key}" + self.entity_description = description @property def native_value(self) -> int | float: diff --git a/homeassistant/components/plugwise/switch.py b/homeassistant/components/plugwise/switch.py index a134ab5b044..744fc0a2b72 100644 --- a/homeassistant/components/plugwise/switch.py +++ b/homeassistant/components/plugwise/switch.py @@ -93,8 +93,8 @@ class PlugwiseSwitchEntity(PlugwiseEntity, SwitchEntity): ) -> None: """Set up the Plugwise API.""" super().__init__(coordinator, device_id) - self.entity_description = description self._attr_unique_id = f"{device_id}-{description.key}" + self.entity_description = description @property def is_on(self) -> bool: diff --git a/homeassistant/components/pocketcasts/manifest.json b/homeassistant/components/pocketcasts/manifest.json index 3cb6f52995e..f2a85ecac0d 100644 --- a/homeassistant/components/pocketcasts/manifest.json +++ b/homeassistant/components/pocketcasts/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/pocketcasts", "iot_class": "cloud_polling", "loggers": ["pycketcasts"], + "quality_scale": "legacy", "requirements": ["pycketcasts==1.0.1"] } diff --git a/homeassistant/components/point/manifest.json b/homeassistant/components/point/manifest.json index 7b0a2f0e01e..5aa733b510f 100644 --- a/homeassistant/components/point/manifest.json +++ b/homeassistant/components/point/manifest.json @@ -7,6 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/point", "iot_class": "cloud_polling", "loggers": ["pypoint"], - "quality_scale": "silver", "requirements": ["pypoint==3.0.0"] } diff --git a/homeassistant/components/profiler/__init__.py b/homeassistant/components/profiler/__init__.py index 9b2b9736574..389e3384ad9 100644 --- a/homeassistant/components/profiler/__init__.py +++ b/homeassistant/components/profiler/__init__.py @@ -436,6 +436,10 @@ async def _async_generate_memory_profile(hass: HomeAssistant, call: ServiceCall) # Imports deferred to avoid loading modules # in memory since usually only one part of this # integration is used at a time + if sys.version_info >= (3, 13): + raise HomeAssistantError( + "Memory profiling is not supported on Python 3.13. Please use Python 3.12." + ) from guppy import hpy # pylint: disable=import-outside-toplevel start_time = int(time.time() * 1000000) diff --git a/homeassistant/components/profiler/manifest.json b/homeassistant/components/profiler/manifest.json index 9f27ee7f7d0..8d2814c8c7f 100644 --- a/homeassistant/components/profiler/manifest.json +++ b/homeassistant/components/profiler/manifest.json @@ -7,7 +7,7 @@ "quality_scale": "internal", "requirements": [ "pyprof2calltree==1.4.5", - "guppy3==3.1.4.post1", + "guppy3==3.1.4.post1;python_version<'3.13'", "objgraph==3.5.0" ], "single_config_entry": true diff --git a/homeassistant/components/proliphix/manifest.json b/homeassistant/components/proliphix/manifest.json index 2b01d5deb46..9cf0b9b0950 100644 --- a/homeassistant/components/proliphix/manifest.json +++ b/homeassistant/components/proliphix/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/proliphix", "iot_class": "local_polling", "loggers": ["proliphix"], + "quality_scale": "legacy", "requirements": ["proliphix==0.4.1"] } diff --git a/homeassistant/components/prometheus/manifest.json b/homeassistant/components/prometheus/manifest.json index 8c43be8539d..e747226074c 100644 --- a/homeassistant/components/prometheus/manifest.json +++ b/homeassistant/components/prometheus/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/prometheus", "iot_class": "assumed_state", "loggers": ["prometheus_client"], + "quality_scale": "legacy", "requirements": ["prometheus-client==0.21.0"] } diff --git a/homeassistant/components/prowl/manifest.json b/homeassistant/components/prowl/manifest.json index 50decb3f046..049d95fb94c 100644 --- a/homeassistant/components/prowl/manifest.json +++ b/homeassistant/components/prowl/manifest.json @@ -3,5 +3,6 @@ "name": "Prowl", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/prowl", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/proximity/config_flow.py b/homeassistant/components/proximity/config_flow.py index 1758b182ad7..5818ec2979b 100644 --- a/homeassistant/components/proximity/config_flow.py +++ b/homeassistant/components/proximity/config_flow.py @@ -89,7 +89,7 @@ class ProximityConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: """Get the options flow for this handler.""" - return ProximityOptionsFlow(config_entry) + return ProximityOptionsFlow() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -121,10 +121,6 @@ class ProximityConfigFlow(ConfigFlow, domain=DOMAIN): class ProximityOptionsFlow(OptionsFlow): """Handle a option flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - def _user_form_schema(self, user_input: dict[str, Any]) -> vol.Schema: return vol.Schema(_base_schema(user_input)) diff --git a/homeassistant/components/proxmoxve/manifest.json b/homeassistant/components/proxmoxve/manifest.json index 8cf3bc7932d..45ead1330e2 100644 --- a/homeassistant/components/proxmoxve/manifest.json +++ b/homeassistant/components/proxmoxve/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/proxmoxve", "iot_class": "local_polling", "loggers": ["proxmoxer"], + "quality_scale": "legacy", "requirements": ["proxmoxer==2.0.1"] } diff --git a/homeassistant/components/proxy/manifest.json b/homeassistant/components/proxy/manifest.json index 1e70c4d3e10..e73eddf3cdd 100644 --- a/homeassistant/components/proxy/manifest.json +++ b/homeassistant/components/proxy/manifest.json @@ -3,5 +3,6 @@ "name": "Camera Proxy", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/proxy", - "requirements": ["Pillow==10.4.0"] + "quality_scale": "legacy", + "requirements": ["Pillow==11.0.0"] } diff --git a/homeassistant/components/pulseaudio_loopback/manifest.json b/homeassistant/components/pulseaudio_loopback/manifest.json index a67dc614c50..90666d18997 100644 --- a/homeassistant/components/pulseaudio_loopback/manifest.json +++ b/homeassistant/components/pulseaudio_loopback/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/pulseaudio_loopback", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["pulsectl==23.5.2"] } diff --git a/homeassistant/components/pure_energie/__init__.py b/homeassistant/components/pure_energie/__init__.py index 459dc5c055c..4de1ce02810 100644 --- a/homeassistant/components/pure_energie/__init__.py +++ b/homeassistant/components/pure_energie/__init__.py @@ -7,13 +7,14 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from .const import DOMAIN from .coordinator import PureEnergieDataUpdateCoordinator -PLATFORMS = [Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.SENSOR] + +type PureEnergieConfigEntry = ConfigEntry[PureEnergieDataUpdateCoordinator] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: PureEnergieConfigEntry) -> bool: """Set up Pure Energie from a config entry.""" coordinator = PureEnergieDataUpdateCoordinator(hass) @@ -23,14 +24,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await coordinator.gridnet.close() raise - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: PureEnergieConfigEntry +) -> bool: """Unload Pure Energie config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - del hass.data[DOMAIN][entry.entry_id] - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/pure_energie/diagnostics.py b/homeassistant/components/pure_energie/diagnostics.py index 6e2b8ee7a35..de9134129ed 100644 --- a/homeassistant/components/pure_energie/diagnostics.py +++ b/homeassistant/components/pure_energie/diagnostics.py @@ -6,12 +6,10 @@ from dataclasses import asdict from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import PureEnergieDataUpdateCoordinator +from . import PureEnergieConfigEntry TO_REDACT = { CONF_HOST, @@ -20,18 +18,18 @@ TO_REDACT = { async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: PureEnergieConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: PureEnergieDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] - return { "entry": { "title": entry.title, "data": async_redact_data(entry.data, TO_REDACT), }, "data": { - "device": async_redact_data(asdict(coordinator.data.device), TO_REDACT), - "smartbridge": asdict(coordinator.data.smartbridge), + "device": async_redact_data( + asdict(entry.runtime_data.data.device), TO_REDACT + ), + "smartbridge": asdict(entry.runtime_data.data.smartbridge), }, } diff --git a/homeassistant/components/pure_energie/manifest.json b/homeassistant/components/pure_energie/manifest.json index ff52ec0ecf9..9efb1734f84 100644 --- a/homeassistant/components/pure_energie/manifest.json +++ b/homeassistant/components/pure_energie/manifest.json @@ -5,7 +5,6 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/pure_energie", "iot_class": "local_polling", - "quality_scale": "platinum", "requirements": ["gridnet==5.0.1"], "zeroconf": [ { diff --git a/homeassistant/components/pure_energie/sensor.py b/homeassistant/components/pure_energie/sensor.py index 85f4672a618..468858f117f 100644 --- a/homeassistant/components/pure_energie/sensor.py +++ b/homeassistant/components/pure_energie/sensor.py @@ -12,13 +12,13 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, UnitOfEnergy, UnitOfPower from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity +from . import PureEnergieConfigEntry from .const import DOMAIN from .coordinator import PureEnergieData, PureEnergieDataUpdateCoordinator @@ -59,12 +59,13 @@ SENSORS: tuple[PureEnergieSensorEntityDescription, ...] = ( async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: PureEnergieConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up Pure Energie Sensors based on a config entry.""" async_add_entities( PureEnergieSensorEntity( - coordinator=hass.data[DOMAIN][entry.entry_id], description=description, entry=entry, ) @@ -83,21 +84,22 @@ class PureEnergieSensorEntity( def __init__( self, *, - coordinator: PureEnergieDataUpdateCoordinator, description: PureEnergieSensorEntityDescription, - entry: ConfigEntry, + entry: PureEnergieConfigEntry, ) -> None: """Initialize Pure Energie sensor.""" - super().__init__(coordinator=coordinator) + super().__init__(coordinator=entry.runtime_data) self.entity_id = f"{SENSOR_DOMAIN}.pem_{description.key}" self.entity_description = description - self._attr_unique_id = f"{coordinator.data.device.n2g_id}_{description.key}" + self._attr_unique_id = ( + f"{entry.runtime_data.data.device.n2g_id}_{description.key}" + ) self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, coordinator.data.device.n2g_id)}, - configuration_url=f"http://{coordinator.config_entry.data[CONF_HOST]}", - sw_version=coordinator.data.device.firmware, - manufacturer=coordinator.data.device.manufacturer, - model=coordinator.data.device.model, + identifiers={(DOMAIN, entry.runtime_data.data.device.n2g_id)}, + configuration_url=f"http://{entry.runtime_data.config_entry.data[CONF_HOST]}", + sw_version=entry.runtime_data.data.device.firmware, + manufacturer=entry.runtime_data.data.device.manufacturer, + model=entry.runtime_data.data.device.model, name=entry.title, ) diff --git a/homeassistant/components/purpleair/config_flow.py b/homeassistant/components/purpleair/config_flow.py index 6337431ecea..3ca7870b3cb 100644 --- a/homeassistant/components/purpleair/config_flow.py +++ b/homeassistant/components/purpleair/config_flow.py @@ -209,7 +209,7 @@ class PurpleAirConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> PurpleAirOptionsFlowHandler: """Define the config flow to handle options.""" - return PurpleAirOptionsFlowHandler(config_entry) + return PurpleAirOptionsFlowHandler() async def async_step_by_coordinates( self, user_input: dict[str, Any] | None = None @@ -315,10 +315,9 @@ class PurpleAirConfigFlow(ConfigFlow, domain=DOMAIN): class PurpleAirOptionsFlowHandler(OptionsFlow): """Handle a PurpleAir options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize.""" self._flow_data: dict[str, Any] = {} - self.config_entry = config_entry @property def settings_schema(self) -> vol.Schema: diff --git a/homeassistant/components/push/manifest.json b/homeassistant/components/push/manifest.json index 900ac25edbf..81cb2dce00c 100644 --- a/homeassistant/components/push/manifest.json +++ b/homeassistant/components/push/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@dgomes"], "dependencies": ["webhook"], "documentation": "https://www.home-assistant.io/integrations/push", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/pushsafer/manifest.json b/homeassistant/components/pushsafer/manifest.json index e9018e2a2ba..8b4ec94b9a5 100644 --- a/homeassistant/components/pushsafer/manifest.json +++ b/homeassistant/components/pushsafer/manifest.json @@ -3,5 +3,6 @@ "name": "Pushsafer", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/pushsafer", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/pvoutput/manifest.json b/homeassistant/components/pvoutput/manifest.json index 61bd6fd6164..bc96bc5061d 100644 --- a/homeassistant/components/pvoutput/manifest.json +++ b/homeassistant/components/pvoutput/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/pvoutput", "integration_type": "device", "iot_class": "cloud_polling", - "quality_scale": "platinum", "requirements": ["pvo==2.1.1"] } diff --git a/homeassistant/components/pvpc_hourly_pricing/config_flow.py b/homeassistant/components/pvpc_hourly_pricing/config_flow.py index 67f9de458d0..3c6b510004a 100644 --- a/homeassistant/components/pvpc_hourly_pricing/config_flow.py +++ b/homeassistant/components/pvpc_hourly_pricing/config_flow.py @@ -13,7 +13,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_API_TOKEN, CONF_NAME from homeassistant.core import callback @@ -56,7 +56,7 @@ class TariffSelectorConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> PVPCOptionsFlowHandler: """Get the options flow for this handler.""" - return PVPCOptionsFlowHandler(config_entry) + return PVPCOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -178,7 +178,7 @@ class TariffSelectorConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="reauth_confirm", data_schema=data_schema) -class PVPCOptionsFlowHandler(OptionsFlowWithConfigEntry): +class PVPCOptionsFlowHandler(OptionsFlow): """Handle PVPC options.""" _power: float | None = None @@ -199,7 +199,7 @@ class PVPCOptionsFlowHandler(OptionsFlowWithConfigEntry): ) # Fill options with entry data - api_token = self.options.get( + api_token = self.config_entry.options.get( CONF_API_TOKEN, self.config_entry.data.get(CONF_API_TOKEN) ) return self.async_show_form( @@ -229,13 +229,11 @@ class PVPCOptionsFlowHandler(OptionsFlowWithConfigEntry): ) # Fill options with entry data - power = self.options.get(ATTR_POWER, self.config_entry.data[ATTR_POWER]) - power_valley = self.options.get( - ATTR_POWER_P3, self.config_entry.data[ATTR_POWER_P3] - ) - api_token = self.options.get( - CONF_API_TOKEN, self.config_entry.data.get(CONF_API_TOKEN) - ) + options = self.config_entry.options + data = self.config_entry.data + power = options.get(ATTR_POWER, data[ATTR_POWER]) + power_valley = options.get(ATTR_POWER_P3, data[ATTR_POWER_P3]) + api_token = options.get(CONF_API_TOKEN, data.get(CONF_API_TOKEN)) use_api_token = api_token is not None schema = vol.Schema( { diff --git a/homeassistant/components/pvpc_hourly_pricing/manifest.json b/homeassistant/components/pvpc_hourly_pricing/manifest.json index 8db978135f6..ccddbece7e4 100644 --- a/homeassistant/components/pvpc_hourly_pricing/manifest.json +++ b/homeassistant/components/pvpc_hourly_pricing/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/pvpc_hourly_pricing", "iot_class": "cloud_polling", "loggers": ["aiopvpc"], - "quality_scale": "platinum", "requirements": ["aiopvpc==4.2.2"] } diff --git a/homeassistant/components/pyload/manifest.json b/homeassistant/components/pyload/manifest.json index 788cdd1eb05..e21167cf10b 100644 --- a/homeassistant/components/pyload/manifest.json +++ b/homeassistant/components/pyload/manifest.json @@ -7,6 +7,5 @@ "integration_type": "service", "iot_class": "local_polling", "loggers": ["pyloadapi"], - "quality_scale": "platinum", "requirements": ["PyLoadAPI==1.3.2"] } diff --git a/homeassistant/components/qbittorrent/sensor.py b/homeassistant/components/qbittorrent/sensor.py index abc23f39975..67eb856bb83 100644 --- a/homeassistant/components/qbittorrent/sensor.py +++ b/homeassistant/components/qbittorrent/sensor.py @@ -100,13 +100,11 @@ SENSOR_TYPES: tuple[QBittorrentSensorEntityDescription, ...] = ( QBittorrentSensorEntityDescription( key=SENSOR_TYPE_ALL_TORRENTS, translation_key="all_torrents", - native_unit_of_measurement="torrents", value_fn=lambda coordinator: count_torrents_in_states(coordinator, []), ), QBittorrentSensorEntityDescription( key=SENSOR_TYPE_ACTIVE_TORRENTS, translation_key="active_torrents", - native_unit_of_measurement="torrents", value_fn=lambda coordinator: count_torrents_in_states( coordinator, ["downloading", "uploading"] ), @@ -114,7 +112,6 @@ SENSOR_TYPES: tuple[QBittorrentSensorEntityDescription, ...] = ( QBittorrentSensorEntityDescription( key=SENSOR_TYPE_INACTIVE_TORRENTS, translation_key="inactive_torrents", - native_unit_of_measurement="torrents", value_fn=lambda coordinator: count_torrents_in_states( coordinator, ["stalledDL", "stalledUP"] ), @@ -122,7 +119,6 @@ SENSOR_TYPES: tuple[QBittorrentSensorEntityDescription, ...] = ( QBittorrentSensorEntityDescription( key=SENSOR_TYPE_PAUSED_TORRENTS, translation_key="paused_torrents", - native_unit_of_measurement="torrents", value_fn=lambda coordinator: count_torrents_in_states( coordinator, ["pausedDL", "pausedUP"] ), diff --git a/homeassistant/components/qbittorrent/strings.json b/homeassistant/components/qbittorrent/strings.json index 88015dad5c3..9c9ee371737 100644 --- a/homeassistant/components/qbittorrent/strings.json +++ b/homeassistant/components/qbittorrent/strings.json @@ -36,16 +36,20 @@ } }, "active_torrents": { - "name": "Active torrents" + "name": "Active torrents", + "unit_of_measurement": "torrents" }, "inactive_torrents": { - "name": "Inactive torrents" + "name": "Inactive torrents", + "unit_of_measurement": "[%key:component::qbittorrent::entity::sensor::active_torrents::unit_of_measurement%]" }, "paused_torrents": { - "name": "Paused torrents" + "name": "Paused torrents", + "unit_of_measurement": "[%key:component::qbittorrent::entity::sensor::active_torrents::unit_of_measurement%]" }, "all_torrents": { - "name": "All torrents" + "name": "All torrents", + "unit_of_measurement": "[%key:component::qbittorrent::entity::sensor::active_torrents::unit_of_measurement%]" } }, "switch": { diff --git a/homeassistant/components/qld_bushfire/manifest.json b/homeassistant/components/qld_bushfire/manifest.json index 282a931bf05..79a29e6fddb 100644 --- a/homeassistant/components/qld_bushfire/manifest.json +++ b/homeassistant/components/qld_bushfire/manifest.json @@ -6,5 +6,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["georss_qld_bushfire_alert_client"], + "quality_scale": "legacy", "requirements": ["georss-qld-bushfire-alert-client==0.8"] } diff --git a/homeassistant/components/qnap/sensor.py b/homeassistant/components/qnap/sensor.py index 526516bfcdd..383a4e5f572 100644 --- a/homeassistant/components/qnap/sensor.py +++ b/homeassistant/components/qnap/sensor.py @@ -13,7 +13,6 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.const import ( - ATTR_NAME, PERCENTAGE, EntityCategory, UnitOfDataRate, @@ -375,17 +374,6 @@ class QNAPMemorySensor(QNAPSensor): return None - # Deprecated since Home Assistant 2024.6.0 - # Can be removed completely in 2024.12.0 - @property - def extra_state_attributes(self) -> dict[str, Any] | None: - """Return the state attributes.""" - if self.coordinator.data: - data = self.coordinator.data["system_stats"]["memory"] - size = round(float(data["total"]) / 1024, 2) - return {ATTR_MEMORY_SIZE: f"{size} {UnitOfInformation.GIBIBYTES}"} - return None - class QNAPNetworkSensor(QNAPSensor): """A QNAP sensor that monitors network stats.""" @@ -414,22 +402,6 @@ class QNAPNetworkSensor(QNAPSensor): return None - # Deprecated since Home Assistant 2024.6.0 - # Can be removed completely in 2024.12.0 - @property - def extra_state_attributes(self) -> dict[str, Any] | None: - """Return the state attributes.""" - if self.coordinator.data: - data = self.coordinator.data["system_stats"]["nics"][self.monitor_device] - return { - ATTR_IP: data["ip"], - ATTR_MASK: data["mask"], - ATTR_MAC: data["mac"], - ATTR_MAX_SPEED: data["max_speed"], - ATTR_PACKETS_ERR: data["err_packets"], - } - return None - class QNAPSystemSensor(QNAPSensor): """A QNAP sensor that monitors overall system health.""" @@ -455,25 +427,6 @@ class QNAPSystemSensor(QNAPSensor): return None - # Deprecated since Home Assistant 2024.6.0 - # Can be removed completely in 2024.12.0 - @property - def extra_state_attributes(self) -> dict[str, Any] | None: - """Return the state attributes.""" - if self.coordinator.data: - data = self.coordinator.data["system_stats"] - days = int(data["uptime"]["days"]) - hours = int(data["uptime"]["hours"]) - minutes = int(data["uptime"]["minutes"]) - - return { - ATTR_NAME: data["system"]["name"], - ATTR_MODEL: data["system"]["model"], - ATTR_SERIAL: data["system"]["serial_number"], - ATTR_UPTIME: f"{days:0>2d}d {hours:0>2d}h {minutes:0>2d}m", - } - return None - class QNAPDriveSensor(QNAPSensor): """A QNAP sensor that monitors HDD/SSD drive stats.""" @@ -533,17 +486,3 @@ class QNAPVolumeSensor(QNAPSensor): return used_gb / total_gb * 100 return None - - # Deprecated since Home Assistant 2024.6.0 - # Can be removed completely in 2024.12.0 - @property - def extra_state_attributes(self) -> dict[str, Any] | None: - """Return the state attributes.""" - if self.coordinator.data: - data = self.coordinator.data["volumes"][self.monitor_device] - total_gb = int(data["total_size"]) / 1024 / 1024 / 1024 - - return { - ATTR_VOLUME_SIZE: f"{round(total_gb, 1)} {UnitOfInformation.GIBIBYTES}" - } - return None diff --git a/homeassistant/components/qrcode/manifest.json b/homeassistant/components/qrcode/manifest.json index 14f2d093f37..9634d45b069 100644 --- a/homeassistant/components/qrcode/manifest.json +++ b/homeassistant/components/qrcode/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/qrcode", "iot_class": "calculated", "loggers": ["pyzbar"], - "requirements": ["Pillow==10.4.0", "pyzbar==0.1.7"] + "quality_scale": "legacy", + "requirements": ["Pillow==11.0.0", "pyzbar==0.1.7"] } diff --git a/homeassistant/components/quantum_gateway/manifest.json b/homeassistant/components/quantum_gateway/manifest.json index 4494e5a2576..98c6c715417 100644 --- a/homeassistant/components/quantum_gateway/manifest.json +++ b/homeassistant/components/quantum_gateway/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@cisasteelersfan"], "documentation": "https://www.home-assistant.io/integrations/quantum_gateway", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["quantum-gateway==0.0.8"] } diff --git a/homeassistant/components/qvr_pro/manifest.json b/homeassistant/components/qvr_pro/manifest.json index 9c0e92698df..2553e1d27c4 100644 --- a/homeassistant/components/qvr_pro/manifest.json +++ b/homeassistant/components/qvr_pro/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/qvr_pro", "iot_class": "local_polling", "loggers": ["pyqvrpro"], + "quality_scale": "legacy", "requirements": ["pyqvrpro==0.52"] } diff --git a/homeassistant/components/qwikswitch/manifest.json b/homeassistant/components/qwikswitch/manifest.json index e30ebffbf2f..750e104d1a3 100644 --- a/homeassistant/components/qwikswitch/manifest.json +++ b/homeassistant/components/qwikswitch/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/qwikswitch", "iot_class": "local_push", "loggers": ["pyqwikswitch"], + "quality_scale": "legacy", "requirements": ["pyqwikswitch==0.93"] } diff --git a/homeassistant/components/rachio/config_flow.py b/homeassistant/components/rachio/config_flow.py index 66811091820..fac93952b35 100644 --- a/homeassistant/components/rachio/config_flow.py +++ b/homeassistant/components/rachio/config_flow.py @@ -108,16 +108,12 @@ class RachioConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle a option flow for Rachio.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, int] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/rainbird/__init__.py b/homeassistant/components/rainbird/__init__.py index da2a0e4b475..97dec9a681e 100644 --- a/homeassistant/components/rainbird/__init__.py +++ b/homeassistant/components/rainbird/__init__.py @@ -7,7 +7,7 @@ from typing import Any import aiohttp from pyrainbird.async_client import AsyncRainbirdClient, AsyncRainbirdController -from pyrainbird.exceptions import RainbirdApiException +from pyrainbird.exceptions import RainbirdApiException, RainbirdAuthException from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -18,12 +18,17 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.device_registry import format_mac from .const import CONF_SERIAL_NUMBER -from .coordinator import RainbirdData, async_create_clientsession +from .coordinator import ( + RainbirdScheduleUpdateCoordinator, + RainbirdUpdateCoordinator, + async_create_clientsession, +) +from .types import RainbirdConfigEntry, RainbirdData _LOGGER = logging.getLogger(__name__) @@ -40,7 +45,9 @@ DOMAIN = "rainbird" def _async_register_clientsession_shutdown( - hass: HomeAssistant, entry: ConfigEntry, clientsession: aiohttp.ClientSession + hass: HomeAssistant, + entry: ConfigEntry, + clientsession: aiohttp.ClientSession, ) -> None: """Register cleanup hooks for the clientsession.""" @@ -55,7 +62,7 @@ def _async_register_clientsession_shutdown( entry.async_on_unload(_async_close_websession) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: RainbirdConfigEntry) -> bool: """Set up the config entry for Rain Bird.""" hass.data.setdefault(DOMAIN, {}) @@ -91,14 +98,30 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: try: model_info = await controller.get_model_and_version() + except RainbirdAuthException as err: + raise ConfigEntryAuthFailed from err except RainbirdApiException as err: raise ConfigEntryNotReady from err - data = RainbirdData(hass, entry, controller, model_info) + data = RainbirdData( + controller, + model_info, + coordinator=RainbirdUpdateCoordinator( + hass, + name=entry.title, + controller=controller, + unique_id=entry.unique_id, + model_info=model_info, + ), + schedule_coordinator=RainbirdScheduleUpdateCoordinator( + hass, + name=f"{entry.title} Schedule", + controller=controller, + ), + ) await data.coordinator.async_config_entry_first_refresh() - hass.data[DOMAIN][entry.entry_id] = data - + entry.runtime_data = data await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True @@ -236,8 +259,4 @@ def _async_fix_device_id( async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/rainbird/binary_sensor.py b/homeassistant/components/rainbird/binary_sensor.py index d44022b0a2d..5722b8852dd 100644 --- a/homeassistant/components/rainbird/binary_sensor.py +++ b/homeassistant/components/rainbird/binary_sensor.py @@ -8,13 +8,12 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN from .coordinator import RainbirdUpdateCoordinator +from .types import RainbirdConfigEntry _LOGGER = logging.getLogger(__name__) @@ -27,11 +26,11 @@ RAIN_SENSOR_ENTITY_DESCRIPTION = BinarySensorEntityDescription( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: RainbirdConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up entry for a Rain Bird binary_sensor.""" - coordinator = hass.data[DOMAIN][config_entry.entry_id].coordinator + coordinator = config_entry.runtime_data.coordinator async_add_entities([RainBirdSensor(coordinator, RAIN_SENSOR_ENTITY_DESCRIPTION)]) diff --git a/homeassistant/components/rainbird/calendar.py b/homeassistant/components/rainbird/calendar.py index 42c1cce69d3..160fe70c61e 100644 --- a/homeassistant/components/rainbird/calendar.py +++ b/homeassistant/components/rainbird/calendar.py @@ -6,7 +6,6 @@ from datetime import datetime import logging from homeassistant.components.calendar import CalendarEntity, CalendarEvent -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceInfo @@ -14,19 +13,19 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util import dt as dt_util -from .const import DOMAIN from .coordinator import RainbirdScheduleUpdateCoordinator +from .types import RainbirdConfigEntry _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: RainbirdConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up entry for a Rain Bird irrigation calendar.""" - data = hass.data[DOMAIN][config_entry.entry_id] + data = config_entry.runtime_data if not data.model_info.model_info.max_programs: return diff --git a/homeassistant/components/rainbird/config_flow.py b/homeassistant/components/rainbird/config_flow.py index c1c814b05c4..86a3c5d5d1c 100644 --- a/homeassistant/components/rainbird/config_flow.py +++ b/homeassistant/components/rainbird/config_flow.py @@ -3,15 +3,13 @@ from __future__ import annotations import asyncio +from collections.abc import Mapping import logging from typing import Any -from pyrainbird.async_client import ( - AsyncRainbirdClient, - AsyncRainbirdController, - RainbirdApiException, -) +from pyrainbird.async_client import AsyncRainbirdClient, AsyncRainbirdController from pyrainbird.data import WifiParams +from pyrainbird.exceptions import RainbirdApiException, RainbirdAuthException import voluptuous as vol from homeassistant.config_entries import ( @@ -45,6 +43,13 @@ DATA_SCHEMA = vol.Schema( ), } ) +REAUTH_SCHEMA = vol.Schema( + { + vol.Required(CONF_PASSWORD): selector.TextSelector( + selector.TextSelectorConfig(type=selector.TextSelectorType.PASSWORD) + ), + } +) class ConfigFlowError(Exception): @@ -59,13 +64,44 @@ class ConfigFlowError(Exception): class RainbirdConfigFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow for Rain Bird.""" + host: str + @staticmethod @callback def async_get_options_flow( config_entry: ConfigEntry, ) -> RainBirdOptionsFlowHandler: """Define the config flow to handle options.""" - return RainBirdOptionsFlowHandler(config_entry) + return RainBirdOptionsFlowHandler() + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Perform reauthentication upon an API authentication error.""" + self.host = entry_data[CONF_HOST] + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm reauthentication dialog.""" + errors: dict[str, str] = {} + if user_input: + try: + await self._test_connection(self.host, user_input[CONF_PASSWORD]) + except ConfigFlowError as err: + _LOGGER.error("Error during config flow: %s", err) + errors["base"] = err.error_code + else: + return self.async_update_reload_and_abort( + self._get_reauth_entry(), + data_updates={CONF_PASSWORD: user_input[CONF_PASSWORD]}, + ) + return self.async_show_form( + step_id="reauth_confirm", + data_schema=REAUTH_SCHEMA, + errors=errors, + ) async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -123,6 +159,11 @@ class RainbirdConfigFlowHandler(ConfigFlow, domain=DOMAIN): f"Timeout connecting to Rain Bird controller: {err!s}", "timeout_connect", ) from err + except RainbirdAuthException as err: + raise ConfigFlowError( + f"Authentication error connecting from Rain Bird controller: {err!s}", + "invalid_auth", + ) from err except RainbirdApiException as err: raise ConfigFlowError( f"Error connecting to Rain Bird controller: {err!s}", @@ -165,10 +206,6 @@ class RainbirdConfigFlowHandler(ConfigFlow, domain=DOMAIN): class RainBirdOptionsFlowHandler(OptionsFlow): """Handle a RainBird options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize RainBirdOptionsFlowHandler.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/rainbird/coordinator.py b/homeassistant/components/rainbird/coordinator.py index 2657fd6433e..437aa7ddbd4 100644 --- a/homeassistant/components/rainbird/coordinator.py +++ b/homeassistant/components/rainbird/coordinator.py @@ -8,7 +8,6 @@ import datetime import logging import aiohttp -from propcache import cached_property from pyrainbird.async_client import ( AsyncRainbirdController, RainbirdApiException, @@ -166,36 +165,3 @@ class RainbirdScheduleUpdateCoordinator(DataUpdateCoordinator[Schedule]): return await self._controller.get_schedule() except RainbirdApiException as err: raise UpdateFailed(f"Error communicating with Device: {err}") from err - - -@dataclass -class RainbirdData: - """Holder for shared integration data. - - The coordinators are lazy since they may only be used by some platforms when needed. - """ - - hass: HomeAssistant - entry: ConfigEntry - controller: AsyncRainbirdController - model_info: ModelAndVersion - - @cached_property - def coordinator(self) -> RainbirdUpdateCoordinator: - """Return RainbirdUpdateCoordinator.""" - return RainbirdUpdateCoordinator( - self.hass, - name=self.entry.title, - controller=self.controller, - unique_id=self.entry.unique_id, - model_info=self.model_info, - ) - - @cached_property - def schedule_coordinator(self) -> RainbirdScheduleUpdateCoordinator: - """Return RainbirdScheduleUpdateCoordinator.""" - return RainbirdScheduleUpdateCoordinator( - self.hass, - name=f"{self.entry.title} Schedule", - controller=self.controller, - ) diff --git a/homeassistant/components/rainbird/number.py b/homeassistant/components/rainbird/number.py index 507a31e59a4..d8081a796b9 100644 --- a/homeassistant/components/rainbird/number.py +++ b/homeassistant/components/rainbird/number.py @@ -7,29 +7,28 @@ import logging from pyrainbird.exceptions import RainbirdApiException, RainbirdDeviceBusyException from homeassistant.components.number import NumberEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.const import UnitOfTime from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN from .coordinator import RainbirdUpdateCoordinator +from .types import RainbirdConfigEntry _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: RainbirdConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up entry for a Rain Bird number platform.""" async_add_entities( [ RainDelayNumber( - hass.data[DOMAIN][config_entry.entry_id].coordinator, + config_entry.runtime_data.coordinator, ) ] ) diff --git a/homeassistant/components/rainbird/quality_scale.yaml b/homeassistant/components/rainbird/quality_scale.yaml new file mode 100644 index 00000000000..cd000c63fad --- /dev/null +++ b/homeassistant/components/rainbird/quality_scale.yaml @@ -0,0 +1,79 @@ +rules: + # Bronze + config-flow: done + brands: done + dependency-transparency: done + common-modules: done + has-entity-name: done + action-setup: + status: done + comment: | + The integration only has an entity service, registered in the platform. + appropriate-polling: + status: done + comment: | + Rainbird devices are local. Irrigation valve/controller status is polled + once per minute to get fast updates when turning on/off the valves. + The irrigation schedule uses a 15 minute poll interval since it rarely + changes. + + Rainbird devices can only accept a single http connection, so this uses a + an aiohttp.ClientSession with a connection limit, and also uses a request + debouncer. + test-before-configure: done + entity-event-setup: + status: exempt + comment: Integration is polling and does not subscribe to events. + unique-config-entry: done + entity-unique-id: done + docs-installation-instructions: + status: todo + comment: | + The introduction can be improved and is missing pre-requisites such as + installing the app. + docs-removal-instructions: todo + test-before-setup: done + docs-high-level-description: done + config-flow-test-coverage: done + docs-actions: done + runtime-data: done + + # Silver + log-when-unavailable: todo + config-entry-unloading: todo + reauthentication-flow: done + action-exceptions: todo + docs-installation-parameters: todo + integration-owner: todo + parallel-updates: todo + test-coverage: todo + docs-configuration-parameters: todo + entity-unavailable: todo + + # Gold + docs-examples: todo + discovery-update-info: todo + entity-device-class: todo + entity-translations: todo + docs-data-update: todo + entity-disabled-by-default: todo + discovery: todo + exception-translations: todo + devices: todo + docs-supported-devices: todo + icon-translations: todo + docs-known-limitations: todo + stale-devices: todo + docs-supported-functions: todo + repair-issues: todo + reconfiguration-flow: todo + entity-category: todo + dynamic-devices: todo + docs-troubleshooting: todo + diagnostics: todo + docs-use-cases: todo + + # Platinum + async-dependency: todo + strict-typing: todo + inject-websession: todo diff --git a/homeassistant/components/rainbird/sensor.py b/homeassistant/components/rainbird/sensor.py index 649d643a20c..4725a33bc9a 100644 --- a/homeassistant/components/rainbird/sensor.py +++ b/homeassistant/components/rainbird/sensor.py @@ -5,14 +5,13 @@ from __future__ import annotations import logging from homeassistant.components.sensor import SensorEntity, SensorEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN from .coordinator import RainbirdUpdateCoordinator +from .types import RainbirdConfigEntry _LOGGER = logging.getLogger(__name__) @@ -25,14 +24,14 @@ RAIN_DELAY_ENTITY_DESCRIPTION = SensorEntityDescription( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: RainbirdConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up entry for a Rain Bird sensor.""" async_add_entities( [ RainBirdSensor( - hass.data[DOMAIN][config_entry.entry_id].coordinator, + config_entry.runtime_data.coordinator, RAIN_DELAY_ENTITY_DESCRIPTION, ) ] diff --git a/homeassistant/components/rainbird/strings.json b/homeassistant/components/rainbird/strings.json index ea0d64f6208..6f92b1bdb97 100644 --- a/homeassistant/components/rainbird/strings.json +++ b/homeassistant/components/rainbird/strings.json @@ -9,16 +9,29 @@ "password": "[%key:common::config_flow::data::password%]" }, "data_description": { - "host": "The hostname or IP address of your Rain Bird device." + "host": "The hostname or IP address of your Rain Bird device.", + "password": "The password used to authenticate with the Rain Bird device." + } + }, + "reauth_confirm": { + "title": "[%key:common::config_flow::title::reauth%]", + "description": "The Rain Bird integration needs to re-authenticate with the device.", + "data": { + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "The password to authenticate with your Rain Bird device." } } }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "timeout_connect": "[%key:common::config_flow::error::timeout_connect%]" + "timeout_connect": "[%key:common::config_flow::error::timeout_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]" } }, "options": { @@ -27,6 +40,9 @@ "title": "[%key:component::rainbird::config::step::user::title%]", "data": { "duration": "Default irrigation time in minutes" + }, + "data_description": { + "duration": "The default duration the sprinkler will run when turned on." } } } diff --git a/homeassistant/components/rainbird/switch.py b/homeassistant/components/rainbird/switch.py index 62a2a7c4a32..f622a1b9b2c 100644 --- a/homeassistant/components/rainbird/switch.py +++ b/homeassistant/components/rainbird/switch.py @@ -8,7 +8,6 @@ from pyrainbird.exceptions import RainbirdApiException, RainbirdDeviceBusyExcept import voluptuous as vol from homeassistant.components.switch import SwitchEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv, entity_platform @@ -19,6 +18,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import ATTR_DURATION, CONF_IMPORTED_NAMES, DOMAIN, MANUFACTURER from .coordinator import RainbirdUpdateCoordinator +from .types import RainbirdConfigEntry _LOGGER = logging.getLogger(__name__) @@ -31,11 +31,11 @@ SERVICE_SCHEMA_IRRIGATION: VolDictType = { async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: RainbirdConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up entry for a Rain Bird irrigation switches.""" - coordinator = hass.data[DOMAIN][config_entry.entry_id].coordinator + coordinator = config_entry.runtime_data.coordinator async_add_entities( RainBirdSwitch( coordinator, diff --git a/homeassistant/components/rainbird/types.py b/homeassistant/components/rainbird/types.py new file mode 100644 index 00000000000..b452712d971 --- /dev/null +++ b/homeassistant/components/rainbird/types.py @@ -0,0 +1,26 @@ +"""Types for Rain Bird integration.""" + +from dataclasses import dataclass + +from pyrainbird.async_client import AsyncRainbirdController +from pyrainbird.data import ModelAndVersion + +from homeassistant.config_entries import ConfigEntry + +from .coordinator import RainbirdScheduleUpdateCoordinator, RainbirdUpdateCoordinator + + +@dataclass +class RainbirdData: + """Holder for shared integration data. + + The coordinators are lazy since they may only be used by some platforms when needed. + """ + + controller: AsyncRainbirdController + model_info: ModelAndVersion + coordinator: RainbirdUpdateCoordinator + schedule_coordinator: RainbirdScheduleUpdateCoordinator + + +type RainbirdConfigEntry = ConfigEntry[RainbirdData] diff --git a/homeassistant/components/raincloud/manifest.json b/homeassistant/components/raincloud/manifest.json index 70f62d2beee..b5179622441 100644 --- a/homeassistant/components/raincloud/manifest.json +++ b/homeassistant/components/raincloud/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/raincloud", "iot_class": "cloud_polling", "loggers": ["raincloudy"], + "quality_scale": "legacy", "requirements": ["raincloudy==0.0.7"] } diff --git a/homeassistant/components/rainmachine/config_flow.py b/homeassistant/components/rainmachine/config_flow.py index 5c07f04c163..0b40d506566 100644 --- a/homeassistant/components/rainmachine/config_flow.py +++ b/homeassistant/components/rainmachine/config_flow.py @@ -63,7 +63,7 @@ class RainMachineFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> RainMachineOptionsFlowHandler: """Define the config flow to handle options.""" - return RainMachineOptionsFlowHandler(config_entry) + return RainMachineOptionsFlowHandler() async def async_step_homekit( self, discovery_info: zeroconf.ZeroconfServiceInfo @@ -168,10 +168,6 @@ class RainMachineFlowHandler(ConfigFlow, domain=DOMAIN): class RainMachineOptionsFlowHandler(OptionsFlow): """Handle a RainMachine options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/random/binary_sensor.py b/homeassistant/components/random/binary_sensor.py index 9d33ad52692..ae9a5886d59 100644 --- a/homeassistant/components/random/binary_sensor.py +++ b/homeassistant/components/random/binary_sensor.py @@ -59,10 +59,9 @@ class RandomBinarySensor(BinarySensorEntity): def __init__(self, config: Mapping[str, Any], entry_id: str | None = None) -> None: """Initialize the Random binary sensor.""" - self._attr_name = config.get(CONF_NAME) + self._attr_name = config[CONF_NAME] self._attr_device_class = config.get(CONF_DEVICE_CLASS) - if entry_id: - self._attr_unique_id = entry_id + self._attr_unique_id = entry_id async def async_update(self) -> None: """Get new state and update the sensor's state.""" diff --git a/homeassistant/components/random/config_flow.py b/homeassistant/components/random/config_flow.py index fcbd77916a9..00314169260 100644 --- a/homeassistant/components/random/config_flow.py +++ b/homeassistant/components/random/config_flow.py @@ -95,7 +95,7 @@ def _generate_schema(domain: str, flow_type: _FlowType) -> vol.Schema: async def choose_options_step(options: dict[str, Any]) -> str: - """Return next step_id for options flow according to template_type.""" + """Return next step_id for options flow according to entity_type.""" return cast(str, options["entity_type"]) @@ -122,7 +122,7 @@ def _validate_unit(options: dict[str, Any]) -> None: def validate_user_input( - template_type: str, + entity_type: str, ) -> Callable[ [SchemaCommonFlowHandler, dict[str, Any]], Coroutine[Any, Any, dict[str, Any]], @@ -136,10 +136,10 @@ def validate_user_input( _: SchemaCommonFlowHandler, user_input: dict[str, Any], ) -> dict[str, Any]: - """Add template type to user input.""" - if template_type == Platform.SENSOR: + """Add entity type to user input.""" + if entity_type == Platform.SENSOR: _validate_unit(user_input) - return {"entity_type": template_type} | user_input + return {"entity_type": entity_type} | user_input return _validate_user_input diff --git a/homeassistant/components/random/sensor.py b/homeassistant/components/random/sensor.py index 3c6e67c9918..aad4fcb851c 100644 --- a/homeassistant/components/random/sensor.py +++ b/homeassistant/components/random/sensor.py @@ -70,22 +70,22 @@ class RandomSensor(SensorEntity): """Representation of a Random number sensor.""" _attr_translation_key = "random" + _unrecorded_attributes = frozenset({ATTR_MAXIMUM, ATTR_MINIMUM}) def __init__(self, config: Mapping[str, Any], entry_id: str | None = None) -> None: """Initialize the Random sensor.""" - self._attr_name = config.get(CONF_NAME) - self._minimum = config.get(CONF_MINIMUM, DEFAULT_MIN) - self._maximum = config.get(CONF_MAXIMUM, DEFAULT_MAX) + self._attr_name = config[CONF_NAME] + self._minimum = config[CONF_MINIMUM] + self._maximum = config[CONF_MAXIMUM] self._attr_native_unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT) self._attr_device_class = config.get(CONF_DEVICE_CLASS) self._attr_extra_state_attributes = { ATTR_MAXIMUM: self._maximum, ATTR_MINIMUM: self._minimum, } - if entry_id: - self._attr_unique_id = entry_id + self._attr_unique_id = entry_id async def async_update(self) -> None: - """Get a new number and updates the states.""" + """Get a new number and update the state.""" self._attr_native_value = randrange(self._minimum, self._maximum + 1) diff --git a/homeassistant/components/random/strings.json b/homeassistant/components/random/strings.json index ef19dd6dd67..e5c5543e39f 100644 --- a/homeassistant/components/random/strings.json +++ b/homeassistant/components/random/strings.json @@ -20,12 +20,12 @@ "title": "Random sensor" }, "user": { - "description": "This helper allows you to create a helper that emits a random value.", + "description": "This helper allows you to create an entity that emits a random value.", "menu_options": { "binary_sensor": "Random binary sensor", "sensor": "Random sensor" }, - "title": "Random helper" + "title": "Create Random helper" } } }, diff --git a/homeassistant/components/raspberry_pi/manifest.json b/homeassistant/components/raspberry_pi/manifest.json index 5ed68154ce1..c8317f7ef1e 100644 --- a/homeassistant/components/raspberry_pi/manifest.json +++ b/homeassistant/components/raspberry_pi/manifest.json @@ -6,5 +6,6 @@ "config_flow": false, "dependencies": ["hardware"], "documentation": "https://www.home-assistant.io/integrations/raspberry_pi", - "integration_type": "hardware" + "integration_type": "hardware", + "quality_scale": "legacy" } diff --git a/homeassistant/components/raspyrfm/manifest.json b/homeassistant/components/raspyrfm/manifest.json index 0fa4ce77200..d001e2b1118 100644 --- a/homeassistant/components/raspyrfm/manifest.json +++ b/homeassistant/components/raspyrfm/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/raspyrfm", "iot_class": "assumed_state", "loggers": ["raspyrfm_client"], + "quality_scale": "legacy", "requirements": ["raspyrfm-client==1.2.8"] } diff --git a/homeassistant/components/rdw/manifest.json b/homeassistant/components/rdw/manifest.json index 7af3e861347..2ab90e55ef0 100644 --- a/homeassistant/components/rdw/manifest.json +++ b/homeassistant/components/rdw/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/rdw", "integration_type": "service", "iot_class": "cloud_polling", - "quality_scale": "platinum", "requirements": ["vehicle==2.2.2"] } diff --git a/homeassistant/components/recollect_waste/config_flow.py b/homeassistant/components/recollect_waste/config_flow.py index 882eb6a00d2..299af2609e3 100644 --- a/homeassistant/components/recollect_waste/config_flow.py +++ b/homeassistant/components/recollect_waste/config_flow.py @@ -34,9 +34,9 @@ class RecollectWasteConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> OptionsFlow: + ) -> RecollectWasteOptionsFlowHandler: """Define the config flow to handle options.""" - return RecollectWasteOptionsFlowHandler(config_entry) + return RecollectWasteOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -79,10 +79,6 @@ class RecollectWasteConfigFlow(ConfigFlow, domain=DOMAIN): class RecollectWasteOptionsFlowHandler(OptionsFlow): """Handle a Recollect Waste options flow.""" - def __init__(self, entry: ConfigEntry) -> None: - """Initialize.""" - self._entry = entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -96,7 +92,7 @@ class RecollectWasteOptionsFlowHandler(OptionsFlow): { vol.Optional( CONF_FRIENDLY_NAME, - default=self._entry.options.get(CONF_FRIENDLY_NAME), + default=self.config_entry.options.get(CONF_FRIENDLY_NAME), ): bool } ), diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index 6ba64d4a571..0c61f8a955e 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -740,7 +740,7 @@ class Recorder(threading.Thread): self.schema_version = schema_status.current_version # Do non-live data migration - migration.migrate_data_non_live(self, self.get_session, schema_status) + self._migrate_data_offline(schema_status) # Non-live migration is now completed, remaining steps are live self.migration_is_live = True @@ -916,6 +916,13 @@ class Recorder(threading.Thread): return False + def _migrate_data_offline( + self, schema_status: migration.SchemaValidationStatus + ) -> None: + """Migrate data.""" + with self.hass.timeout.freeze(DOMAIN): + migration.migrate_data_non_live(self, self.get_session, schema_status) + def _migrate_schema_offline( self, schema_status: migration.SchemaValidationStatus ) -> tuple[bool, migration.SchemaValidationStatus]: @@ -1424,6 +1431,7 @@ class Recorder(threading.Thread): with session_scope(session=self.get_session()) as session: end_incomplete_runs(session, self.recorder_runs_manager.recording_start) self.recorder_runs_manager.start(session) + self.states_manager.load_from_db(session) self._open_event_session() diff --git a/homeassistant/components/recorder/db_schema.py b/homeassistant/components/recorder/db_schema.py index 7e8343321c3..dbe2b775297 100644 --- a/homeassistant/components/recorder/db_schema.py +++ b/homeassistant/components/recorder/db_schema.py @@ -162,14 +162,14 @@ class Unused(CHAR): """An unused column type that behaves like a string.""" -@compiles(UnusedDateTime, "mysql", "mariadb", "sqlite") # type: ignore[misc,no-untyped-call] -@compiles(Unused, "mysql", "mariadb", "sqlite") # type: ignore[misc,no-untyped-call] +@compiles(UnusedDateTime, "mysql", "mariadb", "sqlite") +@compiles(Unused, "mysql", "mariadb", "sqlite") def compile_char_zero(type_: TypeDecorator, compiler: Any, **kw: Any) -> str: """Compile UnusedDateTime and Unused as CHAR(0) on mysql, mariadb, and sqlite.""" return "CHAR(0)" # Uses 1 byte on MySQL (no change on sqlite) -@compiles(Unused, "postgresql") # type: ignore[misc,no-untyped-call] +@compiles(Unused, "postgresql") def compile_char_one(type_: TypeDecorator, compiler: Any, **kw: Any) -> str: """Compile Unused as CHAR(1) on postgresql.""" return "CHAR(1)" # Uses 1 byte diff --git a/homeassistant/components/recorder/history/legacy.py b/homeassistant/components/recorder/history/legacy.py index b59fc43c3d0..3a0fe79455b 100644 --- a/homeassistant/components/recorder/history/legacy.py +++ b/homeassistant/components/recorder/history/legacy.py @@ -22,9 +22,9 @@ from homeassistant.core import HomeAssistant, State, split_entity_id from homeassistant.helpers.recorder import get_instance import homeassistant.util.dt as dt_util -from ..db_schema import RecorderRuns, StateAttributes, States +from ..db_schema import StateAttributes, States from ..filters import Filters -from ..models import process_timestamp, process_timestamp_to_utc_isoformat +from ..models import process_timestamp_to_utc_isoformat from ..models.legacy import LegacyLazyState, legacy_row_to_compressed_state from ..util import execute_stmt_lambda_element, session_scope from .const import ( @@ -436,7 +436,7 @@ def get_last_state_changes( def _get_states_for_entities_stmt( - run_start: datetime, + run_start_ts: float, utc_point_in_time: datetime, entity_ids: list[str], no_attributes: bool, @@ -447,7 +447,6 @@ def _get_states_for_entities_stmt( ) # We got an include-list of entities, accelerate the query by filtering already # in the inner query. - run_start_ts = process_timestamp(run_start).timestamp() utc_point_in_time_ts = dt_util.utc_to_timestamp(utc_point_in_time) stmt += lambda q: q.join( ( @@ -483,7 +482,7 @@ def _get_rows_with_session( session: Session, utc_point_in_time: datetime, entity_ids: list[str], - run: RecorderRuns | None = None, + *, no_attributes: bool = False, ) -> Iterable[Row]: """Return the states at a specific point in time.""" @@ -495,17 +494,16 @@ def _get_rows_with_session( ), ) - if run is None: - run = get_instance(hass).recorder_runs_manager.get(utc_point_in_time) + oldest_ts = get_instance(hass).states_manager.oldest_ts - if run is None or process_timestamp(run.start) > utc_point_in_time: - # History did not run before utc_point_in_time + if oldest_ts is None or oldest_ts > utc_point_in_time.timestamp(): + # We don't have any states for the requested time return [] # We have more than one entity to look at so we need to do a query on states # since the last recorder run started. stmt = _get_states_for_entities_stmt( - run.start, utc_point_in_time, entity_ids, no_attributes + oldest_ts, utc_point_in_time, entity_ids, no_attributes ) return execute_stmt_lambda_element(session, stmt) diff --git a/homeassistant/components/recorder/history/modern.py b/homeassistant/components/recorder/history/modern.py index b44bec0d0ee..902f1b5dc24 100644 --- a/homeassistant/components/recorder/history/modern.py +++ b/homeassistant/components/recorder/history/modern.py @@ -34,7 +34,6 @@ from ..models import ( LazyState, datetime_to_timestamp_or_none, extract_metadata_ids, - process_timestamp, row_to_compressed_state, ) from ..util import execute_stmt_lambda_element, session_scope @@ -246,9 +245,9 @@ def get_significant_states_with_session( if metadata_id is not None and split_entity_id(entity_id)[0] in SIGNIFICANT_DOMAINS ] - run_start_ts: float | None = None + oldest_ts: float | None = None if include_start_time_state and not ( - run_start_ts := _get_run_start_ts_for_utc_point_in_time(hass, start_time) + oldest_ts := _get_oldest_possible_ts(hass, start_time) ): include_start_time_state = False start_time_ts = dt_util.utc_to_timestamp(start_time) @@ -264,7 +263,7 @@ def get_significant_states_with_session( significant_changes_only, no_attributes, include_start_time_state, - run_start_ts, + oldest_ts, ), track_on=[ bool(single_metadata_id), @@ -411,9 +410,9 @@ def state_changes_during_period( entity_id_to_metadata_id: dict[str, int | None] = { entity_id: single_metadata_id } - run_start_ts: float | None = None + oldest_ts: float | None = None if include_start_time_state and not ( - run_start_ts := _get_run_start_ts_for_utc_point_in_time(hass, start_time) + oldest_ts := _get_oldest_possible_ts(hass, start_time) ): include_start_time_state = False start_time_ts = dt_util.utc_to_timestamp(start_time) @@ -426,7 +425,7 @@ def state_changes_during_period( no_attributes, limit, include_start_time_state, - run_start_ts, + oldest_ts, has_last_reported, ), track_on=[ @@ -600,17 +599,17 @@ def _get_start_time_state_for_entities_stmt( ) -def _get_run_start_ts_for_utc_point_in_time( +def _get_oldest_possible_ts( hass: HomeAssistant, utc_point_in_time: datetime ) -> float | None: - """Return the start time of a run.""" - run = get_instance(hass).recorder_runs_manager.get(utc_point_in_time) - if ( - run is not None - and (run_start := process_timestamp(run.start)) < utc_point_in_time - ): - return run_start.timestamp() - # History did not run before utc_point_in_time but we still + """Return the oldest possible timestamp. + + Returns None if there are no states as old as utc_point_in_time. + """ + + oldest_ts = get_instance(hass).states_manager.oldest_ts + if oldest_ts is not None and oldest_ts < utc_point_in_time.timestamp(): + return oldest_ts return None diff --git a/homeassistant/components/recorder/manifest.json b/homeassistant/components/recorder/manifest.json index 2be4b6862ba..93ffb12d18c 100644 --- a/homeassistant/components/recorder/manifest.json +++ b/homeassistant/components/recorder/manifest.json @@ -7,7 +7,7 @@ "iot_class": "local_push", "quality_scale": "internal", "requirements": [ - "SQLAlchemy==2.0.31", + "SQLAlchemy==2.0.36", "fnv-hash-fast==1.0.2", "psutil-home-assistant==0.0.1" ] diff --git a/homeassistant/components/recorder/pool.py b/homeassistant/components/recorder/pool.py index 30f8fa8d07a..fc2a8ccb1cc 100644 --- a/homeassistant/components/recorder/pool.py +++ b/homeassistant/components/recorder/pool.py @@ -16,7 +16,7 @@ from sqlalchemy.pool import ( StaticPool, ) -from homeassistant.helpers.frame import report +from homeassistant.helpers.frame import ReportBehavior, report_usage from homeassistant.util.loop import raise_for_blocking_call _LOGGER = logging.getLogger(__name__) @@ -108,14 +108,14 @@ class RecorderPool(SingletonThreadPool, NullPool): # raise_for_blocking_call will raise an exception def _do_get_db_connection_protected(self) -> ConnectionPoolEntry: - report( + report_usage( ( "accesses the database without the database executor; " f"{ADVISE_MSG} " "for faster database operations" ), exclude_integrations={"recorder"}, - error_if_core=False, + core_behavior=ReportBehavior.LOG, ) return NullPool._create_connection(self) # noqa: SLF001 diff --git a/homeassistant/components/recorder/purge.py b/homeassistant/components/recorder/purge.py index 329f48e5455..28a5a2ed32d 100644 --- a/homeassistant/components/recorder/purge.py +++ b/homeassistant/components/recorder/purge.py @@ -123,6 +123,9 @@ def purge_old_data( _purge_old_entity_ids(instance, session) _purge_old_recorder_runs(instance, session, purge_before) + with session_scope(session=instance.get_session(), read_only=True) as session: + instance.recorder_runs_manager.load_from_db(session) + instance.states_manager.load_from_db(session) if repack: repack_database(instance) return True diff --git a/homeassistant/components/recorder/queries.py b/homeassistant/components/recorder/queries.py index 4acf43a491e..8ca7bef2691 100644 --- a/homeassistant/components/recorder/queries.py +++ b/homeassistant/components/recorder/queries.py @@ -608,7 +608,8 @@ def delete_recorder_runs_rows( """Delete recorder_runs rows.""" return lambda_stmt( lambda: delete(RecorderRuns) - .filter(RecorderRuns.start < purge_before) + .filter(RecorderRuns.end.is_not(None)) + .filter(RecorderRuns.end < purge_before) .filter(RecorderRuns.run_id != current_run_id) .execution_options(synchronize_session=False) ) @@ -636,6 +637,15 @@ def find_states_to_purge( ) +def find_oldest_state() -> StatementLambdaElement: + """Find the last_updated_ts of the oldest state.""" + return lambda_stmt( + lambda: select(States.last_updated_ts).where( + States.state_id.in_(select(func.min(States.state_id))) + ) + ) + + def find_short_term_statistics_to_purge( purge_before: datetime, max_bind_vars: int ) -> StatementLambdaElement: diff --git a/homeassistant/components/recorder/statistics.py b/homeassistant/components/recorder/statistics.py index 4ffe7c72971..9f01fd0399c 100644 --- a/homeassistant/components/recorder/statistics.py +++ b/homeassistant/components/recorder/statistics.py @@ -27,7 +27,9 @@ from homeassistant.helpers.singleton import singleton from homeassistant.helpers.typing import UNDEFINED, UndefinedType from homeassistant.util import dt as dt_util from homeassistant.util.unit_conversion import ( + AreaConverter, BaseUnitConverter, + BloodGlucoseConcentrationConverter, ConductivityConverter, DataRateConverter, DistanceConverter, @@ -128,6 +130,11 @@ QUERY_STATISTICS_SUMMARY_SUM = ( STATISTIC_UNIT_TO_UNIT_CONVERTER: dict[str | None, type[BaseUnitConverter]] = { + **{unit: AreaConverter for unit in AreaConverter.VALID_UNITS}, + **{ + unit: BloodGlucoseConcentrationConverter + for unit in BloodGlucoseConcentrationConverter.VALID_UNITS + }, **{unit: ConductivityConverter for unit in ConductivityConverter.VALID_UNITS}, **{unit: DataRateConverter for unit in DataRateConverter.VALID_UNITS}, **{unit: DistanceConverter for unit in DistanceConverter.VALID_UNITS}, diff --git a/homeassistant/components/recorder/table_managers/states.py b/homeassistant/components/recorder/table_managers/states.py index d5cef759c54..fafcfa0ea61 100644 --- a/homeassistant/components/recorder/table_managers/states.py +++ b/homeassistant/components/recorder/table_managers/states.py @@ -2,7 +2,15 @@ from __future__ import annotations +from collections.abc import Sequence +from typing import Any, cast + +from sqlalchemy.engine.row import Row +from sqlalchemy.orm.session import Session + from ..db_schema import States +from ..queries import find_oldest_state +from ..util import execute_stmt_lambda_element class StatesManager: @@ -13,6 +21,12 @@ class StatesManager: self._pending: dict[str, States] = {} self._last_committed_id: dict[str, int] = {} self._last_reported: dict[int, float] = {} + self._oldest_ts: float | None = None + + @property + def oldest_ts(self) -> float | None: + """Return the oldest timestamp.""" + return self._oldest_ts def pop_pending(self, entity_id: str) -> States | None: """Pop a pending state. @@ -44,6 +58,8 @@ class StatesManager: recorder thread. """ self._pending[entity_id] = state + if self._oldest_ts is None: + self._oldest_ts = state.last_updated_ts def update_pending_last_reported( self, state_id: int, last_reported_timestamp: float @@ -74,6 +90,22 @@ class StatesManager: """ self._last_committed_id.clear() self._pending.clear() + self._oldest_ts = None + + def load_from_db(self, session: Session) -> None: + """Update the cache. + + Must run in the recorder thread. + """ + result = cast( + Sequence[Row[Any]], + execute_stmt_lambda_element(session, find_oldest_state()), + ) + if not result: + ts = None + else: + ts = result[0].last_updated_ts + self._oldest_ts = ts def evict_purged_state_ids(self, purged_state_ids: set[int]) -> None: """Evict purged states from the committed states. diff --git a/homeassistant/components/recorder/tasks.py b/homeassistant/components/recorder/tasks.py index 783f0a80b8e..fa10c12aa68 100644 --- a/homeassistant/components/recorder/tasks.py +++ b/homeassistant/components/recorder/tasks.py @@ -120,8 +120,6 @@ class PurgeTask(RecorderTask): if purge.purge_old_data( instance, self.purge_before, self.repack, self.apply_filter ): - with instance.get_session() as session: - instance.recorder_runs_manager.load_from_db(session) # We always need to do the db cleanups after a purge # is finished to ensure the WAL checkpoint and other # tasks happen after a vacuum. diff --git a/homeassistant/components/recorder/util.py b/homeassistant/components/recorder/util.py index a59519ef38d..125b354211e 100644 --- a/homeassistant/components/recorder/util.py +++ b/homeassistant/components/recorder/util.py @@ -902,7 +902,7 @@ def resolve_period( start_time = (start_time + timedelta(days=cal_offset * 366)).replace( month=1, day=1 ) - end_time = (start_time + timedelta(days=365)).replace(day=1) + end_time = (start_time + timedelta(days=366)).replace(day=1) start_time = dt_util.as_utc(start_time) end_time = dt_util.as_utc(end_time) diff --git a/homeassistant/components/recorder/websocket_api.py b/homeassistant/components/recorder/websocket_api.py index ac917e903df..ee5c5dd6d75 100644 --- a/homeassistant/components/recorder/websocket_api.py +++ b/homeassistant/components/recorder/websocket_api.py @@ -16,6 +16,8 @@ from homeassistant.helpers import config_validation as cv from homeassistant.helpers.json import json_bytes from homeassistant.util import dt as dt_util from homeassistant.util.unit_conversion import ( + AreaConverter, + BloodGlucoseConcentrationConverter, ConductivityConverter, DataRateConverter, DistanceConverter, @@ -54,6 +56,10 @@ UPDATE_STATISTICS_METADATA_TIME_OUT = 10 UNIT_SCHEMA = vol.Schema( { + vol.Optional("area"): vol.In(AreaConverter.VALID_UNITS), + vol.Optional("blood_glucose_concentration"): vol.In( + BloodGlucoseConcentrationConverter.VALID_UNITS + ), vol.Optional("conductivity"): vol.In(ConductivityConverter.VALID_UNITS), vol.Optional("data_rate"): vol.In(DataRateConverter.VALID_UNITS), vol.Optional("distance"): vol.In(DistanceConverter.VALID_UNITS), diff --git a/homeassistant/components/recswitch/manifest.json b/homeassistant/components/recswitch/manifest.json index 3e243d8f0d2..1273d498efd 100644 --- a/homeassistant/components/recswitch/manifest.json +++ b/homeassistant/components/recswitch/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/recswitch", "iot_class": "local_polling", "loggers": ["pyrecswitch"], + "quality_scale": "legacy", "requirements": ["pyrecswitch==1.0.2"] } diff --git a/homeassistant/components/reddit/manifest.json b/homeassistant/components/reddit/manifest.json index beb2b168e88..a2e20329be0 100644 --- a/homeassistant/components/reddit/manifest.json +++ b/homeassistant/components/reddit/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/reddit", "iot_class": "cloud_polling", "loggers": ["praw", "prawcore"], + "quality_scale": "legacy", "requirements": ["praw==7.5.0"] } diff --git a/homeassistant/components/refoss/manifest.json b/homeassistant/components/refoss/manifest.json index bf046e954d1..da7050433f3 100644 --- a/homeassistant/components/refoss/manifest.json +++ b/homeassistant/components/refoss/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/refoss", "iot_class": "local_polling", - "requirements": ["refoss-ha==1.2.4"] + "requirements": ["refoss-ha==1.2.5"] } diff --git a/homeassistant/components/rejseplanen/manifest.json b/homeassistant/components/rejseplanen/manifest.json index 72da7a65f45..6d0642cc996 100644 --- a/homeassistant/components/rejseplanen/manifest.json +++ b/homeassistant/components/rejseplanen/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/rejseplanen", "iot_class": "cloud_polling", "loggers": ["rjpl"], + "quality_scale": "legacy", "requirements": ["rjpl==0.3.6"] } diff --git a/homeassistant/components/remember_the_milk/manifest.json b/homeassistant/components/remember_the_milk/manifest.json index ab309c765fc..13c37d56dba 100644 --- a/homeassistant/components/remember_the_milk/manifest.json +++ b/homeassistant/components/remember_the_milk/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/remember_the_milk", "iot_class": "cloud_push", "loggers": ["rtmapi"], + "quality_scale": "legacy", "requirements": ["RtmAPI==0.7.2", "httplib2==0.20.4"] } diff --git a/homeassistant/components/remote/strings.json b/homeassistant/components/remote/strings.json index e3df487a57b..09b270b9687 100644 --- a/homeassistant/components/remote/strings.json +++ b/homeassistant/components/remote/strings.json @@ -28,7 +28,7 @@ "services": { "turn_on": { "name": "[%key:common::action::turn_on%]", - "description": "Sends the power on command.", + "description": "Sends the turn on command.", "fields": { "activity": { "name": "Activity", @@ -38,11 +38,11 @@ }, "toggle": { "name": "[%key:common::action::toggle%]", - "description": "Toggles a device on/off." + "description": "Sends the toggle command." }, "turn_off": { "name": "[%key:common::action::turn_off%]", - "description": "Turns the device off." + "description": "Sends the turn off command." }, "send_command": { "name": "Send command", diff --git a/homeassistant/components/remote_rpi_gpio/manifest.json b/homeassistant/components/remote_rpi_gpio/manifest.json index 3a369d859f8..b7e3b55d564 100644 --- a/homeassistant/components/remote_rpi_gpio/manifest.json +++ b/homeassistant/components/remote_rpi_gpio/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/remote_rpi_gpio", "iot_class": "local_push", "loggers": ["gpiozero", "pigpio"], + "quality_scale": "legacy", "requirements": ["gpiozero==1.6.2", "pigpio==1.78"] } diff --git a/homeassistant/components/renault/config_flow.py b/homeassistant/components/renault/config_flow.py index 82429dd146c..70544a5637f 100644 --- a/homeassistant/components/renault/config_flow.py +++ b/homeassistant/components/renault/config_flow.py @@ -3,9 +3,11 @@ from __future__ import annotations from collections.abc import Mapping -from typing import TYPE_CHECKING, Any +from typing import Any +import aiohttp from renault_api.const import AVAILABLE_LOCALES +from renault_api.gigya.exceptions import GigyaException import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -14,17 +16,24 @@ from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from .const import CONF_KAMEREON_ACCOUNT_ID, CONF_LOCALE, DOMAIN from .renault_hub import RenaultHub +USER_SCHEMA = vol.Schema( + { + vol.Required(CONF_LOCALE): vol.In(AVAILABLE_LOCALES.keys()), + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, + } +) +REAUTH_SCHEMA = vol.Schema({vol.Required(CONF_PASSWORD): str}) + class RenaultFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a Renault config flow.""" - VERSION = 1 + renault_hub: RenaultHub def __init__(self) -> None: """Initialize the Renault config flow.""" - self._original_data: Mapping[str, Any] | None = None self.renault_config: dict[str, Any] = {} - self.renault_hub: RenaultHub | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -33,30 +42,28 @@ class RenaultFlowHandler(ConfigFlow, domain=DOMAIN): Ask the user for API keys. """ + errors: dict[str, str] = {} if user_input: locale = user_input[CONF_LOCALE] self.renault_config.update(user_input) self.renault_config.update(AVAILABLE_LOCALES[locale]) self.renault_hub = RenaultHub(self.hass, locale) - if not await self.renault_hub.attempt_login( - user_input[CONF_USERNAME], user_input[CONF_PASSWORD] - ): - return self._show_user_form({"base": "invalid_credentials"}) - return await self.async_step_kamereon() - return self._show_user_form() - - def _show_user_form(self, errors: dict[str, Any] | None = None) -> ConfigFlowResult: - """Show the API keys form.""" + try: + login_success = await self.renault_hub.attempt_login( + user_input[CONF_USERNAME], user_input[CONF_PASSWORD] + ) + except (aiohttp.ClientConnectionError, GigyaException): + errors["base"] = "cannot_connect" + except Exception: # noqa: BLE001 + errors["base"] = "unknown" + else: + if login_success: + return await self.async_step_kamereon() + errors["base"] = "invalid_credentials" return self.async_show_form( step_id="user", - data_schema=vol.Schema( - { - vol.Required(CONF_LOCALE): vol.In(AVAILABLE_LOCALES.keys()), - vol.Required(CONF_USERNAME): str, - vol.Required(CONF_PASSWORD): str, - } - ), - errors=errors or {}, + data_schema=USER_SCHEMA, + errors=errors, ) async def async_step_kamereon( @@ -72,18 +79,12 @@ class RenaultFlowHandler(ConfigFlow, domain=DOMAIN): title=user_input[CONF_KAMEREON_ACCOUNT_ID], data=self.renault_config ) - assert self.renault_hub accounts = await self.renault_hub.get_account_ids() if len(accounts) == 0: return self.async_abort(reason="kamereon_no_account") if len(accounts) == 1: - await self.async_set_unique_id(accounts[0]) - self._abort_if_unique_id_configured() - - self.renault_config[CONF_KAMEREON_ACCOUNT_ID] = accounts[0] - return self.async_create_entry( - title=self.renault_config[CONF_KAMEREON_ACCOUNT_ID], - data=self.renault_config, + return await self.async_step_kamereon( + user_input={CONF_KAMEREON_ACCOUNT_ID: accounts[0]} ) return self.async_show_form( @@ -97,48 +98,29 @@ class RenaultFlowHandler(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._original_data = entry_data return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" - if not user_input: - return self._show_reauth_confirm_form() + errors: dict[str, str] = {} + reauth_entry = self._get_reauth_entry() + if user_input: + # Check credentials + self.renault_hub = RenaultHub(self.hass, reauth_entry.data[CONF_LOCALE]) + if await self.renault_hub.attempt_login( + reauth_entry.data[CONF_USERNAME], user_input[CONF_PASSWORD] + ): + return self.async_update_reload_and_abort( + reauth_entry, + data_updates={CONF_PASSWORD: user_input[CONF_PASSWORD]}, + ) + errors = {"base": "invalid_credentials"} - if TYPE_CHECKING: - assert self._original_data - - # Check credentials - self.renault_hub = RenaultHub(self.hass, self._original_data[CONF_LOCALE]) - if not await self.renault_hub.attempt_login( - self._original_data[CONF_USERNAME], user_input[CONF_PASSWORD] - ): - return self._show_reauth_confirm_form({"base": "invalid_credentials"}) - - # Update existing entry - data = {**self._original_data, CONF_PASSWORD: user_input[CONF_PASSWORD]} - existing_entry = await self.async_set_unique_id( - self._original_data[CONF_KAMEREON_ACCOUNT_ID] - ) - if TYPE_CHECKING: - assert existing_entry - self.hass.config_entries.async_update_entry(existing_entry, data=data) - await self.hass.config_entries.async_reload(existing_entry.entry_id) - return self.async_abort(reason="reauth_successful") - - def _show_reauth_confirm_form( - self, errors: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Show the API keys form.""" - if TYPE_CHECKING: - assert self._original_data return self.async_show_form( step_id="reauth_confirm", - data_schema=vol.Schema({vol.Required(CONF_PASSWORD): str}), - errors=errors or {}, - description_placeholders={ - CONF_USERNAME: self._original_data[CONF_USERNAME] - }, + data_schema=REAUTH_SCHEMA, + errors=errors, + description_placeholders={CONF_USERNAME: reauth_entry.data[CONF_USERNAME]}, ) diff --git a/homeassistant/components/renault/manifest.json b/homeassistant/components/renault/manifest.json index 716f2086bf1..396410dfc20 100644 --- a/homeassistant/components/renault/manifest.json +++ b/homeassistant/components/renault/manifest.json @@ -7,6 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_polling", "loggers": ["renault_api"], - "quality_scale": "platinum", "requirements": ["renault-api==0.2.7"] } diff --git a/homeassistant/components/renault/quality_scale.yaml b/homeassistant/components/renault/quality_scale.yaml new file mode 100644 index 00000000000..aa693e8e86d --- /dev/null +++ b/homeassistant/components/renault/quality_scale.yaml @@ -0,0 +1,66 @@ +rules: + # Bronze + action-setup: done + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: + status: todo + comment: Tests are not asserting the unique id + config-flow: done + dependency-transparency: done + docs-actions: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: done + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + # Silver + action-exceptions: done + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: No options flow + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: todo + reauthentication-flow: done + test-coverage: done + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: Discovery not possible + discovery: + status: exempt + comment: Discovery not possible + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: todo + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: todo + icon-translations: done + reconfiguration-flow: todo + repair-issues: done + stale-devices: done + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/renault/services.py b/homeassistant/components/renault/services.py index 4409d9f284b..80fb2363b1e 100644 --- a/homeassistant/components/renault/services.py +++ b/homeassistant/components/renault/services.py @@ -11,6 +11,7 @@ import voluptuous as vol from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import config_validation as cv, device_registry as dr from .const import DOMAIN @@ -169,18 +170,27 @@ def setup_services(hass: HomeAssistant) -> None: device_id = service_call_data[ATTR_VEHICLE] device_entry = device_registry.async_get(device_id) if device_entry is None: - raise ValueError(f"Unable to find device with id: {device_id}") + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_device_id", + translation_placeholders={"device_id": device_id}, + ) loaded_entries: list[RenaultConfigEntry] = [ entry for entry in hass.config_entries.async_entries(DOMAIN) if entry.state == ConfigEntryState.LOADED + and entry.entry_id in device_entry.config_entries ] for entry in loaded_entries: for vin, vehicle in entry.runtime_data.vehicles.items(): if (DOMAIN, vin) in device_entry.identifiers: return vehicle - raise ValueError(f"Unable to find vehicle with VIN: {device_entry.identifiers}") + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="no_config_entry_for_device", + translation_placeholders={"device_id": device_entry.name or device_id}, + ) hass.services.async_register( DOMAIN, diff --git a/homeassistant/components/renault/strings.json b/homeassistant/components/renault/strings.json index 9cc34edb82f..a6487772bb6 100644 --- a/homeassistant/components/renault/strings.json +++ b/homeassistant/components/renault/strings.json @@ -6,7 +6,9 @@ "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" }, "error": { - "invalid_credentials": "[%key:common::config_flow::error::invalid_auth%]" + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_credentials": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" }, "step": { "kamereon": { @@ -211,5 +213,13 @@ } } } + }, + "exceptions": { + "invalid_device_id": { + "message": "No device with id {device_id} was found" + }, + "no_config_entry_for_device": { + "message": "No loaded config entry was found for device with id {device_id}" + } } } diff --git a/homeassistant/components/reolink/binary_sensor.py b/homeassistant/components/reolink/binary_sensor.py index f6c64d0b060..c168c97e809 100644 --- a/homeassistant/components/reolink/binary_sensor.py +++ b/homeassistant/components/reolink/binary_sensor.py @@ -28,6 +28,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .entity import ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription from .util import ReolinkConfigEntry, ReolinkData +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class ReolinkBinarySensorEntityDescription( @@ -103,6 +105,7 @@ BINARY_PUSH_SENSORS = ( BINARY_SENSORS = ( ReolinkBinarySensorEntityDescription( key="sleep", + cmd_id=145, cmd_key="GetChannelstatus", translation_key="sleep", entity_category=EntityCategory.DIAGNOSTIC, @@ -173,14 +176,14 @@ class ReolinkPushBinarySensorEntity(ReolinkBinarySensorEntity): self.async_on_remove( async_dispatcher_connect( self.hass, - f"{self._host.webhook_id}_{self._channel}", + f"{self._host.unique_id}_{self._channel}", self._async_handle_event, ) ) self.async_on_remove( async_dispatcher_connect( self.hass, - f"{self._host.webhook_id}_all", + f"{self._host.unique_id}_all", self._async_handle_event, ) ) diff --git a/homeassistant/components/reolink/button.py b/homeassistant/components/reolink/button.py index 986ac9d872c..cd1e1b05fae 100644 --- a/homeassistant/components/reolink/button.py +++ b/homeassistant/components/reolink/button.py @@ -33,6 +33,7 @@ from .entity import ( ) from .util import ReolinkConfigEntry, ReolinkData +PARALLEL_UPDATES = 0 ATTR_SPEED = "speed" SUPPORT_PTZ_SPEED = CameraEntityFeature.STREAM SERVICE_PTZ_MOVE = "ptz_move" @@ -211,7 +212,7 @@ class ReolinkButtonEntity(ReolinkChannelCoordinatorEntity, ButtonEntity): except ReolinkError as err: raise HomeAssistantError(err) from err - async def async_ptz_move(self, **kwargs) -> None: + async def async_ptz_move(self, **kwargs: Any) -> None: """PTZ move with speed.""" speed = kwargs[ATTR_SPEED] try: diff --git a/homeassistant/components/reolink/camera.py b/homeassistant/components/reolink/camera.py index 600286be9a2..26ef0b0f4fc 100644 --- a/homeassistant/components/reolink/camera.py +++ b/homeassistant/components/reolink/camera.py @@ -21,6 +21,7 @@ from .entity import ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescrip from .util import ReolinkConfigEntry, ReolinkData _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) diff --git a/homeassistant/components/reolink/config_flow.py b/homeassistant/components/reolink/config_flow.py index 102aeae575e..0b1ed7b4b15 100644 --- a/homeassistant/components/reolink/config_flow.py +++ b/homeassistant/components/reolink/config_flow.py @@ -54,10 +54,6 @@ DEFAULT_OPTIONS = {CONF_PROTOCOL: DEFAULT_PROTOCOL} class ReolinkOptionsFlowHandler(OptionsFlow): """Handle Reolink options.""" - def __init__(self, config_entry: ReolinkConfigEntry) -> None: - """Initialize ReolinkOptionsFlowHandler.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -112,7 +108,7 @@ class ReolinkFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ReolinkConfigEntry, ) -> ReolinkOptionsFlowHandler: """Options callback for Reolink.""" - return ReolinkOptionsFlowHandler(config_entry) + return ReolinkOptionsFlowHandler() async def async_step_reauth( self, entry_data: Mapping[str, Any] diff --git a/homeassistant/components/reolink/entity.py b/homeassistant/components/reolink/entity.py index 6101eee8a4c..dc2366e8f56 100644 --- a/homeassistant/components/reolink/entity.py +++ b/homeassistant/components/reolink/entity.py @@ -179,7 +179,7 @@ class ReolinkChannelCoordinatorEntity(ReolinkHostCoordinatorEntity): """Return True if entity is available.""" return super().available and self._host.api.camera_online(self._channel) - def register_callback(self, unique_id: str, cmd_id) -> None: + def register_callback(self, unique_id: str, cmd_id: int) -> None: """Register callback for TCP push events.""" self._host.api.baichuan.register_callback( unique_id, self._push_callback, cmd_id, self._channel diff --git a/homeassistant/components/reolink/host.py b/homeassistant/components/reolink/host.py index 68a44bf0aae..97d888c0323 100644 --- a/homeassistant/components/reolink/host.py +++ b/homeassistant/components/reolink/host.py @@ -262,7 +262,7 @@ class ReolinkHost: else: ir.async_delete_issue(self._hass, DOMAIN, f"firmware_update_{key}") - async def _async_check_tcp_push(self, *_) -> None: + async def _async_check_tcp_push(self, *_: Any) -> None: """Check the TCP push subscription.""" if self._api.baichuan.events_active: ir.async_delete_issue(self._hass, DOMAIN, "webhook_url") @@ -323,7 +323,7 @@ class ReolinkHost: self._cancel_tcp_push_check = None - async def _async_check_onvif(self, *_) -> None: + async def _async_check_onvif(self, *_: Any) -> None: """Check the ONVIF subscription.""" if self._webhook_reachable: ir.async_delete_issue(self._hass, DOMAIN, "webhook_url") @@ -344,7 +344,7 @@ class ReolinkHost: self._cancel_onvif_check = None - async def _async_check_onvif_long_poll(self, *_) -> None: + async def _async_check_onvif_long_poll(self, *_: Any) -> None: """Check if ONVIF long polling is working.""" if not self._long_poll_received: _LOGGER.debug( @@ -450,7 +450,7 @@ class ReolinkHost: err, ) - async def _async_start_long_polling(self, initial=False) -> None: + async def _async_start_long_polling(self, initial: bool = False) -> None: """Start ONVIF long polling task.""" if self._long_poll_task is None: try: @@ -495,7 +495,7 @@ class ReolinkHost: err, ) - async def stop(self, event=None) -> None: + async def stop(self, *_: Any) -> None: """Disconnect the API.""" if self._cancel_poll is not None: self._cancel_poll() @@ -536,6 +536,8 @@ class ReolinkHost: async def renew(self) -> None: """Renew the subscription of motion events (lease time is 15 minutes).""" + await self._api.baichuan.check_subscribe_events() + if self._api.baichuan.events_active and self._api.subscribed(SubType.push): # TCP push active, unsubscribe from ONVIF push because not needed self.unregister_webhook() @@ -651,7 +653,7 @@ class ReolinkHost: webhook.async_unregister(self._hass, self.webhook_id) self.webhook_id = None - async def _async_long_polling(self, *_) -> None: + async def _async_long_polling(self, *_: Any) -> None: """Use ONVIF long polling to immediately receive events.""" # This task will be cancelled once _async_stop_long_polling is called while True: @@ -688,7 +690,7 @@ class ReolinkHost: # Cooldown to prevent CPU over usage on camera freezes await asyncio.sleep(LONG_POLL_COOLDOWN) - async def _async_poll_all_motion(self, *_) -> None: + async def _async_poll_all_motion(self, *_: Any) -> None: """Poll motion and AI states until the first ONVIF push is received.""" if ( self._api.baichuan.events_active @@ -721,7 +723,7 @@ class ReolinkHost: self._hass, POLL_INTERVAL_NO_PUSH, self._poll_job ) - self._signal_write_ha_state(None) + self._signal_write_ha_state() async def handle_webhook( self, hass: HomeAssistant, webhook_id: str, request: Request @@ -780,7 +782,7 @@ class ReolinkHost: "Could not poll motion state after losing connection during receiving ONVIF event" ) return - async_dispatcher_send(hass, f"{webhook_id}_all", {}) + self._signal_write_ha_state() return message = data.decode("utf-8") @@ -793,14 +795,14 @@ class ReolinkHost: self._signal_write_ha_state(channels) - def _signal_write_ha_state(self, channels: list[int] | None) -> None: + def _signal_write_ha_state(self, channels: list[int] | None = None) -> None: """Update the binary sensors with async_write_ha_state.""" if channels is None: - async_dispatcher_send(self._hass, f"{self.webhook_id}_all", {}) + async_dispatcher_send(self._hass, f"{self.unique_id}_all", {}) return for channel in channels: - async_dispatcher_send(self._hass, f"{self.webhook_id}_{channel}", {}) + async_dispatcher_send(self._hass, f"{self.unique_id}_{channel}", {}) @property def event_connection(self) -> str: diff --git a/homeassistant/components/reolink/icons.json b/homeassistant/components/reolink/icons.json index 5815e165607..cee044189ea 100644 --- a/homeassistant/components/reolink/icons.json +++ b/homeassistant/components/reolink/icons.json @@ -222,6 +222,9 @@ "hdr": { "default": "mdi:hdr" }, + "binning_mode": { + "default": "mdi:code-block-brackets" + }, "hub_alarm_ringtone": { "default": "mdi:music-note", "state": { @@ -246,6 +249,12 @@ "off": "mdi:music-note-off" } }, + "vehicle_tone": { + "default": "mdi:music-note", + "state": { + "off": "mdi:music-note-off" + } + }, "visitor_tone": { "default": "mdi:music-note", "state": { @@ -257,11 +266,26 @@ "state": { "off": "mdi:music-note-off" } + }, + "main_frame_rate": { + "default": "mdi:play-speed" + }, + "sub_frame_rate": { + "default": "mdi:play-speed" + }, + "main_bit_rate": { + "default": "mdi:play-speed" + }, + "sub_bit_rate": { + "default": "mdi:play-speed" } }, "sensor": { "ptz_pan_position": { - "default": "mdi:pan" + "default": "mdi:pan-horizontal" + }, + "ptz_tilt_position": { + "default": "mdi:pan-vertical" }, "battery_temperature": { "default": "mdi:thermometer" diff --git a/homeassistant/components/reolink/light.py b/homeassistant/components/reolink/light.py index 0f239a30813..3bd9a120798 100644 --- a/homeassistant/components/reolink/light.py +++ b/homeassistant/components/reolink/light.py @@ -28,6 +28,8 @@ from .entity import ( ) from .util import ReolinkConfigEntry, ReolinkData +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class ReolinkLightEntityDescription( diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json index 0e2c918acc9..913864a92fa 100644 --- a/homeassistant/components/reolink/manifest.json +++ b/homeassistant/components/reolink/manifest.json @@ -18,5 +18,5 @@ "documentation": "https://www.home-assistant.io/integrations/reolink", "iot_class": "local_push", "loggers": ["reolink_aio"], - "requirements": ["reolink-aio==0.11.2"] + "requirements": ["reolink-aio==0.11.4"] } diff --git a/homeassistant/components/reolink/media_source.py b/homeassistant/components/reolink/media_source.py index 9280df0f5bd..0c23bed7e2f 100644 --- a/homeassistant/components/reolink/media_source.py +++ b/homeassistant/components/reolink/media_source.py @@ -24,6 +24,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from .const import DOMAIN from .host import ReolinkHost +from .util import ReolinkConfigEntry _LOGGER = logging.getLogger(__name__) @@ -48,7 +49,9 @@ def res_name(stream: str) -> str: def get_host(hass: HomeAssistant, config_entry_id: str) -> ReolinkHost: """Return the Reolink host from the config entry id.""" - config_entry = hass.config_entries.async_get_entry(config_entry_id) + config_entry: ReolinkConfigEntry | None = hass.config_entries.async_get_entry( + config_entry_id + ) assert config_entry is not None return config_entry.runtime_data.host @@ -65,7 +68,9 @@ class ReolinkVODMediaSource(MediaSource): async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia: """Resolve media to a url.""" - identifier = item.identifier.split("|", 5) + identifier = ["UNKNOWN"] + if item.identifier is not None: + identifier = item.identifier.split("|", 5) if identifier[0] != "FILE": raise Unresolvable(f"Unknown media item '{item.identifier}'.") @@ -110,7 +115,7 @@ class ReolinkVODMediaSource(MediaSource): item: MediaSourceItem, ) -> BrowseMediaSource: """Return media.""" - if item.identifier is None: + if not item.identifier: return await self._async_generate_root() identifier = item.identifier.split("|", 7) diff --git a/homeassistant/components/reolink/number.py b/homeassistant/components/reolink/number.py index 8ce568d4bd0..692b43bca9e 100644 --- a/homeassistant/components/reolink/number.py +++ b/homeassistant/components/reolink/number.py @@ -29,6 +29,8 @@ from .entity import ( ) from .util import ReolinkConfigEntry, ReolinkData +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class ReolinkNumberEntityDescription( diff --git a/homeassistant/components/reolink/select.py b/homeassistant/components/reolink/select.py index 1306c881059..8625f7fb600 100644 --- a/homeassistant/components/reolink/select.py +++ b/homeassistant/components/reolink/select.py @@ -8,6 +8,7 @@ import logging from typing import Any from reolink_aio.api import ( + BinningModeEnum, Chime, ChimeToneEnum, DayNightEnum, @@ -21,7 +22,7 @@ from reolink_aio.api import ( from reolink_aio.exceptions import InvalidParameterError, ReolinkError from homeassistant.components.select import SelectEntity, SelectEntityDescription -from homeassistant.const import EntityCategory +from homeassistant.const import EntityCategory, UnitOfDataRate, UnitOfFrequency from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -35,6 +36,7 @@ from .entity import ( from .util import ReolinkConfigEntry, ReolinkData _LOGGER = logging.getLogger(__name__) +PARALLEL_UPDATES = 0 @dataclass(frozen=True, kw_only=True) @@ -174,6 +176,67 @@ SELECT_ENTITIES = ( value=lambda api, ch: HDREnum(api.HDR_state(ch)).name, method=lambda api, ch, name: api.set_HDR(ch, HDREnum[name].value), ), + ReolinkSelectEntityDescription( + key="binning_mode", + cmd_key="GetIsp", + translation_key="binning_mode", + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + get_options=[method.name for method in BinningModeEnum], + supported=lambda api, ch: api.supported(ch, "binning_mode"), + value=lambda api, ch: BinningModeEnum(api.binning_mode(ch)).name, + method=lambda api, ch, name: api.set_binning_mode( + ch, BinningModeEnum[name].value + ), + ), + ReolinkSelectEntityDescription( + key="main_frame_rate", + cmd_key="GetEnc", + translation_key="main_frame_rate", + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + unit_of_measurement=UnitOfFrequency.HERTZ, + get_options=lambda api, ch: [str(v) for v in api.frame_rate_list(ch, "main")], + supported=lambda api, ch: api.supported(ch, "frame_rate"), + value=lambda api, ch: str(api.frame_rate(ch, "main")), + method=lambda api, ch, value: api.set_frame_rate(ch, int(value), "main"), + ), + ReolinkSelectEntityDescription( + key="sub_frame_rate", + cmd_key="GetEnc", + translation_key="sub_frame_rate", + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + unit_of_measurement=UnitOfFrequency.HERTZ, + get_options=lambda api, ch: [str(v) for v in api.frame_rate_list(ch, "sub")], + supported=lambda api, ch: api.supported(ch, "frame_rate"), + value=lambda api, ch: str(api.frame_rate(ch, "sub")), + method=lambda api, ch, value: api.set_frame_rate(ch, int(value), "sub"), + ), + ReolinkSelectEntityDescription( + key="main_bit_rate", + cmd_key="GetEnc", + translation_key="main_bit_rate", + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND, + get_options=lambda api, ch: [str(v) for v in api.bit_rate_list(ch, "main")], + supported=lambda api, ch: api.supported(ch, "bit_rate"), + value=lambda api, ch: str(api.bit_rate(ch, "main")), + method=lambda api, ch, value: api.set_bit_rate(ch, int(value), "main"), + ), + ReolinkSelectEntityDescription( + key="sub_bit_rate", + cmd_key="GetEnc", + translation_key="sub_bit_rate", + entity_category=EntityCategory.CONFIG, + entity_registry_enabled_default=False, + unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND, + get_options=lambda api, ch: [str(v) for v in api.bit_rate_list(ch, "sub")], + supported=lambda api, ch: api.supported(ch, "bit_rate"), + value=lambda api, ch: str(api.bit_rate(ch, "sub")), + method=lambda api, ch, value: api.set_bit_rate(ch, int(value), "sub"), + ), ) CHIME_SELECT_ENTITIES = ( @@ -197,6 +260,16 @@ CHIME_SELECT_ENTITIES = ( value=lambda chime: ChimeToneEnum(chime.tone("people")).name, method=lambda chime, name: chime.set_tone("people", ChimeToneEnum[name].value), ), + ReolinkChimeSelectEntityDescription( + key="vehicle_tone", + cmd_key="GetDingDongCfg", + translation_key="vehicle_tone", + entity_category=EntityCategory.CONFIG, + get_options=[method.name for method in ChimeToneEnum], + supported=lambda chime: "vehicle" in chime.chime_event_types, + value=lambda chime: ChimeToneEnum(chime.tone("vehicle")).name, + method=lambda chime, name: chime.set_tone("vehicle", ChimeToneEnum[name].value), + ), ReolinkChimeSelectEntityDescription( key="visitor_tone", cmd_key="GetDingDongCfg", diff --git a/homeassistant/components/reolink/sensor.py b/homeassistant/components/reolink/sensor.py index c2fc815235e..36900da99ca 100644 --- a/homeassistant/components/reolink/sensor.py +++ b/homeassistant/components/reolink/sensor.py @@ -29,6 +29,8 @@ from .entity import ( ) from .util import ReolinkConfigEntry, ReolinkData +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class ReolinkSensorEntityDescription( @@ -58,10 +60,20 @@ SENSORS = ( state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, value=lambda api, ch: api.ptz_pan_position(ch), - supported=lambda api, ch: api.supported(ch, "ptz_position"), + supported=lambda api, ch: api.supported(ch, "ptz_pan_position"), + ), + ReolinkSensorEntityDescription( + key="ptz_tilt_position", + cmd_key="GetPtzCurPos", + translation_key="ptz_tilt_position", + state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, + value=lambda api, ch: api.ptz_tilt_position(ch), + supported=lambda api, ch: api.supported(ch, "ptz_tilt_position"), ), ReolinkSensorEntityDescription( key="battery_percent", + cmd_id=252, cmd_key="GetBatteryInfo", native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.BATTERY, @@ -72,6 +84,7 @@ SENSORS = ( ), ReolinkSensorEntityDescription( key="battery_temperature", + cmd_id=252, cmd_key="GetBatteryInfo", translation_key="battery_temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, @@ -84,6 +97,7 @@ SENSORS = ( ), ReolinkSensorEntityDescription( key="battery_state", + cmd_id=252, cmd_key="GetBatteryInfo", translation_key="battery_state", device_class=SensorDeviceClass.ENUM, diff --git a/homeassistant/components/reolink/siren.py b/homeassistant/components/reolink/siren.py index 45f435c1f2c..cb12eb5d38c 100644 --- a/homeassistant/components/reolink/siren.py +++ b/homeassistant/components/reolink/siren.py @@ -21,6 +21,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .entity import ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription from .util import ReolinkConfigEntry, ReolinkData +PARALLEL_UPDATES = 0 + @dataclass(frozen=True) class ReolinkSirenEntityDescription( diff --git a/homeassistant/components/reolink/strings.json b/homeassistant/components/reolink/strings.json index 67fd5329e14..3fe7fe14ec5 100644 --- a/homeassistant/components/reolink/strings.json +++ b/homeassistant/components/reolink/strings.json @@ -490,7 +490,7 @@ "name": "Floodlight mode", "state": { "off": "[%key:common::state::off%]", - "auto": "Auto", + "auto": "[%key:component::reolink::entity::select::day_night_mode::state::auto%]", "onatnight": "On at night", "schedule": "Schedule", "adaptive": "Adaptive", @@ -529,7 +529,7 @@ "name": "Doorbell LED", "state": { "stayoff": "Stay off", - "auto": "Auto", + "auto": "[%key:component::reolink::entity::select::day_night_mode::state::auto%]", "alwaysonatnight": "Auto & always on at night", "alwayson": "Always on" } @@ -539,7 +539,15 @@ "state": { "off": "[%key:common::state::off%]", "on": "[%key:common::state::on%]", - "auto": "Auto" + "auto": "[%key:component::reolink::entity::select::day_night_mode::state::auto%]" + } + }, + "binning_mode": { + "name": "Binning mode", + "state": { + "off": "[%key:common::state::off%]", + "on": "[%key:common::state::on%]", + "auto": "[%key:component::reolink::entity::select::day_night_mode::state::auto%]" } }, "hub_alarm_ringtone": { @@ -606,6 +614,22 @@ "waybackhome": "[%key:component::reolink::entity::select::motion_tone::state::waybackhome%]" } }, + "vehicle_tone": { + "name": "Vehicle ringtone", + "state": { + "off": "[%key:common::state::off%]", + "citybird": "[%key:component::reolink::entity::select::motion_tone::state::citybird%]", + "originaltune": "[%key:component::reolink::entity::select::motion_tone::state::originaltune%]", + "pianokey": "[%key:component::reolink::entity::select::motion_tone::state::pianokey%]", + "loop": "[%key:component::reolink::entity::select::motion_tone::state::loop%]", + "attraction": "[%key:component::reolink::entity::select::motion_tone::state::attraction%]", + "hophop": "[%key:component::reolink::entity::select::motion_tone::state::hophop%]", + "goodday": "[%key:component::reolink::entity::select::motion_tone::state::goodday%]", + "operetta": "[%key:component::reolink::entity::select::motion_tone::state::operetta%]", + "moonlight": "[%key:component::reolink::entity::select::motion_tone::state::moonlight%]", + "waybackhome": "[%key:component::reolink::entity::select::motion_tone::state::waybackhome%]" + } + }, "visitor_tone": { "name": "Visitor ringtone", "state": { @@ -637,6 +661,18 @@ "moonlight": "[%key:component::reolink::entity::select::motion_tone::state::moonlight%]", "waybackhome": "[%key:component::reolink::entity::select::motion_tone::state::waybackhome%]" } + }, + "main_frame_rate": { + "name": "Clear frame rate" + }, + "sub_frame_rate": { + "name": "Fluent frame rate" + }, + "main_bit_rate": { + "name": "Clear bit rate" + }, + "sub_bit_rate": { + "name": "Fluent bit rate" } }, "sensor": { @@ -649,6 +685,9 @@ "ptz_pan_position": { "name": "PTZ pan position" }, + "ptz_tilt_position": { + "name": "PTZ tilt position" + }, "battery_temperature": { "name": "Battery temperature" }, diff --git a/homeassistant/components/reolink/switch.py b/homeassistant/components/reolink/switch.py index 482cdab18a7..c274609599d 100644 --- a/homeassistant/components/reolink/switch.py +++ b/homeassistant/components/reolink/switch.py @@ -27,6 +27,8 @@ from .entity import ( ) from .util import ReolinkConfigEntry, ReolinkData +PARALLEL_UPDATES = 0 + @dataclass(frozen=True, kw_only=True) class ReolinkSwitchEntityDescription( diff --git a/homeassistant/components/reolink/update.py b/homeassistant/components/reolink/update.py index 5738411fa72..aa607e2b29e 100644 --- a/homeassistant/components/reolink/update.py +++ b/homeassistant/components/reolink/update.py @@ -3,11 +3,10 @@ from __future__ import annotations from dataclasses import dataclass -from datetime import datetime from typing import Any from reolink_aio.exceptions import ReolinkError -from reolink_aio.software_version import NewSoftwareVersion +from reolink_aio.software_version import NewSoftwareVersion, SoftwareVersion from homeassistant.components.update import ( UpdateDeviceClass, @@ -19,7 +18,12 @@ from homeassistant.core import CALLBACK_TYPE, HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_call_later +from homeassistant.helpers.update_coordinator import ( + CoordinatorEntity, + DataUpdateCoordinator, +) +from . import DEVICE_UPDATE_INTERVAL from .entity import ( ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription, @@ -28,7 +32,10 @@ from .entity import ( ) from .util import ReolinkConfigEntry, ReolinkData +PARALLEL_UPDATES = 0 +RESUME_AFTER_INSTALL = 15 POLL_AFTER_INSTALL = 120 +POLL_PROGRESS = 2 @dataclass(frozen=True, kw_only=True) @@ -86,25 +93,28 @@ async def async_setup_entry( async_add_entities(entities) -class ReolinkUpdateEntity( - ReolinkChannelCoordinatorEntity, - UpdateEntity, +class ReolinkUpdateBaseEntity( + CoordinatorEntity[DataUpdateCoordinator[None]], UpdateEntity ): - """Base update entity class for Reolink IP cameras.""" + """Base update entity class for Reolink.""" - entity_description: ReolinkUpdateEntityDescription _attr_release_url = "https://reolink.com/download-center/" def __init__( self, reolink_data: ReolinkData, - channel: int, - entity_description: ReolinkUpdateEntityDescription, + channel: int | None, + coordinator: DataUpdateCoordinator[None], ) -> None: """Initialize Reolink update entity.""" - self.entity_description = entity_description - super().__init__(reolink_data, channel, reolink_data.firmware_coordinator) + CoordinatorEntity.__init__(self, coordinator) + self._channel = channel + self._host = reolink_data.host self._cancel_update: CALLBACK_TYPE | None = None + self._cancel_resume: CALLBACK_TYPE | None = None + self._cancel_progress: CALLBACK_TYPE | None = None + self._installing: bool = False + self._reolink_data = reolink_data @property def installed_version(self) -> str | None: @@ -123,6 +133,16 @@ class ReolinkUpdateEntity( return new_firmware.version_string + @property + def in_progress(self) -> bool: + """Update installation progress.""" + return self._host.api.sw_upload_progress(self._channel) < 100 + + @property + def update_percentage(self) -> int: + """Update installation progress.""" + return self._host.api.sw_upload_progress(self._channel) + @property def supported_features(self) -> UpdateEntityFeature: """Flag supported features.""" @@ -130,8 +150,27 @@ class ReolinkUpdateEntity( new_firmware = self._host.api.firmware_update_available(self._channel) if isinstance(new_firmware, NewSoftwareVersion): supported_features |= UpdateEntityFeature.RELEASE_NOTES + supported_features |= UpdateEntityFeature.PROGRESS return supported_features + @property + def available(self) -> bool: + """Return True if entity is available.""" + if self._installing or self._cancel_update is not None: + return True + return super().available + + def version_is_newer(self, latest_version: str, installed_version: str) -> bool: + """Return True if latest_version is newer than installed_version.""" + try: + installed = SoftwareVersion(installed_version) + latest = SoftwareVersion(latest_version) + except ReolinkError: + # when the online update API returns a unexpected string + return True + + return latest > installed + async def async_release_notes(self) -> str | None: """Return the release notes.""" new_firmware = self._host.api.firmware_update_available(self._channel) @@ -148,6 +187,11 @@ class ReolinkUpdateEntity( self, version: str | None, backup: bool, **kwargs: Any ) -> None: """Install the latest firmware version.""" + self._installing = True + await self._pause_update_coordinator() + self._cancel_progress = async_call_later( + self.hass, POLL_PROGRESS, self._async_update_progress + ) try: await self._host.api.update_firmware(self._channel) except ReolinkError as err: @@ -159,10 +203,38 @@ class ReolinkUpdateEntity( self._cancel_update = async_call_later( self.hass, POLL_AFTER_INSTALL, self._async_update_future ) + self._cancel_resume = async_call_later( + self.hass, RESUME_AFTER_INSTALL, self._resume_update_coordinator + ) + self._installing = False - async def _async_update_future(self, now: datetime | None = None) -> None: + async def _pause_update_coordinator(self) -> None: + """Pause updating the states using the data update coordinator (during reboots).""" + self._reolink_data.device_coordinator.update_interval = None + self._reolink_data.device_coordinator.async_set_updated_data(None) + + async def _resume_update_coordinator(self, *args: Any) -> None: + """Resume updating the states using the data update coordinator (after reboots).""" + self._reolink_data.device_coordinator.update_interval = DEVICE_UPDATE_INTERVAL + try: + await self._reolink_data.device_coordinator.async_refresh() + finally: + self._cancel_resume = None + + async def _async_update_progress(self, *args: Any) -> None: """Request update.""" - await self.async_update() + self.async_write_ha_state() + if self._installing: + self._cancel_progress = async_call_later( + self.hass, POLL_PROGRESS, self._async_update_progress + ) + + async def _async_update_future(self, *args: Any) -> None: + """Request update.""" + try: + await self.async_update() + finally: + self._cancel_update = None async def async_added_to_hass(self) -> None: """Entity created.""" @@ -176,16 +248,44 @@ class ReolinkUpdateEntity( self._host.firmware_ch_list.remove(self._channel) if self._cancel_update is not None: self._cancel_update() + if self._cancel_progress is not None: + self._cancel_progress() + if self._cancel_resume is not None: + self._cancel_resume() + + +class ReolinkUpdateEntity( + ReolinkUpdateBaseEntity, + ReolinkChannelCoordinatorEntity, +): + """Base update entity class for Reolink IP cameras.""" + + entity_description: ReolinkUpdateEntityDescription + _channel: int + + def __init__( + self, + reolink_data: ReolinkData, + channel: int, + entity_description: ReolinkUpdateEntityDescription, + ) -> None: + """Initialize Reolink update entity.""" + self.entity_description = entity_description + ReolinkUpdateBaseEntity.__init__( + self, reolink_data, channel, reolink_data.firmware_coordinator + ) + ReolinkChannelCoordinatorEntity.__init__( + self, reolink_data, channel, reolink_data.firmware_coordinator + ) class ReolinkHostUpdateEntity( + ReolinkUpdateBaseEntity, ReolinkHostCoordinatorEntity, - UpdateEntity, ): """Update entity class for Reolink Host.""" entity_description: ReolinkHostUpdateEntityDescription - _attr_release_url = "https://reolink.com/download-center/" def __init__( self, @@ -194,76 +294,9 @@ class ReolinkHostUpdateEntity( ) -> None: """Initialize Reolink update entity.""" self.entity_description = entity_description - super().__init__(reolink_data, reolink_data.firmware_coordinator) - self._cancel_update: CALLBACK_TYPE | None = None - - @property - def installed_version(self) -> str | None: - """Version currently in use.""" - return self._host.api.sw_version - - @property - def latest_version(self) -> str | None: - """Latest version available for install.""" - new_firmware = self._host.api.firmware_update_available() - if not new_firmware: - return self.installed_version - - if isinstance(new_firmware, str): - return new_firmware - - return new_firmware.version_string - - @property - def supported_features(self) -> UpdateEntityFeature: - """Flag supported features.""" - supported_features = UpdateEntityFeature.INSTALL - new_firmware = self._host.api.firmware_update_available() - if isinstance(new_firmware, NewSoftwareVersion): - supported_features |= UpdateEntityFeature.RELEASE_NOTES - return supported_features - - async def async_release_notes(self) -> str | None: - """Return the release notes.""" - new_firmware = self._host.api.firmware_update_available() - assert isinstance(new_firmware, NewSoftwareVersion) - - return ( - "If the install button fails, download this" - f" [firmware zip file]({new_firmware.download_url})." - " Then, follow the installation guide (PDF in the zip file).\n\n" - f"## Release notes\n\n{new_firmware.release_notes}" + ReolinkUpdateBaseEntity.__init__( + self, reolink_data, None, reolink_data.firmware_coordinator + ) + ReolinkHostCoordinatorEntity.__init__( + self, reolink_data, reolink_data.firmware_coordinator ) - - async def async_install( - self, version: str | None, backup: bool, **kwargs: Any - ) -> None: - """Install the latest firmware version.""" - try: - await self._host.api.update_firmware() - except ReolinkError as err: - raise HomeAssistantError( - f"Error trying to update Reolink firmware: {err}" - ) from err - finally: - self.async_write_ha_state() - self._cancel_update = async_call_later( - self.hass, POLL_AFTER_INSTALL, self._async_update_future - ) - - async def _async_update_future(self, now: datetime | None = None) -> None: - """Request update.""" - await self.async_update() - - async def async_added_to_hass(self) -> None: - """Entity created.""" - await super().async_added_to_hass() - self._host.firmware_ch_list.append(None) - - async def async_will_remove_from_hass(self) -> None: - """Entity removed.""" - await super().async_will_remove_from_hass() - if None in self._host.firmware_ch_list: - self._host.firmware_ch_list.remove(None) - if self._cancel_update is not None: - self._cancel_update() diff --git a/homeassistant/components/repetier/manifest.json b/homeassistant/components/repetier/manifest.json index dfddb298284..7392ae0b23e 100644 --- a/homeassistant/components/repetier/manifest.json +++ b/homeassistant/components/repetier/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/repetier", "iot_class": "local_polling", "loggers": ["pyrepetierng"], + "quality_scale": "legacy", "requirements": ["pyrepetierng==0.1.0"] } diff --git a/homeassistant/components/rest/__init__.py b/homeassistant/components/rest/__init__.py index 59239ad6744..5695e51933e 100644 --- a/homeassistant/components/rest/__init__.py +++ b/homeassistant/components/rest/__init__.py @@ -180,6 +180,7 @@ def _rest_coordinator( return DataUpdateCoordinator( hass, _LOGGER, + config_entry=None, name="rest data", update_method=update_method, update_interval=update_interval, diff --git a/homeassistant/components/rflink/manifest.json b/homeassistant/components/rflink/manifest.json index 7917fa0bded..f5f372d2d33 100644 --- a/homeassistant/components/rflink/manifest.json +++ b/homeassistant/components/rflink/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/rflink", "iot_class": "assumed_state", "loggers": ["rflink"], + "quality_scale": "legacy", "requirements": ["rflink==0.0.66"] } diff --git a/homeassistant/components/rfxtrx/config_flow.py b/homeassistant/components/rfxtrx/config_flow.py index ceb9bea4661..866d9ecb1bb 100644 --- a/homeassistant/components/rfxtrx/config_flow.py +++ b/homeassistant/components/rfxtrx/config_flow.py @@ -87,9 +87,8 @@ class RfxtrxOptionsFlow(OptionsFlow): _device_registry: dr.DeviceRegistry _device_entries: list[dr.DeviceEntry] - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize rfxtrx options flow.""" - self._config_entry = config_entry self._global_options: dict[str, Any] = {} self._selected_device: dict[str, Any] = {} self._selected_device_entry_id: str | None = None @@ -120,9 +119,7 @@ class RfxtrxOptionsFlow(OptionsFlow): event_code = device_data["event_code"] assert event_code self._selected_device_event_code = event_code - self._selected_device = self._config_entry.data[CONF_DEVICES][ - event_code - ] + self._selected_device = self.config_entry.data[CONF_DEVICES][event_code] self._selected_device_object = get_rfx_object(event_code) return await self.async_step_set_device_options() if CONF_EVENT_CODE in user_input: @@ -148,7 +145,7 @@ class RfxtrxOptionsFlow(OptionsFlow): device_registry = dr.async_get(self.hass) device_entries = dr.async_entries_for_config_entry( - device_registry, self._config_entry.entry_id + device_registry, self.config_entry.entry_id ) self._device_registry = device_registry self._device_entries = device_entries @@ -162,11 +159,11 @@ class RfxtrxOptionsFlow(OptionsFlow): options = { vol.Optional( CONF_AUTOMATIC_ADD, - default=self._config_entry.data[CONF_AUTOMATIC_ADD], + default=self.config_entry.data[CONF_AUTOMATIC_ADD], ): bool, vol.Optional( CONF_PROTOCOLS, - default=self._config_entry.data.get(CONF_PROTOCOLS) or [], + default=self.config_entry.data.get(CONF_PROTOCOLS) or [], ): cv.multi_select(RECV_MODES), vol.Optional(CONF_EVENT_CODE): str, vol.Optional(CONF_DEVICE): vol.In(configure_devices), @@ -425,7 +422,7 @@ class RfxtrxOptionsFlow(OptionsFlow): def _can_add_device(self, new_rfx_obj: rfxtrxmod.RFXtrxEvent) -> bool: """Check if device does not already exist.""" new_device_id = get_device_id(new_rfx_obj.device) - for packet_id, entity_info in self._config_entry.data[CONF_DEVICES].items(): + for packet_id, entity_info in self.config_entry.data[CONF_DEVICES].items(): rfx_obj = get_rfx_object(packet_id) assert rfx_obj @@ -468,7 +465,7 @@ class RfxtrxOptionsFlow(OptionsFlow): assert entry device_id = get_device_tuple_from_identifiers(entry.identifiers) assert device_id - for packet_id, entity_info in self._config_entry.data[CONF_DEVICES].items(): + for packet_id, entity_info in self.config_entry.data[CONF_DEVICES].items(): if tuple(entity_info.get(CONF_DEVICE_ID)) == device_id: event_code = cast(str, packet_id) break @@ -481,8 +478,8 @@ class RfxtrxOptionsFlow(OptionsFlow): devices: dict[str, Any] | None = None, ) -> None: """Update data in ConfigEntry.""" - entry_data = self._config_entry.data.copy() - entry_data[CONF_DEVICES] = copy.deepcopy(self._config_entry.data[CONF_DEVICES]) + entry_data = self.config_entry.data.copy() + entry_data[CONF_DEVICES] = copy.deepcopy(self.config_entry.data[CONF_DEVICES]) if global_options: entry_data.update(global_options) if devices: @@ -494,9 +491,9 @@ class RfxtrxOptionsFlow(OptionsFlow): entry_data[CONF_DEVICES].pop(event_code, None) else: entry_data[CONF_DEVICES][event_code] = options - self.hass.config_entries.async_update_entry(self._config_entry, data=entry_data) + self.hass.config_entries.async_update_entry(self.config_entry, data=entry_data) self.hass.async_create_task( - self.hass.config_entries.async_reload(self._config_entry.entry_id) + self.hass.config_entries.async_reload(self.config_entry.entry_id) ) @@ -637,9 +634,11 @@ class RfxtrxConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> RfxtrxOptionsFlow: """Get the options flow for this handler.""" - return RfxtrxOptionsFlow(config_entry) + return RfxtrxOptionsFlow() def _test_transport(host: str | None, port: int | None, device: str | None) -> bool: diff --git a/homeassistant/components/ridwell/config_flow.py b/homeassistant/components/ridwell/config_flow.py index a54d4debe75..f03679c8315 100644 --- a/homeassistant/components/ridwell/config_flow.py +++ b/homeassistant/components/ridwell/config_flow.py @@ -93,6 +93,9 @@ class RidwellConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle re-auth completion.""" if not user_input: + if TYPE_CHECKING: + assert self._username + return self.async_show_form( step_id="reauth_confirm", data_schema=STEP_REAUTH_CONFIRM_DATA_SCHEMA, diff --git a/homeassistant/components/ring/__init__.py b/homeassistant/components/ring/__init__.py index b2340b34556..edc084fb57b 100644 --- a/homeassistant/components/ring/__init__.py +++ b/homeassistant/components/ring/__init__.py @@ -9,6 +9,7 @@ import uuid from ring_doorbell import Auth, Ring, RingDevices +from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN from homeassistant.config_entries import ConfigEntry from homeassistant.const import APPLICATION_NAME, CONF_DEVICE_ID, CONF_TOKEN from homeassistant.core import HomeAssistant, callback @@ -70,8 +71,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: RingConfigEntry) -> bool ) ring = Ring(auth) - await _migrate_old_unique_ids(hass, entry.entry_id) - devices_coordinator = RingDataCoordinator(hass, ring) listen_credentials = entry.data.get(CONF_LISTEN_CREDENTIALS) listen_coordinator = RingListenCoordinator( @@ -104,42 +103,46 @@ async def async_remove_config_entry_device( return True -async def _migrate_old_unique_ids(hass: HomeAssistant, entry_id: str) -> None: - entity_registry = er.async_get(hass) - - @callback - def _async_migrator(entity_entry: er.RegistryEntry) -> dict[str, str] | None: - # Old format for camera and light was int - unique_id = cast(str | int, entity_entry.unique_id) - if isinstance(unique_id, int): - new_unique_id = str(unique_id) - if existing_entity_id := entity_registry.async_get_entity_id( - entity_entry.domain, entity_entry.platform, new_unique_id - ): - _LOGGER.error( - "Cannot migrate to unique_id '%s', already exists for '%s', " - "You may have to delete unavailable ring entities", - new_unique_id, - existing_entity_id, - ) - return None - _LOGGER.debug("Fixing non string unique id %s", entity_entry.unique_id) - return {"new_unique_id": new_unique_id} - return None - - await er.async_migrate_entries(hass, entry_id, _async_migrator) - - async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Migrate old config entry.""" entry_version = entry.version entry_minor_version = entry.minor_version + entry_id = entry.entry_id new_minor_version = 2 if entry_version == 1 and entry_minor_version == 1: _LOGGER.debug( "Migrating from version %s.%s", entry_version, entry_minor_version ) + # Migrate non-str unique ids + # This step used to run unconditionally from async_setup_entry + entity_registry = er.async_get(hass) + + @callback + def _async_str_unique_id_migrator( + entity_entry: er.RegistryEntry, + ) -> dict[str, str] | None: + # Old format for camera and light was int + unique_id = cast(str | int, entity_entry.unique_id) + if isinstance(unique_id, int): + new_unique_id = str(unique_id) + if existing_entity_id := entity_registry.async_get_entity_id( + entity_entry.domain, entity_entry.platform, new_unique_id + ): + _LOGGER.error( + "Cannot migrate to unique_id '%s', already exists for '%s', " + "You may have to delete unavailable ring entities", + new_unique_id, + existing_entity_id, + ) + return None + _LOGGER.debug("Fixing non string unique id %s", entity_entry.unique_id) + return {"new_unique_id": new_unique_id} + return None + + await er.async_migrate_entries(hass, entry_id, _async_str_unique_id_migrator) + + # Migrate the hardware id hardware_id = str(uuid.uuid4()) hass.config_entries.async_update_entry( entry, @@ -149,4 +152,34 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: _LOGGER.debug( "Migration to version %s.%s complete", entry_version, new_minor_version ) + + entry_minor_version = entry.minor_version + new_minor_version = 3 + if entry_version == 1 and entry_minor_version == 2: + _LOGGER.debug( + "Migrating from version %s.%s", entry_version, entry_minor_version + ) + + @callback + def _async_camera_unique_id_migrator( + entity_entry: er.RegistryEntry, + ) -> dict[str, str] | None: + # Migrate camera unique ids to append -last + if entity_entry.domain == CAMERA_DOMAIN and not isinstance( + cast(str | int, entity_entry.unique_id), int + ): + new_unique_id = f"{entity_entry.unique_id}-last_recording" + return {"new_unique_id": new_unique_id} + return None + + await er.async_migrate_entries(hass, entry_id, _async_camera_unique_id_migrator) + + hass.config_entries.async_update_entry( + entry, + minor_version=new_minor_version, + ) + _LOGGER.debug( + "Migration to version %s.%s complete", entry_version, new_minor_version + ) + return True diff --git a/homeassistant/components/ring/camera.py b/homeassistant/components/ring/camera.py index 9c66df9d89e..ccd91c163d6 100644 --- a/homeassistant/components/ring/camera.py +++ b/homeassistant/components/ring/camera.py @@ -2,24 +2,37 @@ from __future__ import annotations +from collections.abc import Callable +from dataclasses import dataclass from datetime import timedelta import logging -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Generic from aiohttp import web from haffmpeg.camera import CameraMjpeg from ring_doorbell import RingDoorBell +from ring_doorbell.webrtcstream import RingWebRtcMessage from homeassistant.components import ffmpeg -from homeassistant.components.camera import Camera +from homeassistant.components.camera import ( + Camera, + CameraEntityDescription, + CameraEntityFeature, + RTCIceCandidateInit, + WebRTCAnswer, + WebRTCCandidate, + WebRTCError, + WebRTCSendMessage, +) from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util from . import RingConfigEntry from .coordinator import RingDataCoordinator -from .entity import RingEntity, exception_wrap +from .entity import RingDeviceT, RingEntity, exception_wrap FORCE_REFRESH_INTERVAL = timedelta(minutes=3) MOTION_DETECTION_CAPABILITY = "motion_detection" @@ -27,6 +40,34 @@ MOTION_DETECTION_CAPABILITY = "motion_detection" _LOGGER = logging.getLogger(__name__) +@dataclass(frozen=True, kw_only=True) +class RingCameraEntityDescription(CameraEntityDescription, Generic[RingDeviceT]): + """Base class for event entity description.""" + + exists_fn: Callable[[RingDoorBell], bool] + live_stream: bool + motion_detection: bool + + +CAMERA_DESCRIPTIONS: tuple[RingCameraEntityDescription, ...] = ( + RingCameraEntityDescription( + key="live_view", + translation_key="live_view", + exists_fn=lambda _: True, + live_stream=True, + motion_detection=False, + ), + RingCameraEntityDescription( + key="last_recording", + translation_key="last_recording", + entity_registry_enabled_default=False, + exists_fn=lambda camera: camera.has_subscription, + live_stream=False, + motion_detection=True, + ), +) + + async def async_setup_entry( hass: HomeAssistant, entry: RingConfigEntry, @@ -38,9 +79,10 @@ async def async_setup_entry( ffmpeg_manager = ffmpeg.get_ffmpeg_manager(hass) cams = [ - RingCam(camera, devices_coordinator, ffmpeg_manager) + RingCam(camera, devices_coordinator, description, ffmpeg_manager=ffmpeg_manager) + for description in CAMERA_DESCRIPTIONS for camera in ring_data.devices.video_devices - if camera.has_subscription + if description.exists_fn(camera) ] async_add_entities(cams) @@ -49,26 +91,31 @@ async def async_setup_entry( class RingCam(RingEntity[RingDoorBell], Camera): """An implementation of a Ring Door Bell camera.""" - _attr_name = None - def __init__( self, device: RingDoorBell, coordinator: RingDataCoordinator, + description: RingCameraEntityDescription, + *, ffmpeg_manager: ffmpeg.FFmpegManager, ) -> None: """Initialize a Ring Door Bell camera.""" super().__init__(device, coordinator) + self.entity_description = description Camera.__init__(self) self._ffmpeg_manager = ffmpeg_manager self._last_event: dict[str, Any] | None = None self._last_video_id: int | None = None self._video_url: str | None = None - self._image: bytes | None = None + self._images: dict[tuple[int | None, int | None], bytes] = {} self._expires_at = dt_util.utcnow() - FORCE_REFRESH_INTERVAL - self._attr_unique_id = str(device.id) - if device.has_capability(MOTION_DETECTION_CAPABILITY): + self._attr_unique_id = f"{device.id}-{description.key}" + if description.motion_detection and device.has_capability( + MOTION_DETECTION_CAPABILITY + ): self._attr_motion_detection_enabled = device.motion_detection + if description.live_stream: + self._attr_supported_features |= CameraEntityFeature.STREAM @callback def _handle_coordinator_update(self) -> None: @@ -86,7 +133,7 @@ class RingCam(RingEntity[RingDoorBell], Camera): self._last_event = None self._last_video_id = None self._video_url = None - self._image = None + self._images = {} self._expires_at = dt_util.utcnow() self.async_write_ha_state() @@ -102,7 +149,8 @@ class RingCam(RingEntity[RingDoorBell], Camera): self, width: int | None = None, height: int | None = None ) -> bytes | None: """Return a still image response from the camera.""" - if self._image is None and self._video_url is not None: + key = (width, height) + if not (image := self._images.get(key)) and self._video_url is not None: image = await ffmpeg.async_get_image( self.hass, self._video_url, @@ -111,9 +159,9 @@ class RingCam(RingEntity[RingDoorBell], Camera): ) if image: - self._image = image + self._images[key] = image - return self._image + return image async def handle_async_mjpeg_stream( self, request: web.Request @@ -136,6 +184,47 @@ class RingCam(RingEntity[RingDoorBell], Camera): finally: await stream.close() + async def async_handle_async_webrtc_offer( + self, offer_sdp: str, session_id: str, send_message: WebRTCSendMessage + ) -> None: + """Return the source of the stream.""" + + def message_wrapper(ring_message: RingWebRtcMessage) -> None: + if ring_message.error_code: + msg = ring_message.error_message or "" + send_message(WebRTCError(ring_message.error_code, msg)) + elif ring_message.answer: + send_message(WebRTCAnswer(ring_message.answer)) + elif ring_message.candidate: + send_message( + WebRTCCandidate( + RTCIceCandidateInit( + ring_message.candidate, + sdp_m_line_index=ring_message.sdp_m_line_index or 0, + ) + ) + ) + + return await self._device.generate_async_webrtc_stream( + offer_sdp, session_id, message_wrapper, keep_alive_timeout=None + ) + + async def async_on_webrtc_candidate( + self, session_id: str, candidate: RTCIceCandidateInit + ) -> None: + """Handle a WebRTC candidate.""" + if candidate.sdp_m_line_index is None: + msg = "The sdp_m_line_index is required for ring webrtc streaming" + raise HomeAssistantError(msg) + await self._device.on_webrtc_candidate( + session_id, candidate.candidate, candidate.sdp_m_line_index + ) + + @callback + def close_webrtc_session(self, session_id: str) -> None: + """Close a WebRTC session.""" + self._device.sync_close_webrtc_stream(session_id) + async def async_update(self) -> None: """Update camera entity and refresh attributes.""" if ( @@ -157,7 +246,7 @@ class RingCam(RingEntity[RingDoorBell], Camera): return if self._last_video_id != self._last_event["id"]: - self._image = None + self._images = {} self._video_url = await self._async_get_video() diff --git a/homeassistant/components/ring/const.py b/homeassistant/components/ring/const.py index 9595241ebb1..68ac00d69f6 100644 --- a/homeassistant/components/ring/const.py +++ b/homeassistant/components/ring/const.py @@ -33,4 +33,4 @@ SCAN_INTERVAL = timedelta(minutes=1) CONF_2FA = "2fa" CONF_LISTEN_CREDENTIALS = "listen_token" -CONF_CONFIG_ENTRY_MINOR_VERSION: Final = 2 +CONF_CONFIG_ENTRY_MINOR_VERSION: Final = 3 diff --git a/homeassistant/components/ring/manifest.json b/homeassistant/components/ring/manifest.json index e431c680081..86758b26794 100644 --- a/homeassistant/components/ring/manifest.json +++ b/homeassistant/components/ring/manifest.json @@ -29,6 +29,5 @@ "documentation": "https://www.home-assistant.io/integrations/ring", "iot_class": "cloud_polling", "loggers": ["ring_doorbell"], - "quality_scale": "silver", - "requirements": ["ring-doorbell==0.9.12"] + "requirements": ["ring-doorbell==0.9.13"] } diff --git a/homeassistant/components/ring/strings.json b/homeassistant/components/ring/strings.json index 0887e4112c6..8170ec8e161 100644 --- a/homeassistant/components/ring/strings.json +++ b/homeassistant/components/ring/strings.json @@ -124,6 +124,14 @@ "motion_detection": { "name": "Motion detection" } + }, + "camera": { + "live_view": { + "name": "Live view" + }, + "last_recording": { + "name": "Last recording" + } } }, "issues": { diff --git a/homeassistant/components/ripple/manifest.json b/homeassistant/components/ripple/manifest.json index 72df64ac850..17ff6b34f38 100644 --- a/homeassistant/components/ripple/manifest.json +++ b/homeassistant/components/ripple/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/ripple", "iot_class": "cloud_polling", "loggers": ["pyripple"], + "quality_scale": "legacy", "requirements": ["python-ripple-api==0.0.3"] } diff --git a/homeassistant/components/risco/config_flow.py b/homeassistant/components/risco/config_flow.py index 8f88c7c30a3..f7365d35414 100644 --- a/homeassistant/components/risco/config_flow.py +++ b/homeassistant/components/risco/config_flow.py @@ -220,7 +220,6 @@ class RiscoOptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize.""" - self.config_entry = config_entry self._data = {**DEFAULT_OPTIONS, **config_entry.options} def _options_schema(self) -> vol.Schema: diff --git a/homeassistant/components/risco/manifest.json b/homeassistant/components/risco/manifest.json index 372d8e0c629..c226c1c590d 100644 --- a/homeassistant/components/risco/manifest.json +++ b/homeassistant/components/risco/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/risco", "iot_class": "local_push", "loggers": ["pyrisco"], - "quality_scale": "platinum", "requirements": ["pyrisco==0.6.4"] } diff --git a/homeassistant/components/rituals_perfume_genie/manifest.json b/homeassistant/components/rituals_perfume_genie/manifest.json index 996dd1faecf..114491d9122 100644 --- a/homeassistant/components/rituals_perfume_genie/manifest.json +++ b/homeassistant/components/rituals_perfume_genie/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/rituals_perfume_genie", "iot_class": "cloud_polling", "loggers": ["pyrituals"], - "quality_scale": "silver", "requirements": ["pyrituals==0.0.6"] } diff --git a/homeassistant/components/rituals_perfume_genie/select.py b/homeassistant/components/rituals_perfume_genie/select.py index e93d6ae03ef..27aff70649b 100644 --- a/homeassistant/components/rituals_perfume_genie/select.py +++ b/homeassistant/components/rituals_perfume_genie/select.py @@ -9,7 +9,7 @@ from pyrituals import Diffuser from homeassistant.components.select import SelectEntity, SelectEntityDescription from homeassistant.config_entries import ConfigEntry -from homeassistant.const import AREA_SQUARE_METERS, EntityCategory +from homeassistant.const import EntityCategory, UnitOfArea from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -30,7 +30,7 @@ ENTITY_DESCRIPTIONS = ( RitualsSelectEntityDescription( key="room_size_square_meter", translation_key="room_size_square_meter", - unit_of_measurement=AREA_SQUARE_METERS, + unit_of_measurement=UnitOfArea.SQUARE_METERS, entity_category=EntityCategory.CONFIG, options=["15", "30", "60", "100"], current_fn=lambda diffuser: str(diffuser.room_size_square_meter), diff --git a/homeassistant/components/rmvtransport/manifest.json b/homeassistant/components/rmvtransport/manifest.json index 81b650bcdc0..30be5417ff6 100644 --- a/homeassistant/components/rmvtransport/manifest.json +++ b/homeassistant/components/rmvtransport/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/rmvtransport", "iot_class": "cloud_polling", "loggers": ["RMVtransport"], + "quality_scale": "legacy", "requirements": ["PyRMVtransport==0.3.3"] } diff --git a/homeassistant/components/roborock/config_flow.py b/homeassistant/components/roborock/config_flow.py index 06fbf3e717e..200614b024e 100644 --- a/homeassistant/components/roborock/config_flow.py +++ b/homeassistant/components/roborock/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +from copy import deepcopy import logging from typing import Any @@ -24,7 +25,6 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import CONF_USERNAME from homeassistant.core import callback @@ -171,14 +171,18 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> OptionsFlow: + ) -> RoborockOptionsFlowHandler: """Create the options flow.""" return RoborockOptionsFlowHandler(config_entry) -class RoborockOptionsFlowHandler(OptionsFlowWithConfigEntry): +class RoborockOptionsFlowHandler(OptionsFlow): """Handle an option flow for Roborock.""" + def __init__(self, config_entry: ConfigEntry) -> None: + """Initialize options flow.""" + self.options = deepcopy(dict(config_entry.options)) + async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/roborock/coordinator.py b/homeassistant/components/roborock/coordinator.py index 20bc50f9855..fe592074f71 100644 --- a/homeassistant/components/roborock/coordinator.py +++ b/homeassistant/components/roborock/coordinator.py @@ -2,7 +2,6 @@ from __future__ import annotations -import asyncio from datetime import timedelta import logging @@ -107,8 +106,12 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]): async def _async_update_data(self) -> DeviceProp: """Update data via library.""" try: - await asyncio.gather(*(self._update_device_prop(), self.get_rooms())) + # Update device props and standard api information + await self._update_device_prop() + # Set the new map id from the updated device props self._set_current_map() + # Get the rooms for that map id. + await self.get_rooms() except RoborockException as ex: raise UpdateFailed(ex) from ex return self.roborock_device_info.props diff --git a/homeassistant/components/roborock/select.py b/homeassistant/components/roborock/select.py index 3dfe0e72a7b..73cb95d2d7c 100644 --- a/homeassistant/components/roborock/select.py +++ b/homeassistant/components/roborock/select.py @@ -135,6 +135,9 @@ class RoborockCurrentMapSelectEntity(RoborockCoordinatedEntityV1, SelectEntity): RoborockCommand.LOAD_MULTI_MAP, [map_id], ) + # Update the current map id manually so that nothing gets broken + # if another service hits the api. + self.coordinator.current_map = map_id # We need to wait after updating the map # so that other commands will be executed correctly. await asyncio.sleep(MAP_SLEEP) @@ -148,6 +151,9 @@ class RoborockCurrentMapSelectEntity(RoborockCoordinatedEntityV1, SelectEntity): @property def current_option(self) -> str | None: """Get the current status of the select entity from device_status.""" - if (current_map := self.coordinator.current_map) is not None: + if ( + (current_map := self.coordinator.current_map) is not None + and current_map in self.coordinator.maps + ): # 63 means it is searching for a map. return self.coordinator.maps[current_map].name return None diff --git a/homeassistant/components/roborock/sensor.py b/homeassistant/components/roborock/sensor.py index 33ce6be5a68..47849ed5cc5 100644 --- a/homeassistant/components/roborock/sensor.py +++ b/homeassistant/components/roborock/sensor.py @@ -25,12 +25,7 @@ from homeassistant.components.sensor import ( SensorEntity, SensorEntityDescription, ) -from homeassistant.const import ( - AREA_SQUARE_METERS, - PERCENTAGE, - EntityCategory, - UnitOfTime, -) +from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfArea, UnitOfTime from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType @@ -131,14 +126,14 @@ SENSOR_DESCRIPTIONS = [ translation_key="cleaning_area", value_fn=lambda data: data.status.square_meter_clean_area, entity_category=EntityCategory.DIAGNOSTIC, - native_unit_of_measurement=AREA_SQUARE_METERS, + native_unit_of_measurement=UnitOfArea.SQUARE_METERS, ), RoborockSensorDescription( key="total_cleaning_area", translation_key="total_cleaning_area", value_fn=lambda data: data.clean_summary.square_meter_clean_area, entity_category=EntityCategory.DIAGNOSTIC, - native_unit_of_measurement=AREA_SQUARE_METERS, + native_unit_of_measurement=UnitOfArea.SQUARE_METERS, ), RoborockSensorDescription( key="vacuum_error", diff --git a/homeassistant/components/rocketchat/manifest.json b/homeassistant/components/rocketchat/manifest.json index 50d7579df02..f4f72f02a10 100644 --- a/homeassistant/components/rocketchat/manifest.json +++ b/homeassistant/components/rocketchat/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/rocketchat", "iot_class": "cloud_push", "loggers": ["rocketchat_API"], + "quality_scale": "legacy", "requirements": ["rocketchat-API==0.6.1"] } diff --git a/homeassistant/components/roku/config_flow.py b/homeassistant/components/roku/config_flow.py index 3ece9aff3f2..18e3b3ed68a 100644 --- a/homeassistant/components/roku/config_flow.py +++ b/homeassistant/components/roku/config_flow.py @@ -14,7 +14,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant, callback @@ -165,12 +165,12 @@ class RokuConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> OptionsFlowWithConfigEntry: + ) -> RokuOptionsFlowHandler: """Create the options flow.""" - return RokuOptionsFlowHandler(config_entry) + return RokuOptionsFlowHandler() -class RokuOptionsFlowHandler(OptionsFlowWithConfigEntry): +class RokuOptionsFlowHandler(OptionsFlow): """Handle Roku options.""" async def async_step_init( @@ -186,7 +186,7 @@ class RokuOptionsFlowHandler(OptionsFlowWithConfigEntry): { vol.Optional( CONF_PLAY_MEDIA_APP_ID, - default=self.options.get( + default=self.config_entry.options.get( CONF_PLAY_MEDIA_APP_ID, DEFAULT_PLAY_MEDIA_APP_ID ), ): str, diff --git a/homeassistant/components/roku/manifest.json b/homeassistant/components/roku/manifest.json index fa9823de172..7fe2fb3b686 100644 --- a/homeassistant/components/roku/manifest.json +++ b/homeassistant/components/roku/manifest.json @@ -10,7 +10,6 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["rokuecp"], - "quality_scale": "silver", "requirements": ["rokuecp==0.19.3"], "ssdp": [ { diff --git a/homeassistant/components/romy/sensor.py b/homeassistant/components/romy/sensor.py index bdd486c4f8f..341125b86ba 100644 --- a/homeassistant/components/romy/sensor.py +++ b/homeassistant/components/romy/sensor.py @@ -8,10 +8,10 @@ from homeassistant.components.sensor import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( - AREA_SQUARE_METERS, PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, + UnitOfArea, UnitOfLength, UnitOfTime, ) @@ -61,7 +61,7 @@ SENSORS: list[SensorEntityDescription] = [ key="total_area_cleaned", translation_key="total_area_cleaned", state_class=SensorStateClass.TOTAL, - native_unit_of_measurement=AREA_SQUARE_METERS, + native_unit_of_measurement=UnitOfArea.SQUARE_METERS, entity_category=EntityCategory.DIAGNOSTIC, ), SensorEntityDescription( diff --git a/homeassistant/components/roomba/config_flow.py b/homeassistant/components/roomba/config_flow.py index d0c29faca69..d040074246a 100644 --- a/homeassistant/components/roomba/config_flow.py +++ b/homeassistant/components/roomba/config_flow.py @@ -16,7 +16,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_DELAY, CONF_HOST, CONF_NAME, CONF_PASSWORD from homeassistant.core import HomeAssistant, callback @@ -79,7 +79,7 @@ class RoombaConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 name: str | None = None - blid: str | None = None + blid: str host: str | None = None def __init__(self) -> None: @@ -92,7 +92,7 @@ class RoombaConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> RoombaOptionsFlowHandler: """Get the options flow for this handler.""" - return RoombaOptionsFlowHandler(config_entry) + return RoombaOptionsFlowHandler() async def async_step_zeroconf( self, discovery_info: zeroconf.ZeroconfServiceInfo @@ -300,7 +300,7 @@ class RoombaConfigFlow(ConfigFlow, domain=DOMAIN): ) -class RoombaOptionsFlowHandler(OptionsFlowWithConfigEntry): +class RoombaOptionsFlowHandler(OptionsFlow): """Handle options.""" async def async_step_init( @@ -310,17 +310,18 @@ class RoombaOptionsFlowHandler(OptionsFlowWithConfigEntry): if user_input is not None: return self.async_create_entry(title="", data=user_input) + options = self.config_entry.options return self.async_show_form( step_id="init", data_schema=vol.Schema( { vol.Optional( CONF_CONTINUOUS, - default=self.options.get(CONF_CONTINUOUS, DEFAULT_CONTINUOUS), + default=options.get(CONF_CONTINUOUS, DEFAULT_CONTINUOUS), ): bool, vol.Optional( CONF_DELAY, - default=self.options.get(CONF_DELAY, DEFAULT_DELAY), + default=options.get(CONF_DELAY, DEFAULT_DELAY), ): int, } ), diff --git a/homeassistant/components/roomba/sensor.py b/homeassistant/components/roomba/sensor.py index 87e97fdb760..d358dcb428c 100644 --- a/homeassistant/components/roomba/sensor.py +++ b/homeassistant/components/roomba/sensor.py @@ -12,12 +12,7 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - AREA_SQUARE_METERS, - PERCENTAGE, - EntityCategory, - UnitOfTime, -) +from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfArea, UnitOfTime from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType @@ -108,7 +103,7 @@ SENSORS: list[RoombaSensorEntityDescription] = [ RoombaSensorEntityDescription( key="total_cleaned_area", translation_key="total_cleaned_area", - native_unit_of_measurement=AREA_SQUARE_METERS, + native_unit_of_measurement=UnitOfArea.SQUARE_METERS, entity_category=EntityCategory.DIAGNOSTIC, value_fn=lambda self: ( None if (sqft := self.run_stats.get("sqft")) is None else sqft * 9.29 diff --git a/homeassistant/components/route53/manifest.json b/homeassistant/components/route53/manifest.json index 6db240bdcab..978c916e3ee 100644 --- a/homeassistant/components/route53/manifest.json +++ b/homeassistant/components/route53/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/route53", "iot_class": "cloud_push", "loggers": ["boto3", "botocore", "s3transfer"], + "quality_scale": "legacy", "requirements": ["boto3==1.34.131"] } diff --git a/homeassistant/components/rpi_camera/manifest.json b/homeassistant/components/rpi_camera/manifest.json index 9f7346ea353..aab16b1c462 100644 --- a/homeassistant/components/rpi_camera/manifest.json +++ b/homeassistant/components/rpi_camera/manifest.json @@ -3,5 +3,6 @@ "name": "Raspberry Pi Camera", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/rpi_camera", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/rtorrent/manifest.json b/homeassistant/components/rtorrent/manifest.json index 96b079c4363..bcd39a03aa3 100644 --- a/homeassistant/components/rtorrent/manifest.json +++ b/homeassistant/components/rtorrent/manifest.json @@ -3,5 +3,6 @@ "name": "rTorrent", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/rtorrent", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/rtsp_to_webrtc/__init__.py b/homeassistant/components/rtsp_to_webrtc/__init__.py index 59b8077e398..0fc257c463f 100644 --- a/homeassistant/components/rtsp_to_webrtc/__init__.py +++ b/homeassistant/components/rtsp_to_webrtc/__init__.py @@ -30,6 +30,7 @@ from homeassistant.components import camera from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError +from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.aiohttp_client import async_get_clientsession _LOGGER = logging.getLogger(__name__) @@ -40,10 +41,24 @@ DATA_UNSUB = "unsub" TIMEOUT = 10 CONF_STUN_SERVER = "stun_server" +_DEPRECATED = "deprecated" + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up RTSPtoWebRTC from a config entry.""" hass.data.setdefault(DOMAIN, {}) + ir.async_create_issue( + hass, + DOMAIN, + _DEPRECATED, + breaks_in_ha_version="2025.6.0", + is_fixable=False, + severity=ir.IssueSeverity.WARNING, + translation_key=_DEPRECATED, + translation_placeholders={ + "go2rtc": "[go2rtc](https://www.home-assistant.io/integrations/go2rtc/)", + }, + ) client: WebRTCClientInterface try: @@ -98,6 +113,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" if DOMAIN in hass.data: del hass.data[DOMAIN] + ir.async_delete_issue(hass, DOMAIN, _DEPRECATED) return True diff --git a/homeassistant/components/rtsp_to_webrtc/config_flow.py b/homeassistant/components/rtsp_to_webrtc/config_flow.py index 8c2eac3a4b1..22502659757 100644 --- a/homeassistant/components/rtsp_to_webrtc/config_flow.py +++ b/homeassistant/components/rtsp_to_webrtc/config_flow.py @@ -119,16 +119,12 @@ class RTSPToWebRTCConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlow: """Create an options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """RTSPtoWeb Options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/rtsp_to_webrtc/strings.json b/homeassistant/components/rtsp_to_webrtc/strings.json index e52ab554473..c8dcbb7f462 100644 --- a/homeassistant/components/rtsp_to_webrtc/strings.json +++ b/homeassistant/components/rtsp_to_webrtc/strings.json @@ -24,6 +24,12 @@ "server_unreachable": "[%key:component::rtsp_to_webrtc::config::error::server_unreachable%]" } }, + "issues": { + "deprecated": { + "title": "The RTSPtoWebRTC integration is deprecated", + "description": "The RTSPtoWebRTC integration is deprecated and will be removed. Please use the {go2rtc} integration instead, which is enabled by default and provides a better experience. You only need to remove the RTSPtoWebRTC config entry." + } + }, "options": { "step": { "init": { diff --git a/homeassistant/components/russound_rio/__init__.py b/homeassistant/components/russound_rio/__init__.py index ba53f6794e3..784629ea0bc 100644 --- a/homeassistant/components/russound_rio/__init__.py +++ b/homeassistant/components/russound_rio/__init__.py @@ -11,7 +11,7 @@ from homeassistant.const import CONF_HOST, CONF_PORT, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from .const import CONNECT_TIMEOUT, RUSSOUND_RIO_EXCEPTIONS +from .const import CONNECT_TIMEOUT, DOMAIN, RUSSOUND_RIO_EXCEPTIONS PLATFORMS = [Platform.MEDIA_PLAYER] @@ -43,7 +43,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: RussoundConfigEntry) -> async with asyncio.timeout(CONNECT_TIMEOUT): await client.connect() except RUSSOUND_RIO_EXCEPTIONS as err: - raise ConfigEntryNotReady(f"Error while connecting to {host}:{port}") from err + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="entry_cannot_connect", + translation_placeholders={ + "host": host, + "port": port, + }, + ) from err entry.runtime_data = client await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/russound_rio/const.py b/homeassistant/components/russound_rio/const.py index 1b38dc8ce5c..af52e89d399 100644 --- a/homeassistant/components/russound_rio/const.py +++ b/homeassistant/components/russound_rio/const.py @@ -17,7 +17,7 @@ RUSSOUND_RIO_EXCEPTIONS = ( ) -CONNECT_TIMEOUT = 5 +CONNECT_TIMEOUT = 15 MP_FEATURES_BY_FLAG = { FeatureFlag.COMMANDS_ZONE_MUTE_OFF_ON: MediaPlayerEntityFeature.VOLUME_MUTE diff --git a/homeassistant/components/russound_rio/entity.py b/homeassistant/components/russound_rio/entity.py index 23b196ecb2f..9790ff43e68 100644 --- a/homeassistant/components/russound_rio/entity.py +++ b/homeassistant/components/russound_rio/entity.py @@ -26,7 +26,12 @@ def command[_EntityT: RussoundBaseEntity, **_P]( await func(self, *args, **kwargs) except RUSSOUND_RIO_EXCEPTIONS as exc: raise HomeAssistantError( - f"Error executing {func.__name__} on entity {self.entity_id}," + translation_domain=DOMAIN, + translation_key="command_error", + translation_placeholders={ + "function_name": func.__name__, + "entity_id": self.entity_id, + }, ) from exc return decorator @@ -91,6 +96,4 @@ class RussoundBaseEntity(Entity): async def async_will_remove_from_hass(self) -> None: """Remove callbacks.""" - await self._client.unregister_state_update_callbacks( - self._state_update_callback - ) + self._client.unregister_state_update_callbacks(self._state_update_callback) diff --git a/homeassistant/components/russound_rio/manifest.json b/homeassistant/components/russound_rio/manifest.json index 96fc0fb53db..2cd153c232c 100644 --- a/homeassistant/components/russound_rio/manifest.json +++ b/homeassistant/components/russound_rio/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/russound_rio", "iot_class": "local_push", "loggers": ["aiorussound"], - "quality_scale": "silver", - "requirements": ["aiorussound==4.0.5"] + "requirements": ["aiorussound==4.1.0"] } diff --git a/homeassistant/components/russound_rio/media_player.py b/homeassistant/components/russound_rio/media_player.py index 316e4d2be7c..45818d3e25b 100644 --- a/homeassistant/components/russound_rio/media_player.py +++ b/homeassistant/components/russound_rio/media_player.py @@ -5,7 +5,7 @@ from __future__ import annotations import logging from aiorussound import Controller -from aiorussound.models import Source +from aiorussound.models import PlayStatus, Source from aiorussound.rio import ZoneControlSurface from homeassistant.components.media_player import ( @@ -132,11 +132,18 @@ class RussoundZoneDevice(RussoundBaseEntity, MediaPlayerEntity): def state(self) -> MediaPlayerState | None: """Return the state of the device.""" status = self._zone.status - if status == "ON": - return MediaPlayerState.ON - if status == "OFF": + play_status = self._source.play_status + if not status: return MediaPlayerState.OFF - return None + if play_status == PlayStatus.PLAYING: + return MediaPlayerState.PLAYING + if play_status == PlayStatus.PAUSED: + return MediaPlayerState.PAUSED + if play_status == PlayStatus.TRANSITIONING: + return MediaPlayerState.BUFFERING + if play_status == PlayStatus.STOPPED: + return MediaPlayerState.IDLE + return MediaPlayerState.ON @property def source(self): @@ -175,7 +182,7 @@ class RussoundZoneDevice(RussoundBaseEntity, MediaPlayerEntity): Value is returned based on a range (0..50). Therefore float divide by 50 to get to the required range. """ - return float(self._zone.volume or "0") / 50.0 + return self._zone.volume / 50.0 @command async def async_turn_off(self) -> None: diff --git a/homeassistant/components/russound_rio/strings.json b/homeassistant/components/russound_rio/strings.json index c105dcafae2..b8c29c08301 100644 --- a/homeassistant/components/russound_rio/strings.json +++ b/homeassistant/components/russound_rio/strings.json @@ -33,5 +33,13 @@ "title": "[%key:component::russound_rio::issues::deprecated_yaml_import_issue_cannot_connect::title%]", "description": "[%key:component::russound_rio::issues::deprecated_yaml_import_issue_cannot_connect::description%]" } + }, + "exceptions": { + "entry_cannot_connect": { + "message": "Error while connecting to {host}:{port}" + }, + "command_error": { + "message": "Error executing {function_name} on entity {entity_id}" + } } } diff --git a/homeassistant/components/russound_rnet/manifest.json b/homeassistant/components/russound_rnet/manifest.json index 90bf5d5a7f3..27fbfbca57f 100644 --- a/homeassistant/components/russound_rnet/manifest.json +++ b/homeassistant/components/russound_rnet/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/russound_rnet", "iot_class": "local_polling", "loggers": ["russound"], + "quality_scale": "legacy", "requirements": ["russound==0.2.0"] } diff --git a/homeassistant/components/sabnzbd/__init__.py b/homeassistant/components/sabnzbd/__init__.py index a827e9a36a4..cf2eb5d0a7d 100644 --- a/homeassistant/components/sabnzbd/__init__.py +++ b/homeassistant/components/sabnzbd/__init__.py @@ -8,40 +8,26 @@ from typing import Any import voluptuous as vol -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry, ConfigEntryState -from homeassistant.const import ( - CONF_API_KEY, - CONF_HOST, - CONF_NAME, - CONF_PORT, - CONF_SENSORS, - CONF_SSL, - Platform, -) +from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.const import Platform from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError -from homeassistant.helpers import config_validation as cv, device_registry as dr -from homeassistant.helpers.entity_registry import RegistryEntry, async_migrate_entries -from homeassistant.helpers.typing import ConfigType +from homeassistant.helpers import config_validation as cv +import homeassistant.helpers.issue_registry as ir from .const import ( ATTR_API_KEY, ATTR_SPEED, - DEFAULT_HOST, - DEFAULT_NAME, - DEFAULT_PORT, DEFAULT_SPEED_LIMIT, - DEFAULT_SSL, DOMAIN, SERVICE_PAUSE, SERVICE_RESUME, SERVICE_SET_SPEED, ) from .coordinator import SabnzbdUpdateCoordinator -from .sab import get_client -from .sensor import OLD_SENSOR_KEYS +from .helpers import get_client -PLATFORMS = [Platform.SENSOR] +PLATFORMS = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.NUMBER, Platform.SENSOR] _LOGGER = logging.getLogger(__name__) SERVICES = ( @@ -62,121 +48,33 @@ SERVICE_SPEED_SCHEMA = SERVICE_BASE_SCHEMA.extend( } ) -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.Schema( - vol.All( - cv.deprecated(CONF_HOST), - cv.deprecated(CONF_PORT), - cv.deprecated(CONF_SENSORS), - cv.deprecated(CONF_SSL), - { - vol.Required(CONF_API_KEY): str, - vol.Optional(CONF_NAME, default=DEFAULT_NAME): str, - vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string, - vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, - vol.Optional(CONF_SENSORS): vol.All( - cv.ensure_list, [vol.In(OLD_SENSOR_KEYS)] - ), - vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean, - }, - ) - ) - }, - extra=vol.ALLOW_EXTRA, -) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the SABnzbd component.""" - hass.data.setdefault(DOMAIN, {}) - - if hass.config_entries.async_entries(DOMAIN): - return True - - if DOMAIN in config: - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=config[DOMAIN], - ) - ) - - return True +type SabnzbdConfigEntry = ConfigEntry[SabnzbdUpdateCoordinator] @callback -def async_get_entry_id_for_service_call(hass: HomeAssistant, call: ServiceCall) -> str: +def async_get_entry_for_service_call( + hass: HomeAssistant, call: ServiceCall +) -> SabnzbdConfigEntry: """Get the entry ID related to a service call (by device ID).""" call_data_api_key = call.data[ATTR_API_KEY] for entry in hass.config_entries.async_entries(DOMAIN): if entry.data[ATTR_API_KEY] == call_data_api_key: - return entry.entry_id + return entry raise ValueError(f"No api for API key: {call_data_api_key}") -def update_device_identifiers(hass: HomeAssistant, entry: ConfigEntry): - """Update device identifiers to new identifiers.""" - device_registry = dr.async_get(hass) - device_entry = device_registry.async_get_device(identifiers={(DOMAIN, DOMAIN)}) - if device_entry and entry.entry_id in device_entry.config_entries: - new_identifiers = {(DOMAIN, entry.entry_id)} - _LOGGER.debug( - "Updating device id <%s> with new identifiers <%s>", - device_entry.id, - new_identifiers, - ) - device_registry.async_update_device( - device_entry.id, new_identifiers=new_identifiers - ) - - -async def migrate_unique_id(hass: HomeAssistant, entry: ConfigEntry): - """Migrate entities to new unique ids (with entry_id).""" - - @callback - def async_migrate_callback(entity_entry: RegistryEntry) -> dict | None: - """Define a callback to migrate appropriate SabnzbdSensor entities to new unique IDs. - - Old: description.key - New: {entry_id}_description.key - """ - entry_id = entity_entry.config_entry_id - if entry_id is None: - return None - if entity_entry.unique_id.startswith(entry_id): - return None - - new_unique_id = f"{entry_id}_{entity_entry.unique_id}" - - _LOGGER.debug( - "Migrating entity %s from old unique ID '%s' to new unique ID '%s'", - entity_entry.entity_id, - entity_entry.unique_id, - new_unique_id, - ) - - return {"new_unique_id": new_unique_id} - - await async_migrate_entries(hass, entry.entry_id, async_migrate_callback) - - -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: SabnzbdConfigEntry) -> bool: """Set up the SabNzbd Component.""" sab_api = await get_client(hass, entry.data) if not sab_api: raise ConfigEntryNotReady - await migrate_unique_id(hass, entry) - update_device_identifiers(hass, entry) - - coordinator = SabnzbdUpdateCoordinator(hass, sab_api) + coordinator = SabnzbdUpdateCoordinator(hass, entry, sab_api) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator @callback def extract_api( @@ -188,8 +86,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def wrapper(call: ServiceCall) -> None: """Wrap the service function.""" - entry_id = async_get_entry_id_for_service_call(hass, call) - coordinator: SabnzbdUpdateCoordinator = hass.data[DOMAIN][entry_id] + config_entry = async_get_entry_for_service_call(hass, call) + coordinator = config_entry.runtime_data try: await func(call, coordinator) @@ -204,18 +102,45 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_pause_queue( call: ServiceCall, coordinator: SabnzbdUpdateCoordinator ) -> None: + ir.async_create_issue( + hass, + DOMAIN, + "pause_action_deprecated", + is_fixable=False, + severity=ir.IssueSeverity.WARNING, + breaks_in_ha_version="2025.6", + translation_key="pause_action_deprecated", + ) await coordinator.sab_api.pause_queue() @extract_api async def async_resume_queue( call: ServiceCall, coordinator: SabnzbdUpdateCoordinator ) -> None: + ir.async_create_issue( + hass, + DOMAIN, + "resume_action_deprecated", + is_fixable=False, + severity=ir.IssueSeverity.WARNING, + breaks_in_ha_version="2025.6", + translation_key="resume_action_deprecated", + ) await coordinator.sab_api.resume_queue() @extract_api async def async_set_queue_speed( call: ServiceCall, coordinator: SabnzbdUpdateCoordinator ) -> None: + ir.async_create_issue( + hass, + DOMAIN, + "set_speed_action_deprecated", + is_fixable=False, + severity=ir.IssueSeverity.WARNING, + breaks_in_ha_version="2025.6", + translation_key="set_speed_action_deprecated", + ) speed = call.data.get(ATTR_SPEED) await coordinator.sab_api.set_speed_limit(speed) @@ -234,11 +159,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: SabnzbdConfigEntry) -> bool: """Unload a Sabnzbd config entry.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) loaded_entries = [ entry diff --git a/homeassistant/components/sabnzbd/binary_sensor.py b/homeassistant/components/sabnzbd/binary_sensor.py new file mode 100644 index 00000000000..8b1b1c37c89 --- /dev/null +++ b/homeassistant/components/sabnzbd/binary_sensor.py @@ -0,0 +1,61 @@ +"""Binary sensor platform for SABnzbd.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SabnzbdConfigEntry +from .entity import SabnzbdEntity + + +@dataclass(frozen=True, kw_only=True) +class SabnzbdBinarySensorEntityDescription(BinarySensorEntityDescription): + """Describes Sabnzbd binary sensor entity.""" + + is_on_fn: Callable[[dict[str, Any]], bool] + + +BINARY_SENSORS: tuple[SabnzbdBinarySensorEntityDescription, ...] = ( + SabnzbdBinarySensorEntityDescription( + key="warnings", + translation_key="warnings", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + is_on_fn=lambda data: data["have_warnings"] != "0", + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: SabnzbdConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up a Sabnzbd sensor entry.""" + coordinator = config_entry.runtime_data + + async_add_entities( + [SabnzbdBinarySensor(coordinator, sensor) for sensor in BINARY_SENSORS] + ) + + +class SabnzbdBinarySensor(SabnzbdEntity, BinarySensorEntity): + """Representation of an SABnzbd binary sensor.""" + + entity_description: SabnzbdBinarySensorEntityDescription + + @property + def is_on(self) -> bool: + """Return latest sensor data.""" + return self.entity_description.is_on_fn(self.coordinator.data) diff --git a/homeassistant/components/sabnzbd/button.py b/homeassistant/components/sabnzbd/button.py new file mode 100644 index 00000000000..79038e84775 --- /dev/null +++ b/homeassistant/components/sabnzbd/button.py @@ -0,0 +1,69 @@ +"""Button platform for the SABnzbd component.""" + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any + +from pysabnzbd import SabnzbdApiException + +from homeassistant.components.button import ButtonEntity, ButtonEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SabnzbdConfigEntry +from .const import DOMAIN +from .coordinator import SabnzbdUpdateCoordinator +from .entity import SabnzbdEntity + + +@dataclass(kw_only=True, frozen=True) +class SabnzbdButtonEntityDescription(ButtonEntityDescription): + """Describes SABnzbd button entity.""" + + press_fn: Callable[[SabnzbdUpdateCoordinator], Any] + + +BUTTON_DESCRIPTIONS: tuple[SabnzbdButtonEntityDescription, ...] = ( + SabnzbdButtonEntityDescription( + key="pause", + translation_key="pause", + press_fn=lambda coordinator: coordinator.sab_api.pause_queue(), + ), + SabnzbdButtonEntityDescription( + key="resume", + translation_key="resume", + press_fn=lambda coordinator: coordinator.sab_api.resume_queue(), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SabnzbdConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up buttons from a config entry.""" + coordinator = entry.runtime_data + + async_add_entities( + SabnzbdButton(coordinator, description) for description in BUTTON_DESCRIPTIONS + ) + + +class SabnzbdButton(SabnzbdEntity, ButtonEntity): + """Representation of a SABnzbd button.""" + + entity_description: SabnzbdButtonEntityDescription + + async def async_press(self) -> None: + """Handle the button press.""" + try: + await self.entity_description.press_fn(self.coordinator) + except SabnzbdApiException as e: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="service_call_exception", + ) from e + else: + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/sabnzbd/config_flow.py b/homeassistant/components/sabnzbd/config_flow.py index 2637659e91a..ce9b0a13b18 100644 --- a/homeassistant/components/sabnzbd/config_flow.py +++ b/homeassistant/components/sabnzbd/config_flow.py @@ -6,27 +6,38 @@ import logging from typing import Any import voluptuous as vol +import yarl -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import ( - CONF_API_KEY, - CONF_HOST, - CONF_NAME, - CONF_PORT, - CONF_SSL, - CONF_URL, +from homeassistant.config_entries import ( + SOURCE_RECONFIGURE, + ConfigFlow, + ConfigFlowResult, ) +from homeassistant.const import CONF_API_KEY, CONF_URL +from homeassistant.helpers.selector import ( + TextSelector, + TextSelectorConfig, + TextSelectorType, +) +from homeassistant.util import slugify -from .const import DEFAULT_NAME, DOMAIN -from .sab import get_client +from .const import DOMAIN +from .helpers import get_client _LOGGER = logging.getLogger(__name__) USER_SCHEMA = vol.Schema( { - vol.Required(CONF_API_KEY): str, - vol.Optional(CONF_NAME, default=DEFAULT_NAME): str, - vol.Required(CONF_URL): str, + vol.Required(CONF_URL): TextSelector( + TextSelectorConfig( + type=TextSelectorType.URL, + ) + ), + vol.Required(CONF_API_KEY): TextSelector( + TextSelectorConfig( + type=TextSelectorType.PASSWORD, + ) + ), } ) @@ -36,39 +47,47 @@ class SABnzbdConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def _async_validate_input(self, user_input): - """Validate the user input allows us to connect.""" - errors = {} - sab_api = await get_client(self.hass, user_input) - if not sab_api: - errors["base"] = "cannot_connect" - - return errors + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration flow.""" + return await self.async_step_user(user_input) async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" - errors = {} - if user_input is not None: - errors = await self._async_validate_input(user_input) - if not errors: + if user_input is not None: + sab_api = await get_client(self.hass, user_input) + if not sab_api: + errors["base"] = "cannot_connect" + else: + self._async_abort_entries_match( + { + CONF_URL: user_input[CONF_URL], + CONF_API_KEY: user_input[CONF_API_KEY], + } + ) + + if self.source == SOURCE_RECONFIGURE: + return self.async_update_reload_and_abort( + self._get_reconfigure_entry(), data_updates=user_input + ) + + parsed_url = yarl.URL(user_input[CONF_URL]) return self.async_create_entry( - title=user_input[CONF_API_KEY][:12], data=user_input + title=slugify(parsed_url.host), data=user_input ) return self.async_show_form( step_id="user", - data_schema=USER_SCHEMA, + data_schema=self.add_suggested_values_to_schema( + USER_SCHEMA, + self._get_reconfigure_entry().data + if self.source == SOURCE_RECONFIGURE + else user_input, + ), errors=errors, ) - - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import sabnzbd config from configuration.yaml.""" - protocol = "https://" if import_data[CONF_SSL] else "http://" - import_data[CONF_URL] = ( - f"{protocol}{import_data[CONF_HOST]}:{import_data[CONF_PORT]}" - ) - return await self.async_step_user(import_data) diff --git a/homeassistant/components/sabnzbd/const.py b/homeassistant/components/sabnzbd/const.py index 55346509133..991490f5716 100644 --- a/homeassistant/components/sabnzbd/const.py +++ b/homeassistant/components/sabnzbd/const.py @@ -7,7 +7,6 @@ ATTR_SPEED = "speed" ATTR_API_KEY = "api_key" DEFAULT_HOST = "localhost" -DEFAULT_NAME = "SABnzbd" DEFAULT_PORT = 8080 DEFAULT_SPEED_LIMIT = "100" DEFAULT_SSL = False diff --git a/homeassistant/components/sabnzbd/coordinator.py b/homeassistant/components/sabnzbd/coordinator.py index 5db59bb584b..14f7c18e38c 100644 --- a/homeassistant/components/sabnzbd/coordinator.py +++ b/homeassistant/components/sabnzbd/coordinator.py @@ -6,6 +6,7 @@ from typing import Any from pysabnzbd import SabnzbdApi, SabnzbdApiException +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -15,9 +16,12 @@ _LOGGER = logging.getLogger(__name__) class SabnzbdUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): """The SABnzbd update coordinator.""" + config_entry: ConfigEntry + def __init__( self, hass: HomeAssistant, + config_entry: ConfigEntry, sab_api: SabnzbdApi, ) -> None: """Initialize the SABnzbd update coordinator.""" @@ -26,6 +30,7 @@ class SabnzbdUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): super().__init__( hass, _LOGGER, + config_entry=config_entry, name="SABnzbd", update_interval=timedelta(seconds=30), ) diff --git a/homeassistant/components/sabnzbd/entity.py b/homeassistant/components/sabnzbd/entity.py new file mode 100644 index 00000000000..60a2eb8d251 --- /dev/null +++ b/homeassistant/components/sabnzbd/entity.py @@ -0,0 +1,33 @@ +"""Base entity for Sabnzbd.""" + +from homeassistant.const import CONF_URL +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import SabnzbdUpdateCoordinator + + +class SabnzbdEntity(CoordinatorEntity[SabnzbdUpdateCoordinator]): + """Defines a base Sabnzbd entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: SabnzbdUpdateCoordinator, + description: EntityDescription, + ) -> None: + """Initialize the base entity.""" + super().__init__(coordinator) + + entry_id = coordinator.config_entry.entry_id + self._attr_unique_id = f"{entry_id}_{description.key}" + self.entity_description = description + self._attr_device_info = DeviceInfo( + entry_type=DeviceEntryType.SERVICE, + identifiers={(DOMAIN, entry_id)}, + sw_version=coordinator.data["version"], + configuration_url=coordinator.config_entry.data[CONF_URL], + ) diff --git a/homeassistant/components/sabnzbd/sab.py b/homeassistant/components/sabnzbd/helpers.py similarity index 100% rename from homeassistant/components/sabnzbd/sab.py rename to homeassistant/components/sabnzbd/helpers.py diff --git a/homeassistant/components/sabnzbd/icons.json b/homeassistant/components/sabnzbd/icons.json index ca4f4d584ae..b0a72040b4b 100644 --- a/homeassistant/components/sabnzbd/icons.json +++ b/homeassistant/components/sabnzbd/icons.json @@ -1,4 +1,19 @@ { + "entity": { + "button": { + "pause": { + "default": "mdi:pause" + }, + "resume": { + "default": "mdi:play" + } + }, + "number": { + "speedlimit": { + "default": "mdi:speedometer" + } + } + }, "services": { "pause": { "service": "mdi:pause" diff --git a/homeassistant/components/sabnzbd/number.py b/homeassistant/components/sabnzbd/number.py new file mode 100644 index 00000000000..d8536cb6b37 --- /dev/null +++ b/homeassistant/components/sabnzbd/number.py @@ -0,0 +1,82 @@ +"""Number entities for the SABnzbd integration.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass + +from pysabnzbd import SabnzbdApiException + +from homeassistant.components.number import ( + NumberEntity, + NumberEntityDescription, + NumberMode, +) +from homeassistant.const import PERCENTAGE +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SabnzbdConfigEntry +from .const import DOMAIN +from .coordinator import SabnzbdUpdateCoordinator +from .entity import SabnzbdEntity + + +@dataclass(frozen=True, kw_only=True) +class SabnzbdNumberEntityDescription(NumberEntityDescription): + """Class describing a SABnzbd number entities.""" + + set_fn: Callable[[SabnzbdUpdateCoordinator, float], Awaitable] + + +NUMBER_DESCRIPTIONS: tuple[SabnzbdNumberEntityDescription, ...] = ( + SabnzbdNumberEntityDescription( + key="speedlimit", + translation_key="speedlimit", + mode=NumberMode.BOX, + native_max_value=100, + native_min_value=0, + native_step=1, + native_unit_of_measurement=PERCENTAGE, + set_fn=lambda coordinator, speed: ( + coordinator.sab_api.set_speed_limit(int(speed)) + ), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: SabnzbdConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the SABnzbd number entity.""" + coordinator = config_entry.runtime_data + + async_add_entities( + SabnzbdNumber(coordinator, description) for description in NUMBER_DESCRIPTIONS + ) + + +class SabnzbdNumber(SabnzbdEntity, NumberEntity): + """Representation of a SABnzbd number.""" + + entity_description: SabnzbdNumberEntityDescription + + @property + def native_value(self) -> float: + """Return latest value for number.""" + return self.coordinator.data[self.entity_description.key] + + async def async_set_native_value(self, value: float) -> None: + """Set the new number value.""" + try: + await self.entity_description.set_fn(self.coordinator, value) + except SabnzbdApiException as e: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="service_call_exception", + ) from e + else: + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/sabnzbd/sensor.py b/homeassistant/components/sabnzbd/sensor.py index d956d06f1ac..115b9de3793 100644 --- a/homeassistant/components/sabnzbd/sensor.py +++ b/homeassistant/components/sabnzbd/sensor.py @@ -10,16 +10,13 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import UnitOfDataRate, UnitOfInformation from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import DOMAIN, SabnzbdUpdateCoordinator -from .const import DEFAULT_NAME +from . import SabnzbdConfigEntry +from .entity import SabnzbdEntity @dataclass(frozen=True, kw_only=True) @@ -114,59 +111,22 @@ SENSOR_TYPES: tuple[SabnzbdSensorEntityDescription, ...] = ( ), ) -OLD_SENSOR_KEYS = [ - "current_status", - "speed", - "queue_size", - "queue_remaining", - "disk_size", - "disk_free", - "queue_count", - "day_size", - "week_size", - "month_size", - "total_size", -] - async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: SabnzbdConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a Sabnzbd sensor entry.""" + coordinator = config_entry.runtime_data - entry_id = config_entry.entry_id - coordinator: SabnzbdUpdateCoordinator = hass.data[DOMAIN][entry_id] - - async_add_entities( - [SabnzbdSensor(coordinator, sensor, entry_id) for sensor in SENSOR_TYPES] - ) + async_add_entities([SabnzbdSensor(coordinator, sensor) for sensor in SENSOR_TYPES]) -class SabnzbdSensor(CoordinatorEntity[SabnzbdUpdateCoordinator], SensorEntity): +class SabnzbdSensor(SabnzbdEntity, SensorEntity): """Representation of an SABnzbd sensor.""" entity_description: SabnzbdSensorEntityDescription - _attr_should_poll = False - _attr_has_entity_name = True - - def __init__( - self, - coordinator: SabnzbdUpdateCoordinator, - description: SabnzbdSensorEntityDescription, - entry_id, - ) -> None: - """Initialize the sensor.""" - super().__init__(coordinator) - - self._attr_unique_id = f"{entry_id}_{description.key}" - self.entity_description = description - self._attr_device_info = DeviceInfo( - entry_type=DeviceEntryType.SERVICE, - identifiers={(DOMAIN, entry_id)}, - name=DEFAULT_NAME, - ) @property def native_value(self) -> StateType: diff --git a/homeassistant/components/sabnzbd/strings.json b/homeassistant/components/sabnzbd/strings.json index 5b7312e3b0d..0ac8b93c57f 100644 --- a/homeassistant/components/sabnzbd/strings.json +++ b/homeassistant/components/sabnzbd/strings.json @@ -4,20 +4,42 @@ "user": { "data": { "api_key": "[%key:common::config_flow::data::api_key%]", - "name": "[%key:common::config_flow::data::name%]", "url": "[%key:common::config_flow::data::url%]" }, "data_description": { - "url": "The full URL, including port, of the SABnzbd server. Example: `http://localhost:8080` or `http://a02368d7-sabnzbd:8080`" + "url": "The full URL, including port, of the SABnzbd server. Example: `http://localhost:8080` or `http://a02368d7-sabnzbd:8080`, if you are using the add-on.", + "api_key": "The API key of the SABnzbd server. This can be found in the SABnzbd web interface under Config cog (top right) > General > Security." } } }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" } }, "entity": { + "binary_sensor": { + "warnings": { + "name": "Warnings" + } + }, + "button": { + "pause": { + "name": "[%key:common::action::pause%]" + }, + "resume": { + "name": "[%key:component::sabnzbd::services::resume::name%]" + } + }, + "number": { + "speedlimit": { + "name": "Speedlimit" + } + }, "sensor": { "status": { "name": "Status" @@ -89,5 +111,24 @@ } } } + }, + "issues": { + "pause_action_deprecated": { + "title": "SABnzbd pause action deprecated", + "description": "The 'Pause' action is deprecated and will be removed in a future version. Please use the 'Pause' button instead. To remove this issue, please adjust automations accordingly and restart Home Assistant." + }, + "resume_action_deprecated": { + "title": "SABnzbd resume action deprecated", + "description": "The 'Resume' action is deprecated and will be removed in a future version. Please use the 'Resume' button instead. To remove this issue, please adjust automations accordingly and restart Home Assistant." + }, + "set_speed_action_deprecated": { + "title": "SABnzbd set_speed action deprecated", + "description": "The 'Set speed' action is deprecated and will be removed in a future version. Please use the 'Speedlimit' number entity instead. To remove this issue, please adjust automations accordingly and restart Home Assistant." + } + }, + "exceptions": { + "service_call_exception": { + "message": "Unable to send command to SABnzbd due to a connection error, try again later" + } } } diff --git a/homeassistant/components/saj/manifest.json b/homeassistant/components/saj/manifest.json index e882c9f0d02..2a4243f7489 100644 --- a/homeassistant/components/saj/manifest.json +++ b/homeassistant/components/saj/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/saj", "iot_class": "local_polling", "loggers": ["pysaj"], + "quality_scale": "legacy", "requirements": ["pysaj==0.0.16"] } diff --git a/homeassistant/components/samsungtv/manifest.json b/homeassistant/components/samsungtv/manifest.json index bc4ba900028..041e9b8fe9b 100644 --- a/homeassistant/components/samsungtv/manifest.json +++ b/homeassistant/components/samsungtv/manifest.json @@ -37,7 +37,7 @@ "requirements": [ "getmac==0.9.4", "samsungctl[websocket]==0.7.1", - "samsungtvws[async,encrypted]==2.6.0", + "samsungtvws[async,encrypted]==2.7.1", "wakeonlan==2.1.0", "async-upnp-client==0.41.0" ], diff --git a/homeassistant/components/satel_integra/manifest.json b/homeassistant/components/satel_integra/manifest.json index 828261aa466..a90ea1db5a5 100644 --- a/homeassistant/components/satel_integra/manifest.json +++ b/homeassistant/components/satel_integra/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/satel_integra", "iot_class": "local_push", "loggers": ["satel_integra"], + "quality_scale": "legacy", "requirements": ["satel-integra==0.3.7"] } diff --git a/homeassistant/components/schlage/manifest.json b/homeassistant/components/schlage/manifest.json index 5619cf7b312..61cc2a3c63d 100644 --- a/homeassistant/components/schlage/manifest.json +++ b/homeassistant/components/schlage/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/schlage", "iot_class": "cloud_polling", - "requirements": ["pyschlage==2024.8.0"] + "requirements": ["pyschlage==2024.11.0"] } diff --git a/homeassistant/components/schluter/manifest.json b/homeassistant/components/schluter/manifest.json index e96058cc146..0302ce09440 100644 --- a/homeassistant/components/schluter/manifest.json +++ b/homeassistant/components/schluter/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/schluter", "iot_class": "cloud_polling", "loggers": ["schluter"], + "quality_scale": "legacy", "requirements": ["py-schluter==0.1.7"] } diff --git a/homeassistant/components/screenlogic/config_flow.py b/homeassistant/components/screenlogic/config_flow.py index 4a46756cf2f..19db89dc03d 100644 --- a/homeassistant/components/screenlogic/config_flow.py +++ b/homeassistant/components/screenlogic/config_flow.py @@ -81,7 +81,7 @@ class ScreenlogicConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> ScreenLogicOptionsFlowHandler: """Get the options flow for ScreenLogic.""" - return ScreenLogicOptionsFlowHandler(config_entry) + return ScreenLogicOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -192,10 +192,6 @@ class ScreenlogicConfigFlow(ConfigFlow, domain=DOMAIN): class ScreenLogicOptionsFlowHandler(OptionsFlow): """Handles the options for the ScreenLogic integration.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Init the screen logic options flow.""" - self.config_entry = config_entry - async def async_step_init(self, user_input=None) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: diff --git a/homeassistant/components/scsgate/manifest.json b/homeassistant/components/scsgate/manifest.json index 3f20762cf73..a3b08f86719 100644 --- a/homeassistant/components/scsgate/manifest.json +++ b/homeassistant/components/scsgate/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/scsgate", "iot_class": "local_polling", "loggers": ["scsgate"], + "quality_scale": "legacy", "requirements": ["scsgate==0.1.0"] } diff --git a/homeassistant/components/sendgrid/manifest.json b/homeassistant/components/sendgrid/manifest.json index c38952e1a04..ec89ae0a363 100644 --- a/homeassistant/components/sendgrid/manifest.json +++ b/homeassistant/components/sendgrid/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/sendgrid", "iot_class": "cloud_push", "loggers": ["sendgrid"], + "quality_scale": "legacy", "requirements": ["sendgrid==6.8.2"] } diff --git a/homeassistant/components/sensibo/manifest.json b/homeassistant/components/sensibo/manifest.json index 610695aaf7b..e6398c5076e 100644 --- a/homeassistant/components/sensibo/manifest.json +++ b/homeassistant/components/sensibo/manifest.json @@ -14,6 +14,5 @@ }, "iot_class": "cloud_polling", "loggers": ["pysensibo"], - "quality_scale": "platinum", "requirements": ["pysensibo==1.1.0"] } diff --git a/homeassistant/components/sensor/__init__.py b/homeassistant/components/sensor/__init__.py index 31626b0b761..a0220c23d9d 100644 --- a/homeassistant/components/sensor/__init__.py +++ b/homeassistant/components/sensor/__init__.py @@ -531,7 +531,20 @@ class SensorEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): ): return self.hass.config.units.temperature_unit - # Fourth priority: Native unit + # Fourth priority: Unit translation + if (translation_key := self._unit_of_measurement_translation_key) and ( + unit_of_measurement + := self.platform.default_language_platform_translations.get(translation_key) + ): + if native_unit_of_measurement is not None: + raise ValueError( + f"Sensor {type(self)} from integration '{self.platform.platform_name}' " + f"has a translation key for unit_of_measurement '{unit_of_measurement}', " + f"but also has a native_unit_of_measurement '{native_unit_of_measurement}'" + ) + return unit_of_measurement + + # Lowest priority: Native unit return native_unit_of_measurement @final diff --git a/homeassistant/components/sensor/const.py b/homeassistant/components/sensor/const.py index da0b48a23a0..87012c3631a 100644 --- a/homeassistant/components/sensor/const.py +++ b/homeassistant/components/sensor/const.py @@ -17,6 +17,8 @@ from homeassistant.const import ( SIGNAL_STRENGTH_DECIBELS, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, UnitOfApparentPower, + UnitOfArea, + UnitOfBloodGlucoseConcentration, UnitOfConductivity, UnitOfDataRate, UnitOfElectricCurrent, @@ -46,7 +48,9 @@ from homeassistant.helpers.deprecation import ( dir_with_deprecated_constants, ) from homeassistant.util.unit_conversion import ( + AreaConverter, BaseUnitConverter, + BloodGlucoseConcentrationConverter, ConductivityConverter, DataRateConverter, DistanceConverter, @@ -115,6 +119,12 @@ class SensorDeviceClass(StrEnum): Unit of measurement: `None` """ + AREA = "area" + """Area + + Unit of measurement: `UnitOfArea` units + """ + ATMOSPHERIC_PRESSURE = "atmospheric_pressure" """Atmospheric pressure. @@ -127,6 +137,12 @@ class SensorDeviceClass(StrEnum): Unit of measurement: `%` """ + BLOOD_GLUCOSE_CONCENTRATION = "blood_glucose_concentration" + """Blood glucose concentration. + + Unit of measurement: `mg/dL`, `mmol/L` + """ + CO = "carbon_monoxide" """Carbon Monoxide gas concentration. @@ -182,7 +198,7 @@ class SensorDeviceClass(StrEnum): Use this device class for sensors measuring energy consumption, for example electric energy consumption. - Unit of measurement: `J`, `kJ`, `MJ`, `GJ`, `Wh`, `kWh`, `MWh`, `cal`, `kcal`, `Mcal`, `Gcal` + Unit of measurement: `J`, `kJ`, `MJ`, `GJ`, `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `cal`, `kcal`, `Mcal`, `Gcal` """ ENERGY_STORAGE = "energy_storage" @@ -191,7 +207,7 @@ class SensorDeviceClass(StrEnum): Use this device class for sensors measuring stored energy, for example the amount of electric energy currently stored in a battery or the capacity of a battery. - Unit of measurement: `Wh`, `kWh`, `MWh`, `MJ`, `GJ` + Unit of measurement: `Wh`, `kWh`, `MWh`, `GWh`, `TWh`, `MJ`, `GJ` """ FREQUENCY = "frequency" @@ -299,7 +315,7 @@ class SensorDeviceClass(StrEnum): POWER = "power" """Power. - Unit of measurement: `W`, `kW` + Unit of measurement: `W`, `kW`, `MW`, `GW`, `TW` """ PRECIPITATION = "precipitation" @@ -383,7 +399,7 @@ class SensorDeviceClass(StrEnum): VOLTAGE = "voltage" """Voltage. - Unit of measurement: `V`, `mV` + Unit of measurement: `V`, `mV`, `µV` """ VOLUME = "volume" @@ -411,7 +427,7 @@ class SensorDeviceClass(StrEnum): """Generic flow rate Unit of measurement: UnitOfVolumeFlowRate - - SI / metric: `m³/h`, `L/min` + - SI / metric: `m³/h`, `L/min`, `mL/s` - USCS / imperial: `ft³/min`, `gal/min` """ @@ -492,7 +508,9 @@ _DEPRECATED_STATE_CLASS_TOTAL_INCREASING: Final = DeprecatedConstantEnum( STATE_CLASSES: Final[list[str]] = [cls.value for cls in SensorStateClass] UNIT_CONVERTERS: dict[SensorDeviceClass | str | None, type[BaseUnitConverter]] = { + SensorDeviceClass.AREA: AreaConverter, SensorDeviceClass.ATMOSPHERIC_PRESSURE: PressureConverter, + SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: BloodGlucoseConcentrationConverter, SensorDeviceClass.CONDUCTIVITY: ConductivityConverter, SensorDeviceClass.CURRENT: ElectricCurrentConverter, SensorDeviceClass.DATA_RATE: DataRateConverter, @@ -522,8 +540,10 @@ UNIT_CONVERTERS: dict[SensorDeviceClass | str | None, type[BaseUnitConverter]] = DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = { SensorDeviceClass.APPARENT_POWER: set(UnitOfApparentPower), SensorDeviceClass.AQI: {None}, + SensorDeviceClass.AREA: set(UnitOfArea), SensorDeviceClass.ATMOSPHERIC_PRESSURE: set(UnitOfPressure), SensorDeviceClass.BATTERY: {PERCENTAGE}, + SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: set(UnitOfBloodGlucoseConcentration), SensorDeviceClass.CO: {CONCENTRATION_PARTS_PER_MILLION}, SensorDeviceClass.CO2: {CONCENTRATION_PARTS_PER_MILLION}, SensorDeviceClass.CONDUCTIVITY: set(UnitOfConductivity), @@ -597,8 +617,10 @@ DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = { DEVICE_CLASS_STATE_CLASSES: dict[SensorDeviceClass, set[SensorStateClass]] = { SensorDeviceClass.APPARENT_POWER: {SensorStateClass.MEASUREMENT}, SensorDeviceClass.AQI: {SensorStateClass.MEASUREMENT}, + SensorDeviceClass.AREA: set(SensorStateClass), SensorDeviceClass.ATMOSPHERIC_PRESSURE: {SensorStateClass.MEASUREMENT}, SensorDeviceClass.BATTERY: {SensorStateClass.MEASUREMENT}, + SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: {SensorStateClass.MEASUREMENT}, SensorDeviceClass.CO: {SensorStateClass.MEASUREMENT}, SensorDeviceClass.CO2: {SensorStateClass.MEASUREMENT}, SensorDeviceClass.CONDUCTIVITY: {SensorStateClass.MEASUREMENT}, diff --git a/homeassistant/components/sensor/device_condition.py b/homeassistant/components/sensor/device_condition.py index f2b51899312..fc25dce18fc 100644 --- a/homeassistant/components/sensor/device_condition.py +++ b/homeassistant/components/sensor/device_condition.py @@ -35,8 +35,10 @@ DEVICE_CLASS_NONE = "none" CONF_IS_APPARENT_POWER = "is_apparent_power" CONF_IS_AQI = "is_aqi" +CONF_IS_AREA = "is_area" CONF_IS_ATMOSPHERIC_PRESSURE = "is_atmospheric_pressure" CONF_IS_BATTERY_LEVEL = "is_battery_level" +CONF_IS_BLOOD_GLUCOSE_CONCENTRATION = "is_blood_glucose_concentration" CONF_IS_CO = "is_carbon_monoxide" CONF_IS_CO2 = "is_carbon_dioxide" CONF_IS_CONDUCTIVITY = "is_conductivity" @@ -85,8 +87,12 @@ CONF_IS_WIND_SPEED = "is_wind_speed" ENTITY_CONDITIONS = { SensorDeviceClass.APPARENT_POWER: [{CONF_TYPE: CONF_IS_APPARENT_POWER}], SensorDeviceClass.AQI: [{CONF_TYPE: CONF_IS_AQI}], + SensorDeviceClass.AREA: [{CONF_TYPE: CONF_IS_AREA}], SensorDeviceClass.ATMOSPHERIC_PRESSURE: [{CONF_TYPE: CONF_IS_ATMOSPHERIC_PRESSURE}], SensorDeviceClass.BATTERY: [{CONF_TYPE: CONF_IS_BATTERY_LEVEL}], + SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: [ + {CONF_TYPE: CONF_IS_BLOOD_GLUCOSE_CONCENTRATION} + ], SensorDeviceClass.CO: [{CONF_TYPE: CONF_IS_CO}], SensorDeviceClass.CO2: [{CONF_TYPE: CONF_IS_CO2}], SensorDeviceClass.CONDUCTIVITY: [{CONF_TYPE: CONF_IS_CONDUCTIVITY}], @@ -149,8 +155,10 @@ CONDITION_SCHEMA = vol.All( [ CONF_IS_APPARENT_POWER, CONF_IS_AQI, + CONF_IS_AREA, CONF_IS_ATMOSPHERIC_PRESSURE, CONF_IS_BATTERY_LEVEL, + CONF_IS_BLOOD_GLUCOSE_CONCENTRATION, CONF_IS_CO, CONF_IS_CO2, CONF_IS_CONDUCTIVITY, diff --git a/homeassistant/components/sensor/device_trigger.py b/homeassistant/components/sensor/device_trigger.py index b07b3fac11e..d75b3aa6e41 100644 --- a/homeassistant/components/sensor/device_trigger.py +++ b/homeassistant/components/sensor/device_trigger.py @@ -34,8 +34,10 @@ DEVICE_CLASS_NONE = "none" CONF_APPARENT_POWER = "apparent_power" CONF_AQI = "aqi" +CONF_AREA = "area" CONF_ATMOSPHERIC_PRESSURE = "atmospheric_pressure" CONF_BATTERY_LEVEL = "battery_level" +CONF_BLOOD_GLUCOSE_CONCENTRATION = "blood_glucose_concentration" CONF_CO = "carbon_monoxide" CONF_CO2 = "carbon_dioxide" CONF_CONDUCTIVITY = "conductivity" @@ -84,8 +86,12 @@ CONF_WIND_SPEED = "wind_speed" ENTITY_TRIGGERS = { SensorDeviceClass.APPARENT_POWER: [{CONF_TYPE: CONF_APPARENT_POWER}], SensorDeviceClass.AQI: [{CONF_TYPE: CONF_AQI}], + SensorDeviceClass.AREA: [{CONF_TYPE: CONF_AREA}], SensorDeviceClass.ATMOSPHERIC_PRESSURE: [{CONF_TYPE: CONF_ATMOSPHERIC_PRESSURE}], SensorDeviceClass.BATTERY: [{CONF_TYPE: CONF_BATTERY_LEVEL}], + SensorDeviceClass.BLOOD_GLUCOSE_CONCENTRATION: [ + {CONF_TYPE: CONF_BLOOD_GLUCOSE_CONCENTRATION} + ], SensorDeviceClass.CO: [{CONF_TYPE: CONF_CO}], SensorDeviceClass.CO2: [{CONF_TYPE: CONF_CO2}], SensorDeviceClass.CONDUCTIVITY: [{CONF_TYPE: CONF_CONDUCTIVITY}], @@ -149,8 +155,10 @@ TRIGGER_SCHEMA = vol.All( [ CONF_APPARENT_POWER, CONF_AQI, + CONF_AREA, CONF_ATMOSPHERIC_PRESSURE, CONF_BATTERY_LEVEL, + CONF_BLOOD_GLUCOSE_CONCENTRATION, CONF_CO, CONF_CO2, CONF_CONDUCTIVITY, diff --git a/homeassistant/components/sensor/icons.json b/homeassistant/components/sensor/icons.json index 6132fcbc1e9..5f770765ee3 100644 --- a/homeassistant/components/sensor/icons.json +++ b/homeassistant/components/sensor/icons.json @@ -9,9 +9,15 @@ "aqi": { "default": "mdi:air-filter" }, + "area": { + "default": "mdi:texture-box" + }, "atmospheric_pressure": { "default": "mdi:thermometer-lines" }, + "blood_glucose_concentration": { + "default": "mdi:spoon-sugar" + }, "carbon_dioxide": { "default": "mdi:molecule-co2" }, diff --git a/homeassistant/components/sensor/strings.json b/homeassistant/components/sensor/strings.json index 71bead342c4..0bc370398b5 100644 --- a/homeassistant/components/sensor/strings.json +++ b/homeassistant/components/sensor/strings.json @@ -4,8 +4,10 @@ "condition_type": { "is_apparent_power": "Current {entity_name} apparent power", "is_aqi": "Current {entity_name} air quality index", + "is_area": "Current {entity_name} area", "is_atmospheric_pressure": "Current {entity_name} atmospheric pressure", "is_battery_level": "Current {entity_name} battery level", + "is_blood_glucose_concentration": "Current {entity_name} blood glucose concentration", "is_carbon_monoxide": "Current {entity_name} carbon monoxide concentration level", "is_carbon_dioxide": "Current {entity_name} carbon dioxide concentration level", "is_conductivity": "Current {entity_name} conductivity", @@ -54,8 +56,10 @@ "trigger_type": { "apparent_power": "{entity_name} apparent power changes", "aqi": "{entity_name} air quality index changes", + "area": "{entity_name} area changes", "atmospheric_pressure": "{entity_name} atmospheric pressure changes", "battery_level": "{entity_name} battery level changes", + "blood_glucose_concentration": "{entity_name} blood glucose concentration changes", "carbon_monoxide": "{entity_name} carbon monoxide concentration changes", "carbon_dioxide": "{entity_name} carbon dioxide concentration changes", "conductivity": "{entity_name} conductivity changes", @@ -143,12 +147,18 @@ "aqi": { "name": "Air quality index" }, + "area": { + "name": "Area" + }, "atmospheric_pressure": { "name": "Atmospheric pressure" }, "battery": { "name": "Battery" }, + "blood_glucose_concentration": { + "name": "Blood glucose concentration" + }, "carbon_monoxide": { "name": "Carbon monoxide" }, diff --git a/homeassistant/components/sentry/config_flow.py b/homeassistant/components/sentry/config_flow.py index 59cd1f3f0e9..2fead7c27cd 100644 --- a/homeassistant/components/sentry/config_flow.py +++ b/homeassistant/components/sentry/config_flow.py @@ -49,7 +49,7 @@ class SentryConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> SentryOptionsFlow: """Get the options flow for this handler.""" - return SentryOptionsFlow(config_entry) + return SentryOptionsFlow() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -78,10 +78,6 @@ class SentryConfigFlow(ConfigFlow, domain=DOMAIN): class SentryOptionsFlow(OptionsFlow): """Handle Sentry options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize Sentry options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/serial_pm/manifest.json b/homeassistant/components/serial_pm/manifest.json index 9b61cb3d20b..25b3e61f93d 100644 --- a/homeassistant/components/serial_pm/manifest.json +++ b/homeassistant/components/serial_pm/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/serial_pm", "iot_class": "local_polling", "loggers": ["pmsensor"], + "quality_scale": "legacy", "requirements": ["pmsensor==0.4"] } diff --git a/homeassistant/components/sesame/manifest.json b/homeassistant/components/sesame/manifest.json index d2204629cde..7ed370db082 100644 --- a/homeassistant/components/sesame/manifest.json +++ b/homeassistant/components/sesame/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/sesame", "iot_class": "cloud_polling", "loggers": ["pysesame2"], + "quality_scale": "legacy", "requirements": ["pysesame2==1.0.1"] } diff --git a/homeassistant/components/seven_segments/manifest.json b/homeassistant/components/seven_segments/manifest.json index 2f39644d6d3..bf98140a4d6 100644 --- a/homeassistant/components/seven_segments/manifest.json +++ b/homeassistant/components/seven_segments/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@fabaff"], "documentation": "https://www.home-assistant.io/integrations/seven_segments", "iot_class": "local_polling", - "requirements": ["Pillow==10.4.0"] + "quality_scale": "legacy", + "requirements": ["Pillow==11.0.0"] } diff --git a/homeassistant/components/shelly/config_flow.py b/homeassistant/components/shelly/config_flow.py index 717e0923fd6..55686464637 100644 --- a/homeassistant/components/shelly/config_flow.py +++ b/homeassistant/components/shelly/config_flow.py @@ -12,6 +12,7 @@ from aioshelly.exceptions import ( CustomPortNotSupported, DeviceConnectionError, InvalidAuthError, + MacAddressMismatchError, ) from aioshelly.rpc_device import RpcDevice import voluptuous as vol @@ -176,6 +177,8 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): ) except DeviceConnectionError: errors["base"] = "cannot_connect" + except MacAddressMismatchError: + errors["base"] = "mac_address_mismatch" except CustomPortNotSupported: errors["base"] = "custom_port_not_supported" except Exception: # noqa: BLE001 @@ -215,6 +218,8 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "invalid_auth" except DeviceConnectionError: errors["base"] = "cannot_connect" + except MacAddressMismatchError: + errors["base"] = "mac_address_mismatch" except Exception: # noqa: BLE001 LOGGER.exception("Unexpected exception") errors["base"] = "unknown" @@ -378,6 +383,8 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): await validate_input(self.hass, host, port, info, user_input) except (DeviceConnectionError, InvalidAuthError): return self.async_abort(reason="reauth_unsuccessful") + except MacAddressMismatchError: + return self.async_abort(reason="mac_address_mismatch") return self.async_update_reload_and_abort( reauth_entry, data_updates=user_input @@ -444,7 +451,7 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() @classmethod @callback @@ -460,10 +467,6 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(OptionsFlow): """Handle the option flow for shelly.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/shelly/coordinator.py b/homeassistant/components/shelly/coordinator.py index a66fbb20f48..f20b283cacf 100644 --- a/homeassistant/components/shelly/coordinator.py +++ b/homeassistant/components/shelly/coordinator.py @@ -11,7 +11,12 @@ from typing import Any, cast from aioshelly.ble import async_ensure_ble_enabled, async_stop_scanner from aioshelly.block_device import BlockDevice, BlockUpdateType from aioshelly.const import MODEL_NAMES, MODEL_VALVE -from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError, RpcCallError +from aioshelly.exceptions import ( + DeviceConnectionError, + InvalidAuthError, + MacAddressMismatchError, + RpcCallError, +) from aioshelly.rpc_device import RpcDevice, RpcUpdateType from propcache import cached_property @@ -173,7 +178,7 @@ class ShellyCoordinatorBase[_DeviceT: BlockDevice | RpcDevice]( try: await self.device.initialize() update_device_fw_info(self.hass, self.device, self.entry) - except DeviceConnectionError as err: + except (DeviceConnectionError, MacAddressMismatchError) as err: LOGGER.debug( "Error connecting to Shelly device %s, error: %r", self.name, err ) @@ -450,7 +455,7 @@ class ShellyRestCoordinator(ShellyCoordinatorBase[BlockDevice]): if self.device.status["uptime"] > 2 * REST_SENSORS_UPDATE_INTERVAL: return await self.device.update_shelly() - except DeviceConnectionError as err: + except (DeviceConnectionError, MacAddressMismatchError) as err: raise UpdateFailed(f"Error fetching data: {err!r}") from err except InvalidAuthError: await self.async_shutdown_device_and_start_reauth() diff --git a/homeassistant/components/shelly/manifest.json b/homeassistant/components/shelly/manifest.json index 38437fb2137..3489a2d06d9 100644 --- a/homeassistant/components/shelly/manifest.json +++ b/homeassistant/components/shelly/manifest.json @@ -8,8 +8,7 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["aioshelly"], - "quality_scale": "platinum", - "requirements": ["aioshelly==12.0.1"], + "requirements": ["aioshelly==12.1.0"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/homeassistant/components/shelly/strings.json b/homeassistant/components/shelly/strings.json index 342a7418b2a..eb869b54e4c 100644 --- a/homeassistant/components/shelly/strings.json +++ b/homeassistant/components/shelly/strings.json @@ -45,7 +45,8 @@ "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "unknown": "[%key:common::config_flow::error::unknown%]", "firmware_not_fully_provisioned": "Device not fully provisioned. Please contact Shelly support", - "custom_port_not_supported": "Gen1 device does not support custom port." + "custom_port_not_supported": "Gen1 device does not support custom port.", + "mac_address_mismatch": "The MAC address of the device does not match the one in the configuration, please reboot the device and try again." }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", @@ -53,7 +54,8 @@ "reauth_unsuccessful": "Re-authentication was unsuccessful, please remove the integration and set it up again.", "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", "another_device": "Re-configuration was unsuccessful, the IP address/hostname of another Shelly device was used.", - "ipv6_not_supported": "IPv6 is not supported." + "ipv6_not_supported": "IPv6 is not supported.", + "mac_address_mismatch": "[%key:component::shelly::config::error::mac_address_mismatch%]" } }, "device_automation": { diff --git a/homeassistant/components/shelly/update.py b/homeassistant/components/shelly/update.py index fb586ae8b85..f22547acf50 100644 --- a/homeassistant/components/shelly/update.py +++ b/homeassistant/components/shelly/update.py @@ -238,7 +238,8 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity): ) -> None: """Initialize update entity.""" super().__init__(coordinator, key, attribute, description) - self._ota_in_progress: bool | int = False + self._ota_in_progress = False + self._ota_progress_percentage: int | None = None self._attr_release_url = get_release_url( coordinator.device.gen, coordinator.model, description.beta ) @@ -256,11 +257,12 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity): if self.in_progress is not False: event_type = event["event"] if event_type == OTA_BEGIN: - self._ota_in_progress = 0 + self._ota_progress_percentage = 0 elif event_type == OTA_PROGRESS: - self._ota_in_progress = event["progress_percent"] + self._ota_progress_percentage = event["progress_percent"] elif event_type in (OTA_ERROR, OTA_SUCCESS): self._ota_in_progress = False + self._ota_progress_percentage = None self.async_write_ha_state() @property @@ -278,10 +280,15 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity): return self.installed_version @property - def in_progress(self) -> bool | int: + def in_progress(self) -> bool: """Update installation in progress.""" return self._ota_in_progress + @property + def update_percentage(self) -> int | None: + """Update installation progress.""" + return self._ota_progress_percentage + async def async_install( self, version: str | None, backup: bool, **kwargs: Any ) -> None: @@ -310,6 +317,7 @@ class RpcUpdateEntity(ShellyRpcAttributeEntity, UpdateEntity): await self.coordinator.async_shutdown_device_and_start_reauth() else: self._ota_in_progress = True + self._ota_progress_percentage = None LOGGER.debug("OTA update call for %s successful", self.coordinator.name) diff --git a/homeassistant/components/shodan/manifest.json b/homeassistant/components/shodan/manifest.json index 9155311a2ad..afd75e3fed5 100644 --- a/homeassistant/components/shodan/manifest.json +++ b/homeassistant/components/shodan/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/shodan", "iot_class": "cloud_polling", "loggers": ["shodan"], + "quality_scale": "legacy", "requirements": ["shodan==1.28.0"] } diff --git a/homeassistant/components/shopping_list/__init__.py b/homeassistant/components/shopping_list/__init__.py index 20d3078228c..531bbf37980 100644 --- a/homeassistant/components/shopping_list/__init__.py +++ b/homeassistant/components/shopping_list/__init__.py @@ -320,15 +320,15 @@ class ShoppingData: # Remove the item from mapping after it's appended in the result array. del all_items_mapping[item_id] # Append the rest of the items - for key in all_items_mapping: + for value in all_items_mapping.values(): # All the unchecked items must be passed in the item_ids array, # so all items left in the mapping should be checked items. - if all_items_mapping[key]["complete"] is False: + if value["complete"] is False: raise vol.Invalid( "The item ids array doesn't contain all the unchecked shopping list" " items." ) - new_items.append(all_items_mapping[key]) + new_items.append(value) self.items = new_items self.hass.async_add_executor_job(self.save) self._async_notify() diff --git a/homeassistant/components/sia/config_flow.py b/homeassistant/components/sia/config_flow.py index cb451133d41..a23978145e7 100644 --- a/homeassistant/components/sia/config_flow.py +++ b/homeassistant/components/sia/config_flow.py @@ -181,7 +181,6 @@ class SIAOptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize SIA options flow.""" - self.config_entry = config_entry self.options = deepcopy(dict(config_entry.options)) self.hub: SIAHub | None = None self.accounts_todo: list = [] diff --git a/homeassistant/components/sigfox/manifest.json b/homeassistant/components/sigfox/manifest.json index 3b581e4a081..f3f44bf8979 100644 --- a/homeassistant/components/sigfox/manifest.json +++ b/homeassistant/components/sigfox/manifest.json @@ -3,5 +3,6 @@ "name": "Sigfox", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/sigfox", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/sighthound/manifest.json b/homeassistant/components/sighthound/manifest.json index 875c98acb6d..1efd572425b 100644 --- a/homeassistant/components/sighthound/manifest.json +++ b/homeassistant/components/sighthound/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/sighthound", "iot_class": "cloud_polling", "loggers": ["simplehound"], - "requirements": ["Pillow==10.4.0", "simplehound==0.3"] + "quality_scale": "legacy", + "requirements": ["Pillow==11.0.0", "simplehound==0.3"] } diff --git a/homeassistant/components/signal_messenger/manifest.json b/homeassistant/components/signal_messenger/manifest.json index 217109bfa2c..5ff63052691 100644 --- a/homeassistant/components/signal_messenger/manifest.json +++ b/homeassistant/components/signal_messenger/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/signal_messenger", "iot_class": "cloud_push", "loggers": ["pysignalclirestapi"], + "quality_scale": "legacy", "requirements": ["pysignalclirestapi==0.3.24"] } diff --git a/homeassistant/components/simplisafe/config_flow.py b/homeassistant/components/simplisafe/config_flow.py index 6fdbd351a29..68974fe118f 100644 --- a/homeassistant/components/simplisafe/config_flow.py +++ b/homeassistant/components/simplisafe/config_flow.py @@ -67,7 +67,7 @@ class SimpliSafeFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> SimpliSafeOptionsFlowHandler: """Define the config flow to handle options.""" - return SimpliSafeOptionsFlowHandler(config_entry) + return SimpliSafeOptionsFlowHandler() async def async_step_reauth( self, entry_data: Mapping[str, Any] @@ -153,10 +153,6 @@ class SimpliSafeFlowHandler(ConfigFlow, domain=DOMAIN): class SimpliSafeOptionsFlowHandler(OptionsFlow): """Handle a SimpliSafe options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/sinch/manifest.json b/homeassistant/components/sinch/manifest.json index 21a80f63b1f..4af90b759ee 100644 --- a/homeassistant/components/sinch/manifest.json +++ b/homeassistant/components/sinch/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/sinch", "iot_class": "cloud_push", "loggers": ["clx"], + "quality_scale": "legacy", "requirements": ["clx-sdk-xms==1.0.0"] } diff --git a/homeassistant/components/sisyphus/manifest.json b/homeassistant/components/sisyphus/manifest.json index 4e344c0b25e..f62d19b77c1 100644 --- a/homeassistant/components/sisyphus/manifest.json +++ b/homeassistant/components/sisyphus/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/sisyphus", "iot_class": "local_push", "loggers": ["sisyphus_control"], + "quality_scale": "legacy", "requirements": ["sisyphus-control==3.1.4"] } diff --git a/homeassistant/components/sky_hub/manifest.json b/homeassistant/components/sky_hub/manifest.json index 541cc6e0b03..1030da4d0ff 100644 --- a/homeassistant/components/sky_hub/manifest.json +++ b/homeassistant/components/sky_hub/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/sky_hub", "iot_class": "local_polling", "loggers": ["pyskyqhub"], + "quality_scale": "legacy", "requirements": ["pyskyqhub==0.1.4"] } diff --git a/homeassistant/components/sky_remote/__init__.py b/homeassistant/components/sky_remote/__init__.py new file mode 100644 index 00000000000..4daad78c558 --- /dev/null +++ b/homeassistant/components/sky_remote/__init__.py @@ -0,0 +1,39 @@ +"""The Sky Remote Control integration.""" + +import logging + +from skyboxremote import RemoteControl, SkyBoxConnectionError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, CONF_PORT, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady + +PLATFORMS = [Platform.REMOTE] + +_LOGGER = logging.getLogger(__name__) + + +type SkyRemoteConfigEntry = ConfigEntry[RemoteControl] + + +async def async_setup_entry(hass: HomeAssistant, entry: SkyRemoteConfigEntry) -> bool: + """Set up Sky remote.""" + host = entry.data[CONF_HOST] + port = entry.data[CONF_PORT] + + _LOGGER.debug("Setting up Host: %s, Port: %s", host, port) + remote = RemoteControl(host, port) + try: + await remote.check_connectable() + except SkyBoxConnectionError as e: + raise ConfigEntryNotReady from e + + entry.runtime_data = remote + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/sky_remote/config_flow.py b/homeassistant/components/sky_remote/config_flow.py new file mode 100644 index 00000000000..a55dfb2a52b --- /dev/null +++ b/homeassistant/components/sky_remote/config_flow.py @@ -0,0 +1,64 @@ +"""Config flow for sky_remote.""" + +import logging +from typing import Any + +from skyboxremote import RemoteControl, SkyBoxConnectionError +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_HOST, CONF_PORT +import homeassistant.helpers.config_validation as cv + +from .const import DEFAULT_PORT, DOMAIN, LEGACY_PORT + +DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): cv.string, + } +) + + +async def async_find_box_port(host: str) -> int: + """Find port box uses for communication.""" + logging.debug("Attempting to find port to connect to %s on", host) + remote = RemoteControl(host, DEFAULT_PORT) + try: + await remote.check_connectable() + except SkyBoxConnectionError: + # Try legacy port if the default one failed + remote = RemoteControl(host, LEGACY_PORT) + await remote.check_connectable() + return LEGACY_PORT + return DEFAULT_PORT + + +class SkyRemoteConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Sky Remote.""" + + VERSION = 1 + MINOR_VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the user step.""" + + errors: dict[str, str] = {} + if user_input is not None: + logging.debug("user_input: %s", user_input) + self._async_abort_entries_match(user_input) + try: + port = await async_find_box_port(user_input[CONF_HOST]) + except SkyBoxConnectionError: + logging.exception("while finding port of skybox") + errors["base"] = "cannot_connect" + else: + return self.async_create_entry( + title=user_input[CONF_HOST], + data={**user_input, CONF_PORT: port}, + ) + + return self.async_show_form( + step_id="user", data_schema=DATA_SCHEMA, errors=errors + ) diff --git a/homeassistant/components/sky_remote/const.py b/homeassistant/components/sky_remote/const.py new file mode 100644 index 00000000000..e67744a741b --- /dev/null +++ b/homeassistant/components/sky_remote/const.py @@ -0,0 +1,6 @@ +"""Constants.""" + +DOMAIN = "sky_remote" + +DEFAULT_PORT = 49160 +LEGACY_PORT = 5900 diff --git a/homeassistant/components/sky_remote/manifest.json b/homeassistant/components/sky_remote/manifest.json new file mode 100644 index 00000000000..b00ff309b10 --- /dev/null +++ b/homeassistant/components/sky_remote/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "sky_remote", + "name": "Sky Remote Control", + "codeowners": ["@dunnmj", "@saty9"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/sky_remote", + "integration_type": "device", + "iot_class": "assumed_state", + "requirements": ["skyboxremote==0.0.6"] +} diff --git a/homeassistant/components/sky_remote/remote.py b/homeassistant/components/sky_remote/remote.py new file mode 100644 index 00000000000..05a464f73a6 --- /dev/null +++ b/homeassistant/components/sky_remote/remote.py @@ -0,0 +1,70 @@ +"""Home Assistant integration to control a sky box using the remote platform.""" + +from collections.abc import Iterable +import logging +from typing import Any + +from skyboxremote import VALID_KEYS, RemoteControl + +from homeassistant.components.remote import RemoteEntity +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SkyRemoteConfigEntry +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + config: SkyRemoteConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Sky remote platform.""" + async_add_entities( + [SkyRemote(config.runtime_data, config.entry_id)], + True, + ) + + +class SkyRemote(RemoteEntity): + """Representation of a Sky Remote.""" + + _attr_has_entity_name = True + _attr_name = None + + def __init__(self, remote: RemoteControl, unique_id: str) -> None: + """Initialize the Sky Remote.""" + self._remote = remote + self._attr_unique_id = unique_id + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, unique_id)}, + manufacturer="SKY", + model="Sky Box", + name=remote.host, + ) + + def turn_on(self, activity: str | None = None, **kwargs: Any) -> None: + """Send the power on command.""" + self.send_command(["sky"]) + + def turn_off(self, activity: str | None = None, **kwargs: Any) -> None: + """Send the power command.""" + self.send_command(["power"]) + + def send_command(self, command: Iterable[str], **kwargs: Any) -> None: + """Send a list of commands to the device.""" + for cmd in command: + if cmd not in VALID_KEYS: + raise ServiceValidationError( + f"{cmd} is not in Valid Keys: {VALID_KEYS}" + ) + try: + self._remote.send_keys(command) + except ValueError as err: + _LOGGER.error("Invalid command: %s. Error: %s", command, err) + return + _LOGGER.debug("Successfully sent command %s", command) diff --git a/homeassistant/components/sky_remote/strings.json b/homeassistant/components/sky_remote/strings.json new file mode 100644 index 00000000000..af794490c43 --- /dev/null +++ b/homeassistant/components/sky_remote/strings.json @@ -0,0 +1,21 @@ +{ + "config": { + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + }, + "step": { + "user": { + "title": "Add Sky Remote", + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "Hostname or IP address of your Sky device" + } + } + } + } +} diff --git a/homeassistant/components/skybeacon/manifest.json b/homeassistant/components/skybeacon/manifest.json index deda02f64f7..379f10e8873 100644 --- a/homeassistant/components/skybeacon/manifest.json +++ b/homeassistant/components/skybeacon/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/skybeacon", "iot_class": "local_polling", "loggers": ["pygatt"], + "quality_scale": "legacy", "requirements": ["pygatt[GATTTOOL]==4.0.5"] } diff --git a/homeassistant/components/sleepiq/number.py b/homeassistant/components/sleepiq/number.py index 905ceab18bd..e4fa60a4a43 100644 --- a/homeassistant/components/sleepiq/number.py +++ b/homeassistant/components/sleepiq/number.py @@ -58,14 +58,14 @@ def _get_actuator_name(bed: SleepIQBed, actuator: SleepIQActuator) -> str: f" {bed.name} {actuator.side_full} {actuator.actuator_full} {ENTITY_TYPES[ACTUATOR]}" ) - return f"SleepNumber {bed.name} {actuator.actuator_full} {ENTITY_TYPES[ACTUATOR]}" + return f"SleepNumber {bed.name} {actuator.actuator_full} {ENTITY_TYPES[ACTUATOR]}" # type: ignore[unreachable] def _get_actuator_unique_id(bed: SleepIQBed, actuator: SleepIQActuator) -> str: if actuator.side: return f"{bed.id}_{actuator.side.value}_{actuator.actuator}" - return f"{bed.id}_{actuator.actuator}" + return f"{bed.id}_{actuator.actuator}" # type: ignore[unreachable] def _get_sleeper_name(bed: SleepIQBed, sleeper: SleepIQSleeper) -> str: diff --git a/homeassistant/components/slide/manifest.json b/homeassistant/components/slide/manifest.json index 111bc9bd7a9..2b56185efa1 100644 --- a/homeassistant/components/slide/manifest.json +++ b/homeassistant/components/slide/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/slide", "iot_class": "cloud_polling", "loggers": ["goslideapi"], + "quality_scale": "legacy", "requirements": ["goslide-api==0.7.0"] } diff --git a/homeassistant/components/smartthings/sensor.py b/homeassistant/components/smartthings/sensor.py index b73d3b43764..8bd0421d2bc 100644 --- a/homeassistant/components/smartthings/sensor.py +++ b/homeassistant/components/smartthings/sensor.py @@ -15,11 +15,11 @@ from homeassistant.components.sensor import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( - AREA_SQUARE_METERS, CONCENTRATION_PARTS_PER_MILLION, LIGHT_LUX, PERCENTAGE, EntityCategory, + UnitOfArea, UnitOfElectricPotential, UnitOfEnergy, UnitOfMass, @@ -95,7 +95,7 @@ CAPABILITY_TO_SENSORS: dict[str, list[Map]] = { Map( Attribute.bmi_measurement, "Body Mass Index", - f"{UnitOfMass.KILOGRAMS}/{AREA_SQUARE_METERS}", + f"{UnitOfMass.KILOGRAMS}/{UnitOfArea.SQUARE_METERS}", None, SensorStateClass.MEASUREMENT, None, diff --git a/homeassistant/components/smarttub/manifest.json b/homeassistant/components/smarttub/manifest.json index 432f6338d9f..d5102f14437 100644 --- a/homeassistant/components/smarttub/manifest.json +++ b/homeassistant/components/smarttub/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/smarttub", "iot_class": "cloud_polling", "loggers": ["smarttub"], - "quality_scale": "platinum", "requirements": ["python-smarttub==0.0.38"] } diff --git a/homeassistant/components/smarty/__init__.py b/homeassistant/components/smarty/__init__.py index cc7215349a6..0d043804c3d 100644 --- a/homeassistant/components/smarty/__init__.py +++ b/homeassistant/components/smarty/__init__.py @@ -30,7 +30,13 @@ CONFIG_SCHEMA = vol.Schema( extra=vol.ALLOW_EXTRA, ) -PLATFORMS = [Platform.BINARY_SENSOR, Platform.FAN, Platform.SENSOR] +PLATFORMS = [ + Platform.BINARY_SENSOR, + Platform.BUTTON, + Platform.FAN, + Platform.SENSOR, + Platform.SWITCH, +] async def async_setup(hass: HomeAssistant, hass_config: ConfigType) -> bool: diff --git a/homeassistant/components/smarty/button.py b/homeassistant/components/smarty/button.py new file mode 100644 index 00000000000..b8e31cf6fc8 --- /dev/null +++ b/homeassistant/components/smarty/button.py @@ -0,0 +1,74 @@ +"""Platform to control a Salda Smarty XP/XV ventilation unit.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +import logging +from typing import Any + +from pysmarty2 import Smarty + +from homeassistant.components.button import ButtonEntity, ButtonEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .coordinator import SmartyConfigEntry, SmartyCoordinator +from .entity import SmartyEntity + +_LOGGER = logging.getLogger(__name__) + + +@dataclass(frozen=True, kw_only=True) +class SmartyButtonDescription(ButtonEntityDescription): + """Class describing Smarty button.""" + + press_fn: Callable[[Smarty], bool | None] + + +ENTITIES: tuple[SmartyButtonDescription, ...] = ( + SmartyButtonDescription( + key="reset_filters_timer", + translation_key="reset_filters_timer", + press_fn=lambda smarty: smarty.reset_filters_timer(), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmartyConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Smarty Button Platform.""" + + coordinator = entry.runtime_data + + async_add_entities( + SmartyButton(coordinator, description) for description in ENTITIES + ) + + +class SmartyButton(SmartyEntity, ButtonEntity): + """Representation of a Smarty Button.""" + + entity_description: SmartyButtonDescription + + def __init__( + self, + coordinator: SmartyCoordinator, + entity_description: SmartyButtonDescription, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self.entity_description = entity_description + self._attr_unique_id = ( + f"{coordinator.config_entry.entry_id}_{entity_description.key}" + ) + + async def async_press(self, **kwargs: Any) -> None: + """Press the button.""" + await self.hass.async_add_executor_job( + self.entity_description.press_fn, self.coordinator.client + ) + await self.coordinator.async_refresh() diff --git a/homeassistant/components/smarty/strings.json b/homeassistant/components/smarty/strings.json index 37a6c5cbca1..341a300a26e 100644 --- a/homeassistant/components/smarty/strings.json +++ b/homeassistant/components/smarty/strings.json @@ -46,6 +46,11 @@ "name": "Boost state" } }, + "button": { + "reset_filters_timer": { + "name": "Reset filters timer" + } + }, "sensor": { "supply_air_temperature": { "name": "Supply air temperature" @@ -65,6 +70,11 @@ "filter_days_left": { "name": "Filter days left" } + }, + "switch": { + "boost": { + "name": "Boost" + } } } } diff --git a/homeassistant/components/smarty/switch.py b/homeassistant/components/smarty/switch.py new file mode 100644 index 00000000000..bf5fe80db44 --- /dev/null +++ b/homeassistant/components/smarty/switch.py @@ -0,0 +1,90 @@ +"""Platform to control a Salda Smarty XP/XV ventilation unit.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +import logging +from typing import Any + +from pysmarty2 import Smarty + +from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .coordinator import SmartyConfigEntry, SmartyCoordinator +from .entity import SmartyEntity + +_LOGGER = logging.getLogger(__name__) + + +@dataclass(frozen=True, kw_only=True) +class SmartySwitchDescription(SwitchEntityDescription): + """Class describing Smarty switch.""" + + is_on_fn: Callable[[Smarty], bool] + turn_on_fn: Callable[[Smarty], bool | None] + turn_off_fn: Callable[[Smarty], bool | None] + + +ENTITIES: tuple[SmartySwitchDescription, ...] = ( + SmartySwitchDescription( + key="boost", + translation_key="boost", + is_on_fn=lambda smarty: smarty.boost, + turn_on_fn=lambda smarty: smarty.enable_boost(), + turn_off_fn=lambda smarty: smarty.disable_boost(), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmartyConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Smarty Switch Platform.""" + + coordinator = entry.runtime_data + + async_add_entities( + SmartySwitch(coordinator, description) for description in ENTITIES + ) + + +class SmartySwitch(SmartyEntity, SwitchEntity): + """Representation of a Smarty Switch.""" + + entity_description: SmartySwitchDescription + + def __init__( + self, + coordinator: SmartyCoordinator, + entity_description: SmartySwitchDescription, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self.entity_description = entity_description + self._attr_unique_id = ( + f"{coordinator.config_entry.entry_id}_{entity_description.key}" + ) + + @property + def is_on(self) -> bool: + """Return the state of the switch.""" + return self.entity_description.is_on_fn(self.coordinator.client) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the switch on.""" + await self.hass.async_add_executor_job( + self.entity_description.turn_on_fn, self.coordinator.client + ) + await self.coordinator.async_refresh() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the switch off.""" + await self.hass.async_add_executor_job( + self.entity_description.turn_off_fn, self.coordinator.client + ) + await self.coordinator.async_refresh() diff --git a/homeassistant/components/smlight/config_flow.py b/homeassistant/components/smlight/config_flow.py index 32efc729dc2..92b543e0441 100644 --- a/homeassistant/components/smlight/config_flow.py +++ b/homeassistant/components/smlight/config_flow.py @@ -34,10 +34,11 @@ STEP_AUTH_DATA_SCHEMA = vol.Schema( class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for SMLIGHT Zigbee.""" + host: str + def __init__(self) -> None: """Initialize the config flow.""" self.client: Api2 - self.host: str | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -46,9 +47,8 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): errors: dict[str, str] = {} if user_input is not None: - host = user_input[CONF_HOST] - self.client = Api2(host, session=async_get_clientsession(self.hass)) - self.host = host + self.host = user_input[CONF_HOST] + self.client = Api2(self.host, session=async_get_clientsession(self.hass)) try: if not await self._async_check_auth_required(user_input): @@ -138,9 +138,8 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle reauth when API Authentication failed.""" - host = entry_data[CONF_HOST] - self.client = Api2(host, session=async_get_clientsession(self.hass)) - self.host = host + self.host = entry_data[CONF_HOST] + self.client = Api2(self.host, session=async_get_clientsession(self.hass)) return await self.async_step_reauth_confirm() diff --git a/homeassistant/components/smlight/manifest.json b/homeassistant/components/smlight/manifest.json index c1eca45871b..cb791ac111b 100644 --- a/homeassistant/components/smlight/manifest.json +++ b/homeassistant/components/smlight/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/smlight", "integration_type": "device", "iot_class": "local_push", - "requirements": ["pysmlight==0.1.3"], + "requirements": ["pysmlight==0.1.4"], "zeroconf": [ { "type": "_slzb-06._tcp.local." diff --git a/homeassistant/components/smtp/manifest.json b/homeassistant/components/smtp/manifest.json index 0e0bba707ac..66954eebccc 100644 --- a/homeassistant/components/smtp/manifest.json +++ b/homeassistant/components/smtp/manifest.json @@ -3,5 +3,6 @@ "name": "SMTP", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/smtp", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/snips/manifest.json b/homeassistant/components/snips/manifest.json index 16620eb4bfb..ec768b2b3d4 100644 --- a/homeassistant/components/snips/manifest.json +++ b/homeassistant/components/snips/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["mqtt"], "documentation": "https://www.home-assistant.io/integrations/snips", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/snmp/manifest.json b/homeassistant/components/snmp/manifest.json index 0b8863c8e58..a2a4405a1b5 100644 --- a/homeassistant/components/snmp/manifest.json +++ b/homeassistant/components/snmp/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/snmp", "iot_class": "local_polling", "loggers": ["pyasn1", "pysmi", "pysnmp"], + "quality_scale": "legacy", "requirements": ["pysnmp==6.2.6"] } diff --git a/homeassistant/components/solaredge_local/manifest.json b/homeassistant/components/solaredge_local/manifest.json index d65aa06ea0a..61c08b3b152 100644 --- a/homeassistant/components/solaredge_local/manifest.json +++ b/homeassistant/components/solaredge_local/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/solaredge_local", "iot_class": "local_polling", "loggers": ["solaredge_local"], + "quality_scale": "legacy", "requirements": ["solaredge-local==0.2.3"] } diff --git a/homeassistant/components/solarlog/coordinator.py b/homeassistant/components/solarlog/coordinator.py index 5fdf89c9e74..6e8867c0f52 100644 --- a/homeassistant/components/solarlog/coordinator.py +++ b/homeassistant/components/solarlog/coordinator.py @@ -19,6 +19,7 @@ from solarlog_cli.solarlog_models import SolarlogData from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.device_registry as dr from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.util import slugify @@ -58,6 +59,7 @@ class SolarLogCoordinator(DataUpdateCoordinator[SolarlogData]): self.host, tz=hass.config.time_zone, password=password, + session=async_get_clientsession(hass), ) async def _async_setup(self) -> None: @@ -81,15 +83,27 @@ class SolarLogCoordinator(DataUpdateCoordinator[SolarlogData]): await self.solarlog.update_device_list() data.inverter_data = await self.solarlog.update_inverter_data() except SolarLogConnectionError as ex: - raise ConfigEntryNotReady(ex) from ex + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="config_entry_not_ready", + ) from ex except SolarLogAuthenticationError as ex: if await self.renew_authentication(): # login was successful, update availability of extended data, retry data update await self.solarlog.test_extended_data_available() - raise ConfigEntryNotReady from ex - raise ConfigEntryAuthFailed from ex + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="config_entry_not_ready", + ) from ex + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="auth_failed", + ) from ex except SolarLogUpdateError as ex: - raise UpdateFailed(ex) from ex + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="update_failed", + ) from ex _LOGGER.debug("Data successfully updated") @@ -148,9 +162,15 @@ class SolarLogCoordinator(DataUpdateCoordinator[SolarlogData]): try: logged_in = await self.solarlog.login() except SolarLogAuthenticationError as ex: - raise ConfigEntryAuthFailed from ex + raise ConfigEntryAuthFailed( + translation_domain=DOMAIN, + translation_key="auth_failed", + ) from ex except (SolarLogConnectionError, SolarLogUpdateError) as ex: - raise ConfigEntryNotReady from ex + raise ConfigEntryNotReady( + translation_domain=DOMAIN, + translation_key="config_entry_not_ready", + ) from ex _LOGGER.debug("Credentials successfully updated? %s", logged_in) diff --git a/homeassistant/components/solarlog/manifest.json b/homeassistant/components/solarlog/manifest.json index 9f80b749d08..486b30edfd3 100644 --- a/homeassistant/components/solarlog/manifest.json +++ b/homeassistant/components/solarlog/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/solarlog", "iot_class": "local_polling", "loggers": ["solarlog_cli"], - "requirements": ["solarlog_cli==0.3.2"] + "quality_scale": "platinum", + "requirements": ["solarlog_cli==0.4.0"] } diff --git a/homeassistant/components/solarlog/quality_scale.yaml b/homeassistant/components/solarlog/quality_scale.yaml new file mode 100644 index 00000000000..543889ee18c --- /dev/null +++ b/homeassistant/components/solarlog/quality_scale.yaml @@ -0,0 +1,81 @@ +rules: + # Bronze + config-flow: done + test-before-configure: done + unique-config-entry: done + config-flow-test-coverage: done + runtime-data: done + test-before-setup: done + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: + status: exempt + comment: No explicit event subscriptions. + dependency-transparency: done + action-setup: + status: exempt + comment: No custom action. + common-modules: done + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + docs-actions: + status: exempt + comment: No custom action. + brands: done + + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: + status: exempt + comment: No custom action. + reauthentication-flow: done + parallel-updates: + status: exempt + comment: Coordinator and sensor only platform. + test-coverage: done + integration-owner: done + docs-installation-parameters: done + docs-configuration-parameters: + status: exempt + comment: No options flow. + + # Gold + entity-translations: done + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: done + discovery: + status: exempt + comment: Solar-Log device cannot be discovered. + stale-devices: done + diagnostics: done + exception-translations: done + icon-translations: done + reconfiguration-flow: done + dynamic-devices: done + discovery-update-info: + status: exempt + comment: Solar-Log device cannot be discovered. + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + docs-use-cases: done + docs-supported-devices: done + docs-supported-functions: done + docs-data-update: done + docs-known-limitations: done + docs-troubleshooting: + status: exempt + comment: | + This integration doesn't have known issues that could be resolved by the user. + docs-examples: done + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/solarlog/strings.json b/homeassistant/components/solarlog/strings.json index 723af6cb277..fb724c02adb 100644 --- a/homeassistant/components/solarlog/strings.json +++ b/homeassistant/components/solarlog/strings.json @@ -121,5 +121,16 @@ "name": "Usage" } } + }, + "exceptions": { + "update_error": { + "message": "Error while updating data from the API." + }, + "config_entry_not_ready": { + "message": "Error while loading the config entry." + }, + "auth_failed": { + "message": "Error while logging in to the API." + } } } diff --git a/homeassistant/components/solax/manifest.json b/homeassistant/components/solax/manifest.json index 2ca246a4e77..631ace3792f 100644 --- a/homeassistant/components/solax/manifest.json +++ b/homeassistant/components/solax/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/solax", "iot_class": "local_polling", "loggers": ["solax"], - "requirements": ["solax==3.1.1"] + "requirements": ["solax==3.2.1"] } diff --git a/homeassistant/components/somfy_mylink/config_flow.py b/homeassistant/components/somfy_mylink/config_flow.py index 705db43362e..c2d85160175 100644 --- a/homeassistant/components/somfy_mylink/config_flow.py +++ b/homeassistant/components/somfy_mylink/config_flow.py @@ -130,7 +130,6 @@ class OptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry self.options = deepcopy(dict(config_entry.options)) self._target_id: str | None = None diff --git a/homeassistant/components/sonarr/config_flow.py b/homeassistant/components/sonarr/config_flow.py index 1c1d02638d8..e1cedba10e7 100644 --- a/homeassistant/components/sonarr/config_flow.py +++ b/homeassistant/components/sonarr/config_flow.py @@ -63,7 +63,7 @@ class SonarrConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> SonarrOptionsFlowHandler: """Get the options flow for this handler.""" - return SonarrOptionsFlowHandler(config_entry) + return SonarrOptionsFlowHandler() async def async_step_reauth( self, entry_data: Mapping[str, Any] @@ -93,6 +93,13 @@ class SonarrConfigFlow(ConfigFlow, domain=DOMAIN): errors = {} if user_input is not None: + # aiopyarr defaults to the service port if one isn't given + # this is counter to standard practice where http = 80 + # and https = 443. + if CONF_URL in user_input: + url = yarl.URL(user_input[CONF_URL]) + user_input[CONF_URL] = f"{url.scheme}://{url.host}:{url.port}{url.path}" + if self.source == SOURCE_REAUTH: user_input = {**self._get_reauth_entry().data, **user_input} @@ -148,10 +155,6 @@ class SonarrConfigFlow(ConfigFlow, domain=DOMAIN): class SonarrOptionsFlowHandler(OptionsFlow): """Handle Sonarr client options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, int] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/sonarr/manifest.json b/homeassistant/components/sonarr/manifest.json index bfc2b6f787f..c81dc9c3972 100644 --- a/homeassistant/components/sonarr/manifest.json +++ b/homeassistant/components/sonarr/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/sonarr", "iot_class": "local_polling", "loggers": ["aiopyarr"], - "quality_scale": "silver", "requirements": ["aiopyarr==23.4.0"] } diff --git a/homeassistant/components/songpal/config_flow.py b/homeassistant/components/songpal/config_flow.py index 762de39aa30..41cc0763642 100644 --- a/homeassistant/components/songpal/config_flow.py +++ b/homeassistant/components/songpal/config_flow.py @@ -24,6 +24,8 @@ class SongpalConfig: def __init__(self, name: str, host: str | None, endpoint: str) -> None: """Initialize Configuration.""" self.name = name + if TYPE_CHECKING: + assert host is not None self.host = host self.endpoint = endpoint diff --git a/homeassistant/components/songpal/manifest.json b/homeassistant/components/songpal/manifest.json index c4dec6b938d..a04bea0c48d 100644 --- a/homeassistant/components/songpal/manifest.json +++ b/homeassistant/components/songpal/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/songpal", "iot_class": "local_push", "loggers": ["songpal"], - "quality_scale": "gold", "requirements": ["python-songpal==0.16.2"], "ssdp": [ { diff --git a/homeassistant/components/sonos/media_player.py b/homeassistant/components/sonos/media_player.py index 7711a1e88ea..8d0917c5dba 100644 --- a/homeassistant/components/sonos/media_player.py +++ b/homeassistant/components/sonos/media_player.py @@ -782,9 +782,9 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity): queue: list[DidlMusicTrack] = self.coordinator.soco.get_queue(max_items=0) return [ { - ATTR_MEDIA_TITLE: track.title, - ATTR_MEDIA_ALBUM_NAME: track.album, - ATTR_MEDIA_ARTIST: track.creator, + ATTR_MEDIA_TITLE: getattr(track, "title", None), + ATTR_MEDIA_ALBUM_NAME: getattr(track, "album", None), + ATTR_MEDIA_ARTIST: getattr(track, "creator", None), ATTR_MEDIA_CONTENT_ID: track.get_uri(), } for track in queue diff --git a/homeassistant/components/sony_projector/manifest.json b/homeassistant/components/sony_projector/manifest.json index 5cf5df4c96f..f674f6fa56b 100644 --- a/homeassistant/components/sony_projector/manifest.json +++ b/homeassistant/components/sony_projector/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/sony_projector", "iot_class": "local_polling", "loggers": ["pysdcp"], + "quality_scale": "legacy", "requirements": ["pySDCP==1"] } diff --git a/homeassistant/components/soundtouch/config_flow.py b/homeassistant/components/soundtouch/config_flow.py index 7e3fb2ca8c3..af45b8f6bdc 100644 --- a/homeassistant/components/soundtouch/config_flow.py +++ b/homeassistant/components/soundtouch/config_flow.py @@ -1,6 +1,5 @@ """Config flow for Bose SoundTouch integration.""" -import logging from typing import Any from libsoundtouch import soundtouch_device @@ -14,8 +13,6 @@ from homeassistant.helpers import config_validation as cv from .const import DOMAIN -_LOGGER = logging.getLogger(__name__) - class SoundtouchConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Bose SoundTouch.""" @@ -25,7 +22,7 @@ class SoundtouchConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize a new SoundTouch config flow.""" self.host: str | None = None - self.name = None + self.name: str | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -79,7 +76,7 @@ class SoundtouchConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="zeroconf_confirm", last_step=True, - description_placeholders={"name": self.name}, + description_placeholders={"name": self.name or "?"}, ) async def _async_get_device_id(self, raise_on_progress: bool = True) -> None: @@ -94,10 +91,10 @@ class SoundtouchConfigFlow(ConfigFlow, domain=DOMAIN): self.name = device.config.name - async def _async_create_soundtouch_entry(self): + async def _async_create_soundtouch_entry(self) -> ConfigFlowResult: """Finish config flow and create a SoundTouch config entry.""" return self.async_create_entry( - title=self.name, + title=self.name or "SoundTouch", data={ CONF_HOST: self.host, }, diff --git a/homeassistant/components/spaceapi/manifest.json b/homeassistant/components/spaceapi/manifest.json index 84add9bb4ed..798930bbef5 100644 --- a/homeassistant/components/spaceapi/manifest.json +++ b/homeassistant/components/spaceapi/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@fabaff"], "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/spaceapi", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/spc/manifest.json b/homeassistant/components/spc/manifest.json index a707e1a7804..b3c37ce2e2b 100644 --- a/homeassistant/components/spc/manifest.json +++ b/homeassistant/components/spc/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/spc", "iot_class": "local_push", "loggers": ["pyspcwebgw"], + "quality_scale": "legacy", "requirements": ["pyspcwebgw==0.7.0"] } diff --git a/homeassistant/components/speedtestdotnet/config_flow.py b/homeassistant/components/speedtestdotnet/config_flow.py index dc64448bbef..3bfd4eb6e4a 100644 --- a/homeassistant/components/speedtestdotnet/config_flow.py +++ b/homeassistant/components/speedtestdotnet/config_flow.py @@ -30,7 +30,7 @@ class SpeedTestFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: SpeedTestConfigEntry, ) -> SpeedTestOptionsFlowHandler: """Get the options flow for this handler.""" - return SpeedTestOptionsFlowHandler(config_entry) + return SpeedTestOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -48,9 +48,8 @@ class SpeedTestFlowHandler(ConfigFlow, domain=DOMAIN): class SpeedTestOptionsFlowHandler(OptionsFlow): """Handle SpeedTest options.""" - def __init__(self, config_entry: SpeedTestConfigEntry) -> None: + def __init__(self) -> None: """Initialize options flow.""" - self.config_entry = config_entry self._servers: dict = {} async def async_step_init( diff --git a/homeassistant/components/splunk/manifest.json b/homeassistant/components/splunk/manifest.json index 947af317b35..4b287c8950c 100644 --- a/homeassistant/components/splunk/manifest.json +++ b/homeassistant/components/splunk/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/splunk", "iot_class": "local_push", "loggers": ["hass_splunk"], + "quality_scale": "legacy", "requirements": ["hass-splunk==0.1.1"] } diff --git a/homeassistant/components/spotify/__init__.py b/homeassistant/components/spotify/__init__.py index cfcc9011b37..37580ac432d 100644 --- a/homeassistant/components/spotify/__init__.py +++ b/homeassistant/components/spotify/__init__.py @@ -29,7 +29,7 @@ from .util import ( spotify_uri_from_media_browser_url, ) -PLATFORMS = [Platform.MEDIA_PLAYER, Platform.SENSOR] +PLATFORMS = [Platform.MEDIA_PLAYER] __all__ = [ "async_browse_media", diff --git a/homeassistant/components/spotify/browse_media.py b/homeassistant/components/spotify/browse_media.py index 403ec608a7c..81cdfdfb3cf 100644 --- a/homeassistant/components/spotify/browse_media.py +++ b/homeassistant/components/spotify/browse_media.py @@ -14,6 +14,7 @@ from spotifyaio import ( SpotifyClient, Track, ) +from spotifyaio.models import ItemType, SimplifiedEpisode import yarl from homeassistant.components.media_player import ( @@ -90,6 +91,16 @@ def _get_track_item_payload( } +def _get_episode_item_payload(episode: SimplifiedEpisode) -> ItemPayload: + return { + "id": episode.episode_id, + "name": episode.name, + "type": MediaType.EPISODE, + "uri": episode.uri, + "thumbnail": fetch_image_url(episode.images), + } + + class BrowsableMedia(StrEnum): """Enum of browsable media.""" @@ -101,8 +112,6 @@ class BrowsableMedia(StrEnum): CURRENT_USER_RECENTLY_PLAYED = "current_user_recently_played" CURRENT_USER_TOP_ARTISTS = "current_user_top_artists" CURRENT_USER_TOP_TRACKS = "current_user_top_tracks" - CATEGORIES = "categories" - FEATURED_PLAYLISTS = "featured_playlists" NEW_RELEASES = "new_releases" @@ -115,8 +124,6 @@ LIBRARY_MAP = { BrowsableMedia.CURRENT_USER_RECENTLY_PLAYED.value: "Recently played", BrowsableMedia.CURRENT_USER_TOP_ARTISTS.value: "Top Artists", BrowsableMedia.CURRENT_USER_TOP_TRACKS.value: "Top Tracks", - BrowsableMedia.CATEGORIES.value: "Categories", - BrowsableMedia.FEATURED_PLAYLISTS.value: "Featured Playlists", BrowsableMedia.NEW_RELEASES.value: "New Releases", } @@ -153,18 +160,6 @@ CONTENT_TYPE_MEDIA_CLASS: dict[str, Any] = { "parent": MediaClass.DIRECTORY, "children": MediaClass.TRACK, }, - BrowsableMedia.FEATURED_PLAYLISTS.value: { - "parent": MediaClass.DIRECTORY, - "children": MediaClass.PLAYLIST, - }, - BrowsableMedia.CATEGORIES.value: { - "parent": MediaClass.DIRECTORY, - "children": MediaClass.GENRE, - }, - "category_playlists": { - "parent": MediaClass.DIRECTORY, - "children": MediaClass.PLAYLIST, - }, BrowsableMedia.NEW_RELEASES.value: { "parent": MediaClass.DIRECTORY, "children": MediaClass.ALBUM, @@ -354,32 +349,6 @@ async def build_item_response( # noqa: C901 elif media_content_type == BrowsableMedia.CURRENT_USER_TOP_TRACKS: if top_tracks := await spotify.get_top_tracks(): items = [_get_track_item_payload(track) for track in top_tracks] - elif media_content_type == BrowsableMedia.FEATURED_PLAYLISTS: - if featured_playlists := await spotify.get_featured_playlists(): - items = [ - _get_playlist_item_payload(playlist) for playlist in featured_playlists - ] - elif media_content_type == BrowsableMedia.CATEGORIES: - if categories := await spotify.get_categories(): - items = [ - { - "id": category.category_id, - "name": category.name, - "type": "category_playlists", - "uri": category.category_id, - "thumbnail": category.icons[0].url if category.icons else None, - } - for category in categories - ] - elif media_content_type == "category_playlists": - if ( - playlists := await spotify.get_category_playlists( - category_id=media_content_id - ) - ) and (category := await spotify.get_category(media_content_id)): - title = category.name - image = category.icons[0].url if category.icons else None - items = [_get_playlist_item_payload(playlist) for playlist in playlists] elif media_content_type == BrowsableMedia.NEW_RELEASES: if new_releases := await spotify.get_new_releases(): items = [_get_album_item_payload(album) for album in new_releases] @@ -387,10 +356,15 @@ async def build_item_response( # noqa: C901 if playlist := await spotify.get_playlist(media_content_id): title = playlist.name image = playlist.images[0].url if playlist.images else None - items = [ - _get_track_item_payload(playlist_track.track) - for playlist_track in playlist.tracks.items - ] + for playlist_item in playlist.tracks.items: + if playlist_item.track.type is ItemType.TRACK: + if TYPE_CHECKING: + assert isinstance(playlist_item.track, Track) + items.append(_get_track_item_payload(playlist_item.track)) + elif playlist_item.track.type is ItemType.EPISODE: + if TYPE_CHECKING: + assert isinstance(playlist_item.track, SimplifiedEpisode) + items.append(_get_episode_item_payload(playlist_item.track)) elif media_content_type == MediaType.ALBUM: if album := await spotify.get_album(media_content_id): title = album.name @@ -412,16 +386,7 @@ async def build_item_response( # noqa: C901 ): title = show.name image = show.images[0].url if show.images else None - items = [ - { - "id": episode.episode_id, - "name": episode.name, - "type": MediaType.EPISODE, - "uri": episode.uri, - "thumbnail": fetch_image_url(episode.images), - } - for episode in show_episodes - ] + items = [_get_episode_item_payload(episode) for episode in show_episodes] try: media_class = CONTENT_TYPE_MEDIA_CLASS[media_content_type] @@ -429,36 +394,6 @@ async def build_item_response( # noqa: C901 _LOGGER.debug("Unknown media type received: %s", media_content_type) return None - if media_content_type == BrowsableMedia.CATEGORIES: - media_item = BrowseMedia( - can_expand=True, - can_play=False, - children_media_class=media_class["children"], - media_class=media_class["parent"], - media_content_id=media_content_id, - media_content_type=f"{MEDIA_PLAYER_PREFIX}{media_content_type}", - title=LIBRARY_MAP.get(media_content_id, "Unknown"), - ) - - media_item.children = [] - for item in items: - if (item_id := item["id"]) is None: - _LOGGER.debug("Missing ID for media item: %s", item) - continue - media_item.children.append( - BrowseMedia( - can_expand=True, - can_play=False, - children_media_class=MediaClass.TRACK, - media_class=MediaClass.PLAYLIST, - media_content_id=item_id, - media_content_type=f"{MEDIA_PLAYER_PREFIX}category_playlists", - thumbnail=item["thumbnail"], - title=item["name"], - ) - ) - return media_item - if title is None: title = LIBRARY_MAP.get(media_content_id, "Unknown") diff --git a/homeassistant/components/spotify/coordinator.py b/homeassistant/components/spotify/coordinator.py index 9e62d5f137e..099b1cb3ca8 100644 --- a/homeassistant/components/spotify/coordinator.py +++ b/homeassistant/components/spotify/coordinator.py @@ -7,14 +7,13 @@ from typing import TYPE_CHECKING from spotifyaio import ( ContextType, - ItemType, PlaybackState, Playlist, SpotifyClient, SpotifyConnectionError, + SpotifyNotFoundError, UserProfile, ) -from spotifyaio.models import AudioFeatures from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -39,7 +38,6 @@ class SpotifyCoordinatorData: current_playback: PlaybackState | None position_updated_at: datetime | None playlist: Playlist | None - audio_features: AudioFeatures | None dj_playlist: bool = False @@ -65,7 +63,7 @@ class SpotifyCoordinator(DataUpdateCoordinator[SpotifyCoordinatorData]): ) self.client = client self._playlist: Playlist | None = None - self._currently_loaded_track: str | None = None + self._checked_playlist_id: str | None = None async def _async_setup(self) -> None: """Set up the coordinator.""" @@ -84,39 +82,36 @@ class SpotifyCoordinator(DataUpdateCoordinator[SpotifyCoordinatorData]): current_playback=None, position_updated_at=None, playlist=None, - audio_features=None, ) # Record the last updated time, because Spotify's timestamp property is unreliable # and doesn't actually return the fetch time as is mentioned in the API description position_updated_at = dt_util.utcnow() - audio_features: AudioFeatures | None = None - if (item := current.item) is not None and item.type == ItemType.TRACK: - if item.uri != self._currently_loaded_track: - try: - audio_features = await self.client.get_audio_features(item.uri) - except SpotifyConnectionError: - _LOGGER.debug( - "Unable to load audio features for track '%s'. " - "Continuing without audio features", - item.uri, - ) - audio_features = None - else: - self._currently_loaded_track = item.uri - else: - audio_features = self.data.audio_features dj_playlist = False if (context := current.context) is not None: - if self._playlist is None or self._playlist.uri != context.uri: + dj_playlist = context.uri == SPOTIFY_DJ_PLAYLIST_URI + if not ( + context.uri + in ( + self._checked_playlist_id, + SPOTIFY_DJ_PLAYLIST_URI, + ) + or (self._playlist is None and context.uri == self._checked_playlist_id) + ): + self._checked_playlist_id = context.uri self._playlist = None - if context.uri == SPOTIFY_DJ_PLAYLIST_URI: - dj_playlist = True - elif context.context_type == ContextType.PLAYLIST: + if context.context_type == ContextType.PLAYLIST: # Make sure any playlist lookups don't break the current # playback state update try: self._playlist = await self.client.get_playlist(context.uri) + except SpotifyNotFoundError: + _LOGGER.debug( + "Spotify playlist '%s' not found. " + "Most likely a Spotify-created playlist", + context.uri, + ) + self._playlist = None except SpotifyConnectionError: _LOGGER.debug( "Unable to load spotify playlist '%s'. " @@ -124,10 +119,10 @@ class SpotifyCoordinator(DataUpdateCoordinator[SpotifyCoordinatorData]): context.uri, ) self._playlist = None + self._checked_playlist_id = None return SpotifyCoordinatorData( current_playback=current, position_updated_at=position_updated_at, playlist=self._playlist, - audio_features=audio_features, dj_playlist=dj_playlist, ) diff --git a/homeassistant/components/spotify/icons.json b/homeassistant/components/spotify/icons.json index e1b08127e43..00c63141eae 100644 --- a/homeassistant/components/spotify/icons.json +++ b/homeassistant/components/spotify/icons.json @@ -4,41 +4,6 @@ "spotify": { "default": "mdi:spotify" } - }, - "sensor": { - "song_tempo": { - "default": "mdi:metronome" - }, - "danceability": { - "default": "mdi:dance-ballroom" - }, - "energy": { - "default": "mdi:lightning-bolt" - }, - "mode": { - "default": "mdi:music" - }, - "speechiness": { - "default": "mdi:speaker-message" - }, - "acousticness": { - "default": "mdi:guitar-acoustic" - }, - "instrumentalness": { - "default": "mdi:guitar-electric" - }, - "valence": { - "default": "mdi:emoticon-happy" - }, - "liveness": { - "default": "mdi:music-note" - }, - "time_signature": { - "default": "mdi:music-clef-treble" - }, - "key": { - "default": "mdi:music-clef-treble" - } } } } diff --git a/homeassistant/components/spotify/manifest.json b/homeassistant/components/spotify/manifest.json index 8f8f7e0d588..27b8da7cecf 100644 --- a/homeassistant/components/spotify/manifest.json +++ b/homeassistant/components/spotify/manifest.json @@ -7,8 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/spotify", "integration_type": "service", "iot_class": "cloud_polling", - "loggers": ["spotipy"], - "quality_scale": "silver", - "requirements": ["spotifyaio==0.8.8"], + "loggers": ["spotifyaio"], + "requirements": ["spotifyaio==0.8.11"], "zeroconf": ["_spotify-connect._tcp.local."] } diff --git a/homeassistant/components/spotify/media_player.py b/homeassistant/components/spotify/media_player.py index 7687936fe4c..20a634efb42 100644 --- a/homeassistant/components/spotify/media_player.py +++ b/homeassistant/components/spotify/media_player.py @@ -361,6 +361,8 @@ class SpotifyMediaPlayer(SpotifyEntity, MediaPlayerEntity): """Select playback device.""" for device in self.devices.data: if device.name == source: + if TYPE_CHECKING: + assert device.device_id is not None await self.coordinator.client.transfer_playback(device.device_id) return diff --git a/homeassistant/components/spotify/sensor.py b/homeassistant/components/spotify/sensor.py deleted file mode 100644 index 3486a911b0d..00000000000 --- a/homeassistant/components/spotify/sensor.py +++ /dev/null @@ -1,179 +0,0 @@ -"""Sensor platform for Spotify.""" - -from collections.abc import Callable -from dataclasses import dataclass - -from spotifyaio.models import AudioFeatures, Key - -from homeassistant.components.sensor import ( - SensorDeviceClass, - SensorEntity, - SensorEntityDescription, -) -from homeassistant.const import PERCENTAGE -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entity_platform import AddEntitiesCallback - -from .coordinator import SpotifyConfigEntry, SpotifyCoordinator -from .entity import SpotifyEntity - - -@dataclass(frozen=True, kw_only=True) -class SpotifyAudioFeaturesSensorEntityDescription(SensorEntityDescription): - """Describes Spotify sensor entity.""" - - value_fn: Callable[[AudioFeatures], float | str | None] - - -KEYS: dict[Key, str] = { - Key.C: "C", - Key.C_SHARP_D_FLAT: "C♯/D♭", - Key.D: "D", - Key.D_SHARP_E_FLAT: "D♯/E♭", - Key.E: "E", - Key.F: "F", - Key.F_SHARP_G_FLAT: "F♯/G♭", - Key.G: "G", - Key.G_SHARP_A_FLAT: "G♯/A♭", - Key.A: "A", - Key.A_SHARP_B_FLAT: "A♯/B♭", - Key.B: "B", -} - -KEY_OPTIONS = list(KEYS.values()) - - -def _get_key(audio_features: AudioFeatures) -> str | None: - if audio_features.key is None: - return None - return KEYS[audio_features.key] - - -AUDIO_FEATURE_SENSORS: tuple[SpotifyAudioFeaturesSensorEntityDescription, ...] = ( - SpotifyAudioFeaturesSensorEntityDescription( - key="bpm", - translation_key="song_tempo", - native_unit_of_measurement="bpm", - suggested_display_precision=0, - value_fn=lambda audio_features: audio_features.tempo, - ), - SpotifyAudioFeaturesSensorEntityDescription( - key="danceability", - translation_key="danceability", - native_unit_of_measurement=PERCENTAGE, - suggested_display_precision=0, - value_fn=lambda audio_features: audio_features.danceability * 100, - entity_registry_enabled_default=False, - ), - SpotifyAudioFeaturesSensorEntityDescription( - key="energy", - translation_key="energy", - native_unit_of_measurement=PERCENTAGE, - suggested_display_precision=0, - value_fn=lambda audio_features: audio_features.energy * 100, - entity_registry_enabled_default=False, - ), - SpotifyAudioFeaturesSensorEntityDescription( - key="mode", - translation_key="mode", - device_class=SensorDeviceClass.ENUM, - options=["major", "minor"], - value_fn=lambda audio_features: audio_features.mode.name.lower(), - entity_registry_enabled_default=False, - ), - SpotifyAudioFeaturesSensorEntityDescription( - key="speechiness", - translation_key="speechiness", - native_unit_of_measurement=PERCENTAGE, - suggested_display_precision=0, - value_fn=lambda audio_features: audio_features.speechiness * 100, - entity_registry_enabled_default=False, - ), - SpotifyAudioFeaturesSensorEntityDescription( - key="acousticness", - translation_key="acousticness", - native_unit_of_measurement=PERCENTAGE, - suggested_display_precision=0, - value_fn=lambda audio_features: audio_features.acousticness * 100, - entity_registry_enabled_default=False, - ), - SpotifyAudioFeaturesSensorEntityDescription( - key="instrumentalness", - translation_key="instrumentalness", - native_unit_of_measurement=PERCENTAGE, - suggested_display_precision=0, - value_fn=lambda audio_features: audio_features.instrumentalness * 100, - entity_registry_enabled_default=False, - ), - SpotifyAudioFeaturesSensorEntityDescription( - key="liveness", - translation_key="liveness", - native_unit_of_measurement=PERCENTAGE, - suggested_display_precision=0, - value_fn=lambda audio_features: audio_features.liveness * 100, - entity_registry_enabled_default=False, - ), - SpotifyAudioFeaturesSensorEntityDescription( - key="valence", - translation_key="valence", - native_unit_of_measurement=PERCENTAGE, - suggested_display_precision=0, - value_fn=lambda audio_features: audio_features.valence * 100, - entity_registry_enabled_default=False, - ), - SpotifyAudioFeaturesSensorEntityDescription( - key="time_signature", - translation_key="time_signature", - device_class=SensorDeviceClass.ENUM, - options=["3/4", "4/4", "5/4", "6/4", "7/4"], - value_fn=lambda audio_features: f"{audio_features.time_signature}/4", - entity_registry_enabled_default=False, - ), - SpotifyAudioFeaturesSensorEntityDescription( - key="key", - translation_key="key", - device_class=SensorDeviceClass.ENUM, - options=KEY_OPTIONS, - value_fn=_get_key, - entity_registry_enabled_default=False, - ), -) - - -async def async_setup_entry( - hass: HomeAssistant, - entry: SpotifyConfigEntry, - async_add_entities: AddEntitiesCallback, -) -> None: - """Set up Spotify sensor based on a config entry.""" - coordinator = entry.runtime_data.coordinator - - async_add_entities( - SpotifyAudioFeatureSensor(coordinator, description) - for description in AUDIO_FEATURE_SENSORS - ) - - -class SpotifyAudioFeatureSensor(SpotifyEntity, SensorEntity): - """Representation of a Spotify sensor.""" - - entity_description: SpotifyAudioFeaturesSensorEntityDescription - - def __init__( - self, - coordinator: SpotifyCoordinator, - entity_description: SpotifyAudioFeaturesSensorEntityDescription, - ) -> None: - """Initialize.""" - super().__init__(coordinator) - self._attr_unique_id = ( - f"{coordinator.current_user.user_id}_{entity_description.key}" - ) - self.entity_description = entity_description - - @property - def native_value(self) -> float | str | None: - """Return the state of the sensor.""" - if (audio_features := self.coordinator.data.audio_features) is None: - return None - return self.entity_description.value_fn(audio_features) diff --git a/homeassistant/components/spotify/strings.json b/homeassistant/components/spotify/strings.json index faf20d740d9..90e573a1706 100644 --- a/homeassistant/components/spotify/strings.json +++ b/homeassistant/components/spotify/strings.json @@ -30,46 +30,5 @@ "info": { "api_endpoint_reachable": "Spotify API endpoint reachable" } - }, - "entity": { - "sensor": { - "song_tempo": { - "name": "Song tempo" - }, - "danceability": { - "name": "Song danceability" - }, - "energy": { - "name": "Song energy" - }, - "mode": { - "name": "Song mode", - "state": { - "minor": "Minor", - "major": "Major" - } - }, - "speechiness": { - "name": "Song speechiness" - }, - "acousticness": { - "name": "Song acousticness" - }, - "instrumentalness": { - "name": "Song instrumentalness" - }, - "valence": { - "name": "Song valence" - }, - "liveness": { - "name": "Song liveness" - }, - "time_signature": { - "name": "Song time signature" - }, - "key": { - "name": "Song key" - } - } } } diff --git a/homeassistant/components/sql/config_flow.py b/homeassistant/components/sql/config_flow.py index 5537c7ff3b0..4fe04f2401c 100644 --- a/homeassistant/components/sql/config_flow.py +++ b/homeassistant/components/sql/config_flow.py @@ -23,7 +23,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import ( CONF_DEVICE_CLASS, @@ -144,7 +144,7 @@ class SQLConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> SQLOptionsFlowHandler: """Get the options flow for this handler.""" - return SQLOptionsFlowHandler(config_entry) + return SQLOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -209,7 +209,7 @@ class SQLConfigFlow(ConfigFlow, domain=DOMAIN): ) -class SQLOptionsFlowHandler(OptionsFlowWithConfigEntry): +class SQLOptionsFlowHandler(OptionsFlow): """Handle SQL options.""" async def async_step_init( @@ -223,7 +223,7 @@ class SQLOptionsFlowHandler(OptionsFlowWithConfigEntry): db_url = user_input.get(CONF_DB_URL) query = user_input[CONF_QUERY] column = user_input[CONF_COLUMN_NAME] - name = self.options.get(CONF_NAME, self.config_entry.title) + name = self.config_entry.options.get(CONF_NAME, self.config_entry.title) try: query = validate_sql_select(query) @@ -275,7 +275,7 @@ class SQLOptionsFlowHandler(OptionsFlowWithConfigEntry): return self.async_show_form( step_id="init", data_schema=self.add_suggested_values_to_schema( - OPTIONS_SCHEMA, user_input or self.options + OPTIONS_SCHEMA, user_input or self.config_entry.options ), errors=errors, description_placeholders=description_placeholders, diff --git a/homeassistant/components/sql/manifest.json b/homeassistant/components/sql/manifest.json index dcb5f47829c..01c95d6c5e4 100644 --- a/homeassistant/components/sql/manifest.json +++ b/homeassistant/components/sql/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/sql", "iot_class": "local_polling", - "requirements": ["SQLAlchemy==2.0.31", "sqlparse==0.5.0"] + "requirements": ["SQLAlchemy==2.0.36", "sqlparse==0.5.0"] } diff --git a/homeassistant/components/squeezebox/sensor.py b/homeassistant/components/squeezebox/sensor.py index ff9f86ccf1f..0ca33179f9f 100644 --- a/homeassistant/components/squeezebox/sensor.py +++ b/homeassistant/components/squeezebox/sensor.py @@ -33,12 +33,10 @@ SENSORS: tuple[SensorEntityDescription, ...] = ( SensorEntityDescription( key=STATUS_SENSOR_INFO_TOTAL_ALBUMS, state_class=SensorStateClass.TOTAL, - native_unit_of_measurement="albums", ), SensorEntityDescription( key=STATUS_SENSOR_INFO_TOTAL_ARTISTS, state_class=SensorStateClass.TOTAL, - native_unit_of_measurement="artists", ), SensorEntityDescription( key=STATUS_SENSOR_INFO_TOTAL_DURATION, @@ -49,12 +47,10 @@ SENSORS: tuple[SensorEntityDescription, ...] = ( SensorEntityDescription( key=STATUS_SENSOR_INFO_TOTAL_GENRES, state_class=SensorStateClass.TOTAL, - native_unit_of_measurement="genres", ), SensorEntityDescription( key=STATUS_SENSOR_INFO_TOTAL_SONGS, state_class=SensorStateClass.TOTAL, - native_unit_of_measurement="songs", ), SensorEntityDescription( key=STATUS_SENSOR_LASTSCAN, @@ -63,13 +59,11 @@ SENSORS: tuple[SensorEntityDescription, ...] = ( SensorEntityDescription( key=STATUS_SENSOR_PLAYER_COUNT, state_class=SensorStateClass.TOTAL, - native_unit_of_measurement="players", ), SensorEntityDescription( key=STATUS_SENSOR_OTHER_PLAYER_COUNT, state_class=SensorStateClass.TOTAL, entity_registry_visible_default=False, - native_unit_of_measurement="players", ), ) diff --git a/homeassistant/components/squeezebox/strings.json b/homeassistant/components/squeezebox/strings.json index b1b71cd8c1d..406c7243a1a 100644 --- a/homeassistant/components/squeezebox/strings.json +++ b/homeassistant/components/squeezebox/strings.json @@ -76,25 +76,31 @@ "name": "Last scan" }, "info_total_albums": { - "name": "Total albums" + "name": "Total albums", + "unit_of_measurement": "albums" }, "info_total_artists": { - "name": "Total artists" + "name": "Total artists", + "unit_of_measurement": "artists" }, "info_total_duration": { "name": "Total duration" }, "info_total_genres": { - "name": "Total genres" + "name": "Total genres", + "unit_of_measurement": "genres" }, "info_total_songs": { - "name": "Total songs" + "name": "Total songs", + "unit_of_measurement": "songs" }, "player_count": { - "name": "Player count" + "name": "Player count", + "unit_of_measurement": "players" }, "other_player_count": { - "name": "Player count off service" + "name": "Player count off service", + "unit_of_measurement": "[%key:component::squeezebox::entity::sensor::player_count::unit_of_measurement%]" } } } diff --git a/homeassistant/components/starline/binary_sensor.py b/homeassistant/components/starline/binary_sensor.py index 0383fc8ade6..69f0ae06d02 100644 --- a/homeassistant/components/starline/binary_sensor.py +++ b/homeassistant/components/starline/binary_sensor.py @@ -41,6 +41,11 @@ BINARY_SENSOR_TYPES: tuple[BinarySensorEntityDescription, ...] = ( translation_key="doors", device_class=BinarySensorDeviceClass.LOCK, ), + BinarySensorEntityDescription( + key="run", + translation_key="is_running", + device_class=BinarySensorDeviceClass.RUNNING, + ), BinarySensorEntityDescription( key="hfree", translation_key="handsfree", diff --git a/homeassistant/components/starline/button.py b/homeassistant/components/starline/button.py index ea1a27adc15..6fb307cda74 100644 --- a/homeassistant/components/starline/button.py +++ b/homeassistant/components/starline/button.py @@ -16,6 +16,20 @@ BUTTON_TYPES: tuple[ButtonEntityDescription, ...] = ( key="poke", translation_key="horn", ), + ButtonEntityDescription( + key="panic", + translation_key="panic", + entity_registry_enabled_default=False, + ), + *[ + ButtonEntityDescription( + key=f"flex_{i}", + translation_key="flex", + translation_placeholders={"num": str(i)}, + entity_registry_enabled_default=False, + ) + for i in range(1, 10) + ], ) diff --git a/homeassistant/components/starline/config_flow.py b/homeassistant/components/starline/config_flow.py index 5235bd5230b..a899b562f36 100644 --- a/homeassistant/components/starline/config_flow.py +++ b/homeassistant/components/starline/config_flow.py @@ -34,6 +34,7 @@ class StarlineFlowHandler(ConfigFlow, domain=DOMAIN): _app_code: str _app_token: str _captcha_image: str + _phone_number: str def __init__(self) -> None: """Initialize flow.""" @@ -49,7 +50,6 @@ class StarlineFlowHandler(ConfigFlow, domain=DOMAIN): self._slnet_token_expires = None self._captcha_sid: str | None = None self._captcha_code: str | None = None - self._phone_number = None self._auth = StarlineAuth() diff --git a/homeassistant/components/starline/icons.json b/homeassistant/components/starline/icons.json index 8a4f85a89bf..d7d20ae03bd 100644 --- a/homeassistant/components/starline/icons.json +++ b/homeassistant/components/starline/icons.json @@ -12,11 +12,20 @@ }, "moving_ban": { "default": "mdi:car-off" + }, + "is_running": { + "default": "mdi:speedometer" } }, "button": { "horn": { "default": "mdi:bullhorn-outline" + }, + "flex": { + "default": "mdi:star-circle-outline" + }, + "panic": { + "default": "mdi:alarm-note" } }, "device_tracker": { @@ -60,9 +69,6 @@ "on": "mdi:access-point-network" } }, - "horn": { - "default": "mdi:bullhorn-outline" - }, "service_mode": { "default": "mdi:car-wrench", "state": { diff --git a/homeassistant/components/starline/strings.json b/homeassistant/components/starline/strings.json index 14a8ed5a035..0a30ea5b5be 100644 --- a/homeassistant/components/starline/strings.json +++ b/homeassistant/components/starline/strings.json @@ -63,6 +63,9 @@ }, "moving_ban": { "name": "Moving ban" + }, + "is_running": { + "name": "Running" } }, "device_tracker": { @@ -121,6 +124,12 @@ "button": { "horn": { "name": "Horn" + }, + "flex": { + "name": "Flex logic {num}" + }, + "panic": { + "name": "Panic mode" } } }, diff --git a/homeassistant/components/starline/switch.py b/homeassistant/components/starline/switch.py index 1b48a72c732..05193d98c8a 100644 --- a/homeassistant/components/starline/switch.py +++ b/homeassistant/components/starline/switch.py @@ -78,8 +78,6 @@ class StarlineSwitch(StarlineEntity, SwitchEntity): @property def is_on(self): """Return True if entity is on.""" - if self._key == "poke": - return False return self._device.car_state.get(self._key) def turn_on(self, **kwargs: Any) -> None: @@ -88,6 +86,4 @@ class StarlineSwitch(StarlineEntity, SwitchEntity): def turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" - if self._key == "poke": - return self._account.api.set_car_state(self._device.device_id, self._key, False) diff --git a/homeassistant/components/starlingbank/manifest.json b/homeassistant/components/starlingbank/manifest.json index ef9be6d6da8..f7ab72c4379 100644 --- a/homeassistant/components/starlingbank/manifest.json +++ b/homeassistant/components/starlingbank/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/starlingbank", "iot_class": "cloud_polling", "loggers": ["starlingbank"], + "quality_scale": "legacy", "requirements": ["starlingbank==3.2"] } diff --git a/homeassistant/components/starlink/manifest.json b/homeassistant/components/starlink/manifest.json index b8733dd2435..070cbf1b44c 100644 --- a/homeassistant/components/starlink/manifest.json +++ b/homeassistant/components/starlink/manifest.json @@ -5,6 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/starlink", "iot_class": "local_polling", - "quality_scale": "silver", - "requirements": ["starlink-grpc-core==1.1.3"] + "requirements": ["starlink-grpc-core==1.2.0"] } diff --git a/homeassistant/components/startca/manifest.json b/homeassistant/components/startca/manifest.json index 8c74a655ce3..958477c193b 100644 --- a/homeassistant/components/startca/manifest.json +++ b/homeassistant/components/startca/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/startca", "iot_class": "cloud_polling", + "quality_scale": "legacy", "requirements": ["xmltodict==0.13.0"] } diff --git a/homeassistant/components/statistics/sensor.py b/homeassistant/components/statistics/sensor.py index 50d07d4e466..b6f1844f774 100644 --- a/homeassistant/components/statistics/sensor.py +++ b/homeassistant/components/statistics/sensor.py @@ -364,7 +364,7 @@ class StatisticsSensor(SensorEntity): self.states: deque[float | bool] = deque(maxlen=self._samples_max_buffer_size) self.ages: deque[datetime] = deque(maxlen=self._samples_max_buffer_size) - self.attributes: dict[str, StateType] = {} + self._attr_extra_state_attributes = {} self._state_characteristic_fn: Callable[[], float | int | datetime | None] = ( self._callable_characteristic_fn(self._state_characteristic) @@ -462,10 +462,10 @@ class StatisticsSensor(SensorEntity): # Here we make a copy the current value, which is okay. self._attr_available = new_state.state != STATE_UNAVAILABLE if new_state.state == STATE_UNAVAILABLE: - self.attributes[STAT_SOURCE_VALUE_VALID] = None + self._attr_extra_state_attributes[STAT_SOURCE_VALUE_VALID] = None return if new_state.state in (STATE_UNKNOWN, None, ""): - self.attributes[STAT_SOURCE_VALUE_VALID] = False + self._attr_extra_state_attributes[STAT_SOURCE_VALUE_VALID] = False return try: @@ -475,9 +475,9 @@ class StatisticsSensor(SensorEntity): else: self.states.append(float(new_state.state)) self.ages.append(new_state.last_reported) - self.attributes[STAT_SOURCE_VALUE_VALID] = True + self._attr_extra_state_attributes[STAT_SOURCE_VALUE_VALID] = True except ValueError: - self.attributes[STAT_SOURCE_VALUE_VALID] = False + self._attr_extra_state_attributes[STAT_SOURCE_VALUE_VALID] = False _LOGGER.error( "%s: parsing error. Expected number or binary state, but received '%s'", self.entity_id, @@ -584,13 +584,6 @@ class StatisticsSensor(SensorEntity): return None return SensorStateClass.MEASUREMENT - @property - def extra_state_attributes(self) -> dict[str, StateType] | None: - """Return the state attributes of the sensor.""" - return { - key: value for key, value in self.attributes.items() if value is not None - } - def _purge_old_states(self, max_age: timedelta) -> None: """Remove states which are older than a given age.""" now = dt_util.utcnow() @@ -657,7 +650,7 @@ class StatisticsSensor(SensorEntity): if self._samples_max_age is not None: self._purge_old_states(self._samples_max_age) - self._update_attributes() + self._update_extra_state_attributes() self._update_value() # If max_age is set, ensure to update again after the defined interval. @@ -738,22 +731,22 @@ class StatisticsSensor(SensorEntity): self.async_write_ha_state() _LOGGER.debug("%s: initializing from database completed", self.entity_id) - def _update_attributes(self) -> None: + def _update_extra_state_attributes(self) -> None: """Calculate and update the various attributes.""" if self._samples_max_buffer_size is not None: - self.attributes[STAT_BUFFER_USAGE_RATIO] = round( + self._attr_extra_state_attributes[STAT_BUFFER_USAGE_RATIO] = round( len(self.states) / self._samples_max_buffer_size, 2 ) if self._samples_max_age is not None: if len(self.states) >= 1: - self.attributes[STAT_AGE_COVERAGE_RATIO] = round( + self._attr_extra_state_attributes[STAT_AGE_COVERAGE_RATIO] = round( (self.ages[-1] - self.ages[0]).total_seconds() / self._samples_max_age.total_seconds(), 2, ) else: - self.attributes[STAT_AGE_COVERAGE_RATIO] = None + self._attr_extra_state_attributes[STAT_AGE_COVERAGE_RATIO] = 0 def _update_value(self) -> None: """Front to call the right statistical characteristics functions. diff --git a/homeassistant/components/statistics/strings.json b/homeassistant/components/statistics/strings.json index 3e6fec9d986..91aead261ff 100644 --- a/homeassistant/components/statistics/strings.json +++ b/homeassistant/components/statistics/strings.json @@ -10,7 +10,7 @@ }, "step": { "user": { - "description": "Add a statistics sensor", + "description": "Create a statistics sensor", "data": { "name": "[%key:common::config_flow::data::name%]", "entity_id": "Entity" diff --git a/homeassistant/components/statsd/manifest.json b/homeassistant/components/statsd/manifest.json index 73296a23dd9..4f0ea93eb98 100644 --- a/homeassistant/components/statsd/manifest.json +++ b/homeassistant/components/statsd/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/statsd", "iot_class": "local_push", "loggers": ["statsd"], + "quality_scale": "legacy", "requirements": ["statsd==3.2.1"] } diff --git a/homeassistant/components/steam_online/config_flow.py b/homeassistant/components/steam_online/config_flow.py index 704eef616f6..69009fca8c4 100644 --- a/homeassistant/components/steam_online/config_flow.py +++ b/homeassistant/components/steam_online/config_flow.py @@ -40,7 +40,7 @@ class SteamFlowHandler(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow( config_entry: SteamConfigEntry, - ) -> OptionsFlow: + ) -> SteamOptionsFlowHandler: """Get the options flow for this handler.""" return SteamOptionsFlowHandler(config_entry) @@ -123,7 +123,6 @@ class SteamOptionsFlowHandler(OptionsFlow): def __init__(self, entry: SteamConfigEntry) -> None: """Initialize options flow.""" - self.entry = entry self.options = dict(entry.options) async def async_step_init( @@ -131,7 +130,7 @@ class SteamOptionsFlowHandler(OptionsFlow): ) -> ConfigFlowResult: """Manage Steam options.""" if user_input is not None: - await self.hass.config_entries.async_unload(self.entry.entry_id) + await self.hass.config_entries.async_unload(self.config_entry.entry_id) for _id in self.options[CONF_ACCOUNTS]: if _id not in user_input[CONF_ACCOUNTS] and ( entity_id := er.async_get(self.hass).async_get_entity_id( @@ -146,7 +145,7 @@ class SteamOptionsFlowHandler(OptionsFlow): if _id in user_input[CONF_ACCOUNTS] } } - await self.hass.config_entries.async_reload(self.entry.entry_id) + await self.hass.config_entries.async_reload(self.config_entry.entry_id) return self.async_create_entry(title="", data=channel_data) error = None try: @@ -176,7 +175,9 @@ class SteamOptionsFlowHandler(OptionsFlow): """Get accounts.""" interface = steam.api.interface("ISteamUser") try: - friends = interface.GetFriendList(steamid=self.entry.data[CONF_ACCOUNT]) + friends = interface.GetFriendList( + steamid=self.config_entry.data[CONF_ACCOUNT] + ) _users_str = [user["steamid"] for user in friends["friendslist"]["friends"]] except steam.api.HTTPError: return [] diff --git a/homeassistant/components/steam_online/coordinator.py b/homeassistant/components/steam_online/coordinator.py index 6e7bdf4b91c..81a3bb0d898 100644 --- a/homeassistant/components/steam_online/coordinator.py +++ b/homeassistant/components/steam_online/coordinator.py @@ -60,9 +60,9 @@ class SteamDataUpdateCoordinator( for player in response["response"]["players"]["player"] if player["steamid"] in _ids } - for k in players: - data = self.player_interface.GetSteamLevel(steamid=players[k]["steamid"]) - players[k]["level"] = data["response"].get("player_level") + for value in players.values(): + data = self.player_interface.GetSteamLevel(steamid=value["steamid"]) + value["level"] = data["response"].get("player_level") return players async def _async_update_data(self) -> dict[str, dict[str, str | int]]: diff --git a/homeassistant/components/stiebel_eltron/manifest.json b/homeassistant/components/stiebel_eltron/manifest.json index 6592851d641..9580cd4d4ca 100644 --- a/homeassistant/components/stiebel_eltron/manifest.json +++ b/homeassistant/components/stiebel_eltron/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/stiebel_eltron", "iot_class": "local_polling", "loggers": ["pymodbus", "pystiebeleltron"], + "quality_scale": "legacy", "requirements": ["pystiebeleltron==0.0.1.dev2"] } diff --git a/homeassistant/components/stookwijzer/__init__.py b/homeassistant/components/stookwijzer/__init__.py index a714e3bd368..d8b9561bde9 100644 --- a/homeassistant/components/stookwijzer/__init__.py +++ b/homeassistant/components/stookwijzer/__init__.py @@ -2,29 +2,89 @@ from __future__ import annotations +from typing import Any + from stookwijzer import Stookwijzer -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_LATITUDE, CONF_LOCATION, CONF_LONGITUDE, Platform -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import DOMAIN +from .const import DOMAIN, LOGGER +from .coordinator import StookwijzerConfigEntry, StookwijzerCoordinator PLATFORMS = [Platform.SENSOR] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: StookwijzerConfigEntry) -> bool: """Set up Stookwijzer from a config entry.""" - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = Stookwijzer( - entry.data[CONF_LOCATION][CONF_LATITUDE], - entry.data[CONF_LOCATION][CONF_LONGITUDE], - ) + await er.async_migrate_entries(hass, entry.entry_id, async_migrate_entity_entry) + + coordinator = StookwijzerCoordinator(hass, entry) + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: StookwijzerConfigEntry +) -> bool: """Unload Stookwijzer config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - del hass.data[DOMAIN][entry.entry_id] - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def async_migrate_entry( + hass: HomeAssistant, entry: StookwijzerConfigEntry +) -> bool: + """Migrate old entry.""" + LOGGER.debug("Migrating from version %s", entry.version) + + if entry.version == 1: + latitude, longitude = await Stookwijzer.async_transform_coordinates( + async_get_clientsession(hass), + entry.data[CONF_LOCATION][CONF_LATITUDE], + entry.data[CONF_LOCATION][CONF_LONGITUDE], + ) + + if not latitude or not longitude: + ir.async_create_issue( + hass, + DOMAIN, + "location_migration_failed", + is_fixable=False, + severity=ir.IssueSeverity.ERROR, + translation_key="location_migration_failed", + translation_placeholders={ + "entry_title": entry.title, + }, + ) + return False + + hass.config_entries.async_update_entry( + entry, + version=2, + data={ + CONF_LATITUDE: latitude, + CONF_LONGITUDE: longitude, + }, + ) + + LOGGER.debug("Migration to version %s successful", entry.version) + + return True + + +@callback +def async_migrate_entity_entry(entity_entry: er.RegistryEntry) -> dict[str, Any] | None: + """Migrate Stookwijzer entity entries. + + - Migrates unique ID for the old Stookwijzer sensors to the new unique ID. + """ + if entity_entry.unique_id == entity_entry.config_entry_id: + return {"new_unique_id": f"{entity_entry.config_entry_id}_advice"} + + # No migration needed + return None diff --git a/homeassistant/components/stookwijzer/config_flow.py b/homeassistant/components/stookwijzer/config_flow.py index be53ce56390..32b4836763f 100644 --- a/homeassistant/components/stookwijzer/config_flow.py +++ b/homeassistant/components/stookwijzer/config_flow.py @@ -4,10 +4,12 @@ from __future__ import annotations from typing import Any +from stookwijzer import Stookwijzer import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_LATITUDE, CONF_LOCATION, CONF_LONGITUDE +from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.selector import LocationSelector from .const import DOMAIN @@ -16,21 +18,29 @@ from .const import DOMAIN class StookwijzerFlowHandler(ConfigFlow, domain=DOMAIN): """Config flow for Stookwijzer.""" - VERSION = 1 + VERSION = 2 async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" - + errors = {} if user_input is not None: - return self.async_create_entry( - title="Stookwijzer", - data=user_input, + latitude, longitude = await Stookwijzer.async_transform_coordinates( + async_get_clientsession(self.hass), + user_input[CONF_LOCATION][CONF_LATITUDE], + user_input[CONF_LOCATION][CONF_LONGITUDE], ) + if latitude and longitude: + return self.async_create_entry( + title="Stookwijzer", + data={CONF_LATITUDE: latitude, CONF_LONGITUDE: longitude}, + ) + errors["base"] = "unknown" return self.async_show_form( step_id="user", + errors=errors, data_schema=vol.Schema( { vol.Required( diff --git a/homeassistant/components/stookwijzer/const.py b/homeassistant/components/stookwijzer/const.py index e8cb3d818e6..1b0be86d375 100644 --- a/homeassistant/components/stookwijzer/const.py +++ b/homeassistant/components/stookwijzer/const.py @@ -1,16 +1,7 @@ """Constants for the Stookwijzer integration.""" -from enum import StrEnum import logging from typing import Final DOMAIN: Final = "stookwijzer" LOGGER = logging.getLogger(__package__) - - -class StookwijzerState(StrEnum): - """Stookwijzer states for sensor entity.""" - - BLUE = "blauw" - ORANGE = "oranje" - RED = "rood" diff --git a/homeassistant/components/stookwijzer/coordinator.py b/homeassistant/components/stookwijzer/coordinator.py new file mode 100644 index 00000000000..23092bed66e --- /dev/null +++ b/homeassistant/components/stookwijzer/coordinator.py @@ -0,0 +1,44 @@ +"""Class representing a Stookwijzer update coordinator.""" + +from datetime import timedelta + +from stookwijzer import Stookwijzer + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN, LOGGER + +SCAN_INTERVAL = timedelta(minutes=60) + +type StookwijzerConfigEntry = ConfigEntry[StookwijzerCoordinator] + + +class StookwijzerCoordinator(DataUpdateCoordinator[None]): + """Stookwijzer update coordinator.""" + + def __init__(self, hass: HomeAssistant, entry: StookwijzerConfigEntry) -> None: + """Initialize the coordinator.""" + super().__init__( + hass, + LOGGER, + name=DOMAIN, + update_interval=SCAN_INTERVAL, + ) + self.client = Stookwijzer( + async_get_clientsession(hass), + entry.data[CONF_LATITUDE], + entry.data[CONF_LONGITUDE], + ) + + async def _async_update_data(self) -> None: + """Fetch data from API endpoint.""" + await self.client.async_update() + if self.client.advice is None: + raise UpdateFailed( + translation_domain=DOMAIN, + translation_key="no_data_received", + ) diff --git a/homeassistant/components/stookwijzer/diagnostics.py b/homeassistant/components/stookwijzer/diagnostics.py index c7bf4fad14d..2849e0e976a 100644 --- a/homeassistant/components/stookwijzer/diagnostics.py +++ b/homeassistant/components/stookwijzer/diagnostics.py @@ -4,29 +4,18 @@ from __future__ import annotations from typing import Any -from stookwijzer import Stookwijzer - -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from .const import DOMAIN +from .coordinator import StookwijzerConfigEntry async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: StookwijzerConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - client: Stookwijzer = hass.data[DOMAIN][entry.entry_id] - - last_updated = None - if client.last_updated: - last_updated = client.last_updated.isoformat() - + client = entry.runtime_data.client return { - "state": client.state, - "last_updated": last_updated, - "lqi": client.lqi, - "windspeed": client.windspeed, - "weather": client.weather, - "concentrations": client.concentrations, + "advice": client.advice, + "air_quality_index": client.lki, + "windspeed_ms": client.windspeed_ms, } diff --git a/homeassistant/components/stookwijzer/manifest.json b/homeassistant/components/stookwijzer/manifest.json index dbf902b1e1e..3fe16fb3d33 100644 --- a/homeassistant/components/stookwijzer/manifest.json +++ b/homeassistant/components/stookwijzer/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/stookwijzer", "integration_type": "service", "iot_class": "cloud_polling", - "requirements": ["stookwijzer==1.3.0"] + "requirements": ["stookwijzer==1.5.1"] } diff --git a/homeassistant/components/stookwijzer/quality_scale.yaml b/homeassistant/components/stookwijzer/quality_scale.yaml new file mode 100644 index 00000000000..67fadc00b64 --- /dev/null +++ b/homeassistant/components/stookwijzer/quality_scale.yaml @@ -0,0 +1,89 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + The integration doesn't provide any additional service actions. + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + The integration doesn't provide any additional service actions. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + The integration doesn't subscribe to any events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: todo + test-before-setup: done + unique-config-entry: todo + + # Silver + action-exceptions: + status: exempt + comment: | + This integration is read-only and doesn't provide any actions. + config-entry-unloading: done + docs-configuration-parameters: todo + docs-installation-parameters: todo + entity-unavailable: done + integration-owner: done + log-when-unavailable: done + parallel-updates: + status: exempt + comment: | + This integration is read-only and doesn't provide any actions. Querying + the service for data is handled centrally using a data update coordinator. + reauthentication-flow: + status: exempt + comment: | + This integration doesn't require re-authentication. + test-coverage: done + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: | + The integration cannot be discovered, as it is an external service. + discovery: + status: exempt + comment: | + The integration cannot be discovered, as it is an external service. + docs-data-update: todo + docs-examples: todo + docs-known-limitations: todo + docs-supported-devices: todo + docs-supported-functions: todo + docs-troubleshooting: todo + docs-use-cases: todo + dynamic-devices: + status: exempt + comment: | + This integration provides a single device entry for the service. + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: done + icon-translations: todo + reconfiguration-flow: todo + repair-issues: done + stale-devices: + status: exempt + comment: | + This integration provides a single device entry for the service. + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/stookwijzer/sensor.py b/homeassistant/components/stookwijzer/sensor.py index b8f9a660598..2660ff2ddb2 100644 --- a/homeassistant/components/stookwijzer/sensor.py +++ b/homeassistant/components/stookwijzer/sensor.py @@ -2,65 +2,95 @@ from __future__ import annotations -from datetime import timedelta +from collections.abc import Callable +from dataclasses import dataclass from stookwijzer import Stookwijzer -from homeassistant.components.sensor import SensorDeviceClass, SensorEntity -from homeassistant.config_entries import ConfigEntry +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import UnitOfSpeed from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN, StookwijzerState +from .const import DOMAIN +from .coordinator import StookwijzerConfigEntry, StookwijzerCoordinator -SCAN_INTERVAL = timedelta(minutes=60) + +@dataclass(kw_only=True, frozen=True) +class StookwijzerSensorDescription(SensorEntityDescription): + """Class describing Stookwijzer sensor entities.""" + + value_fn: Callable[[Stookwijzer], int | float | str | None] + + +STOOKWIJZER_SENSORS = [ + StookwijzerSensorDescription( + key="windspeed", + native_unit_of_measurement=UnitOfSpeed.METERS_PER_SECOND, + suggested_unit_of_measurement=UnitOfSpeed.BEAUFORT, + device_class=SensorDeviceClass.WIND_SPEED, + suggested_display_precision=0, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda client: client.windspeed_ms, + ), + StookwijzerSensorDescription( + key="air_quality_index", + device_class=SensorDeviceClass.AQI, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda client: client.lki, + ), + StookwijzerSensorDescription( + key="advice", + translation_key="advice", + device_class=SensorDeviceClass.ENUM, + value_fn=lambda client: client.advice, + options=["code_yellow", "code_orange", "code_red"], + ), +] async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: StookwijzerConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Stookwijzer sensor from a config entry.""" - client = hass.data[DOMAIN][entry.entry_id] - async_add_entities([StookwijzerSensor(client, entry)], update_before_add=True) + async_add_entities( + StookwijzerSensor(description, entry) for description in STOOKWIJZER_SENSORS + ) -class StookwijzerSensor(SensorEntity): +class StookwijzerSensor(CoordinatorEntity[StookwijzerCoordinator], SensorEntity): """Defines a Stookwijzer binary sensor.""" - _attr_attribution = "Data provided by stookwijzer.nu" - _attr_device_class = SensorDeviceClass.ENUM + entity_description: StookwijzerSensorDescription + _attr_attribution = "Data provided by atlasleefomgeving.nl" _attr_has_entity_name = True - _attr_name = None - _attr_translation_key = "stookwijzer" - def __init__(self, client: Stookwijzer, entry: ConfigEntry) -> None: + def __init__( + self, + description: StookwijzerSensorDescription, + entry: StookwijzerConfigEntry, + ) -> None: """Initialize a Stookwijzer device.""" - self._client = client - self._attr_options = [cls.value for cls in StookwijzerState] - self._attr_unique_id = entry.entry_id + super().__init__(entry.runtime_data) + self.entity_description = description + self._attr_unique_id = f"{entry.entry_id}_{description.key}" self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, f"{entry.entry_id}")}, - name="Stookwijzer", - manufacturer="stookwijzer.nu", + identifiers={(DOMAIN, entry.entry_id)}, + manufacturer="Atlas Leefomgeving", entry_type=DeviceEntryType.SERVICE, - configuration_url="https://www.stookwijzer.nu", + configuration_url="https://www.atlasleefomgeving.nl/stookwijzer", ) - def update(self) -> None: - """Update the data from the Stookwijzer handler.""" - self._client.update() - @property - def available(self) -> bool: - """Return if entity is available.""" - return self._client.state is not None - - @property - def native_value(self) -> str | None: + def native_value(self) -> int | float | str | None: """Return the state of the device.""" - if self._client.state is None: - return None - return StookwijzerState(self._client.state).value + return self.entity_description.value_fn(self.coordinator.client) diff --git a/homeassistant/components/stookwijzer/strings.json b/homeassistant/components/stookwijzer/strings.json index 549673165ec..189af89b282 100644 --- a/homeassistant/components/stookwijzer/strings.json +++ b/homeassistant/components/stookwijzer/strings.json @@ -5,19 +5,37 @@ "description": "Select the location you want to recieve the Stookwijzer information for.", "data": { "location": "[%key:common::config_flow::data::location%]" + }, + "data_description": { + "location": "Use the map to set the location for Stookwijzer." } } + }, + "error": { + "unknown": "[%key:common::config_flow::error::unknown%]" } }, "entity": { "sensor": { - "stookwijzer": { + "advice": { + "name": "Advice code", "state": { - "blauw": "Blue", - "oranje": "Orange", - "rood": "Red" + "code_yellow": "Yellow", + "code_orange": "Orange", + "code_red": "Red" } } } + }, + "issues": { + "location_migration_failed": { + "description": "The Stookwijzer integration was unable to automatically migrate your location to a new format the updated integrations uses.\n\nMake sure you are connected to the internet and restart Home Assistant to try again.\n\nIf this doesn't resolve the error, remove and re-add the integration.", + "title": "Migration of your location failed" + } + }, + "exceptions": { + "no_data_received": { + "message": "No data received from Stookwijzer." + } } } diff --git a/homeassistant/components/stream/const.py b/homeassistant/components/stream/const.py index a2fa065e019..66455ffad1a 100644 --- a/homeassistant/components/stream/const.py +++ b/homeassistant/components/stream/const.py @@ -1,5 +1,9 @@ """Constants for Stream component.""" +from __future__ import annotations + +from typing import Final + DOMAIN = "stream" ATTR_ENDPOINTS = "endpoints" @@ -11,8 +15,8 @@ RECORDER_PROVIDER = "recorder" OUTPUT_FORMATS = [HLS_PROVIDER] -SEGMENT_CONTAINER_FORMAT = "mp4" # format for segments -RECORDER_CONTAINER_FORMAT = "mp4" # format for recorder output +SEGMENT_CONTAINER_FORMAT: Final = "mp4" # format for segments +RECORDER_CONTAINER_FORMAT: Final = "mp4" # format for recorder output AUDIO_CODECS = {"aac", "mp3"} FORMAT_CONTENT_TYPE = {HLS_PROVIDER: "application/vnd.apple.mpegurl"} diff --git a/homeassistant/components/stream/core.py b/homeassistant/components/stream/core.py index 68c08a4f072..4184b23b9a0 100644 --- a/homeassistant/components/stream/core.py +++ b/homeassistant/components/stream/core.py @@ -9,7 +9,7 @@ from dataclasses import dataclass, field import datetime from enum import IntEnum import logging -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, cast from aiohttp import web import numpy as np @@ -27,7 +27,7 @@ from .const import ( ) if TYPE_CHECKING: - from av import CodecContext, Packet + from av import Packet, VideoCodecContext from homeassistant.components.camera import DynamicStreamSettings @@ -438,17 +438,17 @@ class KeyFrameConverter: """Initialize.""" # Keep import here so that we can import stream integration - # without installingreqs + # without installing reqs # pylint: disable-next=import-outside-toplevel from homeassistant.components.camera.img_util import TurboJPEGSingleton - self._packet: Packet = None + self._packet: Packet | None = None self._event: asyncio.Event = asyncio.Event() self._hass = hass self._image: bytes | None = None self._turbojpeg = TurboJPEGSingleton.instance() self._lock = asyncio.Lock() - self._codec_context: CodecContext | None = None + self._codec_context: VideoCodecContext | None = None self._stream_settings = stream_settings self._dynamic_stream_settings = dynamic_stream_settings @@ -460,7 +460,7 @@ class KeyFrameConverter: self._packet = packet self._hass.loop.call_soon_threadsafe(self._event.set) - def create_codec_context(self, codec_context: CodecContext) -> None: + def create_codec_context(self, codec_context: VideoCodecContext) -> None: """Create a codec context to be used for decoding the keyframes. This is run by the worker thread and will only be called once per worker. @@ -474,7 +474,9 @@ class KeyFrameConverter: # pylint: disable-next=import-outside-toplevel from av import CodecContext - self._codec_context = CodecContext.create(codec_context.name, "r") + self._codec_context = cast( + "VideoCodecContext", CodecContext.create(codec_context.name, "r") + ) self._codec_context.extradata = codec_context.extradata self._codec_context.skip_frame = "NONKEY" self._codec_context.thread_type = "NONE" @@ -506,9 +508,8 @@ class KeyFrameConverter: frames = self._codec_context.decode(None) break except EOFError: - _LOGGER.debug("Codec context needs flushing, attempting to reopen") - self._codec_context.close() - self._codec_context.open() + _LOGGER.debug("Codec context needs flushing") + self._codec_context.flush_buffers() else: _LOGGER.debug("Unable to decode keyframe") return diff --git a/homeassistant/components/stream/manifest.json b/homeassistant/components/stream/manifest.json index 00387d97b83..fdf81d99e65 100644 --- a/homeassistant/components/stream/manifest.json +++ b/homeassistant/components/stream/manifest.json @@ -7,5 +7,5 @@ "integration_type": "system", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["PyTurboJPEG==1.7.5", "ha-av==10.1.1", "numpy==1.26.4"] + "requirements": ["PyTurboJPEG==1.7.5", "av==13.1.0", "numpy==2.1.3"] } diff --git a/homeassistant/components/stream/recorder.py b/homeassistant/components/stream/recorder.py index 6dfc09891b7..a24440e6d19 100644 --- a/homeassistant/components/stream/recorder.py +++ b/homeassistant/components/stream/recorder.py @@ -9,6 +9,7 @@ import os from typing import TYPE_CHECKING import av +import av.container from homeassistant.core import HomeAssistant, callback @@ -105,24 +106,23 @@ class RecorderOutput(StreamOutput): # Create output on first segment if not output: + container_options: dict[str, str] = { + "video_track_timescale": str(int(1 / source_v.time_base)), # type: ignore[operator] + "movflags": "frag_keyframe+empty_moov", + "min_frag_duration": str(self.stream_settings.min_segment_duration), + } output = av.open( self.video_path + ".tmp", "w", format=RECORDER_CONTAINER_FORMAT, - container_options={ - "video_track_timescale": str(int(1 / source_v.time_base)), - "movflags": "frag_keyframe+empty_moov", - "min_frag_duration": str( - self.stream_settings.min_segment_duration - ), - }, + container_options=container_options, ) # Add output streams if necessary if not output_v: output_v = output.add_stream(template=source_v) context = output_v.codec_context - context.flags |= "GLOBAL_HEADER" + context.global_header = True if source_a and not output_a: output_a = output.add_stream(template=source_a) @@ -132,21 +132,23 @@ class RecorderOutput(StreamOutput): last_stream_id = segment.stream_id pts_adjuster["video"] = int( (running_duration - source.start_time) - / (av.time_base * source_v.time_base) + / (av.time_base * source_v.time_base) # type: ignore[operator] ) if source_a: pts_adjuster["audio"] = int( (running_duration - source.start_time) - / (av.time_base * source_a.time_base) + / (av.time_base * source_a.time_base) # type: ignore[operator] ) # Remux video for packet in source.demux(): - if packet.dts is None: + if packet.pts is None: continue - packet.pts += pts_adjuster[packet.stream.type] - packet.dts += pts_adjuster[packet.stream.type] - packet.stream = output_v if packet.stream.type == "video" else output_a + packet.pts += pts_adjuster[packet.stream.type] # type: ignore[operator] + packet.dts += pts_adjuster[packet.stream.type] # type: ignore[operator] + stream = output_v if packet.stream.type == "video" else output_a + assert stream + packet.stream = stream output.mux(packet) running_duration += source.duration - source.start_time @@ -169,7 +171,9 @@ class RecorderOutput(StreamOutput): os.remove(video_path + ".tmp") def finish_writing( - segments: deque[Segment], output: av.OutputContainer, video_path: str + segments: deque[Segment], + output: av.container.OutputContainer | None, + video_path: str, ) -> None: """Finish writing output.""" # Should only have 0 or 1 segments, but loop through just in case diff --git a/homeassistant/components/stream/worker.py b/homeassistant/components/stream/worker.py index 0d72a9b0818..8c9bb1b8e9e 100644 --- a/homeassistant/components/stream/worker.py +++ b/homeassistant/components/stream/worker.py @@ -13,6 +13,9 @@ from threading import Event from typing import Any, Self, cast import av +import av.audio +import av.container +import av.stream from homeassistant.core import HomeAssistant from homeassistant.util import dt as dt_util @@ -47,10 +50,10 @@ class StreamWorkerError(Exception): """An exception thrown while processing a stream.""" -def redact_av_error_string(err: av.AVError) -> str: +def redact_av_error_string(err: av.FFmpegError) -> str: """Return an error string with credentials redacted from the url.""" - parts = [str(err.type), err.strerror] - if err.filename is not None: + parts = [str(err.type), err.strerror] # type: ignore[attr-defined] + if err.filename: parts.append(redact_credentials(err.filename)) return ", ".join(parts) @@ -123,30 +126,31 @@ class StreamState: class StreamMuxer: """StreamMuxer re-packages video/audio packets for output.""" + _segment_start_dts: int + _memory_file: BytesIO + _av_output: av.container.OutputContainer + _output_video_stream: av.VideoStream + _output_audio_stream: av.audio.AudioStream | None + _segment: Segment | None + # the following 2 member variables are used for Part formation + _memory_file_pos: int + _part_start_dts: float + def __init__( self, hass: HomeAssistant, - video_stream: av.video.VideoStream, - audio_stream: av.audio.stream.AudioStream | None, - audio_bsf: av.BitStreamFilter | None, + video_stream: av.VideoStream, + audio_stream: av.audio.AudioStream | None, + audio_bsf: str | None, stream_state: StreamState, stream_settings: StreamSettings, ) -> None: """Initialize StreamMuxer.""" self._hass = hass - self._segment_start_dts: int = cast(int, None) - self._memory_file: BytesIO = cast(BytesIO, None) - self._av_output: av.container.OutputContainer = None - self._input_video_stream: av.video.VideoStream = video_stream - self._input_audio_stream: av.audio.stream.AudioStream | None = audio_stream + self._input_video_stream = video_stream + self._input_audio_stream = audio_stream self._audio_bsf = audio_bsf - self._audio_bsf_context: av.BitStreamFilterContext = None - self._output_video_stream: av.video.VideoStream = None - self._output_audio_stream: av.audio.stream.AudioStream | None = None - self._segment: Segment | None = None - # the following 3 member variables are used for Part formation - self._memory_file_pos: int = cast(int, None) - self._part_start_dts: int = cast(int, None) + self._audio_bsf_context: av.BitStreamFilterContext | None = None self._part_has_keyframe = False self._stream_settings = stream_settings self._stream_state = stream_state @@ -156,83 +160,83 @@ class StreamMuxer: self, memory_file: BytesIO, sequence: int, - input_vstream: av.video.VideoStream, - input_astream: av.audio.stream.AudioStream | None, + input_vstream: av.VideoStream, + input_astream: av.audio.AudioStream | None, ) -> tuple[ av.container.OutputContainer, - av.video.VideoStream, - av.audio.stream.AudioStream | None, + av.VideoStream, + av.audio.AudioStream | None, ]: """Make a new av OutputContainer and add output streams.""" + container_options: dict[str, str] = { + # Removed skip_sidx - see: + # https://github.com/home-assistant/core/pull/39970 + # "cmaf" flag replaces several of the movflags used, + # but too recent to use for now + "movflags": "frag_custom+empty_moov+default_base_moof+frag_discont+negative_cts_offsets+skip_trailer+delay_moov", + # Sometimes the first segment begins with negative timestamps, + # and this setting just + # adjusts the timestamps in the output from that segment to start + # from 0. Helps from having to make some adjustments + # in test_durations + "avoid_negative_ts": "make_non_negative", + "fragment_index": str(sequence + 1), + "video_track_timescale": str(int(1 / input_vstream.time_base)), # type: ignore[operator] + # Only do extra fragmenting if we are using ll_hls + # Let ffmpeg do the work using frag_duration + # Fragment durations may exceed the 15% allowed variance but it seems ok + **( + { + "movflags": "empty_moov+default_base_moof+frag_discont+negative_cts_offsets+skip_trailer+delay_moov", + # Create a fragment every TARGET_PART_DURATION. The data from + # each fragment is stored in a "Part" that can be combined with + # the data from all the other "Part"s, plus an init section, + # to reconstitute the data in a "Segment". + # + # The LL-HLS spec allows for a fragment's duration to be within + # the range [0.85x,1.0x] of the part target duration. We use the + # frag_duration option to tell ffmpeg to try to cut the + # fragments when they reach frag_duration. However, + # the resulting fragments can have variability in their + # durations and can end up being too short or too long. With a + # video track with no audio, the discrete nature of frames means + # that the frame at the end of a fragment will sometimes extend + # slightly beyond the desired frag_duration. + # + # If there are two tracks, as in the case of a video feed with + # audio, there is an added wrinkle as the fragment cut seems to + # be done on the first track that crosses the desired threshold, + # and cutting on the audio track may also result in a shorter + # video fragment than desired. + # + # Given this, our approach is to give ffmpeg a frag_duration + # somewhere in the middle of the range, hoping that the parts + # stay pretty well bounded, and we adjust the part durations + # a bit in the hls metadata so that everything "looks" ok. + "frag_duration": str( + int(self._stream_settings.part_target_duration * 9e5) + ), + } + if self._stream_settings.ll_hls + else {} + ), + } container = av.open( memory_file, mode="w", format=SEGMENT_CONTAINER_FORMAT, - container_options={ - # Removed skip_sidx - see: - # https://github.com/home-assistant/core/pull/39970 - # "cmaf" flag replaces several of the movflags used, - # but too recent to use for now - "movflags": "frag_custom+empty_moov+default_base_moof+frag_discont+negative_cts_offsets+skip_trailer+delay_moov", - # Sometimes the first segment begins with negative timestamps, - # and this setting just - # adjusts the timestamps in the output from that segment to start - # from 0. Helps from having to make some adjustments - # in test_durations - "avoid_negative_ts": "make_non_negative", - "fragment_index": str(sequence + 1), - "video_track_timescale": str(int(1 / input_vstream.time_base)), - # Only do extra fragmenting if we are using ll_hls - # Let ffmpeg do the work using frag_duration - # Fragment durations may exceed the 15% allowed variance but it seems ok - **( - { - "movflags": "empty_moov+default_base_moof+frag_discont+negative_cts_offsets+skip_trailer+delay_moov", - # Create a fragment every TARGET_PART_DURATION. The data from - # each fragment is stored in a "Part" that can be combined with - # the data from all the other "Part"s, plus an init section, - # to reconstitute the data in a "Segment". - # - # The LL-HLS spec allows for a fragment's duration to be within - # the range [0.85x,1.0x] of the part target duration. We use the - # frag_duration option to tell ffmpeg to try to cut the - # fragments when they reach frag_duration. However, - # the resulting fragments can have variability in their - # durations and can end up being too short or too long. With a - # video track with no audio, the discrete nature of frames means - # that the frame at the end of a fragment will sometimes extend - # slightly beyond the desired frag_duration. - # - # If there are two tracks, as in the case of a video feed with - # audio, there is an added wrinkle as the fragment cut seems to - # be done on the first track that crosses the desired threshold, - # and cutting on the audio track may also result in a shorter - # video fragment than desired. - # - # Given this, our approach is to give ffmpeg a frag_duration - # somewhere in the middle of the range, hoping that the parts - # stay pretty well bounded, and we adjust the part durations - # a bit in the hls metadata so that everything "looks" ok. - "frag_duration": str( - int(self._stream_settings.part_target_duration * 9e5) - ), - } - if self._stream_settings.ll_hls - else {} - ), - }, + container_options=container_options, ) output_vstream = container.add_stream(template=input_vstream) # Check if audio is requested output_astream = None if input_astream: if self._audio_bsf: - self._audio_bsf_context = self._audio_bsf.create() - self._audio_bsf_context.set_input_stream(input_astream) - output_astream = container.add_stream( - template=self._audio_bsf_context or input_astream - ) - return container, output_vstream, output_astream + self._audio_bsf_context = av.BitStreamFilterContext( + self._audio_bsf, input_astream + ) + output_astream = container.add_stream(template=input_astream) + return container, output_vstream, output_astream # type: ignore[return-value] def reset(self, video_dts: int) -> None: """Initialize a new stream segment.""" @@ -251,7 +255,7 @@ class StreamMuxer: input_astream=self._input_audio_stream, ) if self._output_video_stream.name == "hevc": - self._output_video_stream.codec_tag = "hvc1" + self._output_video_stream.codec_context.codec_tag = "hvc1" def mux_packet(self, packet: av.Packet) -> None: """Mux a packet to the appropriate output stream.""" @@ -273,11 +277,11 @@ class StreamMuxer: self._part_has_keyframe |= packet.is_keyframe elif packet.stream == self._input_audio_stream: + assert self._output_audio_stream if self._audio_bsf_context: - self._audio_bsf_context.send(packet) - while packet := self._audio_bsf_context.recv(): - packet.stream = self._output_audio_stream - self._av_output.mux(packet) + for audio_packet in self._audio_bsf_context.filter(packet): + audio_packet.stream = self._output_audio_stream + self._av_output.mux(audio_packet) return packet.stream = self._output_audio_stream self._av_output.mux(packet) @@ -395,7 +399,7 @@ class StreamMuxer: self._memory_file.close() -class PeekIterator(Iterator): +class PeekIterator(Iterator[av.Packet]): """An Iterator that may allow multiple passes. This may be consumed like a normal Iterator, however also supports a @@ -459,7 +463,7 @@ class TimestampValidator: """Validate the packet timestamp based on ordering within the stream.""" # Discard packets missing DTS. Terminate if too many are missing. if packet.dts is None: - if self._missing_dts >= MAX_MISSING_DTS: + if self._missing_dts >= MAX_MISSING_DTS: # type: ignore[unreachable] raise StreamWorkerError( f"No dts in {MAX_MISSING_DTS+1} consecutive packets" ) @@ -486,7 +490,7 @@ def is_keyframe(packet: av.Packet) -> Any: def get_audio_bitstream_filter( packets: Iterator[av.Packet], audio_stream: Any -) -> av.BitStreamFilterContext | None: +) -> str | None: """Return the aac_adtstoasc bitstream filter if ADTS AAC is detected.""" if not audio_stream: return None @@ -503,7 +507,7 @@ def get_audio_bitstream_filter( _LOGGER.debug( "ADTS AAC detected. Adding aac_adtstoaac bitstream filter" ) - return av.BitStreamFilter("aac_adtstoasc") + return "aac_adtstoasc" break return None @@ -524,7 +528,7 @@ def stream_worker( del pyav_options["stimeout"] try: container = av.open(source, options=pyav_options, timeout=SOURCE_TIMEOUT) - except av.AVError as err: + except av.FFmpegError as err: raise StreamWorkerError( f"Error opening stream ({redact_av_error_string(err)})" ) from err @@ -541,7 +545,7 @@ def stream_worker( audio_stream = None # Some audio streams do not have a profile and throw errors when remuxing if audio_stream and audio_stream.profile is None: - audio_stream = None + audio_stream = None # type: ignore[unreachable] # Disable ll-hls for hls inputs if container.format.name == "hls": for field in fields(StreamSettings): @@ -556,8 +560,8 @@ def stream_worker( stream_state.diagnostics.set_value("audio_codec", audio_stream.name) dts_validator = TimestampValidator( - int(1 / video_stream.time_base), - 1 / audio_stream.time_base if audio_stream else 1, + int(1 / video_stream.time_base), # type: ignore[operator] + int(1 / audio_stream.time_base) if audio_stream else 1, # type: ignore[operator] ) container_packets = PeekIterator( filter(dts_validator.is_valid, container.demux((video_stream, audio_stream))) @@ -598,7 +602,7 @@ def stream_worker( except StopIteration as ex: container.close() raise StreamEndedError("Stream ended; no additional packets") from ex - except av.AVError as ex: + except av.FFmpegError as ex: container.close() raise StreamWorkerError( f"Error demuxing stream while finding first packet ({redact_av_error_string(ex)})" @@ -625,7 +629,7 @@ def stream_worker( raise except StopIteration as ex: raise StreamEndedError("Stream ended; no additional packets") from ex - except av.AVError as ex: + except av.FFmpegError as ex: raise StreamWorkerError( f"Error demuxing stream ({redact_av_error_string(ex)})" ) from ex diff --git a/homeassistant/components/subaru/config_flow.py b/homeassistant/components/subaru/config_flow.py index 3d96a89a14f..0ef4ed29941 100644 --- a/homeassistant/components/subaru/config_flow.py +++ b/homeassistant/components/subaru/config_flow.py @@ -106,7 +106,7 @@ class SubaruConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def validate_login_creds(self, data): """Validate the user input allows us to connect. @@ -218,10 +218,6 @@ class SubaruConfigFlow(ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(OptionsFlow): """Handle a option flow for Subaru.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/suez_water/__init__.py b/homeassistant/components/suez_water/__init__.py index f5b2880e011..06f503b85c2 100644 --- a/homeassistant/components/suez_water/__init__.py +++ b/homeassistant/components/suez_water/__init__.py @@ -2,15 +2,12 @@ from __future__ import annotations -from pysuez import SuezClient -from pysuez.client import PySuezError - from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady -from .const import CONF_COUNTER_ID, DOMAIN +from .const import DOMAIN +from .coordinator import SuezWaterCoordinator PLATFORMS: list[Platform] = [Platform.SENSOR] @@ -18,23 +15,10 @@ PLATFORMS: list[Platform] = [Platform.SENSOR] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Suez Water from a config entry.""" - def get_client() -> SuezClient: - try: - client = SuezClient( - entry.data[CONF_USERNAME], - entry.data[CONF_PASSWORD], - entry.data[CONF_COUNTER_ID], - provider=None, - ) - if not client.check_credentials(): - raise ConfigEntryError - except PySuezError as ex: - raise ConfigEntryNotReady from ex - return client + coordinator = SuezWaterCoordinator(hass, entry) + await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[ - entry.entry_id - ] = await hass.async_add_executor_job(get_client) + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/suez_water/config_flow.py b/homeassistant/components/suez_water/config_flow.py index 28b211dc808..ac09cf4a1d3 100644 --- a/homeassistant/components/suez_water/config_flow.py +++ b/homeassistant/components/suez_water/config_flow.py @@ -5,8 +5,7 @@ from __future__ import annotations import logging from typing import Any -from pysuez import SuezClient -from pysuez.client import PySuezError +from pysuez import PySuezError, SuezClient import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -21,28 +20,34 @@ STEP_USER_DATA_SCHEMA = vol.Schema( { vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str, - vol.Required(CONF_COUNTER_ID): str, + vol.Optional(CONF_COUNTER_ID): str, } ) -def validate_input(data: dict[str, Any]) -> None: +async def validate_input(data: dict[str, Any]) -> None: """Validate the user input allows us to connect. Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. """ try: + counter_id = data.get(CONF_COUNTER_ID) client = SuezClient( data[CONF_USERNAME], data[CONF_PASSWORD], - data[CONF_COUNTER_ID], - provider=None, + counter_id, ) - if not client.check_credentials(): + if not await client.check_credentials(): raise InvalidAuth except PySuezError as ex: raise CannotConnect from ex + if counter_id is None: + try: + data[CONF_COUNTER_ID] = await client.find_counter() + except PySuezError as ex: + raise CounterNotFound from ex + class SuezWaterConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Suez Water.""" @@ -58,11 +63,13 @@ class SuezWaterConfigFlow(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(user_input[CONF_USERNAME]) self._abort_if_unique_id_configured() try: - await self.hass.async_add_executor_job(validate_input, user_input) + await validate_input(user_input) except CannotConnect: errors["base"] = "cannot_connect" except InvalidAuth: errors["base"] = "invalid_auth" + except CounterNotFound: + errors["base"] = "counter_not_found" except Exception: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" @@ -82,3 +89,7 @@ class CannotConnect(HomeAssistantError): class InvalidAuth(HomeAssistantError): """Error to indicate there is invalid auth.""" + + +class CounterNotFound(HomeAssistantError): + """Error to indicate we cannot automatically found the counter id.""" diff --git a/homeassistant/components/suez_water/const.py b/homeassistant/components/suez_water/const.py index 7afc0d3ce3e..cecd779c22c 100644 --- a/homeassistant/components/suez_water/const.py +++ b/homeassistant/components/suez_water/const.py @@ -1,5 +1,9 @@ """Constants for the Suez Water integration.""" +from datetime import timedelta + DOMAIN = "suez_water" CONF_COUNTER_ID = "counter_id" + +DATA_REFRESH_INTERVAL = timedelta(hours=12) diff --git a/homeassistant/components/suez_water/coordinator.py b/homeassistant/components/suez_water/coordinator.py new file mode 100644 index 00000000000..224929c606e --- /dev/null +++ b/homeassistant/components/suez_water/coordinator.py @@ -0,0 +1,88 @@ +"""Suez water update coordinator.""" + +from collections.abc import Mapping +from dataclasses import dataclass +from datetime import date +from typing import Any + +from pysuez import PySuezError, SuezClient + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import _LOGGER, HomeAssistant +from homeassistant.exceptions import ConfigEntryError +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import CONF_COUNTER_ID, DATA_REFRESH_INTERVAL, DOMAIN + + +@dataclass +class SuezWaterAggregatedAttributes: + """Class containing aggregated sensor extra attributes.""" + + this_month_consumption: dict[date, float] + previous_month_consumption: dict[date, float] + last_year_overall: dict[str, float] + this_year_overall: dict[str, float] + history: dict[date, float] + highest_monthly_consumption: float + + +@dataclass +class SuezWaterData: + """Class used to hold all fetch data from suez api.""" + + aggregated_value: float + aggregated_attr: Mapping[str, Any] + price: float + + +class SuezWaterCoordinator(DataUpdateCoordinator[SuezWaterData]): + """Suez water coordinator.""" + + _suez_client: SuezClient + config_entry: ConfigEntry + + def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None: + """Initialize suez water coordinator.""" + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=DATA_REFRESH_INTERVAL, + always_update=True, + config_entry=config_entry, + ) + + async def _async_setup(self) -> None: + self._suez_client = SuezClient( + username=self.config_entry.data[CONF_USERNAME], + password=self.config_entry.data[CONF_PASSWORD], + counter_id=self.config_entry.data[CONF_COUNTER_ID], + ) + if not await self._suez_client.check_credentials(): + raise ConfigEntryError("Invalid credentials for suez water") + + async def _async_update_data(self) -> SuezWaterData: + """Fetch data from API endpoint.""" + try: + aggregated = await self._suez_client.fetch_aggregated_data() + data = SuezWaterData( + aggregated_value=aggregated.value, + aggregated_attr={ + "this_month_consumption": aggregated.current_month, + "previous_month_consumption": aggregated.previous_month, + "highest_monthly_consumption": aggregated.highest_monthly_consumption, + "last_year_overall": aggregated.previous_year, + "this_year_overall": aggregated.current_year, + "history": aggregated.history, + }, + price=(await self._suez_client.get_price()).price, + ) + except PySuezError as err: + _LOGGER.exception(err) + raise UpdateFailed( + f"Suez coordinator error communicating with API: {err}" + ) from err + _LOGGER.debug("Successfully fetched suez data") + return data diff --git a/homeassistant/components/suez_water/manifest.json b/homeassistant/components/suez_water/manifest.json index fa7f8f6461d..240be0f37bd 100644 --- a/homeassistant/components/suez_water/manifest.json +++ b/homeassistant/components/suez_water/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/suez_water", "iot_class": "cloud_polling", "loggers": ["pysuez", "regex"], - "requirements": ["pysuezV2==0.2.2"] + "requirements": ["pysuezV2==1.3.2"] } diff --git a/homeassistant/components/suez_water/sensor.py b/homeassistant/components/suez_water/sensor.py index 5b00cbf2dc4..2ba699a9af1 100644 --- a/homeassistant/components/suez_water/sensor.py +++ b/homeassistant/components/suez_water/sensor.py @@ -2,24 +2,53 @@ from __future__ import annotations -from datetime import timedelta -import logging +from collections.abc import Callable, Mapping +from dataclasses import dataclass +from typing import Any -from pysuez import SuezClient -from pysuez.client import PySuezError +from pysuez.const import ATTRIBUTION -from homeassistant.components.sensor import SensorDeviceClass, SensorEntity +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, +) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import UnitOfVolume +from homeassistant.const import CURRENCY_EURO, UnitOfVolume from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import CONF_COUNTER_ID, DOMAIN +from .coordinator import SuezWaterCoordinator, SuezWaterData -_LOGGER = logging.getLogger(__name__) -SCAN_INTERVAL = timedelta(hours=12) +@dataclass(frozen=True, kw_only=True) +class SuezWaterSensorEntityDescription(SensorEntityDescription): + """Describes Suez water sensor entity.""" + + value_fn: Callable[[SuezWaterData], float | str | None] + attr_fn: Callable[[SuezWaterData], Mapping[str, Any] | None] = lambda _: None + + +SENSORS: tuple[SuezWaterSensorEntityDescription, ...] = ( + SuezWaterSensorEntityDescription( + key="water_usage_yesterday", + translation_key="water_usage_yesterday", + native_unit_of_measurement=UnitOfVolume.LITERS, + device_class=SensorDeviceClass.WATER, + value_fn=lambda suez_data: suez_data.aggregated_value, + attr_fn=lambda suez_data: suez_data.aggregated_attr, + ), + SuezWaterSensorEntityDescription( + key="water_price", + translation_key="water_price", + native_unit_of_measurement=CURRENCY_EURO, + device_class=SensorDeviceClass.MONETARY, + value_fn=lambda suez_data: suez_data.price, + ), +) async def async_setup_entry( @@ -28,68 +57,43 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Suez Water sensor from a config entry.""" - client = hass.data[DOMAIN][entry.entry_id] - async_add_entities([SuezSensor(client, entry.data[CONF_COUNTER_ID])], True) + coordinator = hass.data[DOMAIN][entry.entry_id] + counter_id = entry.data[CONF_COUNTER_ID] + + async_add_entities( + SuezWaterSensor(coordinator, counter_id, description) for description in SENSORS + ) -class SuezSensor(SensorEntity): - """Representation of a Sensor.""" +class SuezWaterSensor(CoordinatorEntity[SuezWaterCoordinator], SensorEntity): + """Representation of a Suez water sensor.""" _attr_has_entity_name = True - _attr_translation_key = "water_usage_yesterday" - _attr_native_unit_of_measurement = UnitOfVolume.LITERS - _attr_device_class = SensorDeviceClass.WATER + _attr_attribution = ATTRIBUTION + entity_description: SuezWaterSensorEntityDescription - def __init__(self, client: SuezClient, counter_id: int) -> None: - """Initialize the data object.""" - self.client = client - self._attr_extra_state_attributes = {} - self._attr_unique_id = f"{counter_id}_water_usage_yesterday" + def __init__( + self, + coordinator: SuezWaterCoordinator, + counter_id: int, + entity_description: SuezWaterSensorEntityDescription, + ) -> None: + """Initialize the suez water sensor entity.""" + super().__init__(coordinator) + self._attr_unique_id = f"{counter_id}_{entity_description.key}" self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, str(counter_id))}, entry_type=DeviceEntryType.SERVICE, manufacturer="Suez", ) + self.entity_description = entity_description - def _fetch_data(self) -> None: - """Fetch latest data from Suez.""" - try: - self.client.update() - # _state holds the volume of consumed water during previous day - self._attr_native_value = self.client.state - self._attr_available = True - self._attr_attribution = self.client.attributes["attribution"] + @property + def native_value(self) -> float | str | None: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.coordinator.data) - self._attr_extra_state_attributes["this_month_consumption"] = {} - for item in self.client.attributes["thisMonthConsumption"]: - self._attr_extra_state_attributes["this_month_consumption"][item] = ( - self.client.attributes["thisMonthConsumption"][item] - ) - self._attr_extra_state_attributes["previous_month_consumption"] = {} - for item in self.client.attributes["previousMonthConsumption"]: - self._attr_extra_state_attributes["previous_month_consumption"][ - item - ] = self.client.attributes["previousMonthConsumption"][item] - self._attr_extra_state_attributes["highest_monthly_consumption"] = ( - self.client.attributes["highestMonthlyConsumption"] - ) - self._attr_extra_state_attributes["last_year_overall"] = ( - self.client.attributes["lastYearOverAll"] - ) - self._attr_extra_state_attributes["this_year_overall"] = ( - self.client.attributes["thisYearOverAll"] - ) - self._attr_extra_state_attributes["history"] = {} - for item in self.client.attributes["history"]: - self._attr_extra_state_attributes["history"][item] = ( - self.client.attributes["history"][item] - ) - - except PySuezError: - self._attr_available = False - _LOGGER.warning("Unable to fetch data") - - def update(self) -> None: - """Return the latest collected data from Suez.""" - self._fetch_data() - _LOGGER.debug("Suez data state is: %s", self.native_value) + @property + def extra_state_attributes(self) -> Mapping[str, Any] | None: + """Return extra state of the sensor.""" + return self.entity_description.attr_fn(self.coordinator.data) diff --git a/homeassistant/components/suez_water/strings.json b/homeassistant/components/suez_water/strings.json index f9abd70fc19..6be2affab97 100644 --- a/homeassistant/components/suez_water/strings.json +++ b/homeassistant/components/suez_water/strings.json @@ -12,7 +12,8 @@ "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "unknown": "[%key:common::config_flow::error::unknown%]" + "unknown": "[%key:common::config_flow::error::unknown%]", + "counter_not_found": "Could not find counter id automatically" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" @@ -22,6 +23,9 @@ "sensor": { "water_usage_yesterday": { "name": "Water usage yesterday" + }, + "water_price": { + "name": "Water price" } } } diff --git a/homeassistant/components/supervisord/manifest.json b/homeassistant/components/supervisord/manifest.json index 7586a435ed7..3cdbdd230aa 100644 --- a/homeassistant/components/supervisord/manifest.json +++ b/homeassistant/components/supervisord/manifest.json @@ -3,5 +3,6 @@ "name": "Supervisord", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/supervisord", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/supla/manifest.json b/homeassistant/components/supla/manifest.json index 6927c92c6e1..803a321c0d6 100644 --- a/homeassistant/components/supla/manifest.json +++ b/homeassistant/components/supla/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/supla", "iot_class": "cloud_polling", "loggers": ["asyncpysupla"], + "quality_scale": "legacy", "requirements": ["asyncpysupla==0.0.5"] } diff --git a/homeassistant/components/swiss_hydrological_data/manifest.json b/homeassistant/components/swiss_hydrological_data/manifest.json index 14e2882804e..11b49a42e3f 100644 --- a/homeassistant/components/swiss_hydrological_data/manifest.json +++ b/homeassistant/components/swiss_hydrological_data/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/swiss_hydrological_data", "iot_class": "cloud_polling", "loggers": ["swisshydrodata"], + "quality_scale": "legacy", "requirements": ["swisshydrodata==0.1.0"] } diff --git a/homeassistant/components/swiss_public_transport/__init__.py b/homeassistant/components/swiss_public_transport/__init__.py index bceac6007a2..628f6e95c2a 100644 --- a/homeassistant/components/swiss_public_transport/__init__.py +++ b/homeassistant/components/swiss_public_transport/__init__.py @@ -19,12 +19,22 @@ from homeassistant.helpers import ( from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.typing import ConfigType -from .const import CONF_DESTINATION, CONF_START, CONF_VIA, DOMAIN, PLACEHOLDERS +from .const import ( + CONF_DESTINATION, + CONF_START, + CONF_TIME_FIXED, + CONF_TIME_OFFSET, + CONF_TIME_STATION, + CONF_VIA, + DEFAULT_TIME_STATION, + DOMAIN, + PLACEHOLDERS, +) from .coordinator import ( SwissPublicTransportConfigEntry, SwissPublicTransportDataUpdateCoordinator, ) -from .helper import unique_id_from_config +from .helper import offset_opendata, unique_id_from_config from .services import setup_services _LOGGER = logging.getLogger(__name__) @@ -50,8 +60,19 @@ async def async_setup_entry( start = config[CONF_START] destination = config[CONF_DESTINATION] + time_offset: dict[str, int] | None = config.get(CONF_TIME_OFFSET) + session = async_get_clientsession(hass) - opendata = OpendataTransport(start, destination, session, via=config.get(CONF_VIA)) + opendata = OpendataTransport( + start, + destination, + session, + via=config.get(CONF_VIA), + time=config.get(CONF_TIME_FIXED), + isArrivalTime=config.get(CONF_TIME_STATION, DEFAULT_TIME_STATION) == "arrival", + ) + if time_offset: + offset_opendata(opendata, time_offset) try: await opendata.async_get_data() @@ -75,7 +96,7 @@ async def async_setup_entry( }, ) from e - coordinator = SwissPublicTransportDataUpdateCoordinator(hass, opendata) + coordinator = SwissPublicTransportDataUpdateCoordinator(hass, opendata, time_offset) await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator @@ -96,7 +117,7 @@ async def async_migrate_entry( """Migrate config entry.""" _LOGGER.debug("Migrating from version %s", config_entry.version) - if config_entry.version > 2: + if config_entry.version > 3: # This means the user has downgraded from a future version return False @@ -131,9 +152,9 @@ async def async_migrate_entry( config_entry, unique_id=new_unique_id, minor_version=2 ) - if config_entry.version < 2: - # Via stations now available, which are not backwards compatible if used, changes unique id - hass.config_entries.async_update_entry(config_entry, version=2, minor_version=1) + if config_entry.version < 3: + # Via stations and time/offset settings now available, which are not backwards compatible if used, changes unique id + hass.config_entries.async_update_entry(config_entry, version=3, minor_version=1) _LOGGER.debug( "Migration to version %s.%s successful", diff --git a/homeassistant/components/swiss_public_transport/config_flow.py b/homeassistant/components/swiss_public_transport/config_flow.py index 74c6223f1d9..58d674f0c26 100644 --- a/homeassistant/components/swiss_public_transport/config_flow.py +++ b/homeassistant/components/swiss_public_transport/config_flow.py @@ -14,15 +14,35 @@ from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.selector import ( + DurationSelector, + SelectSelector, + SelectSelectorConfig, + SelectSelectorMode, TextSelector, TextSelectorConfig, TextSelectorType, + TimeSelector, ) -from .const import CONF_DESTINATION, CONF_START, CONF_VIA, DOMAIN, MAX_VIA, PLACEHOLDERS -from .helper import unique_id_from_config +from .const import ( + CONF_DESTINATION, + CONF_START, + CONF_TIME_FIXED, + CONF_TIME_MODE, + CONF_TIME_OFFSET, + CONF_TIME_STATION, + CONF_VIA, + DEFAULT_TIME_MODE, + DEFAULT_TIME_STATION, + DOMAIN, + IS_ARRIVAL_OPTIONS, + MAX_VIA, + PLACEHOLDERS, + TIME_MODE_OPTIONS, +) +from .helper import offset_opendata, unique_id_from_config -DATA_SCHEMA = vol.Schema( +USER_DATA_SCHEMA = vol.Schema( { vol.Required(CONF_START): cv.string, vol.Optional(CONF_VIA): TextSelector( @@ -32,8 +52,25 @@ DATA_SCHEMA = vol.Schema( ), ), vol.Required(CONF_DESTINATION): cv.string, + vol.Optional(CONF_TIME_MODE, default=DEFAULT_TIME_MODE): SelectSelector( + SelectSelectorConfig( + options=TIME_MODE_OPTIONS, + mode=SelectSelectorMode.DROPDOWN, + translation_key="time_mode", + ), + ), + vol.Optional(CONF_TIME_STATION, default=DEFAULT_TIME_STATION): SelectSelector( + SelectSelectorConfig( + options=IS_ARRIVAL_OPTIONS, + mode=SelectSelectorMode.DROPDOWN, + translation_key="time_station", + ), + ), } ) +ADVANCED_TIME_DATA_SCHEMA = {vol.Optional(CONF_TIME_FIXED): TimeSelector()} +ADVANCED_TIME_OFFSET_DATA_SCHEMA = {vol.Optional(CONF_TIME_OFFSET): DurationSelector()} + _LOGGER = logging.getLogger(__name__) @@ -41,39 +78,33 @@ _LOGGER = logging.getLogger(__name__) class SwissPublicTransportConfigFlow(ConfigFlow, domain=DOMAIN): """Swiss public transport config flow.""" - VERSION = 2 + VERSION = 3 MINOR_VERSION = 1 + user_input: dict[str, Any] + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Async user step to set up the connection.""" errors: dict[str, str] = {} if user_input is not None: - unique_id = unique_id_from_config(user_input) - await self.async_set_unique_id(unique_id) - self._abort_if_unique_id_configured() - if CONF_VIA in user_input and len(user_input[CONF_VIA]) > MAX_VIA: errors["base"] = "too_many_via_stations" else: - session = async_get_clientsession(self.hass) - opendata = OpendataTransport( - user_input[CONF_START], - user_input[CONF_DESTINATION], - session, - via=user_input.get(CONF_VIA), - ) - try: - await opendata.async_get_data() - except OpendataTransportConnectionError: - errors["base"] = "cannot_connect" - except OpendataTransportError: - errors["base"] = "bad_config" - except Exception: # pylint: disable=broad-except - _LOGGER.exception("Unknown error") - errors["base"] = "unknown" + err = await self.fetch_connections(user_input) + if err: + errors["base"] = err else: + self.user_input = user_input + if user_input[CONF_TIME_MODE] == "fixed": + return await self.async_step_time_fixed() + if user_input[CONF_TIME_MODE] == "offset": + return await self.async_step_time_offset() + + unique_id = unique_id_from_config(user_input) + await self.async_set_unique_id(unique_id) + self._abort_if_unique_id_configured() return self.async_create_entry( title=unique_id, data=user_input, @@ -81,7 +112,85 @@ class SwissPublicTransportConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="user", - data_schema=DATA_SCHEMA, + data_schema=self.add_suggested_values_to_schema( + data_schema=USER_DATA_SCHEMA, + suggested_values=user_input, + ), errors=errors, description_placeholders=PLACEHOLDERS, ) + + async def async_step_time_fixed( + self, time_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Async time step to set up the connection.""" + return await self._async_step_time_mode( + CONF_TIME_FIXED, vol.Schema(ADVANCED_TIME_DATA_SCHEMA), time_input + ) + + async def async_step_time_offset( + self, time_offset_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Async time offset step to set up the connection.""" + return await self._async_step_time_mode( + CONF_TIME_OFFSET, + vol.Schema(ADVANCED_TIME_OFFSET_DATA_SCHEMA), + time_offset_input, + ) + + async def _async_step_time_mode( + self, + step_id: str, + time_mode_schema: vol.Schema, + time_mode_input: dict[str, Any] | None = None, + ) -> ConfigFlowResult: + """Async time mode step to set up the connection.""" + errors: dict[str, str] = {} + if time_mode_input is not None: + unique_id = unique_id_from_config({**self.user_input, **time_mode_input}) + await self.async_set_unique_id(unique_id) + self._abort_if_unique_id_configured() + + err = await self.fetch_connections( + {**self.user_input, **time_mode_input}, + time_mode_input.get(CONF_TIME_OFFSET), + ) + if err: + errors["base"] = err + else: + return self.async_create_entry( + title=unique_id, + data={**self.user_input, **time_mode_input}, + ) + + return self.async_show_form( + step_id=step_id, + data_schema=time_mode_schema, + errors=errors, + description_placeholders=PLACEHOLDERS, + ) + + async def fetch_connections( + self, input: dict[str, Any], time_offset: dict[str, int] | None = None + ) -> str | None: + """Fetch the connections and advancedly return an error.""" + try: + session = async_get_clientsession(self.hass) + opendata = OpendataTransport( + input[CONF_START], + input[CONF_DESTINATION], + session, + via=input.get(CONF_VIA), + time=input.get(CONF_TIME_FIXED), + ) + if time_offset: + offset_opendata(opendata, time_offset) + await opendata.async_get_data() + except OpendataTransportConnectionError: + return "cannot_connect" + except OpendataTransportError: + return "bad_config" + except Exception: # pylint: disable=broad-except + _LOGGER.exception("Unknown error") + return "unknown" + return None diff --git a/homeassistant/components/swiss_public_transport/const.py b/homeassistant/components/swiss_public_transport/const.py index c02f36f2f25..10bfc0d0355 100644 --- a/homeassistant/components/swiss_public_transport/const.py +++ b/homeassistant/components/swiss_public_transport/const.py @@ -7,13 +7,21 @@ DOMAIN = "swiss_public_transport" CONF_DESTINATION: Final = "to" CONF_START: Final = "from" CONF_VIA: Final = "via" +CONF_TIME_STATION: Final = "time_station" +CONF_TIME_MODE: Final = "time_mode" +CONF_TIME_FIXED: Final = "time_fixed" +CONF_TIME_OFFSET: Final = "time_offset" DEFAULT_NAME = "Next Destination" DEFAULT_UPDATE_TIME = 90 +DEFAULT_TIME_STATION = "departure" +DEFAULT_TIME_MODE = "now" MAX_VIA = 5 CONNECTIONS_COUNT = 3 CONNECTIONS_MAX = 15 +IS_ARRIVAL_OPTIONS = ["departure", "arrival"] +TIME_MODE_OPTIONS = ["now", "fixed", "offset"] PLACEHOLDERS = { diff --git a/homeassistant/components/swiss_public_transport/coordinator.py b/homeassistant/components/swiss_public_transport/coordinator.py index e6413e6f772..59602e7b982 100644 --- a/homeassistant/components/swiss_public_transport/coordinator.py +++ b/homeassistant/components/swiss_public_transport/coordinator.py @@ -19,6 +19,7 @@ import homeassistant.util.dt as dt_util from homeassistant.util.json import JsonValueType from .const import CONNECTIONS_COUNT, DEFAULT_UPDATE_TIME, DOMAIN +from .helper import offset_opendata _LOGGER = logging.getLogger(__name__) @@ -57,7 +58,12 @@ class SwissPublicTransportDataUpdateCoordinator( config_entry: SwissPublicTransportConfigEntry - def __init__(self, hass: HomeAssistant, opendata: OpendataTransport) -> None: + def __init__( + self, + hass: HomeAssistant, + opendata: OpendataTransport, + time_offset: dict[str, int] | None, + ) -> None: """Initialize the SwissPublicTransport data coordinator.""" super().__init__( hass, @@ -66,6 +72,7 @@ class SwissPublicTransportDataUpdateCoordinator( update_interval=timedelta(seconds=DEFAULT_UPDATE_TIME), ) self._opendata = opendata + self._time_offset = time_offset def remaining_time(self, departure) -> timedelta | None: """Calculate the remaining time for the departure.""" @@ -81,6 +88,9 @@ class SwissPublicTransportDataUpdateCoordinator( async def fetch_connections(self, limit: int) -> list[DataConnection]: """Fetch connections using the opendata api.""" self._opendata.limit = limit + if self._time_offset: + offset_opendata(self._opendata, self._time_offset) + try: await self._opendata.async_get_data() except OpendataTransportConnectionError as e: diff --git a/homeassistant/components/swiss_public_transport/helper.py b/homeassistant/components/swiss_public_transport/helper.py index af03f7ad193..704479b77d6 100644 --- a/homeassistant/components/swiss_public_transport/helper.py +++ b/homeassistant/components/swiss_public_transport/helper.py @@ -1,15 +1,59 @@ """Helper functions for swiss_public_transport.""" +from datetime import timedelta from types import MappingProxyType from typing import Any -from .const import CONF_DESTINATION, CONF_START, CONF_VIA +from opendata_transport import OpendataTransport + +import homeassistant.util.dt as dt_util + +from .const import ( + CONF_DESTINATION, + CONF_START, + CONF_TIME_FIXED, + CONF_TIME_OFFSET, + CONF_TIME_STATION, + CONF_VIA, + DEFAULT_TIME_STATION, +) + + +def offset_opendata(opendata: OpendataTransport, offset: dict[str, int]) -> None: + """In place offset the opendata connector.""" + + duration = timedelta(**offset) + if duration: + now_offset = dt_util.as_local(dt_util.now() + duration) + opendata.date = now_offset.date() + opendata.time = now_offset.time() + + +def dict_duration_to_str_duration( + d: dict[str, int], +) -> str: + """Build a string from a dict duration.""" + return f"{d['hours']:02d}:{d['minutes']:02d}:{d['seconds']:02d}" def unique_id_from_config(config: MappingProxyType[str, Any] | dict[str, Any]) -> str: """Build a unique id from a config entry.""" - return f"{config[CONF_START]} {config[CONF_DESTINATION]}" + ( - " via " + ", ".join(config[CONF_VIA]) - if CONF_VIA in config and len(config[CONF_VIA]) > 0 - else "" + return ( + f"{config[CONF_START]} {config[CONF_DESTINATION]}" + + ( + " via " + ", ".join(config[CONF_VIA]) + if CONF_VIA in config and len(config[CONF_VIA]) > 0 + else "" + ) + + ( + " arrival" + if config.get(CONF_TIME_STATION, DEFAULT_TIME_STATION) == "arrival" + else "" + ) + + (" at " + config[CONF_TIME_FIXED] if CONF_TIME_FIXED in config else "") + + ( + " in " + dict_duration_to_str_duration(config[CONF_TIME_OFFSET]) + if CONF_TIME_OFFSET in config + else "" + ) ) diff --git a/homeassistant/components/swiss_public_transport/strings.json b/homeassistant/components/swiss_public_transport/strings.json index b3bfd9aea8f..91645b2fee4 100644 --- a/homeassistant/components/swiss_public_transport/strings.json +++ b/homeassistant/components/swiss_public_transport/strings.json @@ -17,10 +17,30 @@ "data": { "from": "Start station", "to": "End station", - "via": "List of up to 5 via stations" + "via": "List of up to 5 via stations", + "time_station": "Select the relevant station", + "time_mode": "Select a time mode" + }, + "data_description": { + "time_station": "Usually the departure time of a connection when it leaves the start station is tracked. Alternatively, track the time when the connection arrives at its end station.", + "time_mode": "Time mode lets you change the departure timing and fix it to a specific time (e.g. 7:12:00 AM every morning) or add a moving offset (e.g. +00:05:00 taking into account the time to walk to the station)." }, "description": "Provide start and end station for your connection,\nand optionally up to 5 via stations.\n\nCheck the [stationboard]({stationboard_url}) for valid stations.", "title": "Swiss Public Transport" + }, + "time_fixed": { + "data": { + "time_fixed": "Time of day" + }, + "description": "Please select the relevant time for the connection (e.g. 7:12:00 AM every morning).", + "title": "Swiss Public Transport" + }, + "time_offset": { + "data": { + "time_offset": "Time offset" + }, + "description": "Please select the relevant offset to add to the earliest possible connection (e.g. add +00:05:00 offset, taking into account the time to walk to the station)", + "title": "Swiss Public Transport" } } }, @@ -84,5 +104,20 @@ "config_entry_not_found": { "message": "Swiss public transport integration instance \"{target}\" not found." } + }, + "selector": { + "time_station": { + "options": { + "departure": "Show departure time from start station", + "arrival": "Show arrival time at end station" + } + }, + "time_mode": { + "options": { + "now": "Now", + "fixed": "At a fixed time of day", + "offset": "At an offset from now" + } + } } } diff --git a/homeassistant/components/swisscom/manifest.json b/homeassistant/components/swisscom/manifest.json index cb0e674570e..cf1ea01ea9c 100644 --- a/homeassistant/components/swisscom/manifest.json +++ b/homeassistant/components/swisscom/manifest.json @@ -3,5 +3,6 @@ "name": "Swisscom Internet-Box", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/swisscom", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/switch_as_x/config_flow.py b/homeassistant/components/switch_as_x/config_flow.py index 37df3affbad..aa9f1d411ce 100644 --- a/homeassistant/components/switch_as_x/config_flow.py +++ b/homeassistant/components/switch_as_x/config_flow.py @@ -18,12 +18,12 @@ from homeassistant.helpers.schema_config_entry_flow import ( from .const import CONF_INVERT, CONF_TARGET_DOMAIN, DOMAIN TARGET_DOMAIN_OPTIONS = [ - selector.SelectOptionDict(value=Platform.COVER, label="Cover"), - selector.SelectOptionDict(value=Platform.FAN, label="Fan"), - selector.SelectOptionDict(value=Platform.LIGHT, label="Light"), - selector.SelectOptionDict(value=Platform.LOCK, label="Lock"), - selector.SelectOptionDict(value=Platform.SIREN, label="Siren"), - selector.SelectOptionDict(value=Platform.VALVE, label="Valve"), + Platform.COVER, + Platform.FAN, + Platform.LIGHT, + Platform.LOCK, + Platform.SIREN, + Platform.VALVE, ] CONFIG_FLOW = { @@ -35,7 +35,9 @@ CONFIG_FLOW = { ), vol.Optional(CONF_INVERT, default=False): selector.BooleanSelector(), vol.Required(CONF_TARGET_DOMAIN): selector.SelectSelector( - selector.SelectSelectorConfig(options=TARGET_DOMAIN_OPTIONS), + selector.SelectSelectorConfig( + options=TARGET_DOMAIN_OPTIONS, translation_key="target_domain" + ), ), } ) diff --git a/homeassistant/components/switch_as_x/strings.json b/homeassistant/components/switch_as_x/strings.json index 81567ef9e40..9c3db05231b 100644 --- a/homeassistant/components/switch_as_x/strings.json +++ b/homeassistant/components/switch_as_x/strings.json @@ -26,5 +26,17 @@ } } } + }, + "selector": { + "target_domain": { + "options": { + "cover": "[%key:component::cover::title%]", + "fan": "[%key:component::fan::title%]", + "light": "[%key:component::light::title%]", + "lock": "[%key:component::lock::title%]", + "siren": "[%key:component::siren::title%]", + "valve": "[%key:component::valve::title%]" + } + } } } diff --git a/homeassistant/components/switchbot/config_flow.py b/homeassistant/components/switchbot/config_flow.py index 0468db5618a..a0e45169770 100644 --- a/homeassistant/components/switchbot/config_flow.py +++ b/homeassistant/components/switchbot/config_flow.py @@ -80,7 +80,7 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> SwitchbotOptionsFlowHandler: """Get the options flow for this handler.""" - return SwitchbotOptionsFlowHandler(config_entry) + return SwitchbotOptionsFlowHandler() def __init__(self) -> None: """Initialize the config flow.""" @@ -346,10 +346,6 @@ class SwitchbotConfigFlow(ConfigFlow, domain=DOMAIN): class SwitchbotOptionsFlowHandler(OptionsFlow): """Handle Switchbot options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/switchbot/const.py b/homeassistant/components/switchbot/const.py index 19b264bd46f..b8cf4e8e1ab 100644 --- a/homeassistant/components/switchbot/const.py +++ b/homeassistant/components/switchbot/const.py @@ -74,8 +74,3 @@ CONF_RETRY_COUNT = "retry_count" CONF_KEY_ID = "key_id" CONF_ENCRYPTION_KEY = "encryption_key" CONF_LOCK_NIGHTLATCH = "lock_force_nightlatch" - -# Deprecated config Entry Options to be removed in 2023.4 -CONF_TIME_BETWEEN_UPDATE_COMMAND = "update_time" -CONF_RETRY_TIMEOUT = "retry_timeout" -CONF_SCAN_TIMEOUT = "scan_timeout" diff --git a/homeassistant/components/switchbot/manifest.json b/homeassistant/components/switchbot/manifest.json index 0e369f8ad2d..5a328650aca 100644 --- a/homeassistant/components/switchbot/manifest.json +++ b/homeassistant/components/switchbot/manifest.json @@ -39,5 +39,5 @@ "documentation": "https://www.home-assistant.io/integrations/switchbot", "iot_class": "local_push", "loggers": ["switchbot"], - "requirements": ["PySwitchbot==0.51.0"] + "requirements": ["PySwitchbot==0.54.0"] } diff --git a/homeassistant/components/switchbot_cloud/__init__.py b/homeassistant/components/switchbot_cloud/__init__.py index a2738ed446f..625b4698301 100644 --- a/homeassistant/components/switchbot_cloud/__init__.py +++ b/homeassistant/components/switchbot_cloud/__init__.py @@ -85,6 +85,9 @@ def make_device_data( "Meter", "MeterPlus", "WoIOSensor", + "Hub 2", + "MeterPro", + "MeterPro(CO2)", ]: devices_data.sensors.append( prepare_device(hass, api, device, coordinators_by_id) diff --git a/homeassistant/components/switchbot_cloud/sensor.py b/homeassistant/components/switchbot_cloud/sensor.py index ac612aea119..90135ad96b3 100644 --- a/homeassistant/components/switchbot_cloud/sensor.py +++ b/homeassistant/components/switchbot_cloud/sensor.py @@ -9,7 +9,11 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import PERCENTAGE, UnitOfTemperature +from homeassistant.const import ( + CONCENTRATION_PARTS_PER_MILLION, + PERCENTAGE, + UnitOfTemperature, +) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -21,6 +25,7 @@ from .entity import SwitchBotCloudEntity SENSOR_TYPE_TEMPERATURE = "temperature" SENSOR_TYPE_HUMIDITY = "humidity" SENSOR_TYPE_BATTERY = "battery" +SENSOR_TYPE_CO2 = "CO2" METER_PLUS_SENSOR_DESCRIPTIONS = ( SensorEntityDescription( @@ -43,6 +48,16 @@ METER_PLUS_SENSOR_DESCRIPTIONS = ( ), ) +METER_PRO_CO2_SENSOR_DESCRIPTIONS = ( + *METER_PLUS_SENSOR_DESCRIPTIONS, + SensorEntityDescription( + key=SENSOR_TYPE_CO2, + native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.CO2, + ), +) + async def async_setup_entry( hass: HomeAssistant, @@ -55,7 +70,11 @@ async def async_setup_entry( async_add_entities( SwitchBotCloudSensor(data.api, device, coordinator, description) for device, coordinator in data.devices.sensors - for description in METER_PLUS_SENSOR_DESCRIPTIONS + for description in ( + METER_PRO_CO2_SENSOR_DESCRIPTIONS + if device.device_type == "MeterPro(CO2)" + else METER_PLUS_SENSOR_DESCRIPTIONS + ) ) diff --git a/homeassistant/components/switcher_kis/button.py b/homeassistant/components/switcher_kis/button.py index 5564fac830d..d2686e2e550 100644 --- a/homeassistant/components/switcher_kis/button.py +++ b/homeassistant/components/switcher_kis/button.py @@ -10,7 +10,6 @@ from aioswitcher.api import ( DeviceState, SwitcherApi, SwitcherBaseResponse, - SwitcherType2Api, ThermostatSwing, ) from aioswitcher.api.remotes import SwitcherBreezeRemote @@ -128,7 +127,7 @@ class SwitcherThermostatButtonEntity(SwitcherEntity, ButtonEntity): error = None try: - async with SwitcherType2Api( + async with SwitcherApi( self.coordinator.data.device_type, self.coordinator.data.ip_address, self.coordinator.data.device_id, diff --git a/homeassistant/components/switcher_kis/climate.py b/homeassistant/components/switcher_kis/climate.py index eeff603bc8a..f2d4fb60252 100644 --- a/homeassistant/components/switcher_kis/climate.py +++ b/homeassistant/components/switcher_kis/climate.py @@ -4,7 +4,7 @@ from __future__ import annotations from typing import Any, cast -from aioswitcher.api import SwitcherBaseResponse, SwitcherType2Api +from aioswitcher.api import SwitcherApi, SwitcherBaseResponse from aioswitcher.api.remotes import SwitcherBreezeRemote from aioswitcher.device import ( DeviceCategory, @@ -160,7 +160,7 @@ class SwitcherClimateEntity(SwitcherEntity, ClimateEntity): error = None try: - async with SwitcherType2Api( + async with SwitcherApi( self.coordinator.data.device_type, self.coordinator.data.ip_address, self.coordinator.data.device_id, diff --git a/homeassistant/components/switcher_kis/cover.py b/homeassistant/components/switcher_kis/cover.py index c56fa7442fb..7d3ec0e4af0 100644 --- a/homeassistant/components/switcher_kis/cover.py +++ b/homeassistant/components/switcher_kis/cover.py @@ -5,7 +5,7 @@ from __future__ import annotations import logging from typing import Any, cast -from aioswitcher.api import SwitcherBaseResponse, SwitcherType2Api +from aioswitcher.api import SwitcherApi, SwitcherBaseResponse from aioswitcher.device import DeviceCategory, ShutterDirection, SwitcherShutter from homeassistant.components.cover import ( @@ -41,16 +41,20 @@ async def async_setup_entry( def async_add_cover(coordinator: SwitcherDataUpdateCoordinator) -> None: """Add cover from Switcher device.""" entities: list[CoverEntity] = [] + if coordinator.data.device_type.category in ( DeviceCategory.SHUTTER, DeviceCategory.SINGLE_SHUTTER_DUAL_LIGHT, + DeviceCategory.DUAL_SHUTTER_SINGLE_LIGHT, ): - entities.append(SwitcherSingleCoverEntity(coordinator, 0)) - if ( - coordinator.data.device_type.category - == DeviceCategory.DUAL_SHUTTER_SINGLE_LIGHT - ): - entities.extend(SwitcherDualCoverEntity(coordinator, i) for i in range(2)) + number_of_covers = len(cast(SwitcherShutter, coordinator.data).position) + if number_of_covers == 1: + entities.append(SwitcherSingleCoverEntity(coordinator, 0)) + else: + entities.extend( + SwitcherMultiCoverEntity(coordinator, i) + for i in range(number_of_covers) + ) async_add_entities(entities) config_entry.async_on_unload( @@ -95,7 +99,7 @@ class SwitcherBaseCoverEntity(SwitcherEntity, CoverEntity): error = None try: - async with SwitcherType2Api( + async with SwitcherApi( self.coordinator.data.device_type, self.coordinator.data.ip_address, self.coordinator.data.device_id, @@ -152,8 +156,8 @@ class SwitcherSingleCoverEntity(SwitcherBaseCoverEntity): self._update_data() -class SwitcherDualCoverEntity(SwitcherBaseCoverEntity): - """Representation of a Switcher dual cover entity.""" +class SwitcherMultiCoverEntity(SwitcherBaseCoverEntity): + """Representation of a Switcher multiple cover entity.""" _attr_translation_key = "cover" diff --git a/homeassistant/components/switcher_kis/icons.json b/homeassistant/components/switcher_kis/icons.json index 6ca8e0e8351..bd770d3e656 100644 --- a/homeassistant/components/switcher_kis/icons.json +++ b/homeassistant/components/switcher_kis/icons.json @@ -20,6 +20,9 @@ }, "auto_shutdown": { "default": "mdi:progress-clock" + }, + "temperature": { + "default": "mdi:thermometer" } } }, diff --git a/homeassistant/components/switcher_kis/light.py b/homeassistant/components/switcher_kis/light.py index 4b6df6db6ed..b2ee624dbc5 100644 --- a/homeassistant/components/switcher_kis/light.py +++ b/homeassistant/components/switcher_kis/light.py @@ -5,7 +5,7 @@ from __future__ import annotations import logging from typing import Any, cast -from aioswitcher.api import SwitcherBaseResponse, SwitcherType2Api +from aioswitcher.api import SwitcherApi, SwitcherBaseResponse from aioswitcher.device import DeviceCategory, DeviceState, SwitcherLight from homeassistant.components.light import ColorMode, LightEntity @@ -35,16 +35,20 @@ async def async_setup_entry( def async_add_light(coordinator: SwitcherDataUpdateCoordinator) -> None: """Add light from Switcher device.""" entities: list[LightEntity] = [] - if ( - coordinator.data.device_type.category - == DeviceCategory.SINGLE_SHUTTER_DUAL_LIGHT + + if coordinator.data.device_type.category in ( + DeviceCategory.SINGLE_SHUTTER_DUAL_LIGHT, + DeviceCategory.DUAL_SHUTTER_SINGLE_LIGHT, + DeviceCategory.LIGHT, ): - entities.extend(SwitcherDualLightEntity(coordinator, i) for i in range(2)) - if ( - coordinator.data.device_type.category - == DeviceCategory.DUAL_SHUTTER_SINGLE_LIGHT - ): - entities.append(SwitcherSingleLightEntity(coordinator, 0)) + number_of_lights = len(cast(SwitcherLight, coordinator.data).light) + if number_of_lights == 1: + entities.append(SwitcherSingleLightEntity(coordinator, 0)) + else: + entities.extend( + SwitcherMultiLightEntity(coordinator, i) + for i in range(number_of_lights) + ) async_add_entities(entities) config_entry.async_on_unload( @@ -82,7 +86,7 @@ class SwitcherBaseLightEntity(SwitcherEntity, LightEntity): error = None try: - async with SwitcherType2Api( + async with SwitcherApi( self.coordinator.data.device_type, self.coordinator.data.ip_address, self.coordinator.data.device_id, @@ -133,8 +137,8 @@ class SwitcherSingleLightEntity(SwitcherBaseLightEntity): self._attr_unique_id = f"{coordinator.device_id}-{coordinator.mac_address}" -class SwitcherDualLightEntity(SwitcherBaseLightEntity): - """Representation of a Switcher dual light entity.""" +class SwitcherMultiLightEntity(SwitcherBaseLightEntity): + """Representation of a Switcher multiple light entity.""" _attr_translation_key = "light" diff --git a/homeassistant/components/switcher_kis/manifest.json b/homeassistant/components/switcher_kis/manifest.json index 4a50d992d6d..987dac65077 100644 --- a/homeassistant/components/switcher_kis/manifest.json +++ b/homeassistant/components/switcher_kis/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/switcher_kis", "iot_class": "local_push", "loggers": ["aioswitcher"], - "quality_scale": "platinum", - "requirements": ["aioswitcher==4.4.0"], + "requirements": ["aioswitcher==5.0.0"], "single_config_entry": true } diff --git a/homeassistant/components/switcher_kis/sensor.py b/homeassistant/components/switcher_kis/sensor.py index 9ff3d6dfaae..0ed60e5a721 100644 --- a/homeassistant/components/switcher_kis/sensor.py +++ b/homeassistant/components/switcher_kis/sensor.py @@ -46,9 +46,16 @@ TIME_SENSORS: list[SensorEntityDescription] = [ entity_registry_enabled_default=False, ), ] +TEMPERATURE_SENSORS: list[SensorEntityDescription] = [ + SensorEntityDescription( + key="temperature", + translation_key="temperature", + ), +] POWER_PLUG_SENSORS = POWER_SENSORS WATER_HEATER_SENSORS = [*POWER_SENSORS, *TIME_SENSORS] +THERMOSTAT_SENSORS = TEMPERATURE_SENSORS async def async_setup_entry( @@ -71,6 +78,11 @@ async def async_setup_entry( SwitcherSensorEntity(coordinator, description) for description in WATER_HEATER_SENSORS ) + elif coordinator.data.device_type.category == DeviceCategory.THERMOSTAT: + async_add_entities( + SwitcherSensorEntity(coordinator, description) + for description in THERMOSTAT_SENSORS + ) config_entry.async_on_unload( async_dispatcher_connect(hass, SIGNAL_DEVICE_ADD, async_add_sensors) diff --git a/homeassistant/components/switcher_kis/strings.json b/homeassistant/components/switcher_kis/strings.json index 798a43c981c..844cbb4ca98 100644 --- a/homeassistant/components/switcher_kis/strings.json +++ b/homeassistant/components/switcher_kis/strings.json @@ -59,6 +59,9 @@ }, "auto_shutdown": { "name": "Auto shutdown" + }, + "temperature": { + "name": "Current temperature" } } }, diff --git a/homeassistant/components/switcher_kis/switch.py b/homeassistant/components/switcher_kis/switch.py index 6a679680263..7d14620c1aa 100644 --- a/homeassistant/components/switcher_kis/switch.py +++ b/homeassistant/components/switcher_kis/switch.py @@ -6,7 +6,7 @@ from datetime import timedelta import logging from typing import Any -from aioswitcher.api import Command, SwitcherBaseResponse, SwitcherType1Api +from aioswitcher.api import Command, SwitcherApi, SwitcherBaseResponse from aioswitcher.device import DeviceCategory, DeviceState import voluptuous as vol @@ -105,7 +105,7 @@ class SwitcherBaseSwitchEntity(SwitcherEntity, SwitchEntity): error = None try: - async with SwitcherType1Api( + async with SwitcherApi( self.coordinator.data.device_type, self.coordinator.data.ip_address, self.coordinator.data.device_id, diff --git a/homeassistant/components/switchmate/manifest.json b/homeassistant/components/switchmate/manifest.json index 5467dc512c3..f21819e1bc0 100644 --- a/homeassistant/components/switchmate/manifest.json +++ b/homeassistant/components/switchmate/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/switchmate", "iot_class": "local_polling", "loggers": ["switchmate"], + "quality_scale": "legacy", "requirements": ["PySwitchmate==0.5.1"] } diff --git a/homeassistant/components/syncthing/manifest.json b/homeassistant/components/syncthing/manifest.json index f7fd2b7ece6..612665913d0 100644 --- a/homeassistant/components/syncthing/manifest.json +++ b/homeassistant/components/syncthing/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/syncthing", "iot_class": "local_polling", "loggers": ["aiosyncthing"], - "quality_scale": "silver", "requirements": ["aiosyncthing==0.5.1"] } diff --git a/homeassistant/components/synology_chat/manifest.json b/homeassistant/components/synology_chat/manifest.json index 3ac663ff91e..c9bd3396097 100644 --- a/homeassistant/components/synology_chat/manifest.json +++ b/homeassistant/components/synology_chat/manifest.json @@ -3,5 +3,6 @@ "name": "Synology Chat", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/synology_chat", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/synology_dsm/config_flow.py b/homeassistant/components/synology_dsm/config_flow.py index 70ab13c5c09..918a24035f8 100644 --- a/homeassistant/components/synology_dsm/config_flow.py +++ b/homeassistant/components/synology_dsm/config_flow.py @@ -118,7 +118,7 @@ class SynologyDSMFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> SynologyDSMOptionsFlowHandler: """Get the options flow for this handler.""" - return SynologyDSMOptionsFlowHandler(config_entry) + return SynologyDSMOptionsFlowHandler() def __init__(self) -> None: """Initialize the synology_dsm config flow.""" @@ -376,10 +376,6 @@ class SynologyDSMFlowHandler(ConfigFlow, domain=DOMAIN): class SynologyDSMOptionsFlowHandler(OptionsFlow): """Handle a option flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/synology_srm/manifest.json b/homeassistant/components/synology_srm/manifest.json index 9980f37969e..0d712b6742b 100644 --- a/homeassistant/components/synology_srm/manifest.json +++ b/homeassistant/components/synology_srm/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/synology_srm", "iot_class": "local_polling", "loggers": ["synology_srm"], + "quality_scale": "legacy", "requirements": ["synology-srm==0.2.0"] } diff --git a/homeassistant/components/syslog/manifest.json b/homeassistant/components/syslog/manifest.json index 380628ffa66..bf327baec10 100644 --- a/homeassistant/components/syslog/manifest.json +++ b/homeassistant/components/syslog/manifest.json @@ -3,5 +3,6 @@ "name": "Syslog", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/syslog", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/system_bridge/config_flow.py b/homeassistant/components/system_bridge/config_flow.py index dc1736ea337..98396e52545 100644 --- a/homeassistant/components/system_bridge/config_flow.py +++ b/homeassistant/components/system_bridge/config_flow.py @@ -17,7 +17,7 @@ from systembridgemodels.modules import GetData, Module import voluptuous as vol from homeassistant.components import zeroconf -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TOKEN from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -120,11 +120,11 @@ class SystemBridgeConfigFlow( VERSION = 1 MINOR_VERSION = 2 + _name: str + def __init__(self) -> None: """Initialize flow.""" - self._name: str | None = None self._input: dict[str, Any] = {} - self._reauth = False async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -157,15 +157,13 @@ class SystemBridgeConfigFlow( user_input = {**self._input, **user_input} errors, info = await _async_get_info(self.hass, user_input) if not errors and info is not None: - # Check if already configured - existing_entry = await self.async_set_unique_id(info["uuid"]) + await self.async_set_unique_id(info["uuid"]) - if self._reauth and existing_entry: - self.hass.config_entries.async_update_entry( - existing_entry, data=user_input + if self.source == SOURCE_REAUTH: + self._abort_if_unique_id_mismatch() + return self.async_update_reload_and_abort( + self._get_reauth_entry(), data=user_input ) - await self.hass.config_entries.async_reload(existing_entry.entry_id) - return self.async_abort(reason="reauth_successful") self._abort_if_unique_id_configured( updates={CONF_HOST: info["hostname"]} @@ -212,7 +210,6 @@ class SystemBridgeConfigFlow( CONF_HOST: entry_data[CONF_HOST], CONF_PORT: entry_data[CONF_PORT], } - self._reauth = True return await self.async_step_authenticate() diff --git a/homeassistant/components/system_bridge/manifest.json b/homeassistant/components/system_bridge/manifest.json index e886bcad150..2799cf31fdd 100644 --- a/homeassistant/components/system_bridge/manifest.json +++ b/homeassistant/components/system_bridge/manifest.json @@ -9,7 +9,6 @@ "integration_type": "device", "iot_class": "local_push", "loggers": ["systembridgeconnector"], - "quality_scale": "silver", "requirements": ["systembridgeconnector==4.1.5", "systembridgemodels==4.2.4"], "zeroconf": ["_system-bridge._tcp.local."] } diff --git a/homeassistant/components/system_bridge/strings.json b/homeassistant/components/system_bridge/strings.json index b5ceba9bd84..ef7495ef74f 100644 --- a/homeassistant/components/system_bridge/strings.json +++ b/homeassistant/components/system_bridge/strings.json @@ -3,6 +3,7 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "unique_id_mismatch": "The identifier does not match the previous identifier", "unsupported_version": "Your version of System Bridge is not supported. Please upgrade to the latest version.", "unknown": "[%key:common::config_flow::error::unknown%]" }, diff --git a/homeassistant/components/systemmonitor/manifest.json b/homeassistant/components/systemmonitor/manifest.json index 236f25bb1ed..4c6ae0653d3 100644 --- a/homeassistant/components/systemmonitor/manifest.json +++ b/homeassistant/components/systemmonitor/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/systemmonitor", "iot_class": "local_push", "loggers": ["psutil"], - "requirements": ["psutil-home-assistant==0.0.1", "psutil==6.0.0"] + "requirements": ["psutil-home-assistant==0.0.1", "psutil==6.1.0"] } diff --git a/homeassistant/components/tado/config_flow.py b/homeassistant/components/tado/config_flow.py index 2ab2a86f200..c7bb7684901 100644 --- a/homeassistant/components/tado/config_flow.py +++ b/homeassistant/components/tado/config_flow.py @@ -160,16 +160,12 @@ class TadoConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() class OptionsFlowHandler(OptionsFlow): """Handle an option flow for Tado.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/tado/manifest.json b/homeassistant/components/tado/manifest.json index b0c00c888b7..652d51f0261 100644 --- a/homeassistant/components/tado/manifest.json +++ b/homeassistant/components/tado/manifest.json @@ -14,5 +14,5 @@ }, "iot_class": "cloud_polling", "loggers": ["PyTado"], - "requirements": ["python-tado==0.17.6"] + "requirements": ["python-tado==0.17.7"] } diff --git a/homeassistant/components/tailscale/manifest.json b/homeassistant/components/tailscale/manifest.json index 24f485fcdbd..7d571fe0675 100644 --- a/homeassistant/components/tailscale/manifest.json +++ b/homeassistant/components/tailscale/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/tailscale", "integration_type": "hub", "iot_class": "cloud_polling", - "quality_scale": "platinum", "requirements": ["tailscale==0.6.1"] } diff --git a/homeassistant/components/tailwind/manifest.json b/homeassistant/components/tailwind/manifest.json index 97d08737a87..705f591785f 100644 --- a/homeassistant/components/tailwind/manifest.json +++ b/homeassistant/components/tailwind/manifest.json @@ -11,7 +11,6 @@ "documentation": "https://www.home-assistant.io/integrations/tailwind", "integration_type": "device", "iot_class": "local_polling", - "quality_scale": "platinum", "requirements": ["gotailwind==0.2.4"], "zeroconf": [ { diff --git a/homeassistant/components/tank_utility/manifest.json b/homeassistant/components/tank_utility/manifest.json index d73c62fa5ec..76240252696 100644 --- a/homeassistant/components/tank_utility/manifest.json +++ b/homeassistant/components/tank_utility/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/tank_utility", "iot_class": "cloud_polling", "loggers": ["tank_utility"], + "quality_scale": "legacy", "requirements": ["tank-utility==1.5.0"] } diff --git a/homeassistant/components/tankerkoenig/config_flow.py b/homeassistant/components/tankerkoenig/config_flow.py index b13bfa1fa36..509f293665d 100644 --- a/homeassistant/components/tankerkoenig/config_flow.py +++ b/homeassistant/components/tankerkoenig/config_flow.py @@ -74,7 +74,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -236,9 +236,8 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(OptionsFlow): """Handle an options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Initialize options flow.""" - self.config_entry = config_entry self._stations: dict[str, str] = {} async def async_step_init( diff --git a/homeassistant/components/tankerkoenig/manifest.json b/homeassistant/components/tankerkoenig/manifest.json index eeb8646bea7..72248d006e0 100644 --- a/homeassistant/components/tankerkoenig/manifest.json +++ b/homeassistant/components/tankerkoenig/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/tankerkoenig", "iot_class": "cloud_polling", "loggers": ["aiotankerkoenig"], - "quality_scale": "platinum", "requirements": ["aiotankerkoenig==0.4.2"] } diff --git a/homeassistant/components/tapsaff/manifest.json b/homeassistant/components/tapsaff/manifest.json index 861329827d7..c4853ca1c8d 100644 --- a/homeassistant/components/tapsaff/manifest.json +++ b/homeassistant/components/tapsaff/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/tapsaff", "iot_class": "local_polling", "loggers": ["tapsaff"], + "quality_scale": "legacy", "requirements": ["tapsaff==0.2.1"] } diff --git a/homeassistant/components/tcp/manifest.json b/homeassistant/components/tcp/manifest.json index e15200f49f8..7eacff6c50a 100644 --- a/homeassistant/components/tcp/manifest.json +++ b/homeassistant/components/tcp/manifest.json @@ -3,5 +3,6 @@ "name": "TCP", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/tcp", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/technove/manifest.json b/homeassistant/components/technove/manifest.json index ae0e491235f..722aa4004e1 100644 --- a/homeassistant/components/technove/manifest.json +++ b/homeassistant/components/technove/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/technove", "integration_type": "device", "iot_class": "local_polling", - "quality_scale": "platinum", "requirements": ["python-technove==1.3.1"], "zeroconf": ["_technove-stations._tcp.local."] } diff --git a/homeassistant/components/ted5000/manifest.json b/homeassistant/components/ted5000/manifest.json index b2aa68f884b..3e28d963957 100644 --- a/homeassistant/components/ted5000/manifest.json +++ b/homeassistant/components/ted5000/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/ted5000", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["xmltodict==0.13.0"] } diff --git a/homeassistant/components/tedee/__init__.py b/homeassistant/components/tedee/__init__.py index cd593f68e3a..528a5052678 100644 --- a/homeassistant/components/tedee/__init__.py +++ b/homeassistant/components/tedee/__init__.py @@ -7,7 +7,7 @@ from typing import Any from aiohttp.hdrs import METH_POST from aiohttp.web import Request, Response -from pytedee_async.exception import TedeeDataUpdateException, TedeeWebhookException +from aiotedee.exception import TedeeDataUpdateException, TedeeWebhookException from homeassistant.components.http import HomeAssistantView from homeassistant.components.webhook import ( diff --git a/homeassistant/components/tedee/binary_sensor.py b/homeassistant/components/tedee/binary_sensor.py index 5eab7bfa254..b586db7c2a7 100644 --- a/homeassistant/components/tedee/binary_sensor.py +++ b/homeassistant/components/tedee/binary_sensor.py @@ -3,8 +3,8 @@ from collections.abc import Callable from dataclasses import dataclass -from pytedee_async import TedeeLock -from pytedee_async.lock import TedeeLockState +from aiotedee import TedeeLock +from aiotedee.lock import TedeeLockState from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, diff --git a/homeassistant/components/tedee/config_flow.py b/homeassistant/components/tedee/config_flow.py index 65d4ec12e80..422d818d1b5 100644 --- a/homeassistant/components/tedee/config_flow.py +++ b/homeassistant/components/tedee/config_flow.py @@ -4,7 +4,7 @@ from collections.abc import Mapping import logging from typing import Any -from pytedee_async import ( +from aiotedee import ( TedeeAuthException, TedeeClient, TedeeClientException, diff --git a/homeassistant/components/tedee/coordinator.py b/homeassistant/components/tedee/coordinator.py index de3090a3f78..4012b6d07c5 100644 --- a/homeassistant/components/tedee/coordinator.py +++ b/homeassistant/components/tedee/coordinator.py @@ -8,7 +8,7 @@ import logging import time from typing import Any -from pytedee_async import ( +from aiotedee import ( TedeeClient, TedeeClientException, TedeeDataUpdateException, @@ -16,7 +16,7 @@ from pytedee_async import ( TedeeLock, TedeeWebhookException, ) -from pytedee_async.bridge import TedeeBridge +from aiotedee.bridge import TedeeBridge from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST @@ -99,14 +99,19 @@ class TedeeApiCoordinator(DataUpdateCoordinator[dict[int, TedeeLock]]): await update_fn() except TedeeLocalAuthException as ex: raise ConfigEntryAuthFailed( - "Authentication failed. Local access token is invalid" + translation_domain=DOMAIN, + translation_key="authentification_failed", ) from ex except TedeeDataUpdateException as ex: _LOGGER.debug("Error while updating data: %s", str(ex)) - raise UpdateFailed(f"Error while updating data: {ex!s}") from ex + raise UpdateFailed( + translation_domain=DOMAIN, translation_key="update_failed" + ) from ex except (TedeeClientException, TimeoutError) as ex: - raise UpdateFailed(f"Querying API failed. Error: {ex!s}") from ex + raise UpdateFailed( + translation_domain=DOMAIN, translation_key="api_error" + ) from ex def webhook_received(self, message: dict[str, Any]) -> None: """Handle webhook message.""" diff --git a/homeassistant/components/tedee/entity.py b/homeassistant/components/tedee/entity.py index c72e293a292..96cc6f2b3f5 100644 --- a/homeassistant/components/tedee/entity.py +++ b/homeassistant/components/tedee/entity.py @@ -1,6 +1,6 @@ """Bases for Tedee entities.""" -from pytedee_async.lock import TedeeLock +from aiotedee.lock import TedeeLock from homeassistant.core import callback from homeassistant.helpers.device_registry import DeviceInfo diff --git a/homeassistant/components/tedee/lock.py b/homeassistant/components/tedee/lock.py index 34d313f3e48..38df85a9cdb 100644 --- a/homeassistant/components/tedee/lock.py +++ b/homeassistant/components/tedee/lock.py @@ -2,7 +2,7 @@ from typing import Any -from pytedee_async import TedeeClientException, TedeeLock, TedeeLockState +from aiotedee import TedeeClientException, TedeeLock, TedeeLockState from homeassistant.components.lock import LockEntity, LockEntityFeature from homeassistant.core import HomeAssistant @@ -13,6 +13,8 @@ from .const import DOMAIN from .coordinator import TedeeApiCoordinator, TedeeConfigEntry from .entity import TedeeEntity +PARALLEL_UPDATES = 1 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/tedee/manifest.json b/homeassistant/components/tedee/manifest.json index 4f071267a25..bca51f08f93 100644 --- a/homeassistant/components/tedee/manifest.json +++ b/homeassistant/components/tedee/manifest.json @@ -6,7 +6,7 @@ "dependencies": ["http", "webhook"], "documentation": "https://www.home-assistant.io/integrations/tedee", "iot_class": "local_push", - "loggers": ["pytedee_async"], + "loggers": ["aiotedee"], "quality_scale": "platinum", - "requirements": ["pytedee-async==0.2.20"] + "requirements": ["aiotedee==0.2.20"] } diff --git a/homeassistant/components/tedee/quality_scale.yaml b/homeassistant/components/tedee/quality_scale.yaml new file mode 100644 index 00000000000..974c8f82ec9 --- /dev/null +++ b/homeassistant/components/tedee/quality_scale.yaml @@ -0,0 +1,86 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + No custom actions + appropriate-polling: done + brands: done + common-modules: done + config-flow-test-coverage: done + config-flow: done + dependency-transparency: done + docs-actions: + status: exempt + comment: | + No custom actions + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: done + entity-event-setup: + status: exempt + comment: | + No explicit event subscriptions + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + action-exceptions: + status: exempt + comment: | + No custom actions + config-entry-unloading: done + docs-configuration-parameters: + status: exempt + comment: | + Options flow not documented, doesn't have one + docs-installation-parameters: done + entity-unavailable: done + integration-owner: done + log-when-unavailable: + status: done + comment: | + Handled by coordinator + parallel-updates: done + reauthentication-flow: done + test-coverage: done + + # Gold + devices: done + diagnostics: done + discovery-update-info: + status: exempt + comment: | + No discovery + discovery: + status: exempt + comment: | + No discovery supported atm + docs-data-update: done + docs-examples: done + docs-known-limitations: done + docs-supported-devices: done + docs-supported-functions: done + docs-troubleshooting: done + docs-use-cases: done + dynamic-devices: done + entity-category: done + entity-device-class: done + entity-disabled-by-default: done + entity-translations: done + exception-translations: done + icon-translations: done + reconfiguration-flow: done + repair-issues: + status: exempt + comment: | + Currently no repairs/issues + stale-devices: done + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/tedee/sensor.py b/homeassistant/components/tedee/sensor.py index 33894a5eb52..90f76317fff 100644 --- a/homeassistant/components/tedee/sensor.py +++ b/homeassistant/components/tedee/sensor.py @@ -3,7 +3,7 @@ from collections.abc import Callable from dataclasses import dataclass -from pytedee_async import TedeeLock +from aiotedee import TedeeLock from homeassistant.components.sensor import ( SensorDeviceClass, diff --git a/homeassistant/components/tedee/strings.json b/homeassistant/components/tedee/strings.json index b6966fa2933..78cacd706d3 100644 --- a/homeassistant/components/tedee/strings.json +++ b/homeassistant/components/tedee/strings.json @@ -66,12 +66,21 @@ } }, "exceptions": { + "api_error": { + "message": "Error while communicating with the API" + }, + "authentication_failed": { + "message": "Authentication failed. Local access token is invalid" + }, "lock_failed": { "message": "Failed to lock the door. Lock {lock_id}" }, "unlock_failed": { "message": "Failed to unlock the door. Lock {lock_id}" }, + "update_failed": { + "message": "Error while updating data" + }, "open_failed": { "message": "Failed to unlatch the door. Lock {lock_id}" } diff --git a/homeassistant/components/telegram/manifest.json b/homeassistant/components/telegram/manifest.json index ce4457b3129..9022f357970 100644 --- a/homeassistant/components/telegram/manifest.json +++ b/homeassistant/components/telegram/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["telegram_bot"], "documentation": "https://www.home-assistant.io/integrations/telegram", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/telegram_bot/manifest.json b/homeassistant/components/telegram_bot/manifest.json index b432c88762f..3474d39b1d6 100644 --- a/homeassistant/components/telegram_bot/manifest.json +++ b/homeassistant/components/telegram_bot/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/telegram_bot", "iot_class": "cloud_push", "loggers": ["telegram"], + "quality_scale": "legacy", "requirements": ["python-telegram-bot[socks]==21.5"] } diff --git a/homeassistant/components/tellduslive/manifest.json b/homeassistant/components/tellduslive/manifest.json index dc1389c15c5..4ebf1a334bd 100644 --- a/homeassistant/components/tellduslive/manifest.json +++ b/homeassistant/components/tellduslive/manifest.json @@ -5,6 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/tellduslive", "iot_class": "cloud_polling", - "quality_scale": "silver", "requirements": ["tellduslive==0.10.12"] } diff --git a/homeassistant/components/tellstick/manifest.json b/homeassistant/components/tellstick/manifest.json index c64a51b09e4..40956b06ac6 100644 --- a/homeassistant/components/tellstick/manifest.json +++ b/homeassistant/components/tellstick/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/tellstick", "iot_class": "assumed_state", "loggers": ["tellcore"], + "quality_scale": "legacy", "requirements": ["tellcore-net==0.4", "tellcore-py==1.1.2"] } diff --git a/homeassistant/components/telnet/manifest.json b/homeassistant/components/telnet/manifest.json index 48a79afc528..68353104839 100644 --- a/homeassistant/components/telnet/manifest.json +++ b/homeassistant/components/telnet/manifest.json @@ -3,5 +3,6 @@ "name": "Telnet", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/telnet", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/temper/manifest.json b/homeassistant/components/temper/manifest.json index dbad8827877..ad1fcd40525 100644 --- a/homeassistant/components/temper/manifest.json +++ b/homeassistant/components/temper/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/temper", "iot_class": "local_polling", "loggers": ["pyusb", "temperusb"], + "quality_scale": "legacy", "requirements": ["temperusb==1.6.1"] } diff --git a/homeassistant/components/template/config_flow.py b/homeassistant/components/template/config_flow.py index c1c023c0ea4..8ecef8539d3 100644 --- a/homeassistant/components/template/config_flow.py +++ b/homeassistant/components/template/config_flow.py @@ -157,7 +157,7 @@ def generate_schema(domain: str, flow_type: str) -> vol.Schema: type=selector.TextSelectorType.TEXT, multiline=False ) ), - vol.Optional(CONF_SET_VALUE): selector.ActionSelector(), + vol.Required(CONF_SET_VALUE): selector.ActionSelector(), } if domain == Platform.SELECT: diff --git a/homeassistant/components/template/coordinator.py b/homeassistant/components/template/coordinator.py index b9bbd3625af..4d8fe78f2b5 100644 --- a/homeassistant/components/template/coordinator.py +++ b/homeassistant/components/template/coordinator.py @@ -24,7 +24,9 @@ class TriggerUpdateCoordinator(DataUpdateCoordinator): def __init__(self, hass: HomeAssistant, config: dict[str, Any]) -> None: """Instantiate trigger data.""" - super().__init__(hass, _LOGGER, name="Trigger Update Coordinator") + super().__init__( + hass, _LOGGER, config_entry=None, name="Trigger Update Coordinator" + ) self.config = config self._cond_func: Callable[[Mapping[str, Any] | None], bool] | None = None self._unsub_start: Callable[[], None] | None = None diff --git a/homeassistant/components/template/lock.py b/homeassistant/components/template/lock.py index 6ea8aff4c1a..d7bb30dbba0 100644 --- a/homeassistant/components/template/lock.py +++ b/homeassistant/components/template/lock.py @@ -2,13 +2,14 @@ from __future__ import annotations -from typing import Any +from typing import TYPE_CHECKING, Any import voluptuous as vol from homeassistant.components.lock import ( PLATFORM_SCHEMA as LOCK_PLATFORM_SCHEMA, LockEntity, + LockEntityFeature, LockState, ) from homeassistant.const import ( @@ -36,6 +37,7 @@ from .template_entity import ( CONF_CODE_FORMAT_TEMPLATE = "code_format_template" CONF_LOCK = "lock" CONF_UNLOCK = "unlock" +CONF_OPEN = "open" DEFAULT_NAME = "Template Lock" DEFAULT_OPTIMISTIC = False @@ -45,6 +47,7 @@ PLATFORM_SCHEMA = LOCK_PLATFORM_SCHEMA.extend( vol.Optional(CONF_NAME): cv.string, vol.Required(CONF_LOCK): cv.SCRIPT_SCHEMA, vol.Required(CONF_UNLOCK): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_OPEN): cv.SCRIPT_SCHEMA, vol.Required(CONF_VALUE_TEMPLATE): cv.template, vol.Optional(CONF_CODE_FORMAT_TEMPLATE): cv.template, vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean, @@ -53,7 +56,9 @@ PLATFORM_SCHEMA = LOCK_PLATFORM_SCHEMA.extend( ).extend(TEMPLATE_ENTITY_AVAILABILITY_SCHEMA_LEGACY.schema) -async def _async_create_entities(hass, config): +async def _async_create_entities( + hass: HomeAssistant, config: dict[str, Any] +) -> list[TemplateLock]: """Create the Template lock.""" config = rewrite_common_legacy_to_modern_conf(hass, config) return [TemplateLock(hass, config, config.get(CONF_UNIQUE_ID))] @@ -76,22 +81,26 @@ class TemplateLock(TemplateEntity, LockEntity): def __init__( self, - hass, - config, - unique_id, - ): + hass: HomeAssistant, + config: dict[str, Any], + unique_id: str | None, + ) -> None: """Initialize the lock.""" super().__init__( hass, config=config, fallback_name=DEFAULT_NAME, unique_id=unique_id ) - self._state = None + self._state: str | bool | LockState | None = None name = self._attr_name + assert name self._state_template = config.get(CONF_VALUE_TEMPLATE) self._command_lock = Script(hass, config[CONF_LOCK], name, DOMAIN) self._command_unlock = Script(hass, config[CONF_UNLOCK], name, DOMAIN) + if CONF_OPEN in config: + self._command_open = Script(hass, config[CONF_OPEN], name, DOMAIN) + self._attr_supported_features |= LockEntityFeature.OPEN self._code_format_template = config.get(CONF_CODE_FORMAT_TEMPLATE) - self._code_format = None - self._code_format_template_error = None + self._code_format: str | None = None + self._code_format_template_error: TemplateError | None = None self._optimistic = config.get(CONF_OPTIMISTIC) self._attr_assumed_state = bool(self._optimistic) @@ -115,6 +124,11 @@ class TemplateLock(TemplateEntity, LockEntity): """Return true if lock is locking.""" return self._state == LockState.LOCKING + @property + def is_open(self) -> bool: + """Return true if lock is open.""" + return self._state == LockState.OPEN + @callback def _update_state(self, result): """Update the state from the template.""" @@ -141,6 +155,8 @@ class TemplateLock(TemplateEntity, LockEntity): @callback def _async_setup_templates(self) -> None: """Set up templates.""" + if TYPE_CHECKING: + assert self._state_template is not None self.add_template_attribute( "_state", self._state_template, None, self._update_state ) @@ -168,6 +184,8 @@ class TemplateLock(TemplateEntity, LockEntity): async def async_lock(self, **kwargs: Any) -> None: """Lock the device.""" + # Check if we need to raise for incorrect code format + # template before processing the action. self._raise_template_error_if_available() if self._optimistic: @@ -182,6 +200,8 @@ class TemplateLock(TemplateEntity, LockEntity): async def async_unlock(self, **kwargs: Any) -> None: """Unlock the device.""" + # Check if we need to raise for incorrect code format + # template before processing the action. self._raise_template_error_if_available() if self._optimistic: @@ -194,7 +214,24 @@ class TemplateLock(TemplateEntity, LockEntity): self._command_unlock, run_variables=tpl_vars, context=self._context ) + async def async_open(self, **kwargs: Any) -> None: + """Open the device.""" + # Check if we need to raise for incorrect code format + # template before processing the action. + self._raise_template_error_if_available() + + if self._optimistic: + self._state = LockState.OPEN + self.async_write_ha_state() + + tpl_vars = {ATTR_CODE: kwargs.get(ATTR_CODE) if kwargs else None} + + await self.async_run_script( + self._command_open, run_variables=tpl_vars, context=self._context + ) + def _raise_template_error_if_available(self): + """Raise an error if the rendered code format is not valid.""" if self._code_format_template_error is not None: raise ServiceValidationError( translation_domain=DOMAIN, diff --git a/homeassistant/components/template/manifest.json b/homeassistant/components/template/manifest.json index 57188aebaa3..f1225f74f06 100644 --- a/homeassistant/components/template/manifest.json +++ b/homeassistant/components/template/manifest.json @@ -2,7 +2,7 @@ "domain": "template", "name": "Template", "after_dependencies": ["group"], - "codeowners": ["@PhracturedBlue", "@tetienne", "@home-assistant/core"], + "codeowners": ["@PhracturedBlue", "@home-assistant/core"], "config_flow": true, "dependencies": ["blueprint"], "documentation": "https://www.home-assistant.io/integrations/template", diff --git a/homeassistant/components/template/trigger_entity.py b/homeassistant/components/template/trigger_entity.py index df84ce057c3..5130f332d5b 100644 --- a/homeassistant/components/template/trigger_entity.py +++ b/homeassistant/components/template/trigger_entity.py @@ -3,6 +3,7 @@ from __future__ import annotations from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.template import TemplateStateFromEntityId from homeassistant.helpers.trigger_template_entity import TriggerBaseEntity from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -41,11 +42,11 @@ class TriggerEntity( # pylint: disable=hass-enforce-class-module def _process_data(self) -> None: """Process new data.""" - this = None - if state := self.hass.states.get(self.entity_id): - this = state.as_dict() run_variables = self.coordinator.data["run_variables"] - variables = {"this": this, **(run_variables or {})} + variables = { + "this": TemplateStateFromEntityId(self.hass, self.entity_id), + **(run_variables or {}), + } self._render_templates(variables) diff --git a/homeassistant/components/tensorflow/manifest.json b/homeassistant/components/tensorflow/manifest.json index 4f2b6f19285..1ddfa188c0a 100644 --- a/homeassistant/components/tensorflow/manifest.json +++ b/homeassistant/components/tensorflow/manifest.json @@ -5,11 +5,12 @@ "documentation": "https://www.home-assistant.io/integrations/tensorflow", "iot_class": "local_polling", "loggers": ["tensorflow"], + "quality_scale": "legacy", "requirements": [ "tensorflow==2.5.0", "tf-models-official==2.5.0", "pycocotools==2.0.6", - "numpy==1.26.4", - "Pillow==10.4.0" + "numpy==2.1.3", + "Pillow==11.0.0" ] } diff --git a/homeassistant/components/tesla_fleet/__init__.py b/homeassistant/components/tesla_fleet/__init__.py index 4cd8c5c7142..e7030b568b3 100644 --- a/homeassistant/components/tesla_fleet/__init__.py +++ b/homeassistant/components/tesla_fleet/__init__.py @@ -5,7 +5,12 @@ from typing import Final from aiohttp.client_exceptions import ClientResponseError import jwt -from tesla_fleet_api import EnergySpecific, TeslaFleetApi, VehicleSpecific +from tesla_fleet_api import ( + EnergySpecific, + TeslaFleetApi, + VehicleSigned, + VehicleSpecific, +) from tesla_fleet_api.const import Scope from tesla_fleet_api.exceptions import ( InvalidRegion, @@ -126,7 +131,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - # Remove the protobuff 'cached_data' that we do not use to save memory product.pop("cached_data", None) vin = product["vin"] - api = VehicleSpecific(tesla.vehicle, vin) + signing = product["command_signing"] == "required" + if signing: + if not tesla.private_key: + await tesla.get_private_key(hass.config.path("tesla_fleet.key")) + api = VehicleSigned(tesla.vehicle, vin) + else: + api = VehicleSpecific(tesla.vehicle, vin) coordinator = TeslaFleetVehicleDataCoordinator(hass, api, product) await coordinator.async_config_entry_first_refresh() @@ -145,7 +156,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - coordinator=coordinator, vin=vin, device=device, - signing=product["command_signing"] == "required", + signing=signing, ) ) elif "energy_site_id" in product and hasattr(tesla, "energy"): diff --git a/homeassistant/components/tesla_fleet/button.py b/homeassistant/components/tesla_fleet/button.py index 87cd95576d2..aea0f91a97c 100644 --- a/homeassistant/components/tesla_fleet/button.py +++ b/homeassistant/components/tesla_fleet/button.py @@ -70,8 +70,6 @@ async def async_setup_entry( for vehicle in entry.runtime_data.vehicles for description in DESCRIPTIONS if Scope.VEHICLE_CMDS in entry.runtime_data.scopes - and (not vehicle.signing or description.key == "wake") - # Wake doesn't need signing ) diff --git a/homeassistant/components/tesla_fleet/climate.py b/homeassistant/components/tesla_fleet/climate.py index 6199ee112b5..9a1533a688f 100644 --- a/homeassistant/components/tesla_fleet/climate.py +++ b/homeassistant/components/tesla_fleet/climate.py @@ -84,7 +84,7 @@ class TeslaFleetClimateEntity(TeslaFleetVehicleEntity, ClimateEntity): ) -> None: """Initialize the climate.""" - self.read_only = Scope.VEHICLE_CMDS not in scopes or data.signing + self.read_only = Scope.VEHICLE_CMDS not in scopes if self.read_only: self._attr_supported_features = ClimateEntityFeature(0) @@ -231,7 +231,7 @@ class TeslaFleetCabinOverheatProtectionEntity(TeslaFleetVehicleEntity, ClimateEn """Initialize the cabin overheat climate entity.""" # Scopes - self.read_only = Scope.VEHICLE_CMDS not in scopes or data.signing + self.read_only = Scope.VEHICLE_CMDS not in scopes # Supported Features if self.read_only: diff --git a/homeassistant/components/tesla_fleet/cover.py b/homeassistant/components/tesla_fleet/cover.py index d7e1f68ac89..f270734424f 100644 --- a/homeassistant/components/tesla_fleet/cover.py +++ b/homeassistant/components/tesla_fleet/cover.py @@ -57,7 +57,7 @@ class TeslaFleetWindowEntity(TeslaFleetVehicleEntity, CoverEntity): self._attr_supported_features = ( CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE ) - if not self.scoped or self.vehicle.signing: + if not self.scoped: self._attr_supported_features = CoverEntityFeature(0) def _async_update_attrs(self) -> None: @@ -111,7 +111,7 @@ class TeslaFleetChargePortEntity(TeslaFleetVehicleEntity, CoverEntity): self._attr_supported_features = ( CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE ) - if not self.scoped or self.vehicle.signing: + if not self.scoped: self._attr_supported_features = CoverEntityFeature(0) def _async_update_attrs(self) -> None: @@ -144,7 +144,7 @@ class TeslaFleetFrontTrunkEntity(TeslaFleetVehicleEntity, CoverEntity): self.scoped = Scope.VEHICLE_CMDS in scopes self._attr_supported_features = CoverEntityFeature.OPEN - if not self.scoped or self.vehicle.signing: + if not self.scoped: self._attr_supported_features = CoverEntityFeature(0) def _async_update_attrs(self) -> None: @@ -172,7 +172,7 @@ class TeslaFleetRearTrunkEntity(TeslaFleetVehicleEntity, CoverEntity): self._attr_supported_features = ( CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE ) - if not self.scoped or self.vehicle.signing: + if not self.scoped: self._attr_supported_features = CoverEntityFeature(0) def _async_update_attrs(self) -> None: @@ -210,7 +210,7 @@ class TeslaFleetSunroofEntity(TeslaFleetVehicleEntity, CoverEntity): super().__init__(vehicle, "vehicle_state_sun_roof_state") self.scoped = Scope.VEHICLE_CMDS in scopes - if not self.scoped or self.vehicle.signing: + if not self.scoped: self._attr_supported_features = CoverEntityFeature(0) def _async_update_attrs(self) -> None: diff --git a/homeassistant/components/tesla_fleet/entity.py b/homeassistant/components/tesla_fleet/entity.py index 60230cd881d..0ee41b5e322 100644 --- a/homeassistant/components/tesla_fleet/entity.py +++ b/homeassistant/components/tesla_fleet/entity.py @@ -123,14 +123,6 @@ class TeslaFleetVehicleEntity(TeslaFleetEntity): """Wake up the vehicle if its asleep.""" await wake_up_vehicle(self.vehicle) - def raise_for_read_only(self, scope: Scope) -> None: - """Raise an error if no command signing or a scope is not available.""" - if self.vehicle.signing: - raise ServiceValidationError( - translation_domain=DOMAIN, translation_key="command_signing" - ) - super().raise_for_read_only(scope) - class TeslaFleetEnergyLiveEntity(TeslaFleetEntity): """Parent class for TeslaFleet Energy Site Live entities.""" diff --git a/homeassistant/components/tesla_fleet/manifest.json b/homeassistant/components/tesla_fleet/manifest.json index 8d6e5f11068..f27929032d7 100644 --- a/homeassistant/components/tesla_fleet/manifest.json +++ b/homeassistant/components/tesla_fleet/manifest.json @@ -7,6 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/tesla_fleet", "iot_class": "cloud_polling", "loggers": ["tesla-fleet-api"], - "quality_scale": "gold", "requirements": ["tesla-fleet-api==0.8.4"] } diff --git a/homeassistant/components/tesla_fleet/media_player.py b/homeassistant/components/tesla_fleet/media_player.py index 0a1d18c3407..455c990077d 100644 --- a/homeassistant/components/tesla_fleet/media_player.py +++ b/homeassistant/components/tesla_fleet/media_player.py @@ -64,7 +64,7 @@ class TeslaFleetMediaEntity(TeslaFleetVehicleEntity, MediaPlayerEntity): """Initialize the media player entity.""" super().__init__(data, "media") self.scoped = scoped - if not scoped and data.signing: + if not scoped: self._attr_supported_features = MediaPlayerEntityFeature(0) def _async_update_attrs(self) -> None: diff --git a/homeassistant/components/tesla_fleet/oauth.py b/homeassistant/components/tesla_fleet/oauth.py index 00976abf56f..8b43460436b 100644 --- a/homeassistant/components/tesla_fleet/oauth.py +++ b/homeassistant/components/tesla_fleet/oauth.py @@ -49,6 +49,7 @@ class TeslaSystemImplementation(config_entry_oauth2_flow.LocalOAuth2Implementati def extra_authorize_data(self) -> dict[str, Any]: """Extra data that needs to be appended to the authorize url.""" return { + "prompt": "login", "scope": " ".join(SCOPES), "code_challenge": self.code_challenge, # PKCE } @@ -83,4 +84,4 @@ class TeslaUserImplementation(AuthImplementation): @property def extra_authorize_data(self) -> dict[str, Any]: """Extra data that needs to be appended to the authorize url.""" - return {"scope": " ".join(SCOPES)} + return {"prompt": "login", "scope": " ".join(SCOPES)} diff --git a/homeassistant/components/tesla_fleet/strings.json b/homeassistant/components/tesla_fleet/strings.json index 942824c5043..fe5cd06c1ef 100644 --- a/homeassistant/components/tesla_fleet/strings.json +++ b/homeassistant/components/tesla_fleet/strings.json @@ -504,9 +504,6 @@ "command_no_reason": { "message": "Command was unsuccessful but did not return a reason why." }, - "command_signing": { - "message": "Vehicle requires command signing. Please see documentation for more details." - }, "invalid_cop_temp": { "message": "Cabin overheat protection does not support that temperature." }, diff --git a/homeassistant/components/teslemetry/__init__.py b/homeassistant/components/teslemetry/__init__.py index b884f9bbc5c..aa1d2b42660 100644 --- a/homeassistant/components/teslemetry/__init__.py +++ b/homeassistant/components/teslemetry/__init__.py @@ -135,11 +135,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - elif "energy_site_id" in product and Scope.ENERGY_DEVICE_DATA in scopes: site_id = product["energy_site_id"] - if not ( - product["components"]["battery"] - or product["components"]["solar"] - or "wall_connectors" in product["components"] - ): + powerwall = ( + product["components"]["battery"] or product["components"]["solar"] + ) + wall_connector = "wall_connectors" in product["components"] + if not powerwall and not wall_connector: LOGGER.debug( "Skipping Energy Site %s as it has no components", site_id, @@ -162,7 +162,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - info_coordinator=TeslemetryEnergySiteInfoCoordinator( hass, api, product ), - history_coordinator=TeslemetryEnergyHistoryCoordinator(hass, api), + history_coordinator=( + TeslemetryEnergyHistoryCoordinator(hass, api) + if powerwall + else None + ), id=site_id, device=device, ) @@ -185,6 +189,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) - *( energysite.history_coordinator.async_config_entry_first_refresh() for energysite in energysites + if energysite.history_coordinator ), ) diff --git a/homeassistant/components/teslemetry/entity.py b/homeassistant/components/teslemetry/entity.py index ca40d4d00ce..d14f3a42734 100644 --- a/homeassistant/components/teslemetry/entity.py +++ b/homeassistant/components/teslemetry/entity.py @@ -175,6 +175,8 @@ class TeslemetryEnergyHistoryEntity(TeslemetryEntity): ) -> None: """Initialize common aspects of a Teslemetry Energy Site Info entity.""" + assert data.history_coordinator + self.api = data.api self._attr_unique_id = f"{data.id}-{key}" self._attr_device_info = data.device diff --git a/homeassistant/components/teslemetry/manifest.json b/homeassistant/components/teslemetry/manifest.json index 6b667094d62..fc82dea6445 100644 --- a/homeassistant/components/teslemetry/manifest.json +++ b/homeassistant/components/teslemetry/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/teslemetry", "iot_class": "cloud_polling", "loggers": ["tesla-fleet-api"], - "quality_scale": "platinum", "requirements": ["tesla-fleet-api==0.8.4", "teslemetry-stream==0.4.2"] } diff --git a/homeassistant/components/teslemetry/models.py b/homeassistant/components/teslemetry/models.py index 7f8bd37425a..d3969b30a7c 100644 --- a/homeassistant/components/teslemetry/models.py +++ b/homeassistant/components/teslemetry/models.py @@ -49,6 +49,6 @@ class TeslemetryEnergyData: api: EnergySpecific live_coordinator: TeslemetryEnergySiteLiveCoordinator info_coordinator: TeslemetryEnergySiteInfoCoordinator - history_coordinator: TeslemetryEnergyHistoryCoordinator + history_coordinator: TeslemetryEnergyHistoryCoordinator | None id: int device: DeviceInfo diff --git a/homeassistant/components/teslemetry/sensor.py b/homeassistant/components/teslemetry/sensor.py index ba7d930fcd0..95876cc2cf9 100644 --- a/homeassistant/components/teslemetry/sensor.py +++ b/homeassistant/components/teslemetry/sensor.py @@ -482,8 +482,7 @@ async def async_setup_entry( TeslemetryEnergyHistorySensorEntity(energysite, description) for energysite in entry.runtime_data.energysites for description in ENERGY_HISTORY_DESCRIPTIONS - if energysite.info_coordinator.data.get("components_battery") - or energysite.info_coordinator.data.get("components_solar") + if energysite.history_coordinator ), ) ) diff --git a/homeassistant/components/tessie/const.py b/homeassistant/components/tessie/const.py index 90862eff969..4731f5168a2 100644 --- a/homeassistant/components/tessie/const.py +++ b/homeassistant/components/tessie/const.py @@ -13,6 +13,16 @@ MODELS = { "models": "Model S", } +TRANSLATED_ERRORS = { + "unknown": "unknown", + "not supported": "not_supported", + "cable connected": "cable_connected", + "already active": "already_active", + "already inactive": "already_inactive", + "incorrect pin": "incorrect_pin", + "no cable": "no_cable", +} + class TessieState(StrEnum): """Tessie status.""" diff --git a/homeassistant/components/tessie/entity.py b/homeassistant/components/tessie/entity.py index 42a3c92b2be..a2b6d3c9761 100644 --- a/homeassistant/components/tessie/entity.py +++ b/homeassistant/components/tessie/entity.py @@ -10,7 +10,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN +from .const import DOMAIN, TRANSLATED_ERRORS from .coordinator import ( TessieEnergySiteInfoCoordinator, TessieEnergySiteLiveCoordinator, @@ -107,10 +107,11 @@ class TessieEntity(TessieBaseEntity): if response["result"] is False: name: str = getattr(self, "name", self.entity_id) reason: str = response.get("reason", "unknown") + translation_key = TRANSLATED_ERRORS.get(reason, "command_failed") raise HomeAssistantError( translation_domain=DOMAIN, - translation_key=reason.replace(" ", "_"), - translation_placeholders={"name": name}, + translation_key=translation_key, + translation_placeholders={"name": name, "message": reason}, ) def _async_update_attrs(self) -> None: diff --git a/homeassistant/components/tessie/manifest.json b/homeassistant/components/tessie/manifest.json index 92aa289ca47..cab9f4c706d 100644 --- a/homeassistant/components/tessie/manifest.json +++ b/homeassistant/components/tessie/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/tessie", "iot_class": "cloud_polling", "loggers": ["tessie", "tesla-fleet-api"], - "quality_scale": "platinum", "requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.8.4"] } diff --git a/homeassistant/components/tfiac/manifest.json b/homeassistant/components/tfiac/manifest.json index 243710241a2..94f82c99d21 100644 --- a/homeassistant/components/tfiac/manifest.json +++ b/homeassistant/components/tfiac/manifest.json @@ -5,5 +5,6 @@ "disabled": "This integration is disabled because we cannot build a valid wheel.", "documentation": "https://www.home-assistant.io/integrations/tfiac", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["pytfiac==0.4"] } diff --git a/homeassistant/components/thermoworks_smoke/manifest.json b/homeassistant/components/thermoworks_smoke/manifest.json index 7baec9cdb74..f67b041b1e5 100644 --- a/homeassistant/components/thermoworks_smoke/manifest.json +++ b/homeassistant/components/thermoworks_smoke/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/thermoworks_smoke", "iot_class": "cloud_polling", "loggers": ["thermoworks_smoke"], + "quality_scale": "legacy", "requirements": ["stringcase==1.2.0", "thermoworks-smoke==0.1.8"] } diff --git a/homeassistant/components/thethingsnetwork/__init__.py b/homeassistant/components/thethingsnetwork/__init__.py index 253ce7a052e..d3c6c8356cb 100644 --- a/homeassistant/components/thethingsnetwork/__init__.py +++ b/homeassistant/components/thethingsnetwork/__init__.py @@ -2,55 +2,15 @@ import logging -import voluptuous as vol - from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_HOST from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.typing import ConfigType -from .const import CONF_APP_ID, DOMAIN, PLATFORMS, TTN_API_HOST +from .const import DOMAIN, PLATFORMS, TTN_API_HOST from .coordinator import TTNCoordinator _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = vol.Schema( - { - # Configuration via yaml not longer supported - keeping to warn about migration - DOMAIN: vol.Schema( - { - vol.Required(CONF_APP_ID): cv.string, - vol.Required("access_key"): cv.string, - } - ) - }, - extra=vol.ALLOW_EXTRA, -) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Initialize of The Things Network component.""" - - if DOMAIN in config: - ir.async_create_issue( - hass, - DOMAIN, - "manual_migration", - breaks_in_ha_version="2024.12.0", - is_fixable=False, - severity=ir.IssueSeverity.ERROR, - translation_key="manual_migration", - translation_placeholders={ - "domain": DOMAIN, - "v2_v3_migration_url": "https://www.thethingsnetwork.org/forum/c/v2-to-v3-upgrade/102", - "v2_deprecation_url": "https://www.thethingsnetwork.org/forum/t/the-things-network-v2-is-permanently-shutting-down-completed/50710", - }, - ) - - return True - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Establish connection with The Things Network.""" diff --git a/homeassistant/components/thethingsnetwork/strings.json b/homeassistant/components/thethingsnetwork/strings.json index 98572cb318c..f5a4fcef8fd 100644 --- a/homeassistant/components/thethingsnetwork/strings.json +++ b/homeassistant/components/thethingsnetwork/strings.json @@ -22,11 +22,5 @@ "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "unknown": "[%key:common::config_flow::error::unknown%]" } - }, - "issues": { - "manual_migration": { - "description": "Configuring {domain} using YAML was removed as part of migrating to [The Things Network v3]({v2_v3_migration_url}). [The Things Network v2 has shutted down]({v2_deprecation_url}).\n\nPlease remove the {domain} entry from the configuration.yaml and add re-add the integration using the config_flow", - "title": "The {domain} YAML configuration is not supported" - } } } diff --git a/homeassistant/components/thingspeak/manifest.json b/homeassistant/components/thingspeak/manifest.json index ffdc11d9214..aac0ca06426 100644 --- a/homeassistant/components/thingspeak/manifest.json +++ b/homeassistant/components/thingspeak/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/thingspeak", "iot_class": "cloud_push", "loggers": ["thingspeak"], + "quality_scale": "legacy", "requirements": ["thingspeak==1.0.0"] } diff --git a/homeassistant/components/thinkingcleaner/manifest.json b/homeassistant/components/thinkingcleaner/manifest.json index f480340fcf8..048fcfffa05 100644 --- a/homeassistant/components/thinkingcleaner/manifest.json +++ b/homeassistant/components/thinkingcleaner/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/thinkingcleaner", "iot_class": "local_polling", "loggers": ["pythinkingcleaner"], + "quality_scale": "legacy", "requirements": ["pythinkingcleaner==0.0.3"] } diff --git a/homeassistant/components/thomson/manifest.json b/homeassistant/components/thomson/manifest.json index 08961cb2746..7f49b57d724 100644 --- a/homeassistant/components/thomson/manifest.json +++ b/homeassistant/components/thomson/manifest.json @@ -3,5 +3,6 @@ "name": "Thomson", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/thomson", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/threshold/binary_sensor.py b/homeassistant/components/threshold/binary_sensor.py index 5f1639ff2e1..3d52d2225be 100644 --- a/homeassistant/components/threshold/binary_sensor.py +++ b/homeassistant/components/threshold/binary_sensor.py @@ -61,15 +61,29 @@ _LOGGER = logging.getLogger(__name__) DEFAULT_NAME: Final = "Threshold" -PLATFORM_SCHEMA = BINARY_SENSOR_PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_ENTITY_ID): cv.entity_id, - vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA, - vol.Optional(CONF_HYSTERESIS, default=DEFAULT_HYSTERESIS): vol.Coerce(float), - vol.Optional(CONF_LOWER): vol.Coerce(float), - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Optional(CONF_UPPER): vol.Coerce(float), - } + +def no_missing_threshold(value: dict) -> dict: + """Validate data point list is greater than polynomial degrees.""" + if value.get(CONF_LOWER) is None and value.get(CONF_UPPER) is None: + raise vol.Invalid("Lower or Upper thresholds are not provided") + + return value + + +PLATFORM_SCHEMA = vol.All( + BINARY_SENSOR_PLATFORM_SCHEMA.extend( + { + vol.Required(CONF_ENTITY_ID): cv.entity_id, + vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA, + vol.Optional(CONF_HYSTERESIS, default=DEFAULT_HYSTERESIS): vol.Coerce( + float + ), + vol.Optional(CONF_LOWER): vol.Coerce(float), + vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, + vol.Optional(CONF_UPPER): vol.Coerce(float), + } + ), + no_missing_threshold, ) @@ -126,9 +140,6 @@ async def async_setup_platform( hysteresis: float = config[CONF_HYSTERESIS] device_class: BinarySensorDeviceClass | None = config.get(CONF_DEVICE_CLASS) - if lower is None and upper is None: - raise ValueError("Lower or Upper thresholds not provided") - async_add_entities( [ ThresholdSensor( @@ -151,6 +162,9 @@ class ThresholdSensor(BinarySensorEntity): """Representation of a Threshold sensor.""" _attr_should_poll = False + _unrecorded_attributes = frozenset( + {ATTR_ENTITY_ID, ATTR_HYSTERESIS, ATTR_LOWER, ATTR_TYPE, ATTR_UPPER} + ) def __init__( self, diff --git a/homeassistant/components/threshold/strings.json b/homeassistant/components/threshold/strings.json index fc9ee8fb7bf..94a1932cbbc 100644 --- a/homeassistant/components/threshold/strings.json +++ b/homeassistant/components/threshold/strings.json @@ -3,7 +3,7 @@ "config": { "step": { "user": { - "title": "Add Threshold Sensor", + "title": "Create Threshold Sensor", "description": "Create a binary sensor that turns on and off depending on the value of a sensor\n\nOnly lower limit configured - Turn on when the input sensor's value is less than the lower limit.\nOnly upper limit configured - Turn on when the input sensor's value is greater than the upper limit.\nBoth lower and upper limit configured - Turn on when the input sensor's value is in the range [lower limit .. upper limit].", "data": { "entity_id": "Input sensor", diff --git a/homeassistant/components/tibber/__init__.py b/homeassistant/components/tibber/__init__.py index ce05b8070f6..9b5c7ee1168 100644 --- a/homeassistant/components/tibber/__init__.py +++ b/homeassistant/components/tibber/__init__.py @@ -6,15 +6,9 @@ import aiohttp import tibber from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - CONF_ACCESS_TOKEN, - CONF_NAME, - EVENT_HOMEASSISTANT_STOP, - Platform, -) +from homeassistant.const import CONF_ACCESS_TOKEN, EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import Event, HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import discovery from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import ConfigType @@ -73,19 +67,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - # Use discovery to load platform legacy notify platform - # The use of the legacy notify service was deprecated with HA Core 2024.6 - # Support will be removed with HA Core 2024.12 - hass.async_create_task( - discovery.async_load_platform( - hass, - Platform.NOTIFY, - DOMAIN, - {CONF_NAME: DOMAIN}, - hass.data[DATA_HASS_CONFIG], - ) - ) - return True diff --git a/homeassistant/components/tibber/manifest.json b/homeassistant/components/tibber/manifest.json index bc9304ab59d..3a3a772a934 100644 --- a/homeassistant/components/tibber/manifest.json +++ b/homeassistant/components/tibber/manifest.json @@ -7,6 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/tibber", "iot_class": "cloud_polling", "loggers": ["tibber"], - "quality_scale": "silver", "requirements": ["pyTibber==0.30.8"] } diff --git a/homeassistant/components/tibber/notify.py b/homeassistant/components/tibber/notify.py index 1c9f86ed502..fdeeeba68ef 100644 --- a/homeassistant/components/tibber/notify.py +++ b/homeassistant/components/tibber/notify.py @@ -2,38 +2,21 @@ from __future__ import annotations -from collections.abc import Callable -from typing import Any - from tibber import Tibber from homeassistant.components.notify import ( - ATTR_TITLE, ATTR_TITLE_DEFAULT, - BaseNotificationService, NotifyEntity, NotifyEntityFeature, - migrate_notify_issue, ) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import DOMAIN as TIBBER_DOMAIN -async def async_get_service( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, -) -> TibberNotificationService: - """Get the Tibber notification service.""" - tibber_connection: Tibber = hass.data[TIBBER_DOMAIN] - return TibberNotificationService(tibber_connection.send_notification) - - async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: @@ -41,31 +24,6 @@ async def async_setup_entry( async_add_entities([TibberNotificationEntity(entry.entry_id)]) -class TibberNotificationService(BaseNotificationService): - """Implement the notification service for Tibber.""" - - def __init__(self, notify: Callable) -> None: - """Initialize the service.""" - self._notify = notify - - async def async_send_message(self, message: str = "", **kwargs: Any) -> None: - """Send a message to Tibber devices.""" - migrate_notify_issue( - self.hass, - TIBBER_DOMAIN, - "Tibber", - "2024.12.0", - service_name=self._service_name, - ) - title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT) - try: - await self._notify(title=title, message=message) - except TimeoutError as exc: - raise HomeAssistantError( - translation_domain=TIBBER_DOMAIN, translation_key="send_message_timeout" - ) from exc - - class TibberNotificationEntity(NotifyEntity): """Implement the notification entity service for Tibber.""" diff --git a/homeassistant/components/tibber/services.py b/homeassistant/components/tibber/services.py index 72943a0215a..5033cda11d0 100644 --- a/homeassistant/components/tibber/services.py +++ b/homeassistant/components/tibber/services.py @@ -79,7 +79,6 @@ def __get_date(date_input: str | None, mode: str | None) -> datetime: return dt_util.as_local(value) raise ServiceValidationError( - "Invalid datetime provided.", translation_domain=DOMAIN, translation_key="invalid_date", translation_placeholders={ diff --git a/homeassistant/components/tibber/strings.json b/homeassistant/components/tibber/strings.json index 8d73d435c8c..05b98b97995 100644 --- a/homeassistant/components/tibber/strings.json +++ b/homeassistant/components/tibber/strings.json @@ -119,6 +119,9 @@ } }, "exceptions": { + "invalid_date": { + "message": "Invalid datetime provided {date}" + }, "send_message_timeout": { "message": "Timeout sending message with Tibber" } diff --git a/homeassistant/components/tikteck/manifest.json b/homeassistant/components/tikteck/manifest.json index 067dd6f92cf..57e5269d3b0 100644 --- a/homeassistant/components/tikteck/manifest.json +++ b/homeassistant/components/tikteck/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/tikteck", "iot_class": "local_polling", "loggers": ["tikteck"], + "quality_scale": "legacy", "requirements": ["tikteck==0.4"] } diff --git a/homeassistant/components/tmb/manifest.json b/homeassistant/components/tmb/manifest.json index 16efc870504..0e0324a62f4 100644 --- a/homeassistant/components/tmb/manifest.json +++ b/homeassistant/components/tmb/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/tmb", "iot_class": "local_polling", "loggers": ["tmb"], + "quality_scale": "legacy", "requirements": ["tmb==0.0.4"] } diff --git a/homeassistant/components/tod/binary_sensor.py b/homeassistant/components/tod/binary_sensor.py index 907df849ea1..3ac90b5578c 100644 --- a/homeassistant/components/tod/binary_sensor.py +++ b/homeassistant/components/tod/binary_sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable from datetime import datetime, time, timedelta import logging -from typing import TYPE_CHECKING, Any, Literal, TypeGuard +from typing import Any, Literal, TypeGuard import voluptuous as vol @@ -109,6 +109,9 @@ class TodSensor(BinarySensorEntity): """Time of the Day Sensor.""" _attr_should_poll = False + _time_before: datetime + _time_after: datetime + _next_update: datetime def __init__( self, @@ -122,9 +125,6 @@ class TodSensor(BinarySensorEntity): """Init the ToD Sensor...""" self._attr_unique_id = unique_id self._attr_name = name - self._time_before: datetime | None = None - self._time_after: datetime | None = None - self._next_update: datetime | None = None self._after_offset = after_offset self._before_offset = before_offset self._before = before @@ -134,9 +134,6 @@ class TodSensor(BinarySensorEntity): @property def is_on(self) -> bool: """Return True is sensor is on.""" - if TYPE_CHECKING: - assert self._time_after is not None - assert self._time_before is not None if self._time_after < self._time_before: return self._time_after <= dt_util.utcnow() < self._time_before return False @@ -144,10 +141,6 @@ class TodSensor(BinarySensorEntity): @property def extra_state_attributes(self) -> dict[str, Any] | None: """Return the state attributes of the sensor.""" - if TYPE_CHECKING: - assert self._time_after is not None - assert self._time_before is not None - assert self._next_update is not None if time_zone := dt_util.get_default_time_zone(): return { ATTR_AFTER: self._time_after.astimezone(time_zone).isoformat(), @@ -244,9 +237,6 @@ class TodSensor(BinarySensorEntity): def _turn_to_next_day(self) -> None: """Turn to to the next day.""" - if TYPE_CHECKING: - assert self._time_after is not None - assert self._time_before is not None if _is_sun_event(self._after): self._time_after = get_astral_event_next( self.hass, self._after, self._time_after - self._after_offset @@ -282,17 +272,12 @@ class TodSensor(BinarySensorEntity): self.async_on_remove(_clean_up_listener) - if TYPE_CHECKING: - assert self._next_update is not None self._unsub_update = event.async_track_point_in_utc_time( self.hass, self._point_in_time_listener, self._next_update ) def _calculate_next_update(self) -> None: """Datetime when the next update to the state.""" - if TYPE_CHECKING: - assert self._time_after is not None - assert self._time_before is not None now = dt_util.utcnow() if now < self._time_after: self._next_update = self._time_after @@ -309,9 +294,6 @@ class TodSensor(BinarySensorEntity): self._calculate_next_update() self.async_write_ha_state() - if TYPE_CHECKING: - assert self._next_update is not None - self._unsub_update = event.async_track_point_in_utc_time( self.hass, self._point_in_time_listener, self._next_update ) diff --git a/homeassistant/components/tod/strings.json b/homeassistant/components/tod/strings.json index bd4a48df915..c32b996c29a 100644 --- a/homeassistant/components/tod/strings.json +++ b/homeassistant/components/tod/strings.json @@ -3,7 +3,7 @@ "config": { "step": { "user": { - "title": "Add Times of the Day Sensor", + "title": "Create Times of the Day Sensor", "description": "Create a binary sensor that turns on or off depending on the time.", "data": { "after_time": "On time", diff --git a/homeassistant/components/todo/strings.json b/homeassistant/components/todo/strings.json index 717aa310ecd..245e5c82fc8 100644 --- a/homeassistant/components/todo/strings.json +++ b/homeassistant/components/todo/strings.json @@ -44,11 +44,11 @@ "fields": { "item": { "name": "Item name", - "description": "The name for the to-do list item." + "description": "The current name of the to-do item." }, "rename": { "name": "Rename item", - "description": "The new name of the to-do item" + "description": "The new name for the to-do item" }, "status": { "name": "Set status", @@ -78,7 +78,7 @@ "fields": { "item": { "name": "Item name", - "description": "The name for the to-do list items." + "description": "The name for the to-do list item." } } } diff --git a/homeassistant/components/todoist/__init__.py b/homeassistant/components/todoist/__init__.py index 60c40b1c03c..2e30856d0df 100644 --- a/homeassistant/components/todoist/__init__.py +++ b/homeassistant/components/todoist/__init__.py @@ -25,7 +25,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: token = entry.data[CONF_TOKEN] api = TodoistAPIAsync(token) - coordinator = TodoistCoordinator(hass, _LOGGER, SCAN_INTERVAL, api, token) + coordinator = TodoistCoordinator(hass, _LOGGER, entry, SCAN_INTERVAL, api, token) await coordinator.async_config_entry_first_refresh() hass.data.setdefault(DOMAIN, {}) diff --git a/homeassistant/components/todoist/calendar.py b/homeassistant/components/todoist/calendar.py index 31470633cc6..62f9fafc02a 100644 --- a/homeassistant/components/todoist/calendar.py +++ b/homeassistant/components/todoist/calendar.py @@ -142,7 +142,7 @@ async def async_setup_platform( project_id_lookup = {} api = TodoistAPIAsync(token) - coordinator = TodoistCoordinator(hass, _LOGGER, SCAN_INTERVAL, api, token) + coordinator = TodoistCoordinator(hass, _LOGGER, None, SCAN_INTERVAL, api, token) await coordinator.async_refresh() async def _shutdown_coordinator(_: Event) -> None: diff --git a/homeassistant/components/todoist/coordinator.py b/homeassistant/components/todoist/coordinator.py index b55680907ac..2f35741c5ab 100644 --- a/homeassistant/components/todoist/coordinator.py +++ b/homeassistant/components/todoist/coordinator.py @@ -6,6 +6,7 @@ import logging from todoist_api_python.api_async import TodoistAPIAsync from todoist_api_python.models import Label, Project, Section, Task +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -17,12 +18,19 @@ class TodoistCoordinator(DataUpdateCoordinator[list[Task]]): self, hass: HomeAssistant, logger: logging.Logger, + entry: ConfigEntry | None, update_interval: timedelta, api: TodoistAPIAsync, token: str, ) -> None: """Initialize the Todoist coordinator.""" - super().__init__(hass, logger, name="Todoist", update_interval=update_interval) + super().__init__( + hass, + logger, + config_entry=entry, + name="Todoist", + update_interval=update_interval, + ) self.api = api self._projects: list[Project] | None = None self._labels: list[Label] | None = None diff --git a/homeassistant/components/todoist/strings.json b/homeassistant/components/todoist/strings.json index 5b083ac58bf..721b491bbf5 100644 --- a/homeassistant/components/todoist/strings.json +++ b/homeassistant/components/todoist/strings.json @@ -78,7 +78,7 @@ "description": "When should user be reminded of this task, in natural language." }, "reminder_date_lang": { - "name": "Reminder data language", + "name": "Reminder date language", "description": "The language of reminder_date_string." }, "reminder_date": { diff --git a/homeassistant/components/tolo/config_flow.py b/homeassistant/components/tolo/config_flow.py index 5cf91bdc3a8..d5d7e33a5e0 100644 --- a/homeassistant/components/tolo/config_flow.py +++ b/homeassistant/components/tolo/config_flow.py @@ -23,7 +23,7 @@ class ToloSaunaConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - _discovered_host: str | None = None + _discovered_host: str @staticmethod def _check_device_availability(host: str) -> bool: diff --git a/homeassistant/components/tomato/manifest.json b/homeassistant/components/tomato/manifest.json index 6db69d50d82..081d55bc46d 100644 --- a/homeassistant/components/tomato/manifest.json +++ b/homeassistant/components/tomato/manifest.json @@ -3,5 +3,6 @@ "name": "Tomato", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/tomato", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/tomorrowio/config_flow.py b/homeassistant/components/tomorrowio/config_flow.py index 90bb488a7c2..cce41b17498 100644 --- a/homeassistant/components/tomorrowio/config_flow.py +++ b/homeassistant/components/tomorrowio/config_flow.py @@ -91,10 +91,6 @@ def _get_unique_id(hass: HomeAssistant, input_dict: dict[str, Any]): class TomorrowioOptionsConfigFlow(OptionsFlow): """Handle Tomorrow.io options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize Tomorrow.io options flow.""" - self._config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -105,7 +101,7 @@ class TomorrowioOptionsConfigFlow(OptionsFlow): options_schema = { vol.Required( CONF_TIMESTEP, - default=self._config_entry.options[CONF_TIMESTEP], + default=self.config_entry.options[CONF_TIMESTEP], ): vol.In([1, 5, 15, 30, 60]), } @@ -125,7 +121,7 @@ class TomorrowioConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> TomorrowioOptionsConfigFlow: """Get the options flow for this handler.""" - return TomorrowioOptionsConfigFlow(config_entry) + return TomorrowioOptionsConfigFlow() async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/torque/manifest.json b/homeassistant/components/torque/manifest.json index b966365bdd4..44047c67dd2 100644 --- a/homeassistant/components/torque/manifest.json +++ b/homeassistant/components/torque/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/torque", - "iot_class": "local_push" + "iot_class": "local_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/totalconnect/config_flow.py b/homeassistant/components/totalconnect/config_flow.py index c64dd5c6120..3f5d05fda13 100644 --- a/homeassistant/components/totalconnect/config_flow.py +++ b/homeassistant/components/totalconnect/config_flow.py @@ -193,16 +193,12 @@ class TotalConnectConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> TotalConnectOptionsFlowHandler: """Get options flow.""" - return TotalConnectOptionsFlowHandler(config_entry) + return TotalConnectOptionsFlowHandler() class TotalConnectOptionsFlowHandler(OptionsFlow): """TotalConnect options flow handler.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, bool] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/touchline/manifest.json b/homeassistant/components/touchline/manifest.json index 340edb8381a..c003cca97a4 100644 --- a/homeassistant/components/touchline/manifest.json +++ b/homeassistant/components/touchline/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/touchline", "iot_class": "local_polling", "loggers": ["pytouchline"], + "quality_scale": "legacy", "requirements": ["pytouchline==0.7"] } diff --git a/homeassistant/components/touchline_sl/climate.py b/homeassistant/components/touchline_sl/climate.py index 93328823749..8a0ffc4cd86 100644 --- a/homeassistant/components/touchline_sl/climate.py +++ b/homeassistant/components/touchline_sl/climate.py @@ -2,22 +2,19 @@ from typing import Any -from pytouchlinesl import Zone - from homeassistant.components.climate import ( ClimateEntity, ClimateEntityFeature, + HVACAction, HVACMode, ) from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import TouchlineSLConfigEntry -from .const import DOMAIN from .coordinator import TouchlineSLModuleCoordinator +from .entity import TouchlineSLZoneEntity async def async_setup_entry( @@ -37,10 +34,10 @@ async def async_setup_entry( CONSTANT_TEMPERATURE = "constant_temperature" -class TouchlineSLZone(CoordinatorEntity[TouchlineSLModuleCoordinator], ClimateEntity): +class TouchlineSLZone(TouchlineSLZoneEntity, ClimateEntity): """Roth Touchline SL Zone.""" - _attr_has_entity_name = True + _attr_hvac_action = HVACAction.IDLE _attr_hvac_mode = HVACMode.HEAT _attr_hvac_modes = [HVACMode.HEAT] _attr_name = None @@ -52,22 +49,12 @@ class TouchlineSLZone(CoordinatorEntity[TouchlineSLModuleCoordinator], ClimateEn def __init__(self, coordinator: TouchlineSLModuleCoordinator, zone_id: int) -> None: """Construct a Touchline SL climate zone.""" - super().__init__(coordinator) - self.zone_id: int = zone_id + super().__init__(coordinator, zone_id) self._attr_unique_id = ( f"module-{self.coordinator.data.module.id}-zone-{self.zone_id}" ) - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, str(zone_id))}, - name=self.zone.name, - manufacturer="Roth", - via_device=(DOMAIN, coordinator.data.module.id), - model="zone", - suggested_area=self.zone.name, - ) - # Call this in __init__ so data is populated right away, since it's # already available in the coordinator data. self.set_attr() @@ -78,16 +65,6 @@ class TouchlineSLZone(CoordinatorEntity[TouchlineSLModuleCoordinator], ClimateEn self.set_attr() super()._handle_coordinator_update() - @property - def zone(self) -> Zone: - """Return the device object from the coordinator data.""" - return self.coordinator.data.zones[self.zone_id] - - @property - def available(self) -> bool: - """Return if the device is available.""" - return super().available and self.zone_id in self.coordinator.data.zones - async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None: @@ -124,3 +101,16 @@ class TouchlineSLZone(CoordinatorEntity[TouchlineSLModuleCoordinator], ClimateEn elif self.zone.mode == "globalSchedule": schedule = self.zone.schedule self._attr_preset_mode = schedule.name + + if self.zone.algorithm == "heating": + self._attr_hvac_action = ( + HVACAction.HEATING if self.zone.relay_on else HVACAction.IDLE + ) + self._attr_hvac_mode = HVACMode.HEAT + self._attr_hvac_modes = [HVACMode.HEAT] + elif self.zone.algorithm == "cooling": + self._attr_hvac_action = ( + HVACAction.COOLING if self.zone.relay_on else HVACAction.IDLE + ) + self._attr_hvac_mode = HVACMode.COOL + self._attr_hvac_modes = [HVACMode.COOL] diff --git a/homeassistant/components/touchline_sl/entity.py b/homeassistant/components/touchline_sl/entity.py new file mode 100644 index 00000000000..637ad8955eb --- /dev/null +++ b/homeassistant/components/touchline_sl/entity.py @@ -0,0 +1,38 @@ +"""Base class for Touchline SL zone entities.""" + +from pytouchlinesl import Zone + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import TouchlineSLModuleCoordinator + + +class TouchlineSLZoneEntity(CoordinatorEntity[TouchlineSLModuleCoordinator]): + """Defines a base Touchline SL zone entity.""" + + _attr_has_entity_name = True + + def __init__(self, coordinator: TouchlineSLModuleCoordinator, zone_id: int) -> None: + """Initialize touchline entity.""" + super().__init__(coordinator) + self.zone_id = zone_id + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, str(zone_id))}, + name=self.zone.name, + manufacturer="Roth", + via_device=(DOMAIN, coordinator.data.module.id), + model="zone", + suggested_area=self.zone.name, + ) + + @property + def zone(self) -> Zone: + """Return the device object from the coordinator data.""" + return self.coordinator.data.zones[self.zone_id] + + @property + def available(self) -> bool: + """Return if the device is available.""" + return super().available and self.zone_id in self.coordinator.data.zones diff --git a/homeassistant/components/touchline_sl/manifest.json b/homeassistant/components/touchline_sl/manifest.json index dd591cbf038..ab07ae770fd 100644 --- a/homeassistant/components/touchline_sl/manifest.json +++ b/homeassistant/components/touchline_sl/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/touchline_sl", "integration_type": "hub", "iot_class": "cloud_polling", - "requirements": ["pytouchlinesl==0.1.8"] + "requirements": ["pytouchlinesl==0.3.0"] } diff --git a/homeassistant/components/tplink/__init__.py b/homeassistant/components/tplink/__init__.py index ceeb1120ed8..a7ffce686be 100644 --- a/homeassistant/components/tplink/__init__.py +++ b/homeassistant/components/tplink/__init__.py @@ -31,6 +31,7 @@ from homeassistant.const import ( CONF_MAC, CONF_MODEL, CONF_PASSWORD, + CONF_PORT, CONF_USERNAME, ) from homeassistant.core import HomeAssistant, callback @@ -141,12 +142,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: TPLinkConfigEntry) -> bo entry_credentials_hash = entry.data.get(CONF_CREDENTIALS_HASH) entry_use_http = entry.data.get(CONF_USES_HTTP, False) entry_aes_keys = entry.data.get(CONF_AES_KEYS) + port_override = entry.data.get(CONF_PORT) conn_params: Device.ConnectionParameters | None = None if conn_params_dict := entry.data.get(CONF_CONNECTION_PARAMETERS): try: conn_params = Device.ConnectionParameters.from_dict(conn_params_dict) - except KasaException: + except (KasaException, TypeError, ValueError, LookupError): _LOGGER.warning( "Invalid connection parameters dict for %s: %s", host, conn_params_dict ) @@ -157,6 +159,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TPLinkConfigEntry) -> bo timeout=CONNECT_TIMEOUT, http_client=client, aes_keys=entry_aes_keys, + port_override=port_override, ) if conn_params: config.connection_type = conn_params diff --git a/homeassistant/components/tplink/binary_sensor.py b/homeassistant/components/tplink/binary_sensor.py index 34375bccf4f..e14ecf01749 100644 --- a/homeassistant/components/tplink/binary_sensor.py +++ b/homeassistant/components/tplink/binary_sensor.py @@ -3,7 +3,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Final +from typing import Final, cast from kasa import Feature @@ -98,4 +98,4 @@ class TPLinkBinarySensorEntity(CoordinatedTPLinkFeatureEntity, BinarySensorEntit @callback def _async_update_attrs(self) -> None: """Update the entity's attributes.""" - self._attr_is_on = self._feature.value + self._attr_is_on = cast(bool | None, self._feature.value) diff --git a/homeassistant/components/tplink/climate.py b/homeassistant/components/tplink/climate.py index f86992ea0cf..0bd25d9f80c 100644 --- a/homeassistant/components/tplink/climate.py +++ b/homeassistant/components/tplink/climate.py @@ -116,8 +116,8 @@ class TPLinkClimateEntity(CoordinatedTPLinkEntity, ClimateEntity): @callback def _async_update_attrs(self) -> None: """Update the entity's attributes.""" - self._attr_current_temperature = self._temp_feature.value - self._attr_target_temperature = self._target_feature.value + self._attr_current_temperature = cast(float | None, self._temp_feature.value) + self._attr_target_temperature = cast(float | None, self._target_feature.value) self._attr_hvac_mode = ( HVACMode.HEAT if self._state_feature.value else HVACMode.OFF @@ -134,7 +134,9 @@ class TPLinkClimateEntity(CoordinatedTPLinkEntity, ClimateEntity): self._attr_hvac_action = HVACAction.OFF return - self._attr_hvac_action = STATE_TO_ACTION[self._mode_feature.value] + self._attr_hvac_action = STATE_TO_ACTION[ + cast(ThermostatState, self._mode_feature.value) + ] def _get_unique_id(self) -> str: """Return unique id.""" diff --git a/homeassistant/components/tplink/config_flow.py b/homeassistant/components/tplink/config_flow.py index a9f665e12fd..63f1b4e125b 100644 --- a/homeassistant/components/tplink/config_flow.py +++ b/homeassistant/components/tplink/config_flow.py @@ -32,6 +32,7 @@ from homeassistant.const import ( CONF_MAC, CONF_MODEL, CONF_PASSWORD, + CONF_PORT, CONF_USERNAME, ) from homeassistant.core import callback @@ -69,6 +70,7 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): MINOR_VERSION = CONF_CONFIG_ENTRY_MINOR_VERSION host: str | None = None + port: int | None = None def __init__(self) -> None: """Initialize the config flow.""" @@ -260,6 +262,26 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): step_id="discovery_confirm", description_placeholders=placeholders ) + @staticmethod + def _async_get_host_port(host_str: str) -> tuple[str, int | None]: + """Parse the host string for host and port.""" + if "[" in host_str: + _, _, bracketed = host_str.partition("[") + host, _, port_str = bracketed.partition("]") + _, _, port_str = port_str.partition(":") + else: + host, _, port_str = host_str.partition(":") + + if not port_str: + return host, None + + try: + port = int(port_str) + except ValueError: + return host, None + + return host, port + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -270,14 +292,29 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: if not (host := user_input[CONF_HOST]): return await self.async_step_pick_device() - self._async_abort_entries_match({CONF_HOST: host}) + + host, port = self._async_get_host_port(host) + + match_dict = {CONF_HOST: host} + if port: + self.port = port + match_dict[CONF_PORT] = port + self._async_abort_entries_match(match_dict) + self.host = host credentials = await get_credentials(self.hass) try: device = await self._async_try_discover_and_update( - host, credentials, raise_on_progress=False, raise_on_timeout=False + host, + credentials, + raise_on_progress=False, + raise_on_timeout=False, + port=port, ) or await self._async_try_connect_all( - host, credentials=credentials, raise_on_progress=False + host, + credentials=credentials, + raise_on_progress=False, + port=port, ) except AuthenticationError: return await self.async_step_user_auth_confirm() @@ -318,7 +355,10 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): ) else: device = await self._async_try_connect_all( - self.host, credentials=credentials, raise_on_progress=False + self.host, + credentials=credentials, + raise_on_progress=False, + port=self.port, ) except AuthenticationError as ex: errors[CONF_PASSWORD] = "invalid_auth" @@ -420,6 +460,8 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): data[CONF_AES_KEYS] = device.config.aes_keys if device.credentials_hash: data[CONF_CREDENTIALS_HASH] = device.credentials_hash + if port := device.config.port_override: + data[CONF_PORT] = port return self.async_create_entry( title=f"{device.alias} {device.model}", data=data, @@ -430,6 +472,8 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): host: str, credentials: Credentials | None, raise_on_progress: bool, + *, + port: int | None = None, ) -> Device | None: """Try to connect to the device speculatively. @@ -441,12 +485,15 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): host, credentials=credentials, http_client=create_async_tplink_clientsession(self.hass), + port=port, ) else: # This will just try the legacy protocol that doesn't require auth # and doesn't use http try: - device = await Device.connect(config=DeviceConfig(host)) + device = await Device.connect( + config=DeviceConfig(host, port_override=port) + ) except Exception: # noqa: BLE001 return None if device: @@ -462,6 +509,8 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): credentials: Credentials | None, raise_on_progress: bool, raise_on_timeout: bool, + *, + port: int | None = None, ) -> Device | None: """Try to discover the device and call update. @@ -470,7 +519,9 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): self._discovered_device = None try: self._discovered_device = await Discover.discover_single( - host, credentials=credentials + host, + credentials=credentials, + port=port, ) except TimeoutError as ex: if raise_on_timeout: @@ -526,6 +577,7 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): reauth_entry = self._get_reauth_entry() entry_data = reauth_entry.data host = entry_data[CONF_HOST] + port = entry_data.get(CONF_PORT) if user_input: username = user_input[CONF_USERNAME] @@ -537,8 +589,12 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): credentials=credentials, raise_on_progress=False, raise_on_timeout=False, + port=port, ) or await self._async_try_connect_all( - host, credentials=credentials, raise_on_progress=False + host, + credentials=credentials, + raise_on_progress=False, + port=port, ) except AuthenticationError as ex: errors[CONF_PASSWORD] = "invalid_auth" diff --git a/homeassistant/components/tplink/icons.json b/homeassistant/components/tplink/icons.json index 96ea8f41bb7..0abd68543c5 100644 --- a/homeassistant/components/tplink/icons.json +++ b/homeassistant/components/tplink/icons.json @@ -68,6 +68,15 @@ "state": { "on": "mdi:sleep" } + }, + "child_lock": { + "default": "mdi:account-lock" + }, + "pir_enabled": { + "default": "mdi:motion-sensor-off", + "state": { + "on": "mdi:motion-sensor" + } } }, "sensor": { @@ -88,6 +97,9 @@ }, "alarm_source": { "default": "mdi:bell" + }, + "water_alert_timestamp": { + "default": "mdi:clock-alert-outline" } }, "number": { diff --git a/homeassistant/components/tplink/manifest.json b/homeassistant/components/tplink/manifest.json index cb8a55b3db2..3f19f50cdb6 100644 --- a/homeassistant/components/tplink/manifest.json +++ b/homeassistant/components/tplink/manifest.json @@ -300,6 +300,5 @@ "documentation": "https://www.home-assistant.io/integrations/tplink", "iot_class": "local_polling", "loggers": ["kasa"], - "quality_scale": "platinum", - "requirements": ["python-kasa[speedups]==0.7.7"] + "requirements": ["python-kasa[speedups]==0.8.0"] } diff --git a/homeassistant/components/tplink/number.py b/homeassistant/components/tplink/number.py index 5f80d5479d2..b51c00db7c0 100644 --- a/homeassistant/components/tplink/number.py +++ b/homeassistant/components/tplink/number.py @@ -4,7 +4,7 @@ from __future__ import annotations from dataclasses import dataclass import logging -from typing import Final +from typing import Final, cast from kasa import Device, Feature @@ -108,4 +108,4 @@ class TPLinkNumberEntity(CoordinatedTPLinkFeatureEntity, NumberEntity): @callback def _async_update_attrs(self) -> None: """Update the entity's attributes.""" - self._attr_native_value = self._feature.value + self._attr_native_value = cast(float | None, self._feature.value) diff --git a/homeassistant/components/tplink/select.py b/homeassistant/components/tplink/select.py index 41e3224215b..3755a1d0be2 100644 --- a/homeassistant/components/tplink/select.py +++ b/homeassistant/components/tplink/select.py @@ -93,4 +93,4 @@ class TPLinkSelectEntity(CoordinatedTPLinkFeatureEntity, SelectEntity): @callback def _async_update_attrs(self) -> None: """Update the entity's attributes.""" - self._attr_current_option = self._feature.value + self._attr_current_option = cast(str | None, self._feature.value) diff --git a/homeassistant/components/tplink/sensor.py b/homeassistant/components/tplink/sensor.py index f3d3b1c7b31..8b7351f8d7d 100644 --- a/homeassistant/components/tplink/sensor.py +++ b/homeassistant/components/tplink/sensor.py @@ -3,7 +3,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import cast +from typing import TYPE_CHECKING, cast from kasa import Feature @@ -97,6 +97,10 @@ SENSOR_DESCRIPTIONS: tuple[TPLinkSensorEntityDescription, ...] = ( key="device_time", device_class=SensorDeviceClass.TIMESTAMP, ), + TPLinkSensorEntityDescription( + key="water_alert_timestamp", + device_class=SensorDeviceClass.TIMESTAMP, + ), TPLinkSensorEntityDescription( key="humidity", device_class=SensorDeviceClass.HUMIDITY, @@ -157,6 +161,12 @@ class TPLinkSensorEntity(CoordinatedTPLinkFeatureEntity, SensorEntity): # We probably do not need this, when we are rounding already? self._attr_suggested_display_precision = self._feature.precision_hint + if TYPE_CHECKING: + # pylint: disable-next=import-outside-toplevel + from datetime import date, datetime + + assert isinstance(value, str | int | float | date | datetime | None) + self._attr_native_value = value # Map to homeassistant units and fallback to upstream one if none found if (unit := self._feature.unit) is not None: diff --git a/homeassistant/components/tplink/strings.json b/homeassistant/components/tplink/strings.json index e4eb484aec9..8e5118c2720 100644 --- a/homeassistant/components/tplink/strings.json +++ b/homeassistant/components/tplink/strings.json @@ -159,6 +159,9 @@ "device_time": { "name": "Device time" }, + "water_alert_timestamp": { + "name": "Last water leak alert" + }, "auto_off_at": { "name": "Auto off at" }, @@ -187,6 +190,12 @@ }, "fan_sleep_mode": { "name": "Fan sleep mode" + }, + "child_lock": { + "name": "Child lock" + }, + "pir_enabled": { + "name": "Motion sensor" } }, "number": { diff --git a/homeassistant/components/tplink/switch.py b/homeassistant/components/tplink/switch.py index 6d3e21d88c5..7e223752665 100644 --- a/homeassistant/components/tplink/switch.py +++ b/homeassistant/components/tplink/switch.py @@ -4,7 +4,7 @@ from __future__ import annotations from dataclasses import dataclass import logging -from typing import Any +from typing import Any, cast from kasa import Feature @@ -48,6 +48,12 @@ SWITCH_DESCRIPTIONS: tuple[TPLinkSwitchEntityDescription, ...] = ( TPLinkSwitchEntityDescription( key="fan_sleep_mode", ), + TPLinkSwitchEntityDescription( + key="child_lock", + ), + TPLinkSwitchEntityDescription( + key="pir_enabled", + ), ) SWITCH_DESCRIPTIONS_MAP = {desc.key: desc for desc in SWITCH_DESCRIPTIONS} @@ -93,4 +99,4 @@ class TPLinkSwitch(CoordinatedTPLinkFeatureEntity, SwitchEntity): @callback def _async_update_attrs(self) -> None: """Update the entity's attributes.""" - self._attr_is_on = self._feature.value + self._attr_is_on = cast(bool | None, self._feature.value) diff --git a/homeassistant/components/tplink_lte/manifest.json b/homeassistant/components/tplink_lte/manifest.json index 63640628e35..a880594e683 100644 --- a/homeassistant/components/tplink_lte/manifest.json +++ b/homeassistant/components/tplink_lte/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/tplink_lte", "iot_class": "local_polling", "loggers": ["tp_connected"], + "quality_scale": "legacy", "requirements": ["tp-connected==0.0.4"] } diff --git a/homeassistant/components/tplink_omada/__init__.py b/homeassistant/components/tplink_omada/__init__.py index 573df44122c..2d33a890510 100644 --- a/homeassistant/components/tplink_omada/__init__.py +++ b/homeassistant/components/tplink_omada/__init__.py @@ -11,9 +11,9 @@ from tplink_omada_client.exceptions import ( UnsupportedControllerVersion, ) -from homeassistant.config_entries import ConfigEntry +from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.const import Platform -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import device_registry as dr @@ -60,6 +60,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: OmadaConfigEntry) -> boo entry.runtime_data = controller + async def handle_reconnect_client(call: ServiceCall) -> None: + """Handle the service action call.""" + mac: str | None = call.data.get("mac") + if not mac: + return + + await site_client.reconnect_client(mac) + + hass.services.async_register(DOMAIN, "reconnect_client", handle_reconnect_client) + _remove_old_devices(hass, entry, controller.devices_coordinator.data) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -69,7 +79,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: OmadaConfigEntry) -> boo async def async_unload_entry(hass: HomeAssistant, entry: OmadaConfigEntry) -> bool: """Unload a config entry.""" - return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + loaded_entries = [ + entry + for entry in hass.config_entries.async_entries(DOMAIN) + if entry.state == ConfigEntryState.LOADED + ] + if len(loaded_entries) == 1: + # This is the last loaded instance of Omada, deregister any services + hass.services.async_remove(DOMAIN, "reconnect_client") + + return unload_ok def _remove_old_devices( diff --git a/homeassistant/components/tplink_omada/icons.json b/homeassistant/components/tplink_omada/icons.json index c681b5e1f81..94f0a6b9764 100644 --- a/homeassistant/components/tplink_omada/icons.json +++ b/homeassistant/components/tplink_omada/icons.json @@ -27,5 +27,10 @@ "default": "mdi:memory" } } + }, + "services": { + "reconnect_client": { + "service": "mdi:sync" + } } } diff --git a/homeassistant/components/tplink_omada/manifest.json b/homeassistant/components/tplink_omada/manifest.json index 6bde656dc30..af20b54675b 100644 --- a/homeassistant/components/tplink_omada/manifest.json +++ b/homeassistant/components/tplink_omada/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/tplink_omada", "integration_type": "hub", "iot_class": "local_polling", - "requirements": ["tplink-omada-client==1.4.2"] + "requirements": ["tplink-omada-client==1.4.3"] } diff --git a/homeassistant/components/tplink_omada/services.yaml b/homeassistant/components/tplink_omada/services.yaml new file mode 100644 index 00000000000..19a64ea8625 --- /dev/null +++ b/homeassistant/components/tplink_omada/services.yaml @@ -0,0 +1,7 @@ +reconnect_client: + fields: + mac: + required: true + example: "01-23-45-67-89-AB" + selector: + text: diff --git a/homeassistant/components/tplink_omada/strings.json b/homeassistant/components/tplink_omada/strings.json index 7fcede3fb12..73cea692dbf 100644 --- a/homeassistant/components/tplink_omada/strings.json +++ b/homeassistant/components/tplink_omada/strings.json @@ -87,5 +87,17 @@ "name": "Memory usage" } } + }, + "services": { + "reconnect_client": { + "name": "Reconnect wireless client", + "description": "Tries to get wireless client to reconnect to Omada Network.", + "fields": { + "mac": { + "name": "MAC address", + "description": "MAC address of the device." + } + } + } } } diff --git a/homeassistant/components/trafikverket_camera/__init__.py b/homeassistant/components/trafikverket_camera/__init__.py index 938bfce2318..614072cc706 100644 --- a/homeassistant/components/trafikverket_camera/__init__.py +++ b/homeassistant/components/trafikverket_camera/__init__.py @@ -4,7 +4,7 @@ from __future__ import annotations import logging -from pytrafikverket.trafikverket_camera import TrafikverketCamera +from pytrafikverket import TrafikverketCamera from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_ID, CONF_LOCATION @@ -25,7 +25,7 @@ TVCameraConfigEntry = ConfigEntry[TVDataUpdateCoordinator] async def async_setup_entry(hass: HomeAssistant, entry: TVCameraConfigEntry) -> bool: """Set up Trafikverket Camera from a config entry.""" - coordinator = TVDataUpdateCoordinator(hass) + coordinator = TVDataUpdateCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator diff --git a/homeassistant/components/trafikverket_camera/config_flow.py b/homeassistant/components/trafikverket_camera/config_flow.py index 18e210beb16..29f3db7beac 100644 --- a/homeassistant/components/trafikverket_camera/config_flow.py +++ b/homeassistant/components/trafikverket_camera/config_flow.py @@ -5,9 +5,13 @@ from __future__ import annotations from collections.abc import Mapping from typing import Any -from pytrafikverket.exceptions import InvalidAuthentication, NoCameraFound, UnknownError -from pytrafikverket.models import CameraInfoModel -from pytrafikverket.trafikverket_camera import TrafikverketCamera +from pytrafikverket import ( + CameraInfoModel, + InvalidAuthentication, + NoCameraFound, + TrafikverketCamera, + UnknownError, +) import voluptuous as vol from homeassistant.config_entries import ( diff --git a/homeassistant/components/trafikverket_camera/coordinator.py b/homeassistant/components/trafikverket_camera/coordinator.py index 7bc5c556c00..649eb102575 100644 --- a/homeassistant/components/trafikverket_camera/coordinator.py +++ b/homeassistant/components/trafikverket_camera/coordinator.py @@ -9,14 +9,14 @@ import logging from typing import TYPE_CHECKING import aiohttp -from pytrafikverket.exceptions import ( +from pytrafikverket import ( + CameraInfoModel, InvalidAuthentication, MultipleCamerasFound, NoCameraFound, + TrafikverketCamera, UnknownError, ) -from pytrafikverket.models import CameraInfoModel -from pytrafikverket.trafikverket_camera import TrafikverketCamera from homeassistant.const import CONF_API_KEY, CONF_ID from homeassistant.core import HomeAssistant @@ -44,21 +44,20 @@ class CameraData: class TVDataUpdateCoordinator(DataUpdateCoordinator[CameraData]): """A Trafikverket Data Update Coordinator.""" - config_entry: TVCameraConfigEntry - - def __init__(self, hass: HomeAssistant) -> None: + def __init__(self, hass: HomeAssistant, config_entry: TVCameraConfigEntry) -> None: """Initialize the Trafikverket coordinator.""" super().__init__( hass, _LOGGER, + config_entry=config_entry, name=DOMAIN, update_interval=TIME_BETWEEN_UPDATES, ) self.session = async_get_clientsession(hass) self._camera_api = TrafikverketCamera( - self.session, self.config_entry.data[CONF_API_KEY] + self.session, config_entry.data[CONF_API_KEY] ) - self._id = self.config_entry.data[CONF_ID] + self._id = config_entry.data[CONF_ID] async def _async_update_data(self) -> CameraData: """Fetch data from Trafikverket.""" diff --git a/homeassistant/components/trafikverket_camera/manifest.json b/homeassistant/components/trafikverket_camera/manifest.json index f424f47f7c5..08d945e0a0c 100644 --- a/homeassistant/components/trafikverket_camera/manifest.json +++ b/homeassistant/components/trafikverket_camera/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/trafikverket_camera", "iot_class": "cloud_polling", "loggers": ["pytrafikverket"], - "requirements": ["pytrafikverket==1.0.0"] + "requirements": ["pytrafikverket==1.1.1"] } diff --git a/homeassistant/components/trafikverket_ferry/manifest.json b/homeassistant/components/trafikverket_ferry/manifest.json index 0b7b056754c..4177587db7e 100644 --- a/homeassistant/components/trafikverket_ferry/manifest.json +++ b/homeassistant/components/trafikverket_ferry/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/trafikverket_ferry", "iot_class": "cloud_polling", "loggers": ["pytrafikverket"], - "requirements": ["pytrafikverket==1.0.0"] + "requirements": ["pytrafikverket==1.1.1"] } diff --git a/homeassistant/components/trafikverket_train/__init__.py b/homeassistant/components/trafikverket_train/__init__.py index 3e807df9301..23aee50d816 100644 --- a/homeassistant/components/trafikverket_train/__init__.py +++ b/homeassistant/components/trafikverket_train/__init__.py @@ -2,6 +2,8 @@ from __future__ import annotations +import logging + from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -11,6 +13,8 @@ from .coordinator import TVDataUpdateCoordinator TVTrainConfigEntry = ConfigEntry[TVDataUpdateCoordinator] +_LOGGER = logging.getLogger(__name__) + async def async_setup_entry(hass: HomeAssistant, entry: TVTrainConfigEntry) -> bool: """Set up Trafikverket Train from a config entry.""" @@ -42,3 +46,24 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: """Handle options update.""" await hass.config_entries.async_reload(entry.entry_id) + + +async def async_migrate_entry(hass: HomeAssistant, entry: TVTrainConfigEntry) -> bool: + """Migrate config entry.""" + _LOGGER.debug("Migrating from version %s", entry.version) + + if entry.version > 1: + # This means the user has downgraded from a future version + return False + + if entry.version == 1 and entry.minor_version == 1: + # Remove unique id + hass.config_entries.async_update_entry(entry, unique_id=None, minor_version=2) + + _LOGGER.debug( + "Migration to version %s.%s successful", + entry.version, + entry.minor_version, + ) + + return True diff --git a/homeassistant/components/trafikverket_train/config_flow.py b/homeassistant/components/trafikverket_train/config_flow.py index a9eefd09b9b..363b9bb2542 100644 --- a/homeassistant/components/trafikverket_train/config_flow.py +++ b/homeassistant/components/trafikverket_train/config_flow.py @@ -21,7 +21,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_API_KEY, CONF_NAME, CONF_WEEKDAY, WEEKDAYS from homeassistant.core import HomeAssistant, callback @@ -37,7 +37,7 @@ from homeassistant.helpers.selector import ( import homeassistant.util.dt as dt_util from .const import CONF_FILTER_PRODUCT, CONF_FROM, CONF_TIME, CONF_TO, DOMAIN -from .util import create_unique_id, next_departuredate +from .util import next_departuredate _LOGGER = logging.getLogger(__name__) @@ -93,8 +93,8 @@ async def validate_input( try: web_session = async_get_clientsession(hass) train_api = TrafikverketTrain(web_session, api_key) - from_station = await train_api.async_get_train_station(train_from) - to_station = await train_api.async_get_train_station(train_to) + from_station = await train_api.async_search_train_station(train_from) + to_station = await train_api.async_search_train_station(train_to) if train_time: await train_api.async_get_train_stop( from_station, to_station, when, product_filter @@ -125,6 +125,7 @@ class TVTrainConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Trafikverket Train integration.""" VERSION = 1 + MINOR_VERSION = 2 @staticmethod @callback @@ -132,7 +133,7 @@ class TVTrainConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> TVTrainOptionsFlowHandler: """Get the options flow for this handler.""" - return TVTrainOptionsFlowHandler(config_entry) + return TVTrainOptionsFlowHandler() async def async_step_reauth( self, entry_data: Mapping[str, Any] @@ -202,11 +203,16 @@ class TVTrainConfigFlow(ConfigFlow, domain=DOMAIN): filter_product, ) if not errors: - unique_id = create_unique_id( - train_from, train_to, train_time, train_days + self._async_abort_entries_match( + { + CONF_API_KEY: api_key, + CONF_FROM: train_from, + CONF_TO: train_to, + CONF_TIME: train_time, + CONF_WEEKDAY: train_days, + CONF_FILTER_PRODUCT: filter_product, + } ) - await self.async_set_unique_id(unique_id) - self._abort_if_unique_id_configured() return self.async_create_entry( title=name, data={ @@ -229,7 +235,7 @@ class TVTrainConfigFlow(ConfigFlow, domain=DOMAIN): ) -class TVTrainOptionsFlowHandler(OptionsFlowWithConfigEntry): +class TVTrainOptionsFlowHandler(OptionsFlow): """Handle Trafikverket Train options.""" async def async_step_init( @@ -247,7 +253,7 @@ class TVTrainOptionsFlowHandler(OptionsFlowWithConfigEntry): step_id="init", data_schema=self.add_suggested_values_to_schema( vol.Schema(OPTION_SCHEMA), - user_input or self.options, + user_input or self.config_entry.options, ), errors=errors, ) diff --git a/homeassistant/components/trafikverket_train/coordinator.py b/homeassistant/components/trafikverket_train/coordinator.py index 16a7a649b85..49d4e1ded74 100644 --- a/homeassistant/components/trafikverket_train/coordinator.py +++ b/homeassistant/components/trafikverket_train/coordinator.py @@ -94,10 +94,10 @@ class TVDataUpdateCoordinator(DataUpdateCoordinator[TrainData]): async def _async_setup(self) -> None: """Initiate stations.""" try: - self.to_station = await self._train_api.async_get_train_station( + self.to_station = await self._train_api.async_search_train_station( self.config_entry.data[CONF_TO] ) - self.from_station = await self._train_api.async_get_train_station( + self.from_station = await self._train_api.async_search_train_station( self.config_entry.data[CONF_FROM] ) except InvalidAuthentication as error: diff --git a/homeassistant/components/trafikverket_train/manifest.json b/homeassistant/components/trafikverket_train/manifest.json index 222b23dbe9a..40f3a39a2bb 100644 --- a/homeassistant/components/trafikverket_train/manifest.json +++ b/homeassistant/components/trafikverket_train/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/trafikverket_train", "iot_class": "cloud_polling", "loggers": ["pytrafikverket"], - "requirements": ["pytrafikverket==1.0.0"] + "requirements": ["pytrafikverket==1.1.1"] } diff --git a/homeassistant/components/trafikverket_train/util.py b/homeassistant/components/trafikverket_train/util.py index 9648436f1e5..9a8dd9ea237 100644 --- a/homeassistant/components/trafikverket_train/util.py +++ b/homeassistant/components/trafikverket_train/util.py @@ -2,22 +2,11 @@ from __future__ import annotations -from datetime import date, time, timedelta +from datetime import date, timedelta from homeassistant.const import WEEKDAYS -def create_unique_id( - from_station: str, to_station: str, depart_time: time | str | None, weekdays: list -) -> str: - """Create unique id.""" - timestr = str(depart_time) if depart_time else "" - return ( - f"{from_station.casefold().replace(' ', '')}-{to_station.casefold().replace(' ', '')}" - f"-{timestr.casefold().replace(' ', '')}-{weekdays!s}" - ) - - def next_weekday(fromdate: date, weekday: int) -> date: """Return the date of the next time a specific weekday happen.""" days_ahead = weekday - fromdate.weekday() diff --git a/homeassistant/components/trafikverket_weatherstation/manifest.json b/homeassistant/components/trafikverket_weatherstation/manifest.json index 85838726178..3996379540f 100644 --- a/homeassistant/components/trafikverket_weatherstation/manifest.json +++ b/homeassistant/components/trafikverket_weatherstation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/trafikverket_weatherstation", "iot_class": "cloud_polling", "loggers": ["pytrafikverket"], - "requirements": ["pytrafikverket==1.0.0"] + "requirements": ["pytrafikverket==1.1.1"] } diff --git a/homeassistant/components/transmission/config_flow.py b/homeassistant/components/transmission/config_flow.py index a6e77dd23f7..30e9f5a146b 100644 --- a/homeassistant/components/transmission/config_flow.py +++ b/homeassistant/components/transmission/config_flow.py @@ -63,7 +63,7 @@ class TransmissionFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> TransmissionOptionsFlowHandler: """Get the options flow for this handler.""" - return TransmissionOptionsFlowHandler(config_entry) + return TransmissionOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -138,10 +138,6 @@ class TransmissionFlowHandler(ConfigFlow, domain=DOMAIN): class TransmissionOptionsFlowHandler(OptionsFlow): """Handle Transmission client options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize Transmission options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/transmission/sensor.py b/homeassistant/components/transmission/sensor.py index 737520adb5f..652f5d51fbb 100644 --- a/homeassistant/components/transmission/sensor.py +++ b/homeassistant/components/transmission/sensor.py @@ -83,7 +83,6 @@ SENSOR_TYPES: tuple[TransmissionSensorEntityDescription, ...] = ( TransmissionSensorEntityDescription( key="active_torrents", translation_key="active_torrents", - native_unit_of_measurement="torrents", val_func=lambda coordinator: coordinator.data.active_torrent_count, extra_state_attr_func=lambda coordinator: _torrents_info_attr( coordinator=coordinator, key="active_torrents" @@ -92,7 +91,6 @@ SENSOR_TYPES: tuple[TransmissionSensorEntityDescription, ...] = ( TransmissionSensorEntityDescription( key="paused_torrents", translation_key="paused_torrents", - native_unit_of_measurement="torrents", val_func=lambda coordinator: coordinator.data.paused_torrent_count, extra_state_attr_func=lambda coordinator: _torrents_info_attr( coordinator=coordinator, key="paused_torrents" @@ -101,7 +99,6 @@ SENSOR_TYPES: tuple[TransmissionSensorEntityDescription, ...] = ( TransmissionSensorEntityDescription( key="total_torrents", translation_key="total_torrents", - native_unit_of_measurement="torrents", val_func=lambda coordinator: coordinator.data.torrent_count, extra_state_attr_func=lambda coordinator: _torrents_info_attr( coordinator=coordinator, key="total_torrents" @@ -110,7 +107,6 @@ SENSOR_TYPES: tuple[TransmissionSensorEntityDescription, ...] = ( TransmissionSensorEntityDescription( key="completed_torrents", translation_key="completed_torrents", - native_unit_of_measurement="torrents", val_func=lambda coordinator: len( _filter_torrents(coordinator.torrents, MODES["completed_torrents"]) ), @@ -121,7 +117,6 @@ SENSOR_TYPES: tuple[TransmissionSensorEntityDescription, ...] = ( TransmissionSensorEntityDescription( key="started_torrents", translation_key="started_torrents", - native_unit_of_measurement="torrents", val_func=lambda coordinator: len( _filter_torrents(coordinator.torrents, MODES["started_torrents"]) ), diff --git a/homeassistant/components/transmission/strings.json b/homeassistant/components/transmission/strings.json index 20ae6ca723d..578bc262589 100644 --- a/homeassistant/components/transmission/strings.json +++ b/homeassistant/components/transmission/strings.json @@ -60,19 +60,24 @@ } }, "active_torrents": { - "name": "Active torrents" + "name": "Active torrents", + "unit_of_measurement": "torrents" }, "paused_torrents": { - "name": "Paused torrents" + "name": "Paused torrents", + "unit_of_measurement": "[%key:component::transmission::entity::sensor::active_torrents::unit_of_measurement%]" }, "total_torrents": { - "name": "Total torrents" + "name": "Total torrents", + "unit_of_measurement": "[%key:component::transmission::entity::sensor::active_torrents::unit_of_measurement%]" }, "completed_torrents": { - "name": "Completed torrents" + "name": "Completed torrents", + "unit_of_measurement": "[%key:component::transmission::entity::sensor::active_torrents::unit_of_measurement%]" }, "started_torrents": { - "name": "Started torrents" + "name": "Started torrents", + "unit_of_measurement": "[%key:component::transmission::entity::sensor::active_torrents::unit_of_measurement%]" } }, "switch": { diff --git a/homeassistant/components/transport_nsw/manifest.json b/homeassistant/components/transport_nsw/manifest.json index 9d535b99aa1..83c138a4f91 100644 --- a/homeassistant/components/transport_nsw/manifest.json +++ b/homeassistant/components/transport_nsw/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/transport_nsw", "iot_class": "cloud_polling", "loggers": ["TransportNSW"], + "quality_scale": "legacy", "requirements": ["PyTransportNSW==0.1.1"] } diff --git a/homeassistant/components/travisci/manifest.json b/homeassistant/components/travisci/manifest.json index e61a987c86f..be30cf8e1f9 100644 --- a/homeassistant/components/travisci/manifest.json +++ b/homeassistant/components/travisci/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/travisci", "iot_class": "cloud_polling", "loggers": ["travispy"], + "quality_scale": "legacy", "requirements": ["TravisPy==0.3.5"] } diff --git a/homeassistant/components/trend/binary_sensor.py b/homeassistant/components/trend/binary_sensor.py index 681680f180f..9691ecf0744 100644 --- a/homeassistant/components/trend/binary_sensor.py +++ b/homeassistant/components/trend/binary_sensor.py @@ -227,10 +227,15 @@ class SensorTrend(BinarySensorEntity, RestoreEntity): state = new_state.attributes.get(self._attribute) else: state = new_state.state - if state not in (STATE_UNKNOWN, STATE_UNAVAILABLE): + + if state in (STATE_UNKNOWN, STATE_UNAVAILABLE): + self._attr_available = False + else: + self._attr_available = True sample = (new_state.last_updated.timestamp(), float(state)) # type: ignore[arg-type] self.samples.append(sample) - self.async_schedule_update_ha_state(True) + + self.async_schedule_update_ha_state(True) except (ValueError, TypeError) as ex: _LOGGER.error(ex) diff --git a/homeassistant/components/trend/manifest.json b/homeassistant/components/trend/manifest.json index 56b4b811171..d7981105fd2 100644 --- a/homeassistant/components/trend/manifest.json +++ b/homeassistant/components/trend/manifest.json @@ -7,5 +7,5 @@ "integration_type": "helper", "iot_class": "calculated", "quality_scale": "internal", - "requirements": ["numpy==1.26.4"] + "requirements": ["numpy==2.1.3"] } diff --git a/homeassistant/components/tts/__init__.py b/homeassistant/components/tts/__init__.py index ad267b9106b..e7d1091719b 100644 --- a/homeassistant/components/tts/__init__.py +++ b/homeassistant/components/tts/__init__.py @@ -13,6 +13,7 @@ import logging import mimetypes import os import re +import secrets import subprocess import tempfile from typing import Any, Final, TypedDict, final @@ -540,6 +541,10 @@ class SpeechManager: self.file_cache: dict[str, str] = {} self.mem_cache: dict[str, TTSCache] = {} + # filename <-> token + self.filename_to_token: dict[str, str] = {} + self.token_to_filename: dict[str, str] = {} + def _init_cache(self) -> dict[str, str]: """Init cache folder and fetch files.""" try: @@ -656,7 +661,17 @@ class SpeechManager: engine_instance, cache_key, message, use_cache, language, options ) - return f"/api/tts_proxy/{filename}" + # Use a randomly generated token instead of exposing the filename + token = self.filename_to_token.get(filename) + if not token: + # Keep extension (.mp3, etc.) + token = secrets.token_urlsafe(16) + os.path.splitext(filename)[1] + + # Map token <-> filename + self.filename_to_token[filename] = token + self.token_to_filename[token] = filename + + return f"/api/tts_proxy/{token}" async def async_get_tts_audio( self, @@ -910,11 +925,15 @@ class SpeechManager: ), ) - async def async_read_tts(self, filename: str) -> tuple[str | None, bytes]: + async def async_read_tts(self, token: str) -> tuple[str | None, bytes]: """Read a voice file and return binary. This method is a coroutine. """ + filename = self.token_to_filename.get(token) + if not filename: + raise HomeAssistantError(f"{token} was not recognized!") + if not (record := _RE_VOICE_FILE.match(filename.lower())) and not ( record := _RE_LEGACY_VOICE_FILE.match(filename.lower()) ): @@ -1076,6 +1095,7 @@ class TextToSpeechView(HomeAssistantView): async def get(self, request: web.Request, filename: str) -> web.Response: """Start a get request.""" try: + # filename is actually token, but we keep its name for compatibility content, data = await self.tts.async_read_tts(filename) except HomeAssistantError as err: _LOGGER.error("Error on load tts: %s", err) diff --git a/homeassistant/components/tuya/__init__.py b/homeassistant/components/tuya/__init__.py index 47143f3595c..c8a639cd239 100644 --- a/homeassistant/components/tuya/__init__.py +++ b/homeassistant/components/tuya/__init__.py @@ -146,14 +146,21 @@ class DeviceListener(SharingDeviceListener): self.hass = hass self.manager = manager - def update_device(self, device: CustomerDevice) -> None: + def update_device( + self, device: CustomerDevice, updated_status_properties: list[str] | None + ) -> None: """Update device status.""" LOGGER.debug( - "Received update for device %s: %s", + "Received update for device %s: %s (updated properties: %s)", device.id, self.manager.device_map[device.id].status, + updated_status_properties, + ) + dispatcher_send( + self.hass, + f"{TUYA_HA_SIGNAL_UPDATE_ENTITY}_{device.id}", + updated_status_properties, ) - dispatcher_send(self.hass, f"{TUYA_HA_SIGNAL_UPDATE_ENTITY}_{device.id}") def add_device(self, device: CustomerDevice) -> None: """Add device added listener.""" diff --git a/homeassistant/components/tuya/entity.py b/homeassistant/components/tuya/entity.py index 4d3710f7570..cc258560067 100644 --- a/homeassistant/components/tuya/entity.py +++ b/homeassistant/components/tuya/entity.py @@ -283,10 +283,15 @@ class TuyaEntity(Entity): async_dispatcher_connect( self.hass, f"{TUYA_HA_SIGNAL_UPDATE_ENTITY}_{self.device.id}", - self.async_write_ha_state, + self._handle_state_update, ) ) + async def _handle_state_update( + self, updated_status_properties: list[str] | None + ) -> None: + self.async_write_ha_state() + def _send_command(self, commands: list[dict[str, Any]]) -> None: """Send command to the device.""" LOGGER.debug("Sending commands for device %s: %s", self.device.id, commands) diff --git a/homeassistant/components/tuya/manifest.json b/homeassistant/components/tuya/manifest.json index 305a74160de..b53e6fa27d8 100644 --- a/homeassistant/components/tuya/manifest.json +++ b/homeassistant/components/tuya/manifest.json @@ -43,5 +43,5 @@ "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["tuya_iot"], - "requirements": ["tuya-device-sharing-sdk==0.1.9"] + "requirements": ["tuya-device-sharing-sdk==0.2.1"] } diff --git a/homeassistant/components/tuya/number.py b/homeassistant/components/tuya/number.py index d2e381d9982..8d5b5dbfa19 100644 --- a/homeassistant/components/tuya/number.py +++ b/homeassistant/components/tuya/number.py @@ -292,6 +292,17 @@ NUMBERS: dict[str, tuple[NumberEntityDescription, ...]] = { device_class=NumberDeviceClass.TEMPERATURE, ), ), + # CO2 Detector + # https://developer.tuya.com/en/docs/iot/categoryco2bj?id=Kaiuz3wes7yuy + "co2bj": ( + NumberEntityDescription( + key=DPCode.ALARM_TIME, + translation_key="alarm_duration", + native_unit_of_measurement=UnitOfTime.SECONDS, + device_class=NumberDeviceClass.DURATION, + entity_category=EntityCategory.CONFIG, + ), + ), } diff --git a/homeassistant/components/tuya/select.py b/homeassistant/components/tuya/select.py index abc5e4c496b..831d3cb3e0c 100644 --- a/homeassistant/components/tuya/select.py +++ b/homeassistant/components/tuya/select.py @@ -307,6 +307,15 @@ SELECTS: dict[str, tuple[SelectEntityDescription, ...]] = { entity_category=EntityCategory.CONFIG, ), ), + # CO2 Detector + # https://developer.tuya.com/en/docs/iot/categoryco2bj?id=Kaiuz3wes7yuy + "co2bj": ( + SelectEntityDescription( + key=DPCode.ALARM_VOLUME, + translation_key="volume", + entity_category=EntityCategory.CONFIG, + ), + ), } # Socket (duplicate of `kg`) diff --git a/homeassistant/components/tuya/sensor.py b/homeassistant/components/tuya/sensor.py index fd8efcac95d..f766c744998 100644 --- a/homeassistant/components/tuya/sensor.py +++ b/homeassistant/components/tuya/sensor.py @@ -203,6 +203,23 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = { device_class=SensorDeviceClass.CO2, state_class=SensorStateClass.MEASUREMENT, ), + TuyaSensorEntityDescription( + key=DPCode.CH2O_VALUE, + translation_key="formaldehyde", + state_class=SensorStateClass.MEASUREMENT, + ), + TuyaSensorEntityDescription( + key=DPCode.VOC_VALUE, + translation_key="voc", + device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS, + state_class=SensorStateClass.MEASUREMENT, + ), + TuyaSensorEntityDescription( + key=DPCode.PM25_VALUE, + translation_key="pm25", + device_class=SensorDeviceClass.PM25, + state_class=SensorStateClass.MEASUREMENT, + ), *BATTERY_SENSORS, ), # Two-way temperature and humidity switch @@ -243,6 +260,31 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = { entity_registry_enabled_default=False, ), ), + # Single Phase power meter + # Note: Undocumented + "aqcz": ( + TuyaSensorEntityDescription( + key=DPCode.CUR_CURRENT, + translation_key="current", + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), + TuyaSensorEntityDescription( + key=DPCode.CUR_POWER, + translation_key="power", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), + TuyaSensorEntityDescription( + key=DPCode.CUR_VOLTAGE, + translation_key="voltage", + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), + ), # CO Detector # https://developer.tuya.com/en/docs/iot/categorycobj?id=Kaiuz3u1j6q1v "cobj": ( diff --git a/homeassistant/components/tuya/siren.py b/homeassistant/components/tuya/siren.py index 334dced134d..6f7dfe4c96c 100644 --- a/homeassistant/components/tuya/siren.py +++ b/homeassistant/components/tuya/siren.py @@ -11,6 +11,7 @@ from homeassistant.components.siren import ( SirenEntityDescription, SirenEntityFeature, ) +from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -43,6 +44,14 @@ SIRENS: dict[str, tuple[SirenEntityDescription, ...]] = { key=DPCode.SIREN_SWITCH, ), ), + # CO2 Detector + # https://developer.tuya.com/en/docs/iot/categoryco2bj?id=Kaiuz3wes7yuy + "co2bj": ( + SirenEntityDescription( + key=DPCode.ALARM_SWITCH, + entity_category=EntityCategory.CONFIG, + ), + ), } diff --git a/homeassistant/components/tuya/strings.json b/homeassistant/components/tuya/strings.json index 0f005821cbb..8ec61cc8aa5 100644 --- a/homeassistant/components/tuya/strings.json +++ b/homeassistant/components/tuya/strings.json @@ -119,6 +119,9 @@ } }, "number": { + "alarm_duration": { + "name": "Alarm duration" + }, "temperature": { "name": "[%key:component::sensor::entity_component::temperature::name%]" }, diff --git a/homeassistant/components/tuya/switch.py b/homeassistant/components/tuya/switch.py index 77432c5b9a5..2b5e6fec4a6 100644 --- a/homeassistant/components/tuya/switch.py +++ b/homeassistant/components/tuya/switch.py @@ -528,6 +528,13 @@ SWITCHES: dict[str, tuple[SwitchEntityDescription, ...]] = { translation_key="switch", ), ), + # Hejhome whitelabel Fingerbot + "znjxs": ( + SwitchEntityDescription( + key=DPCode.SWITCH, + translation_key="switch", + ), + ), # IoT Switch? # Note: Undocumented "tdq": ( diff --git a/homeassistant/components/twentemilieu/__init__.py b/homeassistant/components/twentemilieu/__init__.py index b6728b96536..0a2fb50c7c4 100644 --- a/homeassistant/components/twentemilieu/__init__.py +++ b/homeassistant/components/twentemilieu/__init__.py @@ -49,12 +49,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ) await coordinator.async_config_entry_first_refresh() - # For backwards compat, set unique ID - if entry.unique_id is None: - hass.config_entries.async_update_entry( - entry, unique_id=str(entry.data[CONF_ID]) - ) - entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/twentemilieu/manifest.json b/homeassistant/components/twentemilieu/manifest.json index 8ba4f3b760e..a89091948c2 100644 --- a/homeassistant/components/twentemilieu/manifest.json +++ b/homeassistant/components/twentemilieu/manifest.json @@ -7,6 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["twentemilieu"], - "quality_scale": "platinum", "requirements": ["twentemilieu==2.1.0"] } diff --git a/homeassistant/components/twentemilieu/quality_scale.yaml b/homeassistant/components/twentemilieu/quality_scale.yaml new file mode 100644 index 00000000000..f8fd813b03d --- /dev/null +++ b/homeassistant/components/twentemilieu/quality_scale.yaml @@ -0,0 +1,118 @@ +rules: + # Bronze + action-setup: + status: exempt + comment: | + This integration does not provide additional actions. + appropriate-polling: done + brands: done + common-modules: + status: todo + comment: | + The coordinator isn't in the common module yet. + config-flow-test-coverage: done + config-flow: + status: todo + comment: | + data_description's are missing. + dependency-transparency: done + docs-actions: + status: exempt + comment: | + This integration does not provide additional actions. + docs-high-level-description: + status: todo + comment: | + The introduction can be improved and is missing links to the provider. + docs-installation-instructions: done + docs-removal-instructions: todo + entity-event-setup: + status: exempt + comment: | + Entities of this integration does not explicitly subscribe to events. + entity-unique-id: done + has-entity-name: done + runtime-data: done + test-before-configure: done + test-before-setup: done + unique-config-entry: done + + # Silver + config-entry-unloading: done + log-when-unavailable: done + entity-unavailable: done + action-exceptions: + status: exempt + comment: | + This integration does not provide additional actions. + reauthentication-flow: + status: exempt + comment: | + This integration does not require authentication. + parallel-updates: + status: exempt + comment: | + This integration only polls data using a coordinator. + Since the integration is read-only and poll-only (only provide sensor + data), there is no need to implement parallel updates. + test-coverage: done + integration-owner: done + docs-installation-parameters: todo + docs-configuration-parameters: + status: exempt + comment: | + This integration does not have an options flow. + + # Gold + entity-translations: + status: todo + comment: | + The calendar entity name isn't translated yet. + entity-device-class: done + devices: done + entity-category: done + entity-disabled-by-default: done + discovery: + status: exempt + comment: | + This integration cannot be discovered, it is a connecting to a service + provider, which uses the users home address to get the data. + stale-devices: + status: exempt + comment: | + This integration has a fixed single device which represents the service. + diagnostics: done + exception-translations: + status: todo + comment: | + The coordinator raises, and currently, doesn't provide a translation for it. + icon-translations: done + reconfiguration-flow: todo + dynamic-devices: + status: exempt + comment: | + This integration has a fixed single device which represents the service. + discovery-update-info: + status: exempt + comment: | + This integration cannot be discovered, it is a connecting to a service + provider, which uses the users home address to get the data. + repair-issues: + status: exempt + comment: | + This integration doesn't have any cases where raising an issue is needed. + docs-use-cases: todo + docs-supported-devices: + status: exempt + comment: | + This is an service, which doesn't integrate with any devices. + docs-supported-functions: done + docs-data-update: todo + docs-known-limitations: todo + docs-troubleshooting: todo + docs-examples: todo + + # Platinum + async-dependency: done + inject-websession: done + strict-typing: done diff --git a/homeassistant/components/twilio_call/manifest.json b/homeassistant/components/twilio_call/manifest.json index 88f09efdeed..f4389e1c7d7 100644 --- a/homeassistant/components/twilio_call/manifest.json +++ b/homeassistant/components/twilio_call/manifest.json @@ -5,5 +5,6 @@ "dependencies": ["twilio"], "documentation": "https://www.home-assistant.io/integrations/twilio_call", "iot_class": "cloud_push", - "loggers": ["twilio"] + "loggers": ["twilio"], + "quality_scale": "legacy" } diff --git a/homeassistant/components/twilio_sms/manifest.json b/homeassistant/components/twilio_sms/manifest.json index 8736d58c0da..eed5a1113c6 100644 --- a/homeassistant/components/twilio_sms/manifest.json +++ b/homeassistant/components/twilio_sms/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["twilio"], "documentation": "https://www.home-assistant.io/integrations/twilio_sms", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/twitter/manifest.json b/homeassistant/components/twitter/manifest.json index 44e8712b029..af4dff4486d 100644 --- a/homeassistant/components/twitter/manifest.json +++ b/homeassistant/components/twitter/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/twitter", "iot_class": "cloud_push", "loggers": ["TwitterAPI"], + "quality_scale": "legacy", "requirements": ["TwitterAPI==2.7.12"] } diff --git a/homeassistant/components/ubus/manifest.json b/homeassistant/components/ubus/manifest.json index 902b7c9bb82..6053199b4ce 100644 --- a/homeassistant/components/ubus/manifest.json +++ b/homeassistant/components/ubus/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/ubus", "iot_class": "local_polling", "loggers": ["openwrt"], + "quality_scale": "legacy", "requirements": ["openwrt-ubus-rpc==0.0.2"] } diff --git a/homeassistant/components/uk_transport/manifest.json b/homeassistant/components/uk_transport/manifest.json index f3511e71bfa..d855a04ee29 100644 --- a/homeassistant/components/uk_transport/manifest.json +++ b/homeassistant/components/uk_transport/manifest.json @@ -3,5 +3,6 @@ "name": "UK Transport", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/uk_transport", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/unifi/config_flow.py b/homeassistant/components/unifi/config_flow.py index f36edc8a888..63c8533aa2e 100644 --- a/homeassistant/components/unifi/config_flow.py +++ b/homeassistant/components/unifi/config_flow.py @@ -21,7 +21,6 @@ import voluptuous as vol from homeassistant.components import ssdp from homeassistant.config_entries import ( SOURCE_REAUTH, - ConfigEntry, ConfigEntryState, ConfigFlow, ConfigFlowResult, @@ -79,7 +78,7 @@ class UnifiFlowHandler(ConfigFlow, domain=UNIFI_DOMAIN): @staticmethod @callback def async_get_options_flow( - config_entry: ConfigEntry, + config_entry: UnifiConfigEntry, ) -> UnifiOptionsFlowHandler: """Get the options flow for this handler.""" return UnifiOptionsFlowHandler(config_entry) @@ -250,7 +249,6 @@ class UnifiOptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: UnifiConfigEntry) -> None: """Initialize UniFi Network options flow.""" - self.config_entry = config_entry self.options = dict(config_entry.options) async def async_step_init( diff --git a/homeassistant/components/unifi/manifest.json b/homeassistant/components/unifi/manifest.json index 6f92dec5361..66d0a53284b 100644 --- a/homeassistant/components/unifi/manifest.json +++ b/homeassistant/components/unifi/manifest.json @@ -7,7 +7,6 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["aiounifi"], - "quality_scale": "platinum", "requirements": ["aiounifi==80"], "ssdp": [ { diff --git a/homeassistant/components/unifi_direct/device_tracker.py b/homeassistant/components/unifi_direct/device_tracker.py index 144cbd4dec7..d5e2e926114 100644 --- a/homeassistant/components/unifi_direct/device_tracker.py +++ b/homeassistant/components/unifi_direct/device_tracker.py @@ -67,11 +67,11 @@ class UnifiDeviceScanner(DeviceScanner): """Update the client info from AP.""" try: self.clients = self.ap.get_clients() - except UniFiAPConnectionException: - _LOGGER.error("Failed to connect to accesspoint") + except UniFiAPConnectionException as e: + _LOGGER.error("Failed to connect to accesspoint: %s", str(e)) return False - except UniFiAPDataException: - _LOGGER.error("Failed to get proper response from accesspoint") + except UniFiAPDataException as e: + _LOGGER.error("Failed to get proper response from accesspoint: %s", str(e)) return False return True diff --git a/homeassistant/components/unifi_direct/manifest.json b/homeassistant/components/unifi_direct/manifest.json index 8ca8ef27bb2..aa696985dbe 100644 --- a/homeassistant/components/unifi_direct/manifest.json +++ b/homeassistant/components/unifi_direct/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/unifi_direct", "iot_class": "local_polling", "loggers": ["unifi_ap"], - "requirements": ["unifi_ap==0.0.1"] + "quality_scale": "legacy", + "requirements": ["unifi_ap==0.0.2"] } diff --git a/homeassistant/components/unifiled/manifest.json b/homeassistant/components/unifiled/manifest.json index c75efb2053b..a2179c76fd9 100644 --- a/homeassistant/components/unifiled/manifest.json +++ b/homeassistant/components/unifiled/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/unifiled", "iot_class": "local_polling", "loggers": ["unifiled"], + "quality_scale": "legacy", "requirements": ["unifiled==0.11"] } diff --git a/homeassistant/components/unifiprotect/__init__.py b/homeassistant/components/unifiprotect/__init__.py index 394a7f43329..ed409a6eea0 100644 --- a/homeassistant/components/unifiprotect/__init__.py +++ b/homeassistant/components/unifiprotect/__init__.py @@ -45,7 +45,7 @@ from .utils import ( async_create_api_client, async_get_devices, ) -from .views import ThumbnailProxyView, VideoProxyView +from .views import ThumbnailProxyView, VideoEventProxyView, VideoProxyView _LOGGER = logging.getLogger(__name__) @@ -174,6 +174,7 @@ async def _async_setup_entry( await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) hass.http.register_view(ThumbnailProxyView(hass)) hass.http.register_view(VideoProxyView(hass)) + hass.http.register_view(VideoEventProxyView(hass)) async def _async_options_updated(hass: HomeAssistant, entry: UFPConfigEntry) -> None: diff --git a/homeassistant/components/unifiprotect/camera.py b/homeassistant/components/unifiprotect/camera.py index a40939be917..0b1c03b8dd6 100644 --- a/homeassistant/components/unifiprotect/camera.py +++ b/homeassistant/components/unifiprotect/camera.py @@ -90,7 +90,7 @@ def _get_camera_channels( is_default = False # no RTSP enabled use first channel with no stream - if is_default: + if is_default and not camera.is_third_party_camera: _create_rtsp_repair(hass, entry, data, camera) yield camera, camera.channels[0], True else: diff --git a/homeassistant/components/unifiprotect/config_flow.py b/homeassistant/components/unifiprotect/config_flow.py index 6a9dc1210c0..31950f8f7e4 100644 --- a/homeassistant/components/unifiprotect/config_flow.py +++ b/homeassistant/components/unifiprotect/config_flow.py @@ -225,7 +225,7 @@ class ProtectFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlow: """Get the options flow for this handler.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() @callback def _async_create_entry(self, title: str, data: dict[str, Any]) -> ConfigFlowResult: @@ -376,10 +376,6 @@ class ProtectFlowHandler(ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(OptionsFlow): """Handle options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/unifiprotect/const.py b/homeassistant/components/unifiprotect/const.py index ad251ba6153..7d1e5b55d3f 100644 --- a/homeassistant/components/unifiprotect/const.py +++ b/homeassistant/components/unifiprotect/const.py @@ -1,5 +1,7 @@ """Constant definitions for UniFi Protect Integration.""" +from typing import Final + from uiprotect.data import ModelType, Version from homeassistant.const import Platform @@ -75,3 +77,8 @@ PLATFORMS = [ DISPATCH_ADD = "add_device" DISPATCH_ADOPT = "adopt_device" DISPATCH_CHANNELS = "new_camera_channels" + +EVENT_TYPE_FINGERPRINT_IDENTIFIED: Final = "identified" +EVENT_TYPE_FINGERPRINT_NOT_IDENTIFIED: Final = "not_identified" +EVENT_TYPE_NFC_SCANNED: Final = "scanned" +EVENT_TYPE_DOORBELL_RING: Final = "ring" diff --git a/homeassistant/components/unifiprotect/data.py b/homeassistant/components/unifiprotect/data.py index 4ad8892ca01..baecc7f8323 100644 --- a/homeassistant/components/unifiprotect/data.py +++ b/homeassistant/components/unifiprotect/data.py @@ -349,6 +349,7 @@ def async_ufp_instance_for_config_entry_ids( entry.runtime_data.api for entry_id in config_entry_ids if (entry := hass.config_entries.async_get_entry(entry_id)) + and entry.domain == DOMAIN and hasattr(entry, "runtime_data") ), None, diff --git a/homeassistant/components/unifiprotect/event.py b/homeassistant/components/unifiprotect/event.py index 8bbe568242b..f126920fb18 100644 --- a/homeassistant/components/unifiprotect/event.py +++ b/homeassistant/components/unifiprotect/event.py @@ -14,7 +14,13 @@ from homeassistant.components.event import ( from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import ATTR_EVENT_ID +from .const import ( + ATTR_EVENT_ID, + EVENT_TYPE_DOORBELL_RING, + EVENT_TYPE_FINGERPRINT_IDENTIFIED, + EVENT_TYPE_FINGERPRINT_NOT_IDENTIFIED, + EVENT_TYPE_NFC_SCANNED, +) from .data import ProtectData, ProtectDeviceType, UFPConfigEntry from .entity import EventEntityMixin, ProtectDeviceEntity, ProtectEventMixin @@ -23,22 +29,10 @@ from .entity import EventEntityMixin, ProtectDeviceEntity, ProtectEventMixin class ProtectEventEntityDescription(ProtectEventMixin, EventEntityDescription): """Describes UniFi Protect event entity.""" - -EVENT_DESCRIPTIONS: tuple[ProtectEventEntityDescription, ...] = ( - ProtectEventEntityDescription( - key="doorbell", - translation_key="doorbell", - name="Doorbell", - device_class=EventDeviceClass.DOORBELL, - icon="mdi:doorbell-video", - ufp_required_field="feature_flags.is_doorbell", - ufp_event_obj="last_ring_event", - event_types=[EventType.RING], - ), -) + entity_class: type[ProtectDeviceEntity] -class ProtectDeviceEventEntity(EventEntityMixin, ProtectDeviceEntity, EventEntity): +class ProtectDeviceRingEventEntity(EventEntityMixin, ProtectDeviceEntity, EventEntity): """A UniFi Protect event entity.""" entity_description: ProtectEventEntityDescription @@ -57,26 +51,128 @@ class ProtectDeviceEventEntity(EventEntityMixin, ProtectDeviceEntity, EventEntit if ( event and not self._event_already_ended(prev_event, prev_event_end) - and (event_types := description.event_types) - and (event_type := event.type) in event_types + and event.type is EventType.RING ): - self._trigger_event(event_type, {ATTR_EVENT_ID: event.id}) + self._trigger_event(EVENT_TYPE_DOORBELL_RING, {ATTR_EVENT_ID: event.id}) self.async_write_ha_state() +class ProtectDeviceNFCEventEntity(EventEntityMixin, ProtectDeviceEntity, EventEntity): + """A UniFi Protect NFC event entity.""" + + entity_description: ProtectEventEntityDescription + + @callback + def _async_update_device_from_protect(self, device: ProtectDeviceType) -> None: + description = self.entity_description + + prev_event = self._event + prev_event_end = self._event_end + super()._async_update_device_from_protect(device) + if event := description.get_event_obj(device): + self._event = event + self._event_end = event.end if event else None + + if ( + event + and not self._event_already_ended(prev_event, prev_event_end) + and event.type is EventType.NFC_CARD_SCANNED + ): + event_data = {ATTR_EVENT_ID: event.id} + if event.metadata and event.metadata.nfc and event.metadata.nfc.nfc_id: + event_data["nfc_id"] = event.metadata.nfc.nfc_id + + self._trigger_event(EVENT_TYPE_NFC_SCANNED, event_data) + self.async_write_ha_state() + + +class ProtectDeviceFingerprintEventEntity( + EventEntityMixin, ProtectDeviceEntity, EventEntity +): + """A UniFi Protect fingerprint event entity.""" + + entity_description: ProtectEventEntityDescription + + @callback + def _async_update_device_from_protect(self, device: ProtectDeviceType) -> None: + description = self.entity_description + + prev_event = self._event + prev_event_end = self._event_end + super()._async_update_device_from_protect(device) + if event := description.get_event_obj(device): + self._event = event + self._event_end = event.end if event else None + + if ( + event + and not self._event_already_ended(prev_event, prev_event_end) + and event.type is EventType.FINGERPRINT_IDENTIFIED + ): + event_data = {ATTR_EVENT_ID: event.id} + if ( + event.metadata + and event.metadata.fingerprint + and event.metadata.fingerprint.ulp_id + ): + event_data["ulp_id"] = event.metadata.fingerprint.ulp_id + event_identified = EVENT_TYPE_FINGERPRINT_IDENTIFIED + else: + event_data["ulp_id"] = "" + event_identified = EVENT_TYPE_FINGERPRINT_NOT_IDENTIFIED + + self._trigger_event(event_identified, event_data) + self.async_write_ha_state() + + +EVENT_DESCRIPTIONS: tuple[ProtectEventEntityDescription, ...] = ( + ProtectEventEntityDescription( + key="doorbell", + translation_key="doorbell", + device_class=EventDeviceClass.DOORBELL, + icon="mdi:doorbell-video", + ufp_required_field="feature_flags.is_doorbell", + ufp_event_obj="last_ring_event", + event_types=[EVENT_TYPE_DOORBELL_RING], + entity_class=ProtectDeviceRingEventEntity, + ), + ProtectEventEntityDescription( + key="nfc", + translation_key="nfc", + device_class=EventDeviceClass.DOORBELL, + icon="mdi:nfc", + ufp_required_field="feature_flags.support_nfc", + ufp_event_obj="last_nfc_card_scanned_event", + event_types=[EVENT_TYPE_NFC_SCANNED], + entity_class=ProtectDeviceNFCEventEntity, + ), + ProtectEventEntityDescription( + key="fingerprint", + translation_key="fingerprint", + device_class=EventDeviceClass.DOORBELL, + icon="mdi:fingerprint", + ufp_required_field="feature_flags.has_fingerprint_sensor", + ufp_event_obj="last_fingerprint_identified_event", + event_types=[ + EVENT_TYPE_FINGERPRINT_IDENTIFIED, + EVENT_TYPE_FINGERPRINT_NOT_IDENTIFIED, + ], + entity_class=ProtectDeviceFingerprintEventEntity, + ), +) + + @callback def _async_event_entities( data: ProtectData, ufp_device: ProtectAdoptableDeviceModel | None = None, ) -> list[ProtectDeviceEntity]: - entities: list[ProtectDeviceEntity] = [] - for device in data.get_cameras() if ufp_device is None else [ufp_device]: - entities.extend( - ProtectDeviceEventEntity(data, device, description) - for description in EVENT_DESCRIPTIONS - if description.has_required(device) - ) - return entities + return [ + description.entity_class(data, device, description) + for device in (data.get_cameras() if ufp_device is None else [ufp_device]) + for description in EVENT_DESCRIPTIONS + if description.has_required(device) + ] async def async_setup_entry( diff --git a/homeassistant/components/unifiprotect/manifest.json b/homeassistant/components/unifiprotect/manifest.json index 85867b5c87c..e8a8c062800 100644 --- a/homeassistant/components/unifiprotect/manifest.json +++ b/homeassistant/components/unifiprotect/manifest.json @@ -1,7 +1,7 @@ { "domain": "unifiprotect", "name": "UniFi Protect", - "codeowners": [], + "codeowners": ["@RaHehl"], "config_flow": true, "dependencies": ["http", "repairs"], "dhcp": [ @@ -40,7 +40,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["uiprotect", "unifi_discovery"], - "requirements": ["uiprotect==6.4.0", "unifi-discovery==1.2.0"], + "requirements": ["uiprotect==6.6.5", "unifi-discovery==1.2.0"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/homeassistant/components/unifiprotect/number.py b/homeassistant/components/unifiprotect/number.py index f6aacf81161..767128337ba 100644 --- a/homeassistant/components/unifiprotect/number.py +++ b/homeassistant/components/unifiprotect/number.py @@ -124,7 +124,7 @@ CAMERA_NUMBERS: tuple[ProtectNumberEntityDescription, ...] = ( name="Infrared custom lux trigger", icon="mdi:white-balance-sunny", entity_category=EntityCategory.CONFIG, - ufp_min=1, + ufp_min=0, ufp_max=30, ufp_step=1, ufp_required_field="feature_flags.has_led_ir", diff --git a/homeassistant/components/unifiprotect/sensor.py b/homeassistant/components/unifiprotect/sensor.py index a91a94aa629..09187e023a1 100644 --- a/homeassistant/components/unifiprotect/sensor.py +++ b/homeassistant/components/unifiprotect/sensor.py @@ -245,7 +245,7 @@ CAMERA_SENSORS: tuple[ProtectSensorEntityDescription, ...] = ( name="Recording mode", icon="mdi:video-outline", entity_category=EntityCategory.DIAGNOSTIC, - ufp_value="recording_settings.mode", + ufp_value="recording_settings.mode.value", ufp_perm=PermRequired.NO_WRITE, ), ProtectSensorEntityDescription( @@ -254,7 +254,7 @@ CAMERA_SENSORS: tuple[ProtectSensorEntityDescription, ...] = ( icon="mdi:circle-opacity", entity_category=EntityCategory.DIAGNOSTIC, ufp_required_field="feature_flags.has_led_ir", - ufp_value="isp_settings.ir_led_mode", + ufp_value="isp_settings.ir_led_mode.value", ufp_perm=PermRequired.NO_WRITE, ), ProtectSensorEntityDescription( diff --git a/homeassistant/components/unifiprotect/strings.json b/homeassistant/components/unifiprotect/strings.json index 9238c825390..8ecb4076409 100644 --- a/homeassistant/components/unifiprotect/strings.json +++ b/homeassistant/components/unifiprotect/strings.json @@ -137,6 +137,7 @@ }, "event": { "doorbell": { + "name": "Doorbell", "state_attributes": { "event_type": { "state": { @@ -144,6 +145,27 @@ } } } + }, + "nfc": { + "name": "NFC", + "state_attributes": { + "event_type": { + "state": { + "scanned": "Scanned" + } + } + } + }, + "fingerprint": { + "name": "Fingerprint", + "state_attributes": { + "event_type": { + "state": { + "identified": "Identified", + "not_identified": "Not identified" + } + } + } } } }, @@ -182,7 +204,7 @@ "fields": { "device_id": { "name": "Chime", - "description": "The chimes to link to the doorbells to." + "description": "The chimes to link to the doorbells." }, "doorbells": { "name": "Doorbells", diff --git a/homeassistant/components/unifiprotect/views.py b/homeassistant/components/unifiprotect/views.py index 00128492c67..9bf6ed024f5 100644 --- a/homeassistant/components/unifiprotect/views.py +++ b/homeassistant/components/unifiprotect/views.py @@ -5,7 +5,7 @@ from __future__ import annotations from datetime import datetime from http import HTTPStatus import logging -from typing import Any +from typing import TYPE_CHECKING, Any from urllib.parse import urlencode from aiohttp import web @@ -30,7 +30,9 @@ def async_generate_thumbnail_url( ) -> str: """Generate URL for event thumbnail.""" - url_format = ThumbnailProxyView.url or "{nvr_id}/{event_id}" + url_format = ThumbnailProxyView.url + if TYPE_CHECKING: + assert url_format is not None url = url_format.format(nvr_id=nvr_id, event_id=event_id) params = {} @@ -50,7 +52,9 @@ def async_generate_event_video_url(event: Event) -> str: if event.start is None or event.end is None: raise ValueError("Event is ongoing") - url_format = VideoProxyView.url or "{nvr_id}/{camera_id}/{start}/{end}" + url_format = VideoProxyView.url + if TYPE_CHECKING: + assert url_format is not None return url_format.format( nvr_id=event.api.bootstrap.nvr.id, camera_id=event.camera_id, @@ -59,6 +63,19 @@ def async_generate_event_video_url(event: Event) -> str: ) +@callback +def async_generate_proxy_event_video_url( + nvr_id: str, + event_id: str, +) -> str: + """Generate proxy URL for event video.""" + + url_format = VideoEventProxyView.url + if TYPE_CHECKING: + assert url_format is not None + return url_format.format(nvr_id=nvr_id, event_id=event_id) + + @callback def _client_error(message: Any, code: HTTPStatus) -> web.Response: _LOGGER.warning("Client error (%s): %s", code.value, message) @@ -107,6 +124,27 @@ class ProtectProxyView(HomeAssistantView): return data return _404("Invalid NVR ID") + @callback + def _async_get_camera(self, data: ProtectData, camera_id: str) -> Camera | None: + if (camera := data.api.bootstrap.cameras.get(camera_id)) is not None: + return camera + + entity_registry = er.async_get(self.hass) + device_registry = dr.async_get(self.hass) + + if (entity := entity_registry.async_get(camera_id)) is None or ( + device := device_registry.async_get(entity.device_id or "") + ) is None: + return None + + macs = [c[1] for c in device.connections if c[0] == dr.CONNECTION_NETWORK_MAC] + for mac in macs: + if (ufp_device := data.api.bootstrap.get_device_from_mac(mac)) is not None: + if isinstance(ufp_device, Camera): + camera = ufp_device + break + return camera + class ThumbnailProxyView(ProtectProxyView): """View to proxy event thumbnails from UniFi Protect.""" @@ -156,27 +194,6 @@ class VideoProxyView(ProtectProxyView): url = "/api/unifiprotect/video/{nvr_id}/{camera_id}/{start}/{end}" name = "api:unifiprotect_thumbnail" - @callback - def _async_get_camera(self, data: ProtectData, camera_id: str) -> Camera | None: - if (camera := data.api.bootstrap.cameras.get(camera_id)) is not None: - return camera - - entity_registry = er.async_get(self.hass) - device_registry = dr.async_get(self.hass) - - if (entity := entity_registry.async_get(camera_id)) is None or ( - device := device_registry.async_get(entity.device_id or "") - ) is None: - return None - - macs = [c[1] for c in device.connections if c[0] == dr.CONNECTION_NETWORK_MAC] - for mac in macs: - if (ufp_device := data.api.bootstrap.get_device_from_mac(mac)) is not None: - if isinstance(ufp_device, Camera): - camera = ufp_device - break - return camera - async def get( self, request: web.Request, nvr_id: str, camera_id: str, start: str, end: str ) -> web.StreamResponse: @@ -226,3 +243,56 @@ class VideoProxyView(ProtectProxyView): if response.prepared: await response.write_eof() return response + + +class VideoEventProxyView(ProtectProxyView): + """View to proxy video clips for events from UniFi Protect.""" + + url = "/api/unifiprotect/video/{nvr_id}/{event_id}" + name = "api:unifiprotect_videoEventView" + + async def get( + self, request: web.Request, nvr_id: str, event_id: str + ) -> web.StreamResponse: + """Get Camera Video clip for an event.""" + + data = self._get_data_or_404(nvr_id) + if isinstance(data, web.Response): + return data + + try: + event = await data.api.get_event(event_id) + except ClientError: + return _404(f"Invalid event ID: {event_id}") + if event.start is None or event.end is None: + return _400("Event is still ongoing") + camera = self._async_get_camera(data, str(event.camera_id)) + if camera is None: + return _404(f"Invalid camera ID: {event.camera_id}") + if not camera.can_read_media(data.api.bootstrap.auth_user): + return _403(f"User cannot read media from camera: {camera.id}") + + response = web.StreamResponse( + status=200, + reason="OK", + headers={ + "Content-Type": "video/mp4", + }, + ) + + async def iterator(total: int, chunk: bytes | None) -> None: + if not response.prepared: + response.content_length = total + await response.prepare(request) + + if chunk is not None: + await response.write(chunk) + + try: + await camera.get_video(event.start, event.end, iterator_callback=iterator) + except ClientError as err: + return _404(err) + + if response.prepared: + await response.write_eof() + return response diff --git a/homeassistant/components/upc_connect/manifest.json b/homeassistant/components/upc_connect/manifest.json index 02b852ec3a6..1874e5db028 100644 --- a/homeassistant/components/upc_connect/manifest.json +++ b/homeassistant/components/upc_connect/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/upc_connect", "iot_class": "local_polling", "loggers": ["connect_box"], + "quality_scale": "legacy", "requirements": ["connect-box==0.3.1"] } diff --git a/homeassistant/components/upcloud/config_flow.py b/homeassistant/components/upcloud/config_flow.py index 20860df5553..bb988726ba5 100644 --- a/homeassistant/components/upcloud/config_flow.py +++ b/homeassistant/components/upcloud/config_flow.py @@ -95,16 +95,12 @@ class UpCloudConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> UpCloudOptionsFlow: """Get options flow.""" - return UpCloudOptionsFlow(config_entry) + return UpCloudOptionsFlow() class UpCloudOptionsFlow(OptionsFlow): """UpCloud options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/upnp/config_flow.py b/homeassistant/components/upnp/config_flow.py index 1a40d4b3442..41e481fa58c 100644 --- a/homeassistant/components/upnp/config_flow.py +++ b/homeassistant/components/upnp/config_flow.py @@ -16,7 +16,6 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.core import HomeAssistant, callback @@ -94,9 +93,11 @@ class UpnpFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> UpnpOptionsFlowHandler: """Get the options flow for this handler.""" - return UpnpOptionsFlowHandler(config_entry) + return UpnpOptionsFlowHandler() @property def _discoveries(self) -> dict[str, SsdpServiceInfo]: @@ -299,7 +300,7 @@ class UpnpFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_create_entry(title=title, data=data, options=options) -class UpnpOptionsFlowHandler(OptionsFlowWithConfigEntry): +class UpnpOptionsFlowHandler(OptionsFlow): """Handle an options flow.""" async def async_step_init( @@ -313,7 +314,7 @@ class UpnpOptionsFlowHandler(OptionsFlowWithConfigEntry): { vol.Optional( CONFIG_ENTRY_FORCE_POLL, - default=self.options.get( + default=self.config_entry.options.get( CONFIG_ENTRY_FORCE_POLL, DEFAULT_CONFIG_ENTRY_FORCE_POLL ), ): bool, diff --git a/homeassistant/components/uptimerobot/manifest.json b/homeassistant/components/uptimerobot/manifest.json index 254409cff7e..67e57f46986 100644 --- a/homeassistant/components/uptimerobot/manifest.json +++ b/homeassistant/components/uptimerobot/manifest.json @@ -6,6 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/uptimerobot", "iot_class": "cloud_polling", "loggers": ["pyuptimerobot"], - "quality_scale": "platinum", "requirements": ["pyuptimerobot==22.2.0"] } diff --git a/homeassistant/components/usgs_earthquakes_feed/manifest.json b/homeassistant/components/usgs_earthquakes_feed/manifest.json index ffb9412703f..ea68d00e2a9 100644 --- a/homeassistant/components/usgs_earthquakes_feed/manifest.json +++ b/homeassistant/components/usgs_earthquakes_feed/manifest.json @@ -6,5 +6,6 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["aio_geojson_usgs_earthquakes"], + "quality_scale": "legacy", "requirements": ["aio-geojson-usgs-earthquakes==0.3"] } diff --git a/homeassistant/components/utility_meter/manifest.json b/homeassistant/components/utility_meter/manifest.json index 25e803e6a2d..31a2d4e9584 100644 --- a/homeassistant/components/utility_meter/manifest.json +++ b/homeassistant/components/utility_meter/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["croniter"], "quality_scale": "internal", - "requirements": ["croniter==2.0.2"] + "requirements": ["cronsim==2.6"] } diff --git a/homeassistant/components/utility_meter/sensor.py b/homeassistant/components/utility_meter/sensor.py index 6b8c07c7ef7..9c13aa1984a 100644 --- a/homeassistant/components/utility_meter/sensor.py +++ b/homeassistant/components/utility_meter/sensor.py @@ -9,7 +9,7 @@ from decimal import Decimal, DecimalException, InvalidOperation import logging from typing import Any, Self -from croniter import croniter +from cronsim import CronSim import voluptuous as vol from homeassistant.components.sensor import ( @@ -27,6 +27,7 @@ from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, CONF_NAME, CONF_UNIQUE_ID, + EVENT_CORE_CONFIG_UPDATE, STATE_UNAVAILABLE, STATE_UNKNOWN, ) @@ -379,14 +380,13 @@ class UtilityMeterSensor(RestoreSensor): self.entity_id = suggested_entity_id self._parent_meter = parent_meter self._sensor_source_id = source_entity - self._state = None self._last_period = Decimal(0) self._last_reset = dt_util.utcnow() self._last_valid_state = None self._collecting = None - self._name = name + self._attr_name = name self._input_device_class = None - self._unit_of_measurement = None + self._attr_native_unit_of_measurement = None self._period = meter_type if meter_type is not None: # For backwards compatibility reasons we convert the period and offset into a cron pattern @@ -405,12 +405,26 @@ class UtilityMeterSensor(RestoreSensor): self._tariff = tariff self._tariff_entity = tariff_entity self._next_reset = None + self._current_tz = None + self._config_scheduler() + + def _config_scheduler(self): + self.scheduler = ( + CronSim( + self._cron_pattern, + dt_util.now( + dt_util.get_default_time_zone() + ), # we need timezone for DST purposes (see issue #102984) + ) + if self._cron_pattern + else None + ) def start(self, attributes: Mapping[str, Any]) -> None: """Initialize unit and state upon source initial update.""" self._input_device_class = attributes.get(ATTR_DEVICE_CLASS) - self._unit_of_measurement = attributes.get(ATTR_UNIT_OF_MEASUREMENT) - self._state = 0 + self._attr_native_unit_of_measurement = attributes.get(ATTR_UNIT_OF_MEASUREMENT) + self._attr_native_value = 0 self.async_write_ha_state() @staticmethod @@ -485,13 +499,13 @@ class UtilityMeterSensor(RestoreSensor): ) return - if self._state is None: + if self.native_value is None: # First state update initializes the utility_meter sensors for sensor in self.hass.data[DATA_UTILITY][self._parent_meter][ DATA_TARIFF_SENSORS ]: sensor.start(new_state_attributes) - if self._unit_of_measurement is None: + if self.native_unit_of_measurement is None: _LOGGER.warning( "Source sensor %s has no unit of measurement. Please %s", self._sensor_source_id, @@ -502,10 +516,12 @@ class UtilityMeterSensor(RestoreSensor): adjustment := self.calculate_adjustment(old_state, new_state) ) is not None and (self._sensor_net_consumption or adjustment >= 0): # If net_consumption is off, the adjustment must be non-negative - self._state += adjustment # type: ignore[operator] # self._state will be set to by the start function if it is None, therefore it always has a valid Decimal value at this line + self._attr_native_value += adjustment # type: ignore[operator] # self._attr_native_value will be set to by the start function if it is None, therefore it always has a valid Decimal value at this line self._input_device_class = new_state_attributes.get(ATTR_DEVICE_CLASS) - self._unit_of_measurement = new_state_attributes.get(ATTR_UNIT_OF_MEASUREMENT) + self._attr_native_unit_of_measurement = new_state_attributes.get( + ATTR_UNIT_OF_MEASUREMENT + ) self._last_valid_state = new_state_val self.async_write_ha_state() @@ -534,7 +550,7 @@ class UtilityMeterSensor(RestoreSensor): _LOGGER.debug( "%s - %s - source <%s>", - self._name, + self.name, COLLECTING if self._collecting is not None else PAUSED, self._sensor_source_id, ) @@ -543,11 +559,10 @@ class UtilityMeterSensor(RestoreSensor): async def _program_reset(self): """Program the reset of the utility meter.""" - if self._cron_pattern is not None: - tz = dt_util.get_default_time_zone() - self._next_reset = croniter(self._cron_pattern, dt_util.now(tz)).get_next( - datetime - ) # we need timezone for DST purposes (see issue #102984) + if self.scheduler: + self._next_reset = next(self.scheduler) + + _LOGGER.debug("Next reset of %s is %s", self.entity_id, self._next_reset) self.async_on_remove( async_track_point_in_time( self.hass, @@ -555,6 +570,7 @@ class UtilityMeterSensor(RestoreSensor): self._next_reset, ) ) + self.async_write_ha_state() async def _async_reset_meter(self, event): """Reset the utility meter status.""" @@ -575,20 +591,26 @@ class UtilityMeterSensor(RestoreSensor): return _LOGGER.debug("Reset utility meter <%s>", self.entity_id) self._last_reset = dt_util.utcnow() - self._last_period = Decimal(self._state) if self._state else Decimal(0) - self._state = 0 + self._last_period = ( + Decimal(self.native_value) if self.native_value else Decimal(0) + ) + self._attr_native_value = 0 self.async_write_ha_state() async def async_calibrate(self, value): """Calibrate the Utility Meter with a given value.""" - _LOGGER.debug("Calibrate %s = %s type(%s)", self._name, value, type(value)) - self._state = Decimal(str(value)) + _LOGGER.debug("Calibrate %s = %s type(%s)", self.name, value, type(value)) + self._attr_native_value = Decimal(str(value)) self.async_write_ha_state() async def async_added_to_hass(self): """Handle entity which will be added.""" await super().async_added_to_hass() + # track current timezone in case it changes + # and we need to reconfigure the scheduler + self._current_tz = self.hass.config.time_zone + await self._program_reset() self.async_on_remove( @@ -598,10 +620,11 @@ class UtilityMeterSensor(RestoreSensor): ) if (last_sensor_data := await self.async_get_last_sensor_data()) is not None: - # new introduced in 2022.04 - self._state = last_sensor_data.native_value + self._attr_native_value = last_sensor_data.native_value self._input_device_class = last_sensor_data.input_device_class - self._unit_of_measurement = last_sensor_data.native_unit_of_measurement + self._attr_native_unit_of_measurement = ( + last_sensor_data.native_unit_of_measurement + ) self._last_period = last_sensor_data.last_period self._last_reset = last_sensor_data.last_reset self._last_valid_state = last_sensor_data.last_valid_state @@ -609,39 +632,6 @@ class UtilityMeterSensor(RestoreSensor): # Null lambda to allow cancelling the collection on tariff change self._collecting = lambda: None - elif state := await self.async_get_last_state(): - # legacy to be removed on 2022.10 (we are keeping this to avoid utility_meter counter losses) - try: - self._state = Decimal(state.state) - except InvalidOperation: - _LOGGER.error( - "Could not restore state <%s>. Resetting utility_meter.%s", - state.state, - self.name, - ) - else: - self._unit_of_measurement = state.attributes.get( - ATTR_UNIT_OF_MEASUREMENT - ) - self._last_period = ( - Decimal(state.attributes[ATTR_LAST_PERIOD]) - if state.attributes.get(ATTR_LAST_PERIOD) - and is_number(state.attributes[ATTR_LAST_PERIOD]) - else Decimal(0) - ) - self._last_valid_state = ( - Decimal(state.attributes[ATTR_LAST_VALID_STATE]) - if state.attributes.get(ATTR_LAST_VALID_STATE) - and is_number(state.attributes[ATTR_LAST_VALID_STATE]) - else None - ) - self._last_reset = dt_util.as_utc( - dt_util.parse_datetime(state.attributes.get(ATTR_LAST_RESET)) - ) - if state.attributes.get(ATTR_STATUS) == COLLECTING: - # Null lambda to allow cancelling the collection on tariff change - self._collecting = lambda: None - @callback def async_source_tracking(event): """Wait for source to be ready, then start meter.""" @@ -666,7 +656,7 @@ class UtilityMeterSensor(RestoreSensor): _LOGGER.debug( "<%s> collecting %s from %s", self.name, - self._unit_of_measurement, + self.native_unit_of_measurement, self._sensor_source_id, ) self._collecting = async_track_state_change_event( @@ -675,28 +665,34 @@ class UtilityMeterSensor(RestoreSensor): self.async_on_remove(async_at_started(self.hass, async_source_tracking)) + async def async_track_time_zone(event): + """Reconfigure Scheduler after time zone changes.""" + + if self._current_tz != self.hass.config.time_zone: + self._current_tz = self.hass.config.time_zone + + self._config_scheduler() + await self._program_reset() + + self.async_on_remove( + self.hass.bus.async_listen(EVENT_CORE_CONFIG_UPDATE, async_track_time_zone) + ) + async def async_will_remove_from_hass(self) -> None: """Run when entity will be removed from hass.""" if self._collecting: self._collecting() self._collecting = None - @property - def name(self): - """Return the name of the sensor.""" - return self._name - - @property - def native_value(self): - """Return the state of the sensor.""" - return self._state - @property def device_class(self): """Return the device class of the sensor.""" if self._input_device_class is not None: return self._input_device_class - if self._unit_of_measurement in DEVICE_CLASS_UNITS[SensorDeviceClass.ENERGY]: + if ( + self.native_unit_of_measurement + in DEVICE_CLASS_UNITS[SensorDeviceClass.ENERGY] + ): return SensorDeviceClass.ENERGY return None @@ -709,11 +705,6 @@ class UtilityMeterSensor(RestoreSensor): else SensorStateClass.TOTAL_INCREASING ) - @property - def native_unit_of_measurement(self): - """Return the unit the value is expressed in.""" - return self._unit_of_measurement - @property def extra_state_attributes(self): """Return the state attributes of the sensor.""" diff --git a/homeassistant/components/utility_meter/strings.json b/homeassistant/components/utility_meter/strings.json index e05789aece1..4a8ae415a83 100644 --- a/homeassistant/components/utility_meter/strings.json +++ b/homeassistant/components/utility_meter/strings.json @@ -3,7 +3,7 @@ "config": { "step": { "user": { - "title": "Add Utility Meter", + "title": "Create Utility Meter", "description": "Create a sensor which tracks consumption of various utilities (e.g., energy, gas, water, heating) over a configured period of time, typically monthly. The utility meter sensor optionally supports splitting the consumption by tariffs, in that case one sensor for each tariff is created as well as a select entity to choose the current tariff.", "data": { "always_available": "Sensor always available", diff --git a/homeassistant/components/uvc/manifest.json b/homeassistant/components/uvc/manifest.json index c72b865b5ef..aeb9b6068ea 100644 --- a/homeassistant/components/uvc/manifest.json +++ b/homeassistant/components/uvc/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/uvc", "iot_class": "local_polling", "loggers": ["uvcclient"], + "quality_scale": "legacy", "requirements": ["uvcclient==0.12.1"] } diff --git a/homeassistant/components/vasttrafik/manifest.json b/homeassistant/components/vasttrafik/manifest.json index 336d06e182c..73b773720ad 100644 --- a/homeassistant/components/vasttrafik/manifest.json +++ b/homeassistant/components/vasttrafik/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/vasttrafik", "iot_class": "cloud_polling", "loggers": ["vasttrafik"], + "quality_scale": "legacy", "requirements": ["vtjp==0.2.1"] } diff --git a/homeassistant/components/velbus/manifest.json b/homeassistant/components/velbus/manifest.json index 5443afeef77..84262ebd61c 100644 --- a/homeassistant/components/velbus/manifest.json +++ b/homeassistant/components/velbus/manifest.json @@ -13,7 +13,7 @@ "velbus-packet", "velbus-protocol" ], - "requirements": ["velbus-aio==2024.10.0"], + "requirements": ["velbus-aio==2024.11.1"], "usb": [ { "vid": "10CF", diff --git a/homeassistant/components/vera/config_flow.py b/homeassistant/components/vera/config_flow.py index 08e7640773b..f2b182cc270 100644 --- a/homeassistant/components/vera/config_flow.py +++ b/homeassistant/components/vera/config_flow.py @@ -76,10 +76,6 @@ def options_data(user_input: dict[str, str]) -> dict[str, list[int]]: class OptionsFlowHandler(OptionsFlow): """Options for the component.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Init object.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, str] | None = None, @@ -104,7 +100,7 @@ class VeraFlowHandler(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler: """Get the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/verisure/config_flow.py b/homeassistant/components/verisure/config_flow.py index 42ce7f9e9fe..0f1088ccb80 100644 --- a/homeassistant/components/verisure/config_flow.py +++ b/homeassistant/components/verisure/config_flow.py @@ -43,9 +43,11 @@ class VerisureConfigFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> VerisureOptionsFlowHandler: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> VerisureOptionsFlowHandler: """Get the options flow for this handler.""" - return VerisureOptionsFlowHandler(config_entry) + return VerisureOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -290,10 +292,6 @@ class VerisureConfigFlowHandler(ConfigFlow, domain=DOMAIN): class VerisureOptionsFlowHandler(OptionsFlow): """Handle Verisure options.""" - def __init__(self, entry: ConfigEntry) -> None: - """Initialize Verisure options flow.""" - self.entry = entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -310,7 +308,7 @@ class VerisureOptionsFlowHandler(OptionsFlow): vol.Optional( CONF_LOCK_CODE_DIGITS, description={ - "suggested_value": self.entry.options.get( + "suggested_value": self.config_entry.options.get( CONF_LOCK_CODE_DIGITS, DEFAULT_LOCK_CODE_DIGITS ) }, diff --git a/homeassistant/components/versasense/manifest.json b/homeassistant/components/versasense/manifest.json index 421a46bc2f6..1f1ee9e6b9c 100644 --- a/homeassistant/components/versasense/manifest.json +++ b/homeassistant/components/versasense/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/versasense", "iot_class": "local_polling", "loggers": ["pyversasense"], + "quality_scale": "legacy", "requirements": ["pyversasense==0.0.6"] } diff --git a/homeassistant/components/vesync/fan.py b/homeassistant/components/vesync/fan.py index 58a262e769f..098a17e90f0 100644 --- a/homeassistant/components/vesync/fan.py +++ b/homeassistant/components/vesync/fan.py @@ -94,6 +94,7 @@ class VeSyncFanHA(VeSyncDevice, FanEntity): | FanEntityFeature.TURN_ON ) _attr_name = None + _attr_translation_key = "vesync" _enable_turn_on_off_backwards_compatibility = False def __init__(self, fan) -> None: diff --git a/homeassistant/components/vesync/icons.json b/homeassistant/components/vesync/icons.json index cfdefb2ed09..e4769acc9a5 100644 --- a/homeassistant/components/vesync/icons.json +++ b/homeassistant/components/vesync/icons.json @@ -1,4 +1,20 @@ { + "entity": { + "fan": { + "vesync": { + "state_attributes": { + "preset_mode": { + "state": { + "auto": "mdi:fan-auto", + "sleep": "mdi:sleep", + "pet": "mdi:paw", + "turbo": "mdi:weather-tornado" + } + } + } + } + } + }, "services": { "update_devices": { "service": "mdi:update" diff --git a/homeassistant/components/vesync/strings.json b/homeassistant/components/vesync/strings.json index 5ff0aa58722..b6e4e2fd957 100644 --- a/homeassistant/components/vesync/strings.json +++ b/homeassistant/components/vesync/strings.json @@ -42,6 +42,20 @@ "current_voltage": { "name": "Current voltage" } + }, + "fan": { + "vesync": { + "state_attributes": { + "preset_mode": { + "state": { + "auto": "Auto", + "sleep": "Sleep", + "pet": "Pet", + "turbo": "Turbo" + } + } + } + } } }, "services": { diff --git a/homeassistant/components/viaggiatreno/manifest.json b/homeassistant/components/viaggiatreno/manifest.json index 904f9c0bebf..584742c8c59 100644 --- a/homeassistant/components/viaggiatreno/manifest.json +++ b/homeassistant/components/viaggiatreno/manifest.json @@ -3,5 +3,6 @@ "name": "Trenitalia ViaggiaTreno", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/viaggiatreno", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/vicare/fan.py b/homeassistant/components/vicare/fan.py index b787de20773..1800704a16f 100644 --- a/homeassistant/components/vicare/fan.py +++ b/homeassistant/components/vicare/fan.py @@ -29,6 +29,7 @@ from homeassistant.util.percentage import ( from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity +from .types import ViCareDevice from .utils import get_device_serial _LOGGER = logging.getLogger(__name__) @@ -90,6 +91,17 @@ ORDERED_NAMED_FAN_SPEEDS = [ ] +def _build_entities( + device_list: list[ViCareDevice], +) -> list[ViCareFan]: + """Create ViCare climate entities for a device.""" + return [ + ViCareFan(get_device_serial(device.api), device.config, device.api) + for device in device_list + if isinstance(device.api, PyViCareVentilationDevice) + ] + + async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, @@ -100,27 +112,18 @@ async def async_setup_entry( device_list = hass.data[DOMAIN][config_entry.entry_id][DEVICE_LIST] async_add_entities( - [ - ViCareFan(get_device_serial(device.api), device.config, device.api) - for device in device_list - if isinstance(device.api, PyViCareVentilationDevice) - ] + await hass.async_add_executor_job( + _build_entities, + device_list, + ) ) class ViCareFan(ViCareEntity, FanEntity): """Representation of the ViCare ventilation device.""" - _attr_preset_modes = list[str]( - [ - VentilationMode.PERMANENT, - VentilationMode.VENTILATION, - VentilationMode.SENSOR_DRIVEN, - VentilationMode.SENSOR_OVERRIDE, - ] - ) _attr_speed_count = len(ORDERED_NAMED_FAN_SPEEDS) - _attr_supported_features = FanEntityFeature.SET_SPEED | FanEntityFeature.PRESET_MODE + _attr_supported_features = FanEntityFeature.SET_SPEED _attr_translation_key = "ventilation" _enable_turn_on_off_backwards_compatibility = False @@ -134,6 +137,15 @@ class ViCareFan(ViCareEntity, FanEntity): super().__init__( self._attr_translation_key, device_serial, device_config, device ) + # init presets + supported_modes = list[str](self._api.getAvailableModes()) + self._attr_preset_modes = [ + mode + for mode in VentilationMode + if VentilationMode.to_vicare_mode(mode) in supported_modes + ] + if len(self._attr_preset_modes) > 0: + self._attr_supported_features |= FanEntityFeature.PRESET_MODE def update(self) -> None: """Update state of fan.""" @@ -161,6 +173,30 @@ class ViCareFan(ViCareEntity, FanEntity): # Viessmann ventilation unit cannot be turned off return True + @property + def icon(self) -> str | None: + """Return the icon to use in the frontend.""" + if hasattr(self, "_attr_preset_mode"): + if self._attr_preset_mode == VentilationMode.VENTILATION: + return "mdi:fan-clock" + if self._attr_preset_mode in [ + VentilationMode.SENSOR_DRIVEN, + VentilationMode.SENSOR_OVERRIDE, + ]: + return "mdi:fan-auto" + if self._attr_preset_mode == VentilationMode.PERMANENT: + if self._attr_percentage == 0: + return "mdi:fan-off" + if self._attr_percentage is not None: + level = 1 + ORDERED_NAMED_FAN_SPEEDS.index( + percentage_to_ordered_list_item( + ORDERED_NAMED_FAN_SPEEDS, self._attr_percentage + ) + ) + if level < 4: # fan-speed- only supports 1-3 + return f"mdi:fan-speed-{level}" + return "mdi:fan" + def set_percentage(self, percentage: int) -> None: """Set the speed of the fan, as a percentage.""" if self._attr_preset_mode != str(VentilationMode.PERMANENT): diff --git a/homeassistant/components/vicare/quality_scale.yaml b/homeassistant/components/vicare/quality_scale.yaml new file mode 100644 index 00000000000..436e046204f --- /dev/null +++ b/homeassistant/components/vicare/quality_scale.yaml @@ -0,0 +1,49 @@ +rules: + # Bronze + config-flow: + status: todo + comment: data_description is missing. + test-before-configure: done + unique-config-entry: + status: todo + comment: Uniqueness is not checked yet. + config-flow-test-coverage: done + runtime-data: + status: todo + comment: runtime_data is not used yet. + test-before-setup: done + appropriate-polling: done + entity-unique-id: done + has-entity-name: done + entity-event-setup: + status: exempt + comment: Entities of this integration does not explicitly subscribe to events. + dependency-transparency: done + action-setup: + status: todo + comment: service registered in climate async_setup_entry. + common-modules: + status: done + comment: No coordinator is used, data update is centrally handled by the library. + docs-high-level-description: done + docs-installation-instructions: done + docs-removal-instructions: + status: todo + comment: removal instructions missing + docs-actions: done + brands: done + # Silver + integration-owner: done + reauthentication-flow: done + config-entry-unloading: done + # Gold + devices: done + diagnostics: done + entity-category: done + dynamic-devices: done + entity-device-class: done + entity-translations: done + entity-disabled-by-default: done + repair-issues: + status: exempt + comment: This integration does not raise any repairable issues. diff --git a/homeassistant/components/vivotek/manifest.json b/homeassistant/components/vivotek/manifest.json index 5a33ca09908..f0b622afcad 100644 --- a/homeassistant/components/vivotek/manifest.json +++ b/homeassistant/components/vivotek/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/vivotek", "iot_class": "local_polling", "loggers": ["libpyvivotek"], + "quality_scale": "legacy", "requirements": ["libpyvivotek==0.4.0"] } diff --git a/homeassistant/components/vizio/config_flow.py b/homeassistant/components/vizio/config_flow.py index c8f1aaa21cb..49f6a709565 100644 --- a/homeassistant/components/vizio/config_flow.py +++ b/homeassistant/components/vizio/config_flow.py @@ -108,10 +108,6 @@ def _host_is_same(host1: str, host2: str) -> bool: class VizioOptionsConfigFlow(OptionsFlow): """Handle Vizio options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize vizio options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -184,7 +180,7 @@ class VizioConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> VizioOptionsConfigFlow: """Get the options flow for this handler.""" - return VizioOptionsConfigFlow(config_entry) + return VizioOptionsConfigFlow() def __init__(self) -> None: """Initialize config flow.""" diff --git a/homeassistant/components/vizio/coordinator.py b/homeassistant/components/vizio/coordinator.py index 1930828b595..a7ca7d7f9ed 100644 --- a/homeassistant/components/vizio/coordinator.py +++ b/homeassistant/components/vizio/coordinator.py @@ -34,10 +34,9 @@ class VizioAppsDataUpdateCoordinator(DataUpdateCoordinator[list[dict[str, Any]]] self.fail_threshold = 10 self.store = store - async def async_config_entry_first_refresh(self) -> None: + async def _async_setup(self) -> None: """Refresh data for the first time when a config entry is setup.""" self.data = await self.store.async_load() or APPS - await super().async_config_entry_first_refresh() async def _async_update_data(self) -> list[dict[str, Any]]: """Update data via library.""" diff --git a/homeassistant/components/vizio/manifest.json b/homeassistant/components/vizio/manifest.json index e6812ed58b1..91b2ff46495 100644 --- a/homeassistant/components/vizio/manifest.json +++ b/homeassistant/components/vizio/manifest.json @@ -7,7 +7,6 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["pyvizio"], - "quality_scale": "platinum", "requirements": ["pyvizio==0.1.61"], "zeroconf": ["_viziocast._tcp.local."] } diff --git a/homeassistant/components/vlc/manifest.json b/homeassistant/components/vlc/manifest.json index 7e4fb7b2a4f..a31fe49859c 100644 --- a/homeassistant/components/vlc/manifest.json +++ b/homeassistant/components/vlc/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/vlc", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["python-vlc==3.0.18122"] } diff --git a/homeassistant/components/vodafone_station/config_flow.py b/homeassistant/components/vodafone_station/config_flow.py index c373520bc58..7a80244f8d6 100644 --- a/homeassistant/components/vodafone_station/config_flow.py +++ b/homeassistant/components/vodafone_station/config_flow.py @@ -17,7 +17,6 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant, callback @@ -63,9 +62,11 @@ class VodafoneStationConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> VodafoneStationOptionsFlowHandler: """Get the options flow for this handler.""" - return VodafoneStationOptionsFlowHandler(config_entry) + return VodafoneStationOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -143,7 +144,7 @@ class VodafoneStationConfigFlow(ConfigFlow, domain=DOMAIN): ) -class VodafoneStationOptionsFlowHandler(OptionsFlowWithConfigEntry): +class VodafoneStationOptionsFlowHandler(OptionsFlow): """Handle a option flow.""" async def async_step_init( @@ -158,7 +159,7 @@ class VodafoneStationOptionsFlowHandler(OptionsFlowWithConfigEntry): { vol.Optional( CONF_CONSIDER_HOME, - default=self.options.get( + default=self.config_entry.options.get( CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME.total_seconds() ), ): vol.All(vol.Coerce(int), vol.Clamp(min=0, max=900)) diff --git a/homeassistant/components/vodafone_station/manifest.json b/homeassistant/components/vodafone_station/manifest.json index 29cb3c070ab..4acafc8df3a 100644 --- a/homeassistant/components/vodafone_station/manifest.json +++ b/homeassistant/components/vodafone_station/manifest.json @@ -7,6 +7,5 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["aiovodafone"], - "quality_scale": "silver", "requirements": ["aiovodafone==0.6.1"] } diff --git a/homeassistant/components/voicerss/manifest.json b/homeassistant/components/voicerss/manifest.json index bfc61365dc0..1e7da9d220d 100644 --- a/homeassistant/components/voicerss/manifest.json +++ b/homeassistant/components/voicerss/manifest.json @@ -3,5 +3,6 @@ "name": "VoiceRSS", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/voicerss", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/voip/config_flow.py b/homeassistant/components/voip/config_flow.py index 821c7f29a1e..63dcb8f86ee 100644 --- a/homeassistant/components/voip/config_flow.py +++ b/homeassistant/components/voip/config_flow.py @@ -47,16 +47,12 @@ class VoIPConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlow: """Create the options flow.""" - return VoipOptionsFlowHandler(config_entry) + return VoipOptionsFlowHandler() class VoipOptionsFlowHandler(OptionsFlow): """Handle VoIP options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/volkszaehler/manifest.json b/homeassistant/components/volkszaehler/manifest.json index e9070d0fa87..1427f330e77 100644 --- a/homeassistant/components/volkszaehler/manifest.json +++ b/homeassistant/components/volkszaehler/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/volkszaehler", "iot_class": "local_polling", "loggers": ["volkszaehler"], + "quality_scale": "legacy", "requirements": ["volkszaehler==0.4.0"] } diff --git a/homeassistant/components/vulcan/manifest.json b/homeassistant/components/vulcan/manifest.json index 47ab7ec53cb..554a82e9c2c 100644 --- a/homeassistant/components/vulcan/manifest.json +++ b/homeassistant/components/vulcan/manifest.json @@ -5,6 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/vulcan", "iot_class": "cloud_polling", - "quality_scale": "silver", "requirements": ["vulcan-api==2.3.2"] } diff --git a/homeassistant/components/vultr/manifest.json b/homeassistant/components/vultr/manifest.json index dc3cd3571eb..713485e7931 100644 --- a/homeassistant/components/vultr/manifest.json +++ b/homeassistant/components/vultr/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/vultr", "iot_class": "cloud_polling", "loggers": ["vultr"], + "quality_scale": "legacy", "requirements": ["vultr==0.1.2"] } diff --git a/homeassistant/components/w800rf32/manifest.json b/homeassistant/components/w800rf32/manifest.json index 769eb96b3c0..4d5074e72c2 100644 --- a/homeassistant/components/w800rf32/manifest.json +++ b/homeassistant/components/w800rf32/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/w800rf32", "iot_class": "local_push", "loggers": ["W800rf32"], + "quality_scale": "legacy", "requirements": ["pyW800rf32==0.4"] } diff --git a/homeassistant/components/waterfurnace/manifest.json b/homeassistant/components/waterfurnace/manifest.json index 9e01f7e6a05..2bf72acb047 100644 --- a/homeassistant/components/waterfurnace/manifest.json +++ b/homeassistant/components/waterfurnace/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/waterfurnace", "iot_class": "cloud_polling", "loggers": ["waterfurnace"], + "quality_scale": "legacy", "requirements": ["waterfurnace==1.1.0"] } diff --git a/homeassistant/components/watson_iot/manifest.json b/homeassistant/components/watson_iot/manifest.json index 702c5492246..a457dcc44b1 100644 --- a/homeassistant/components/watson_iot/manifest.json +++ b/homeassistant/components/watson_iot/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/watson_iot", "iot_class": "cloud_push", "loggers": ["ibmiotf", "paho_mqtt"], + "quality_scale": "legacy", "requirements": ["ibmiotf==0.3.4"] } diff --git a/homeassistant/components/watson_tts/manifest.json b/homeassistant/components/watson_tts/manifest.json index f26fc006561..ecc3d97be46 100644 --- a/homeassistant/components/watson_tts/manifest.json +++ b/homeassistant/components/watson_tts/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/watson_tts", "iot_class": "cloud_push", "loggers": ["ibm_cloud_sdk_core", "ibm_watson"], + "quality_scale": "legacy", "requirements": ["ibm-watson==5.2.2"] } diff --git a/homeassistant/components/watttime/config_flow.py b/homeassistant/components/watttime/config_flow.py index db68738b302..ad676e166c5 100644 --- a/homeassistant/components/watttime/config_flow.py +++ b/homeassistant/components/watttime/config_flow.py @@ -126,9 +126,11 @@ class WattTimeConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> WattTimeOptionsFlowHandler: """Define the config flow to handle options.""" - return WattTimeOptionsFlowHandler(config_entry) + return WattTimeOptionsFlowHandler() async def async_step_coordinates( self, user_input: dict[str, Any] | None = None @@ -241,10 +243,6 @@ class WattTimeConfigFlow(ConfigFlow, domain=DOMAIN): class WattTimeOptionsFlowHandler(OptionsFlow): """Handle a WattTime options flow.""" - def __init__(self, entry: ConfigEntry) -> None: - """Initialize.""" - self.entry = entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -258,7 +256,7 @@ class WattTimeOptionsFlowHandler(OptionsFlow): { vol.Required( CONF_SHOW_ON_MAP, - default=self.entry.options.get(CONF_SHOW_ON_MAP, True), + default=self.config_entry.options.get(CONF_SHOW_ON_MAP, True), ): bool } ), diff --git a/homeassistant/components/waze_travel_time/config_flow.py b/homeassistant/components/waze_travel_time/config_flow.py index 1d75adc6c29..6ab6a4b121c 100644 --- a/homeassistant/components/waze_travel_time/config_flow.py +++ b/homeassistant/components/waze_travel_time/config_flow.py @@ -113,10 +113,6 @@ def default_options(hass: HomeAssistant) -> dict[str, str | bool | list[str]]: class WazeOptionsFlow(OptionsFlow): """Handle an options flow for Waze Travel Time.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize waze options flow.""" - self.config_entry = config_entry - async def async_step_init(self, user_input=None) -> ConfigFlowResult: """Handle the initial step.""" if user_input is not None: @@ -148,7 +144,7 @@ class WazeConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> WazeOptionsFlow: """Get the options flow for this handler.""" - return WazeOptionsFlow(config_entry) + return WazeOptionsFlow() async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/webostv/config_flow.py b/homeassistant/components/webostv/config_flow.py index 24bf89b24a6..45395bd282a 100644 --- a/homeassistant/components/webostv/config_flow.py +++ b/homeassistant/components/webostv/config_flow.py @@ -170,8 +170,6 @@ class OptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.config_entry = config_entry - self.options = config_entry.options self.host = config_entry.data[CONF_HOST] self.key = config_entry.data[CONF_CLIENT_SECRET] @@ -188,7 +186,8 @@ class OptionsFlowHandler(OptionsFlow): if not sources_list: errors["base"] = "cannot_retrieve" - sources = [s for s in self.options.get(CONF_SOURCES, []) if s in sources_list] + option_sources = self.config_entry.options.get(CONF_SOURCES, []) + sources = [s for s in option_sources if s in sources_list] if not sources: sources = sources_list diff --git a/homeassistant/components/webostv/manifest.json b/homeassistant/components/webostv/manifest.json index 679bad9b9f5..6c826c2f997 100644 --- a/homeassistant/components/webostv/manifest.json +++ b/homeassistant/components/webostv/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/webostv", "iot_class": "local_push", "loggers": ["aiowebostv"], - "quality_scale": "platinum", "requirements": ["aiowebostv==0.4.2"], "ssdp": [ { diff --git a/homeassistant/components/websocket_api/http.py b/homeassistant/components/websocket_api/http.py index 29dc6113350..e7d57aebab6 100644 --- a/homeassistant/components/websocket_api/http.py +++ b/homeassistant/components/websocket_api/http.py @@ -36,6 +36,8 @@ from .error import Disconnect from .messages import message_to_json_bytes from .util import describe_request +CLOSE_MSG_TYPES = {WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING} + if TYPE_CHECKING: from .connection import ActiveConnection @@ -328,13 +330,7 @@ class WebSocketHandler: if TYPE_CHECKING: assert writer is not None - # aiohttp 3.11.0 changed the method name from _send_frame to send_frame - if hasattr(writer, "send_frame"): - send_frame = writer.send_frame # pragma: no cover - else: - send_frame = writer._send_frame # noqa: SLF001 - - send_bytes_text = partial(send_frame, opcode=WSMsgType.TEXT) + send_bytes_text = partial(writer.send_frame, opcode=WSMsgType.TEXT) auth = AuthPhase( logger, hass, self._send_message, self._cancel, request, send_bytes_text ) @@ -344,7 +340,7 @@ class WebSocketHandler: try: connection = await self._async_handle_auth_phase(auth, send_bytes_text) self._async_increase_writer_limit(writer) - await self._async_websocket_command_phase(connection, send_bytes_text) + await self._async_websocket_command_phase(connection) except asyncio.CancelledError: logger.debug("%s: Connection cancelled", self.description) raise @@ -454,9 +450,7 @@ class WebSocketHandler: writer._limit = 2**20 # noqa: SLF001 async def _async_websocket_command_phase( - self, - connection: ActiveConnection, - send_bytes_text: Callable[[bytes], Coroutine[Any, Any, None]], + self, connection: ActiveConnection ) -> None: """Handle the command phase of the websocket connection.""" wsock = self._wsock @@ -467,24 +461,26 @@ class WebSocketHandler: # Command phase while not wsock.closed: msg = await wsock.receive() + msg_type = msg.type + msg_data = msg.data - if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING): + if msg_type in CLOSE_MSG_TYPES: break - if msg.type is WSMsgType.BINARY: - if len(msg.data) < 1: + if msg_type is WSMsgType.BINARY: + if len(msg_data) < 1: raise Disconnect("Received invalid binary message.") - handler = msg.data[0] - payload = msg.data[1:] + handler = msg_data[0] + payload = msg_data[1:] async_handle_binary(handler, payload) continue - if msg.type is not WSMsgType.TEXT: + if msg_type is not WSMsgType.TEXT: raise Disconnect("Received non-Text message.") try: - command_msg_data = json_loads(msg.data) + command_msg_data = json_loads(msg_data) except ValueError as ex: raise Disconnect("Received invalid JSON.") from ex diff --git a/homeassistant/components/weheat/manifest.json b/homeassistant/components/weheat/manifest.json index d32e0ce4047..61d6a110dbd 100644 --- a/homeassistant/components/weheat/manifest.json +++ b/homeassistant/components/weheat/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/weheat", "iot_class": "cloud_polling", - "requirements": ["weheat==2024.09.23"] + "requirements": ["weheat==2024.11.26"] } diff --git a/homeassistant/components/wemo/config_flow.py b/homeassistant/components/wemo/config_flow.py index 10a9bf5604b..361c58953c5 100644 --- a/homeassistant/components/wemo/config_flow.py +++ b/homeassistant/components/wemo/config_flow.py @@ -32,16 +32,12 @@ class WemoFlow(DiscoveryFlowHandler, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: """Get the options flow for this handler.""" - return WemoOptionsFlow(config_entry) + return WemoOptionsFlow() class WemoOptionsFlow(OptionsFlow): """Options flow for the WeMo component.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/wiffi/config_flow.py b/homeassistant/components/wiffi/config_flow.py index 3fcbef395e6..308923597cd 100644 --- a/homeassistant/components/wiffi/config_flow.py +++ b/homeassistant/components/wiffi/config_flow.py @@ -34,7 +34,7 @@ class WiffiFlowHandler(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Create Wiffi server setup option flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -79,10 +79,6 @@ class WiffiFlowHandler(ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(OptionsFlow): """Wiffi server setup option flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize options flow.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, int] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/wilight/manifest.json b/homeassistant/components/wilight/manifest.json index 8da0ffd9241..7f7e16d55fb 100644 --- a/homeassistant/components/wilight/manifest.json +++ b/homeassistant/components/wilight/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/wilight", "iot_class": "local_polling", "loggers": ["pywilight"], - "quality_scale": "silver", "requirements": ["pywilight==0.0.74"], "ssdp": [ { diff --git a/homeassistant/components/wirelesstag/manifest.json b/homeassistant/components/wirelesstag/manifest.json index 9735c833453..1ff9403d3bc 100644 --- a/homeassistant/components/wirelesstag/manifest.json +++ b/homeassistant/components/wirelesstag/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/wirelesstag", "iot_class": "cloud_push", "loggers": ["wirelesstagpy"], + "quality_scale": "legacy", "requirements": ["wirelesstagpy==0.8.1"] } diff --git a/homeassistant/components/withings/manifest.json b/homeassistant/components/withings/manifest.json index f9e8328ae53..57d4bafdc7b 100644 --- a/homeassistant/components/withings/manifest.json +++ b/homeassistant/components/withings/manifest.json @@ -8,6 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/withings", "iot_class": "cloud_push", "loggers": ["aiowithings"], - "quality_scale": "platinum", "requirements": ["aiowithings==3.1.3"] } diff --git a/homeassistant/components/wiz/manifest.json b/homeassistant/components/wiz/manifest.json index bb5527bc467..7b1ecdcdb6b 100644 --- a/homeassistant/components/wiz/manifest.json +++ b/homeassistant/components/wiz/manifest.json @@ -26,6 +26,5 @@ ], "documentation": "https://www.home-assistant.io/integrations/wiz", "iot_class": "local_push", - "quality_scale": "platinum", "requirements": ["pywizlight==0.5.14"] } diff --git a/homeassistant/components/wled/config_flow.py b/homeassistant/components/wled/config_flow.py index 2798e0d46d1..812a0500d1a 100644 --- a/homeassistant/components/wled/config_flow.py +++ b/homeassistant/components/wled/config_flow.py @@ -12,7 +12,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_HOST, CONF_MAC from homeassistant.core import callback @@ -30,9 +30,11 @@ class WLEDFlowHandler(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> WLEDOptionsFlowHandler: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> WLEDOptionsFlowHandler: """Get the options flow for this handler.""" - return WLEDOptionsFlowHandler(config_entry) + return WLEDOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -117,7 +119,7 @@ class WLEDFlowHandler(ConfigFlow, domain=DOMAIN): return await wled.update() -class WLEDOptionsFlowHandler(OptionsFlowWithConfigEntry): +class WLEDOptionsFlowHandler(OptionsFlow): """Handle WLED options.""" async def async_step_init( @@ -133,7 +135,7 @@ class WLEDOptionsFlowHandler(OptionsFlowWithConfigEntry): { vol.Optional( CONF_KEEP_MAIN_LIGHT, - default=self.options.get( + default=self.config_entry.options.get( CONF_KEEP_MAIN_LIGHT, DEFAULT_KEEP_MAIN_LIGHT ), ): bool, diff --git a/homeassistant/components/wled/coordinator.py b/homeassistant/components/wled/coordinator.py index cb39fde5e5a..8e2855e9f05 100644 --- a/homeassistant/components/wled/coordinator.py +++ b/homeassistant/components/wled/coordinator.py @@ -49,6 +49,7 @@ class WLEDDataUpdateCoordinator(DataUpdateCoordinator[WLEDDevice]): super().__init__( hass, LOGGER, + config_entry=entry, name=DOMAIN, update_interval=SCAN_INTERVAL, ) @@ -133,6 +134,7 @@ class WLEDReleasesDataUpdateCoordinator(DataUpdateCoordinator[Releases]): super().__init__( hass, LOGGER, + config_entry=None, name=DOMAIN, update_interval=RELEASES_SCAN_INTERVAL, ) diff --git a/homeassistant/components/wled/manifest.json b/homeassistant/components/wled/manifest.json index 71939127356..c731f8181af 100644 --- a/homeassistant/components/wled/manifest.json +++ b/homeassistant/components/wled/manifest.json @@ -6,7 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/wled", "integration_type": "device", "iot_class": "local_push", - "quality_scale": "platinum", "requirements": ["wled==0.20.2"], "zeroconf": ["_wled._tcp.local."] } diff --git a/homeassistant/components/workday/config_flow.py b/homeassistant/components/workday/config_flow.py index 2552fe849e2..2036d685d31 100644 --- a/homeassistant/components/workday/config_flow.py +++ b/homeassistant/components/workday/config_flow.py @@ -12,7 +12,7 @@ from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_COUNTRY, CONF_LANGUAGE, CONF_NAME from homeassistant.core import callback @@ -67,12 +67,14 @@ def add_province_and_language_to_schema( _country = country_holidays(country=country) if country_default_language := (_country.default_language): selectable_languages = _country.supported_languages - new_selectable_languages = [lang[:2] for lang in selectable_languages] + new_selectable_languages = list(selectable_languages) language_schema = { vol.Optional( CONF_LANGUAGE, default=country_default_language ): LanguageSelector( - LanguageSelectorConfig(languages=new_selectable_languages) + LanguageSelectorConfig( + languages=new_selectable_languages, native_name=True + ) ) } @@ -219,7 +221,7 @@ class WorkdayConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> WorkdayOptionsFlowHandler: """Get the options flow for this handler.""" - return WorkdayOptionsFlowHandler(config_entry) + return WorkdayOptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -310,7 +312,7 @@ class WorkdayConfigFlow(ConfigFlow, domain=DOMAIN): ) -class WorkdayOptionsFlowHandler(OptionsFlowWithConfigEntry): +class WorkdayOptionsFlowHandler(OptionsFlow): """Handle Workday options.""" async def async_step_init( @@ -320,7 +322,7 @@ class WorkdayOptionsFlowHandler(OptionsFlowWithConfigEntry): errors: dict[str, str] = {} if user_input is not None: - combined_input: dict[str, Any] = {**self.options, **user_input} + combined_input: dict[str, Any] = {**self.config_entry.options, **user_input} if CONF_PROVINCE not in user_input: # Province not present, delete old value (if present) too combined_input.pop(CONF_PROVINCE, None) @@ -340,7 +342,7 @@ class WorkdayOptionsFlowHandler(OptionsFlowWithConfigEntry): else: LOGGER.debug("abort_check in options with %s", combined_input) abort_match = { - CONF_COUNTRY: self._config_entry.options.get(CONF_COUNTRY), + CONF_COUNTRY: self.config_entry.options.get(CONF_COUNTRY), CONF_EXCLUDES: combined_input[CONF_EXCLUDES], CONF_OFFSET: combined_input[CONF_OFFSET], CONF_WORKDAYS: combined_input[CONF_WORKDAYS], @@ -357,23 +359,22 @@ class WorkdayOptionsFlowHandler(OptionsFlowWithConfigEntry): else: return self.async_create_entry(data=combined_input) + options = self.config_entry.options schema: vol.Schema = await self.hass.async_add_executor_job( add_province_and_language_to_schema, DATA_SCHEMA_OPT, - self.options.get(CONF_COUNTRY), + options.get(CONF_COUNTRY), ) - new_schema = self.add_suggested_values_to_schema( - schema, user_input or self.options - ) + new_schema = self.add_suggested_values_to_schema(schema, user_input or options) LOGGER.debug("Errors have occurred in options %s", errors) return self.async_show_form( step_id="init", data_schema=new_schema, errors=errors, description_placeholders={ - "name": self.options[CONF_NAME], - "country": self.options.get(CONF_COUNTRY), + "name": options[CONF_NAME], + "country": options.get(CONF_COUNTRY, "-"), }, ) diff --git a/homeassistant/components/workday/manifest.json b/homeassistant/components/workday/manifest.json index ea08bfe1717..842c6f1f1ad 100644 --- a/homeassistant/components/workday/manifest.json +++ b/homeassistant/components/workday/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["holidays"], "quality_scale": "internal", - "requirements": ["holidays==0.61"] + "requirements": ["holidays==0.62"] } diff --git a/homeassistant/components/worldtidesinfo/manifest.json b/homeassistant/components/worldtidesinfo/manifest.json index 962e63617f4..c873f2f08f3 100644 --- a/homeassistant/components/worldtidesinfo/manifest.json +++ b/homeassistant/components/worldtidesinfo/manifest.json @@ -3,5 +3,6 @@ "name": "World Tides", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/worldtidesinfo", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/worxlandroid/manifest.json b/homeassistant/components/worxlandroid/manifest.json index a74228295c8..7a65b3b91b6 100644 --- a/homeassistant/components/worxlandroid/manifest.json +++ b/homeassistant/components/worxlandroid/manifest.json @@ -3,5 +3,6 @@ "name": "Worx Landroid", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/worxlandroid", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/ws66i/config_flow.py b/homeassistant/components/ws66i/config_flow.py index 9f6f4ca59c2..120b7738d2e 100644 --- a/homeassistant/components/ws66i/config_flow.py +++ b/homeassistant/components/ws66i/config_flow.py @@ -130,7 +130,7 @@ class WS66iConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> Ws66iOptionsFlowHandler: """Define the config flow to handle options.""" - return Ws66iOptionsFlowHandler(config_entry) + return Ws66iOptionsFlowHandler() @callback @@ -145,10 +145,6 @@ def _key_for_source( class Ws66iOptionsFlowHandler(OptionsFlow): """Handle a WS66i options flow.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, str] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/ws66i/manifest.json b/homeassistant/components/ws66i/manifest.json index d259823d5af..c465a9f9f37 100644 --- a/homeassistant/components/ws66i/manifest.json +++ b/homeassistant/components/ws66i/manifest.json @@ -5,6 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/ws66i", "iot_class": "local_polling", - "quality_scale": "silver", "requirements": ["pyws66i==1.1"] } diff --git a/homeassistant/components/wsdot/manifest.json b/homeassistant/components/wsdot/manifest.json index 4444cfbac4a..9b7746eea74 100644 --- a/homeassistant/components/wsdot/manifest.json +++ b/homeassistant/components/wsdot/manifest.json @@ -3,5 +3,6 @@ "name": "Washington State Department of Transportation (WSDOT)", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/wsdot", - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/x10/manifest.json b/homeassistant/components/x10/manifest.json index 258080dc374..517bab07f6c 100644 --- a/homeassistant/components/x10/manifest.json +++ b/homeassistant/components/x10/manifest.json @@ -3,5 +3,6 @@ "name": "Heyu X10", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/x10", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/xeoma/manifest.json b/homeassistant/components/xeoma/manifest.json index d66177ca214..839724cc781 100644 --- a/homeassistant/components/xeoma/manifest.json +++ b/homeassistant/components/xeoma/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/xeoma", "iot_class": "local_polling", "loggers": ["pyxeoma"], + "quality_scale": "legacy", "requirements": ["pyxeoma==1.4.2"] } diff --git a/homeassistant/components/xiaomi/manifest.json b/homeassistant/components/xiaomi/manifest.json index ef7085f2aa4..45540db47f3 100644 --- a/homeassistant/components/xiaomi/manifest.json +++ b/homeassistant/components/xiaomi/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "dependencies": ["ffmpeg"], "documentation": "https://www.home-assistant.io/integrations/xiaomi", - "iot_class": "local_polling" + "iot_class": "local_polling", + "quality_scale": "legacy" } diff --git a/homeassistant/components/xiaomi_miio/config_flow.py b/homeassistant/components/xiaomi_miio/config_flow.py index 7fc84c26235..b068f4a1e61 100644 --- a/homeassistant/components/xiaomi_miio/config_flow.py +++ b/homeassistant/components/xiaomi_miio/config_flow.py @@ -63,10 +63,6 @@ DEVICE_CLOUD_CONFIG = vol.Schema( class OptionsFlowHandler(OptionsFlow): """Options for the component.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Init object.""" - self.config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -122,7 +118,7 @@ class XiaomiMiioFlowHandler(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler: """Get the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_reauth( self, entry_data: Mapping[str, Any] diff --git a/homeassistant/components/xiaomi_miio/sensor.py b/homeassistant/components/xiaomi_miio/sensor.py index 3f6f4e9b50b..aafcba97487 100644 --- a/homeassistant/components/xiaomi_miio/sensor.py +++ b/homeassistant/components/xiaomi_miio/sensor.py @@ -24,7 +24,6 @@ from homeassistant.components.sensor import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( - AREA_SQUARE_METERS, ATTR_BATTERY_LEVEL, ATTR_TEMPERATURE, CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, @@ -37,6 +36,7 @@ from homeassistant.const import ( PERCENTAGE, REVOLUTIONS_PER_MINUTE, EntityCategory, + UnitOfArea, UnitOfPower, UnitOfPressure, UnitOfTemperature, @@ -622,7 +622,7 @@ VACUUM_SENSORS = { entity_category=EntityCategory.DIAGNOSTIC, ), f"last_clean_{ATTR_LAST_CLEAN_AREA}": XiaomiMiioSensorDescription( - native_unit_of_measurement=AREA_SQUARE_METERS, + native_unit_of_measurement=UnitOfArea.SQUARE_METERS, icon="mdi:texture-box", key=ATTR_LAST_CLEAN_AREA, parent_key=VacuumCoordinatorDataAttributes.last_clean_details, @@ -639,7 +639,7 @@ VACUUM_SENSORS = { entity_category=EntityCategory.DIAGNOSTIC, ), f"current_{ATTR_LAST_CLEAN_AREA}": XiaomiMiioSensorDescription( - native_unit_of_measurement=AREA_SQUARE_METERS, + native_unit_of_measurement=UnitOfArea.SQUARE_METERS, icon="mdi:texture-box", key=ATTR_STATUS_CLEAN_AREA, parent_key=VacuumCoordinatorDataAttributes.status, @@ -657,7 +657,7 @@ VACUUM_SENSORS = { entity_category=EntityCategory.DIAGNOSTIC, ), f"clean_history_{ATTR_CLEAN_HISTORY_TOTAL_AREA}": XiaomiMiioSensorDescription( - native_unit_of_measurement=AREA_SQUARE_METERS, + native_unit_of_measurement=UnitOfArea.SQUARE_METERS, icon="mdi:texture-box", key=ATTR_CLEAN_HISTORY_TOTAL_AREA, parent_key=VacuumCoordinatorDataAttributes.clean_history_status, diff --git a/homeassistant/components/xiaomi_miio/strings.json b/homeassistant/components/xiaomi_miio/strings.json index 31fe547b162..bafc1ec543b 100644 --- a/homeassistant/components/xiaomi_miio/strings.json +++ b/homeassistant/components/xiaomi_miio/strings.json @@ -216,22 +216,22 @@ "name": "Air quality index" }, "filter_life_remaining": { - "name": "Filter lifetime remaining" + "name": "Filter life remaining" }, "filter_hours_used": { "name": "Filter use" }, "filter_left_time": { - "name": "Filter lifetime left" + "name": "Filter lifetime remaining" }, "dust_filter_life_remaining": { - "name": "Dust filter lifetime remaining" + "name": "Dust filter life remaining" }, "dust_filter_life_remaining_days": { "name": "Dust filter lifetime remaining days" }, "upper_filter_life_remaining": { - "name": "Upper filter lifetime remaining" + "name": "Upper filter life remaining" }, "upper_filter_life_remaining_days": { "name": "Upper filter lifetime remaining days" @@ -276,16 +276,16 @@ "name": "Total dust collection count" }, "main_brush_left": { - "name": "Main brush left" + "name": "Main brush remaining" }, "side_brush_left": { - "name": "Side brush left" + "name": "Side brush remaining" }, "filter_left": { - "name": "Filter left" + "name": "Filter remaining" }, "sensor_dirty_left": { - "name": "Sensor dirty left" + "name": "Sensor dirty remaining" } }, "switch": { diff --git a/homeassistant/components/xiaomi_tv/manifest.json b/homeassistant/components/xiaomi_tv/manifest.json index 2e913e80fdc..8335adff333 100644 --- a/homeassistant/components/xiaomi_tv/manifest.json +++ b/homeassistant/components/xiaomi_tv/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/xiaomi_tv", "iot_class": "assumed_state", "loggers": ["pymitv"], + "quality_scale": "legacy", "requirements": ["pymitv==1.4.3"] } diff --git a/homeassistant/components/xmpp/manifest.json b/homeassistant/components/xmpp/manifest.json index 308c3d70978..d77d70ff86c 100644 --- a/homeassistant/components/xmpp/manifest.json +++ b/homeassistant/components/xmpp/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/xmpp", "iot_class": "cloud_push", "loggers": ["pyasn1", "slixmpp"], + "quality_scale": "legacy", "requirements": ["slixmpp==1.8.5", "emoji==2.8.0"] } diff --git a/homeassistant/components/xs1/manifest.json b/homeassistant/components/xs1/manifest.json index 9f4c921642d..88a5e4427ae 100644 --- a/homeassistant/components/xs1/manifest.json +++ b/homeassistant/components/xs1/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/xs1", "iot_class": "local_polling", "loggers": ["xs1_api_client"], + "quality_scale": "legacy", "requirements": ["xs1-api-client==3.0.0"] } diff --git a/homeassistant/components/yale/manifest.json b/homeassistant/components/yale/manifest.json index 34f3a7a1728..50c2a0af457 100644 --- a/homeassistant/components/yale/manifest.json +++ b/homeassistant/components/yale/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/yale", "iot_class": "cloud_push", "loggers": ["socketio", "engineio", "yalexs"], - "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.0"] + "requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.1"] } diff --git a/homeassistant/components/yale_smart_alarm/config_flow.py b/homeassistant/components/yale_smart_alarm/config_flow.py index 7b68a1f5dab..c71b7b33a08 100644 --- a/homeassistant/components/yale_smart_alarm/config_flow.py +++ b/homeassistant/components/yale_smart_alarm/config_flow.py @@ -23,10 +23,8 @@ from .const import ( CONF_AREA_ID, CONF_LOCK_CODE_DIGITS, DEFAULT_AREA_ID, - DEFAULT_LOCK_CODE_DIGITS, DEFAULT_NAME, DOMAIN, - LOGGER, YALE_BASE_ERRORS, ) @@ -44,6 +42,26 @@ DATA_SCHEMA_AUTH = vol.Schema( } ) +OPTIONS_SCHEMA = vol.Schema( + { + vol.Optional( + CONF_LOCK_CODE_DIGITS, + ): int, + } +) + + +def validate_credentials(username: str, password: str) -> dict[str, Any]: + """Validate credentials.""" + errors: dict[str, str] = {} + try: + YaleSmartAlarmClient(username, password) + except AuthenticationError: + errors = {"base": "invalid_auth"} + except YALE_BASE_ERRORS: + errors = {"base": "cannot_connect"} + return errors + class YaleConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Yale integration.""" @@ -54,7 +72,7 @@ class YaleConfigFlow(ConfigFlow, domain=DOMAIN): @callback def async_get_options_flow(config_entry: ConfigEntry) -> YaleOptionsFlowHandler: """Get the options flow for this handler.""" - return YaleOptionsFlowHandler(config_entry) + return YaleOptionsFlowHandler() async def async_step_reauth( self, entry_data: Mapping[str, Any] @@ -66,24 +84,16 @@ class YaleConfigFlow(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: reauth_entry = self._get_reauth_entry() username = reauth_entry.data[CONF_USERNAME] password = user_input[CONF_PASSWORD] - try: - await self.hass.async_add_executor_job( - YaleSmartAlarmClient, username, password - ) - except AuthenticationError as error: - LOGGER.error("Authentication failed. Check credentials %s", error) - errors = {"base": "invalid_auth"} - except YALE_BASE_ERRORS as error: - LOGGER.error("Connection to API failed %s", error) - errors = {"base": "cannot_connect"} - + errors = await self.hass.async_add_executor_job( + validate_credentials, username, password + ) if not errors: return self.async_update_reload_and_abort( reauth_entry, @@ -96,11 +106,42 @@ class YaleConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of existing entry.""" + errors: dict[str, str] = {} + + if user_input is not None: + reconfigure_entry = self._get_reconfigure_entry() + username = user_input[CONF_USERNAME] + + errors = await self.hass.async_add_executor_job( + validate_credentials, username, user_input[CONF_PASSWORD] + ) + if ( + username != reconfigure_entry.unique_id + and await self.async_set_unique_id(username) + ): + errors["base"] = "unique_id_exists" + if not errors: + return self.async_update_reload_and_abort( + reconfigure_entry, + unique_id=username, + data_updates=user_input, + ) + + return self.async_show_form( + step_id="reconfigure", + data_schema=DATA_SCHEMA, + errors=errors, + ) + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: username = user_input[CONF_USERNAME] @@ -108,17 +149,9 @@ class YaleConfigFlow(ConfigFlow, domain=DOMAIN): name = DEFAULT_NAME area = user_input.get(CONF_AREA_ID, DEFAULT_AREA_ID) - try: - await self.hass.async_add_executor_job( - YaleSmartAlarmClient, username, password - ) - except AuthenticationError as error: - LOGGER.error("Authentication failed. Check credentials %s", error) - errors = {"base": "invalid_auth"} - except YALE_BASE_ERRORS as error: - LOGGER.error("Connection to API failed %s", error) - errors = {"base": "cannot_connect"} - + errors = await self.hass.async_add_executor_job( + validate_credentials, username, password + ) if not errors: await self.async_set_unique_id(username) self._abort_if_unique_id_configured() @@ -143,32 +176,18 @@ class YaleConfigFlow(ConfigFlow, domain=DOMAIN): class YaleOptionsFlowHandler(OptionsFlow): """Handle Yale options.""" - def __init__(self, entry: ConfigEntry) -> None: - """Initialize Yale options flow.""" - self.entry = entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Manage Yale options.""" - errors: dict[str, Any] = {} - if user_input: + if user_input is not None: return self.async_create_entry(data=user_input) return self.async_show_form( step_id="init", - data_schema=vol.Schema( - { - vol.Optional( - CONF_LOCK_CODE_DIGITS, - description={ - "suggested_value": self.entry.options.get( - CONF_LOCK_CODE_DIGITS, DEFAULT_LOCK_CODE_DIGITS - ) - }, - ): int, - } + data_schema=self.add_suggested_values_to_schema( + OPTIONS_SCHEMA, + self.config_entry.options, ), - errors=errors, ) diff --git a/homeassistant/components/yale_smart_alarm/strings.json b/homeassistant/components/yale_smart_alarm/strings.json index cc837d7b7d7..7f940e1139e 100644 --- a/homeassistant/components/yale_smart_alarm/strings.json +++ b/homeassistant/components/yale_smart_alarm/strings.json @@ -2,11 +2,13 @@ "config": { "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "error": { "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unique_id_exists": "Another config entry with this username already exist" }, "step": { "user": { @@ -21,6 +23,13 @@ "data": { "password": "[%key:common::config_flow::data::password%]" } + }, + "reconfigure": { + "data": { + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]", + "area_id": "[%key:component::yale_smart_alarm::config::step::user::data::area_id%]" + } } } }, diff --git a/homeassistant/components/yalexs_ble/config_flow.py b/homeassistant/components/yalexs_ble/config_flow.py index 191ef5a20b2..6de74759686 100644 --- a/homeassistant/components/yalexs_ble/config_flow.py +++ b/homeassistant/components/yalexs_ble/config_flow.py @@ -312,16 +312,12 @@ class YalexsConfigFlow(ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> YaleXSBLEOptionsFlowHandler: """Get the options flow for this handler.""" - return YaleXSBLEOptionsFlowHandler(config_entry) + return YaleXSBLEOptionsFlowHandler() class YaleXSBLEOptionsFlowHandler(OptionsFlow): """Handle YaleXSBLE options.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize YaleXSBLE options flow.""" - self.entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -343,7 +339,9 @@ class YaleXSBLEOptionsFlowHandler(OptionsFlow): { vol.Optional( CONF_ALWAYS_CONNECTED, - default=self.entry.options.get(CONF_ALWAYS_CONNECTED, False), + default=self.config_entry.options.get( + CONF_ALWAYS_CONNECTED, False + ), ): bool, } ), diff --git a/homeassistant/components/yalexs_ble/manifest.json b/homeassistant/components/yalexs_ble/manifest.json index 1baeaeea63f..c3d1a3d97f1 100644 --- a/homeassistant/components/yalexs_ble/manifest.json +++ b/homeassistant/components/yalexs_ble/manifest.json @@ -12,5 +12,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/yalexs_ble", "iot_class": "local_push", - "requirements": ["yalexs-ble==2.5.0"] + "requirements": ["yalexs-ble==2.5.1"] } diff --git a/homeassistant/components/yamaha/manifest.json b/homeassistant/components/yamaha/manifest.json index 8e6ba0b8854..936028330a5 100644 --- a/homeassistant/components/yamaha/manifest.json +++ b/homeassistant/components/yamaha/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/yamaha", "iot_class": "local_polling", "loggers": ["rxv"], + "quality_scale": "legacy", "requirements": ["rxv==0.7.0"] } diff --git a/homeassistant/components/yandex_transport/manifest.json b/homeassistant/components/yandex_transport/manifest.json index 1d1219d5a95..ad31d495253 100644 --- a/homeassistant/components/yandex_transport/manifest.json +++ b/homeassistant/components/yandex_transport/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@rishatik92", "@devbis"], "documentation": "https://www.home-assistant.io/integrations/yandex_transport", "iot_class": "cloud_polling", + "quality_scale": "legacy", "requirements": ["aioymaps==1.2.5"] } diff --git a/homeassistant/components/yandextts/manifest.json b/homeassistant/components/yandextts/manifest.json index e1ab27272ef..418516a2d09 100644 --- a/homeassistant/components/yandextts/manifest.json +++ b/homeassistant/components/yandextts/manifest.json @@ -3,5 +3,6 @@ "name": "Yandex TTS", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/yandextts", - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "quality_scale": "legacy" } diff --git a/homeassistant/components/yeelight/config_flow.py b/homeassistant/components/yeelight/config_flow.py index 5438414ea61..7a3a0a2f100 100644 --- a/homeassistant/components/yeelight/config_flow.py +++ b/homeassistant/components/yeelight/config_flow.py @@ -58,9 +58,11 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): @staticmethod @callback - def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlowHandler: + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> OptionsFlowHandler: """Return the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() def __init__(self) -> None: """Initialize the config flow.""" @@ -296,16 +298,12 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(OptionsFlow): """Handle a option flow for Yeelight.""" - def __init__(self, config_entry: ConfigEntry) -> None: - """Initialize the option flow.""" - self._config_entry = config_entry - async def async_step_init( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" - data = self._config_entry.data - options = self._config_entry.options + data = self.config_entry.data + options = self.config_entry.options detected_model = data.get(CONF_DETECTED_MODEL) model = options[CONF_MODEL] or detected_model diff --git a/homeassistant/components/yeelight/manifest.json b/homeassistant/components/yeelight/manifest.json index 8d0a2e31185..4da2e0cfc3e 100644 --- a/homeassistant/components/yeelight/manifest.json +++ b/homeassistant/components/yeelight/manifest.json @@ -16,7 +16,6 @@ }, "iot_class": "local_push", "loggers": ["async_upnp_client", "yeelight"], - "quality_scale": "platinum", "requirements": ["yeelight==0.7.14", "async-upnp-client==0.41.0"], "zeroconf": [ { diff --git a/homeassistant/components/yeelightsunflower/manifest.json b/homeassistant/components/yeelightsunflower/manifest.json index 67746e122cb..bfd185cfa72 100644 --- a/homeassistant/components/yeelightsunflower/manifest.json +++ b/homeassistant/components/yeelightsunflower/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/yeelightsunflower", "iot_class": "local_polling", "loggers": ["yeelightsunflower"], + "quality_scale": "legacy", "requirements": ["yeelightsunflower==0.0.10"] } diff --git a/homeassistant/components/yi/manifest.json b/homeassistant/components/yi/manifest.json index d8514b251cc..24b5aaad758 100644 --- a/homeassistant/components/yi/manifest.json +++ b/homeassistant/components/yi/manifest.json @@ -7,5 +7,6 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["aioftp"], + "quality_scale": "legacy", "requirements": ["aioftp==0.21.3"] } diff --git a/homeassistant/components/youtube/config_flow.py b/homeassistant/components/youtube/config_flow.py index 8d6c7753282..48336422585 100644 --- a/homeassistant/components/youtube/config_flow.py +++ b/homeassistant/components/youtube/config_flow.py @@ -15,7 +15,7 @@ from homeassistant.config_entries import ( SOURCE_REAUTH, ConfigEntry, ConfigFlowResult, - OptionsFlowWithConfigEntry, + OptionsFlow, ) from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN from homeassistant.core import callback @@ -54,7 +54,7 @@ class OAuth2FlowHandler( config_entry: ConfigEntry, ) -> YouTubeOptionsFlowHandler: """Get the options flow for this handler.""" - return YouTubeOptionsFlowHandler(config_entry) + return YouTubeOptionsFlowHandler() @property def logger(self) -> logging.Logger: @@ -159,7 +159,7 @@ class OAuth2FlowHandler( ) -class YouTubeOptionsFlowHandler(OptionsFlowWithConfigEntry): +class YouTubeOptionsFlowHandler(OptionsFlow): """YouTube Options flow handler.""" async def async_step_init( @@ -194,6 +194,6 @@ class YouTubeOptionsFlowHandler(OptionsFlowWithConfigEntry): ), } ), - self.options, + self.config_entry.options, ), ) diff --git a/homeassistant/components/zabbix/manifest.json b/homeassistant/components/zabbix/manifest.json index d1823051636..9c7171bea46 100644 --- a/homeassistant/components/zabbix/manifest.json +++ b/homeassistant/components/zabbix/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/zabbix", "iot_class": "local_polling", "loggers": ["pyzabbix"], + "quality_scale": "legacy", "requirements": ["py-zabbix==1.1.7"] } diff --git a/homeassistant/components/zengge/manifest.json b/homeassistant/components/zengge/manifest.json index 5a4525079da..03d989c5f3b 100644 --- a/homeassistant/components/zengge/manifest.json +++ b/homeassistant/components/zengge/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/zengge", "iot_class": "local_polling", "loggers": ["zengge"], + "quality_scale": "legacy", "requirements": ["bluepy==1.3.0", "zengge==0.2"] } diff --git a/homeassistant/components/zeroconf/manifest.json b/homeassistant/components/zeroconf/manifest.json index 98b09f1a251..9ad92bb4bc7 100644 --- a/homeassistant/components/zeroconf/manifest.json +++ b/homeassistant/components/zeroconf/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["zeroconf"], "quality_scale": "internal", - "requirements": ["zeroconf==0.136.0"] + "requirements": ["zeroconf==0.136.2"] } diff --git a/homeassistant/components/zeroconf/usage.py b/homeassistant/components/zeroconf/usage.py index b9d51cd3c36..8ddfdbd592d 100644 --- a/homeassistant/components/zeroconf/usage.py +++ b/homeassistant/components/zeroconf/usage.py @@ -4,7 +4,7 @@ from typing import Any import zeroconf -from homeassistant.helpers.frame import report +from homeassistant.helpers.frame import ReportBehavior, report_usage from .models import HaZeroconf @@ -16,14 +16,14 @@ def install_multiple_zeroconf_catcher(hass_zc: HaZeroconf) -> None: """ def new_zeroconf_new(self: zeroconf.Zeroconf, *k: Any, **kw: Any) -> HaZeroconf: - report( + report_usage( ( "attempted to create another Zeroconf instance. Please use the shared" " Zeroconf via await" " homeassistant.components.zeroconf.async_get_instance(hass)" ), exclude_integrations={"zeroconf"}, - error_if_core=False, + core_behavior=ReportBehavior.LOG, ) return hass_zc diff --git a/homeassistant/components/zestimate/manifest.json b/homeassistant/components/zestimate/manifest.json index a881adf503d..a787a9b1099 100644 --- a/homeassistant/components/zestimate/manifest.json +++ b/homeassistant/components/zestimate/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/zestimate", "iot_class": "cloud_polling", + "quality_scale": "legacy", "requirements": ["xmltodict==0.13.0"] } diff --git a/homeassistant/components/zha/config_flow.py b/homeassistant/components/zha/config_flow.py index 72ff66912d5..9c515c315b7 100644 --- a/homeassistant/components/zha/config_flow.py +++ b/homeassistant/components/zha/config_flow.py @@ -70,8 +70,17 @@ UPLOADED_BACKUP_FILE = "uploaded_backup_file" REPAIR_MY_URL = "https://my.home-assistant.io/redirect/repairs/" -DEFAULT_ZHA_ZEROCONF_PORT = 6638 -ESPHOME_API_PORT = 6053 +LEGACY_ZEROCONF_PORT = 6638 +LEGACY_ZEROCONF_ESPHOME_API_PORT = 6053 + +ZEROCONF_SERVICE_TYPE = "_zigbee-coordinator._tcp.local." +ZEROCONF_PROPERTIES_SCHEMA = vol.Schema( + { + vol.Required("radio_type"): vol.All(str, vol.In([t.name for t in RadioType])), + vol.Required("serial_number"): str, + }, + extra=vol.ALLOW_EXTRA, +) def _format_backup_choice( @@ -617,34 +626,65 @@ class ZhaConfigFlowHandler(BaseZhaFlow, ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle zeroconf discovery.""" - # Hostname is format: livingroom.local. - local_name = discovery_info.hostname[:-1] - port = discovery_info.port or DEFAULT_ZHA_ZEROCONF_PORT + # Transform legacy zeroconf discovery into the new format + if discovery_info.type != ZEROCONF_SERVICE_TYPE: + port = discovery_info.port or LEGACY_ZEROCONF_PORT + name = discovery_info.name - # Fix incorrect port for older TubesZB devices - if "tube" in local_name and port == ESPHOME_API_PORT: - port = DEFAULT_ZHA_ZEROCONF_PORT + # Fix incorrect port for older TubesZB devices + if "tube" in name and port == LEGACY_ZEROCONF_ESPHOME_API_PORT: + port = LEGACY_ZEROCONF_PORT - if "radio_type" in discovery_info.properties: - self._radio_mgr.radio_type = self._radio_mgr.parse_radio_type( - discovery_info.properties["radio_type"] + # Determine the radio type + if "radio_type" in discovery_info.properties: + radio_type = discovery_info.properties["radio_type"] + elif "efr32" in name: + radio_type = RadioType.ezsp.name + elif "zigate" in name: + radio_type = RadioType.zigate.name + else: + radio_type = RadioType.znp.name + + fallback_title = name.split("._", 1)[0] + title = discovery_info.properties.get("name", fallback_title) + + discovery_info = zeroconf.ZeroconfServiceInfo( + ip_address=discovery_info.ip_address, + ip_addresses=discovery_info.ip_addresses, + port=port, + hostname=discovery_info.hostname, + type=ZEROCONF_SERVICE_TYPE, + name=f"{title}.{ZEROCONF_SERVICE_TYPE}", + properties={ + "radio_type": radio_type, + # To maintain backwards compatibility + "serial_number": discovery_info.hostname.removesuffix(".local."), + }, ) - elif "efr32" in local_name: - self._radio_mgr.radio_type = RadioType.ezsp - else: - self._radio_mgr.radio_type = RadioType.znp - node_name = local_name.removesuffix(".local") - device_path = f"socket://{discovery_info.host}:{port}" + try: + discovery_props = ZEROCONF_PROPERTIES_SCHEMA(discovery_info.properties) + except vol.Invalid: + return self.async_abort(reason="invalid_zeroconf_data") + + radio_type = self._radio_mgr.parse_radio_type(discovery_props["radio_type"]) + device_path = f"socket://{discovery_info.host}:{discovery_info.port}" + title = discovery_info.name.removesuffix(f".{ZEROCONF_SERVICE_TYPE}") await self._set_unique_id_and_update_ignored_flow( - unique_id=node_name, + unique_id=discovery_props["serial_number"], device_path=device_path, ) - self.context["title_placeholders"] = {CONF_NAME: node_name} - self._title = device_path + self.context["title_placeholders"] = {CONF_NAME: title} + self._title = title self._radio_mgr.device_path = device_path + self._radio_mgr.radio_type = radio_type + self._radio_mgr.device_settings = { + CONF_DEVICE_PATH: device_path, + CONF_BAUDRATE: 115200, + CONF_FLOW_CONTROL: None, + } return await self.async_step_confirm() @@ -682,8 +722,6 @@ class ZhaOptionsFlowHandler(BaseZhaFlow, OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" super().__init__() - self.config_entry = config_entry - self._radio_mgr.device_path = config_entry.data[CONF_DEVICE][CONF_DEVICE_PATH] self._radio_mgr.device_settings = config_entry.data[CONF_DEVICE] self._radio_mgr.radio_type = RadioType[config_entry.data[CONF_RADIO_TYPE]] diff --git a/homeassistant/components/zha/icons.json b/homeassistant/components/zha/icons.json index 5b3b85ced39..6ba4aab18ab 100644 --- a/homeassistant/components/zha/icons.json +++ b/homeassistant/components/zha/icons.json @@ -118,6 +118,12 @@ }, "exercise_day_of_week": { "default": "mdi:wrench-clock" + }, + "off_led_color": { + "default": "mdi:palette-outline" + }, + "on_led_color": { + "default": "mdi:palette" } }, "sensor": { @@ -206,6 +212,9 @@ }, "use_load_balancing": { "default": "mdi:scale-balance" + }, + "double_up_full": { + "default": "mdi:gesture-double-tap" } } }, diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index 8736dc89549..1fbbd83bb9c 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["universal-silabs-flasher==0.0.25", "zha==0.0.39"], + "requirements": ["universal-silabs-flasher==0.0.25", "zha==0.0.41"], "usb": [ { "vid": "10C4", @@ -130,6 +130,10 @@ { "type": "_czc._tcp.local.", "name": "czc*" + }, + { + "type": "_zigbee-coordinator._tcp.local.", + "name": "*" } ] } diff --git a/homeassistant/components/zha/strings.json b/homeassistant/components/zha/strings.json index d21cd1c5042..4706e204872 100644 --- a/homeassistant/components/zha/strings.json +++ b/homeassistant/components/zha/strings.json @@ -76,7 +76,8 @@ "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", "not_zha_device": "This device is not a zha device", "usb_probe_failed": "Failed to probe the usb device", - "wrong_firmware_installed": "Your device is running the wrong firmware and cannot be used with ZHA until the correct firmware is installed. [A repair has been created]({repair_url}) with more information and instructions for how to fix this." + "wrong_firmware_installed": "Your device is running the wrong firmware and cannot be used with ZHA until the correct firmware is installed. [A repair has been created]({repair_url}) with more information and instructions for how to fix this.", + "invalid_zeroconf_data": "The coordinator has invalid zeroconf service info and cannot be identified by ZHA" } }, "options": { @@ -599,6 +600,12 @@ }, "self_test": { "name": "Self-test" + }, + "reset_summation_delivered": { + "name": "Reset summation delivered" + }, + "restart_device": { + "name": "Restart device" } }, "climate": { @@ -791,6 +798,30 @@ }, "valve_countdown_2": { "name": "Irrigation time 2" + }, + "on_led_intensity": { + "name": "On LED intensity" + }, + "off_led_intensity": { + "name": "Off LED intensity" + }, + "frost_protection_temperature": { + "name": "Frost protection temperature" + }, + "valve_opening_degree": { + "name": "Valve opening degree" + }, + "valve_closing_degree": { + "name": "Valve closing degree" + }, + "siren_time": { + "name": "Siren time" + }, + "timer_time_left": { + "name": "Timer time left" + }, + "approach_distance": { + "name": "Approach distance" } }, "select": { @@ -886,6 +917,15 @@ }, "weather_delay": { "name": "Weather delay" + }, + "on_led_color": { + "name": "On LED color" + }, + "off_led_color": { + "name": "Off LED color" + }, + "external_trigger_mode": { + "name": "External trigger mode" } }, "sensor": { @@ -1083,6 +1123,15 @@ }, "valve_status_2": { "name": "Status 2" + }, + "timer_state": { + "name": "Timer state" + }, + "last_valve_open_duration": { + "name": "Last valve open duration" + }, + "motion_distance": { + "name": "Motion distance" } }, "switch": { @@ -1193,6 +1242,21 @@ }, "valve_on_off_2": { "name": "Valve 2" + }, + "double_up_full": { + "name": "Double tap on - full" + }, + "open_window": { + "name": "Open window" + }, + "turbo_mode": { + "name": "Turbo mode" + }, + "detach_relay": { + "name": "Detach relay" + }, + "enable_siren": { + "name": "Enable siren" } } } diff --git a/homeassistant/components/zha/update.py b/homeassistant/components/zha/update.py index 18b8ed1cca5..cb5c160e7b3 100644 --- a/homeassistant/components/zha/update.py +++ b/homeassistant/components/zha/update.py @@ -36,6 +36,18 @@ from .helpers import ( _LOGGER = logging.getLogger(__name__) +OTA_MESSAGE_BATTERY_POWERED = ( + "Battery powered devices can sometimes take multiple hours to update and you may" + " need to wake the device for the update to begin." +) + +ZHA_DOCS_NETWORK_RELIABILITY = "https://www.home-assistant.io/integrations/zha/#zigbee-interference-avoidance-and-network-rangecoverage-optimization" +OTA_MESSAGE_RELIABILITY = ( + "If you are having issues updating a specific device, make sure that you've" + f" eliminated [common environmental issues]({ZHA_DOCS_NETWORK_RELIABILITY}) that" + " could be affecting network reliability. OTA updates require a reliable network." +) + async def async_setup_entry( hass: HomeAssistant, @@ -149,7 +161,21 @@ class ZHAFirmwareUpdateEntity( This is suitable for a long changelog that does not fit in the release_summary property. The returned string can contain markdown. """ - return self.entity_data.entity.release_notes + + if self.entity_data.device_proxy.device.is_mains_powered: + header = ( + "" + f"{OTA_MESSAGE_RELIABILITY}" + "" + ) + else: + header = ( + "" + f"{OTA_MESSAGE_BATTERY_POWERED} {OTA_MESSAGE_RELIABILITY}" + "" + ) + + return f"{header}\n\n{self.entity_data.entity.release_notes or ''}" @property def release_url(self) -> str | None: diff --git a/homeassistant/components/zhong_hong/manifest.json b/homeassistant/components/zhong_hong/manifest.json index 9da0e9ab72b..3569466fb0a 100644 --- a/homeassistant/components/zhong_hong/manifest.json +++ b/homeassistant/components/zhong_hong/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/zhong_hong", "iot_class": "local_push", "loggers": ["zhong_hong_hvac"], + "quality_scale": "legacy", "requirements": ["zhong-hong-hvac==1.0.13"] } diff --git a/homeassistant/components/ziggo_mediabox_xl/manifest.json b/homeassistant/components/ziggo_mediabox_xl/manifest.json index 81aac99e58d..1ae09c9927d 100644 --- a/homeassistant/components/ziggo_mediabox_xl/manifest.json +++ b/homeassistant/components/ziggo_mediabox_xl/manifest.json @@ -4,5 +4,6 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/ziggo_mediabox_xl", "iot_class": "local_polling", + "quality_scale": "legacy", "requirements": ["ziggo-mediabox-xl==1.1.0"] } diff --git a/homeassistant/components/zodiac/manifest.json b/homeassistant/components/zodiac/manifest.json index 88f3d7fadef..f641826ca7b 100644 --- a/homeassistant/components/zodiac/manifest.json +++ b/homeassistant/components/zodiac/manifest.json @@ -4,6 +4,5 @@ "codeowners": ["@JulienTant"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/zodiac", - "iot_class": "calculated", - "quality_scale": "silver" + "iot_class": "calculated" } diff --git a/homeassistant/components/zoneminder/manifest.json b/homeassistant/components/zoneminder/manifest.json index f441a800555..2501aba2cf1 100644 --- a/homeassistant/components/zoneminder/manifest.json +++ b/homeassistant/components/zoneminder/manifest.json @@ -5,5 +5,6 @@ "documentation": "https://www.home-assistant.io/integrations/zoneminder", "iot_class": "local_polling", "loggers": ["zoneminder"], + "quality_scale": "legacy", "requirements": ["zm-py==0.5.4"] } diff --git a/homeassistant/components/zwave_js/__init__.py b/homeassistant/components/zwave_js/__init__.py index 06b8214d941..c8503b1f4c6 100644 --- a/homeassistant/components/zwave_js/__init__.py +++ b/homeassistant/components/zwave_js/__init__.py @@ -9,6 +9,7 @@ import logging from typing import Any from awesomeversion import AwesomeVersion +import voluptuous as vol from zwave_js_server.client import Client as ZwaveClient from zwave_js_server.const import CommandClass, RemoveNodeReason from zwave_js_server.exceptions import BaseZwaveJSServerError, InvalidServerVersion @@ -87,6 +88,7 @@ from .const import ( CONF_ADDON_S2_AUTHENTICATED_KEY, CONF_ADDON_S2_UNAUTHENTICATED_KEY, CONF_DATA_COLLECTION_OPTED_IN, + CONF_INSTALLER_MODE, CONF_INTEGRATION_CREATED_ADDON, CONF_LR_S2_ACCESS_CONTROL_KEY, CONF_LR_S2_AUTHENTICATED_KEY, @@ -132,12 +134,21 @@ DATA_CLIENT_LISTEN_TASK = "client_listen_task" DATA_DRIVER_EVENTS = "driver_events" DATA_START_CLIENT_TASK = "start_client_task" -CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) +CONFIG_SCHEMA = vol.Schema( + { + DOMAIN: vol.Schema( + { + vol.Optional(CONF_INSTALLER_MODE, default=False): cv.boolean, + } + ) + }, + extra=vol.ALLOW_EXTRA, +) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Z-Wave JS component.""" - hass.data[DOMAIN] = {} + hass.data[DOMAIN] = config.get(DOMAIN, {}) for entry in hass.config_entries.async_entries(DOMAIN): if not isinstance(entry.unique_id, str): hass.config_entries.async_update_entry( diff --git a/homeassistant/components/zwave_js/api.py b/homeassistant/components/zwave_js/api.py index 6eb54afb51a..ff0459ddbdd 100644 --- a/homeassistant/components/zwave_js/api.py +++ b/homeassistant/components/zwave_js/api.py @@ -43,6 +43,7 @@ from zwave_js_server.model.controller.firmware import ( ControllerFirmwareUpdateResult, ) from zwave_js_server.model.driver import Driver +from zwave_js_server.model.endpoint import Endpoint from zwave_js_server.model.log_config import LogConfig from zwave_js_server.model.log_message import LogMessage from zwave_js_server.model.node import Node, NodeStatistics @@ -55,6 +56,7 @@ from zwave_js_server.model.utils import ( async_parse_qr_code_string, async_try_parse_dsk_from_qr_code_string, ) +from zwave_js_server.model.value import ConfigurationValueFormat from zwave_js_server.util.node import async_set_config_parameter from homeassistant.components import websocket_api @@ -75,8 +77,15 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from .config_validation import BITMASK_SCHEMA from .const import ( + ATTR_COMMAND_CLASS, + ATTR_ENDPOINT, + ATTR_METHOD_NAME, + ATTR_PARAMETERS, + ATTR_WAIT_FOR_RESULT, CONF_DATA_COLLECTION_OPTED_IN, + CONF_INSTALLER_MODE, DATA_CLIENT, + DOMAIN, EVENT_DEVICE_ADDED_TO_REGISTRY, USER_AGENT, ) @@ -100,6 +109,8 @@ PROPERTY = "property" PROPERTY_KEY = "property_key" ENDPOINT = "endpoint" VALUE = "value" +VALUE_SIZE = "value_size" +VALUE_FORMAT = "value_format" # constants for log config commands CONFIG = "config" @@ -410,6 +421,8 @@ def async_register_api(hass: HomeAssistant) -> None: websocket_api.async_register_command(hass, websocket_rebuild_node_routes) websocket_api.async_register_command(hass, websocket_set_config_parameter) websocket_api.async_register_command(hass, websocket_get_config_parameters) + websocket_api.async_register_command(hass, websocket_get_raw_config_parameter) + websocket_api.async_register_command(hass, websocket_set_raw_config_parameter) websocket_api.async_register_command(hass, websocket_subscribe_log_updates) websocket_api.async_register_command(hass, websocket_update_log_config) websocket_api.async_register_command(hass, websocket_get_log_config) @@ -437,6 +450,9 @@ def async_register_api(hass: HomeAssistant) -> None: ) websocket_api.async_register_command(hass, websocket_subscribe_node_statistics) websocket_api.async_register_command(hass, websocket_hard_reset_controller) + websocket_api.async_register_command(hass, websocket_node_capabilities) + websocket_api.async_register_command(hass, websocket_invoke_cc_api) + websocket_api.async_register_command(hass, websocket_get_integration_settings) hass.http.register_view(FirmwareUploadView(dr.async_get(hass))) @@ -1752,6 +1768,72 @@ async def websocket_get_config_parameters( ) +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/set_raw_config_parameter", + vol.Required(DEVICE_ID): str, + vol.Required(PROPERTY): int, + vol.Required(VALUE): int, + vol.Required(VALUE_SIZE): vol.All(vol.Coerce(int), vol.Range(min=1, max=4)), + vol.Required(VALUE_FORMAT): vol.Coerce(ConfigurationValueFormat), + } +) +@websocket_api.async_response +@async_handle_failed_command +@async_get_node +async def websocket_set_raw_config_parameter( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], + node: Node, +) -> None: + """Set a custom config parameter value for a Z-Wave node.""" + result = await node.async_set_raw_config_parameter_value( + msg[VALUE], + msg[PROPERTY], + value_size=msg[VALUE_SIZE], + value_format=msg[VALUE_FORMAT], + ) + + connection.send_result( + msg[ID], + { + STATUS: result.status, + }, + ) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/get_raw_config_parameter", + vol.Required(DEVICE_ID): str, + vol.Required(PROPERTY): int, + } +) +@websocket_api.async_response +@async_handle_failed_command +@async_get_node +async def websocket_get_raw_config_parameter( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], + node: Node, +) -> None: + """Get a custom config parameter value for a Z-Wave node.""" + value = await node.async_get_raw_config_parameter_value( + msg[PROPERTY], + ) + + connection.send_result( + msg[ID], + { + VALUE: value, + }, + ) + + def filename_is_present_if_logging_to_file(obj: dict) -> dict: """Validate that filename is provided if log_to_file is True.""" if obj.get(LOG_TO_FILE, False) and FILENAME not in obj: @@ -2525,3 +2607,103 @@ async def websocket_hard_reset_controller( ) ] await driver.async_hard_reset() + + +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/node_capabilities", + vol.Required(DEVICE_ID): str, + } +) +@websocket_api.async_response +@async_handle_failed_command +@async_get_node +async def websocket_node_capabilities( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], + node: Node, +) -> None: + """Get node endpoints with their support command classes.""" + # consumers expect snake_case at the moment + # remove that addition when consumers are updated + connection.send_result( + msg[ID], + { + idx: [ + command_class.to_dict() | {"is_secure": command_class.is_secure} + for command_class in endpoint.command_classes + ] + for idx, endpoint in node.endpoints.items() + }, + ) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/invoke_cc_api", + vol.Required(DEVICE_ID): str, + vol.Required(ATTR_COMMAND_CLASS): vol.All( + vol.Coerce(int), vol.Coerce(CommandClass) + ), + vol.Optional(ATTR_ENDPOINT): vol.Coerce(int), + vol.Required(ATTR_METHOD_NAME): cv.string, + vol.Required(ATTR_PARAMETERS): list, + vol.Optional(ATTR_WAIT_FOR_RESULT): cv.boolean, + } +) +@websocket_api.async_response +@async_handle_failed_command +@async_get_node +async def websocket_invoke_cc_api( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], + node: Node, +) -> None: + """Call invokeCCAPI on the node or provided endpoint.""" + command_class: CommandClass = msg[ATTR_COMMAND_CLASS] + method_name: str = msg[ATTR_METHOD_NAME] + parameters: list[Any] = msg[ATTR_PARAMETERS] + + node_or_endpoint: Node | Endpoint = node + if (endpoint := msg.get(ATTR_ENDPOINT)) is not None: + node_or_endpoint = node.endpoints[endpoint] + + try: + result = await node_or_endpoint.async_invoke_cc_api( + command_class, + method_name, + *parameters, + wait_for_result=msg.get(ATTR_WAIT_FOR_RESULT, False), + ) + except BaseZwaveJSServerError as err: + connection.send_error(msg[ID], err.__class__.__name__, str(err)) + else: + connection.send_result( + msg[ID], + result, + ) + + +@callback +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required(TYPE): "zwave_js/get_integration_settings", + } +) +def websocket_get_integration_settings( + hass: HomeAssistant, + connection: ActiveConnection, + msg: dict[str, Any], +) -> None: + """Get Z-Wave JS integration wide configuration.""" + connection.send_result( + msg[ID], + { + # list explicitly to avoid leaking other keys and to set default + CONF_INSTALLER_MODE: hass.data[DOMAIN].get(CONF_INSTALLER_MODE, False), + }, + ) diff --git a/homeassistant/components/zwave_js/config_flow.py b/homeassistant/components/zwave_js/config_flow.py index 7eb887c8dcf..36f208e18d5 100644 --- a/homeassistant/components/zwave_js/config_flow.py +++ b/homeassistant/components/zwave_js/config_flow.py @@ -366,7 +366,7 @@ class ZWaveJSConfigFlow(BaseZwaveJSFlow, ConfigFlow, domain=DOMAIN): config_entry: ConfigEntry, ) -> OptionsFlowHandler: """Return the options flow.""" - return OptionsFlowHandler(config_entry) + return OptionsFlowHandler() async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -725,10 +725,9 @@ class ZWaveJSConfigFlow(BaseZwaveJSFlow, ConfigFlow, domain=DOMAIN): class OptionsFlowHandler(BaseZwaveJSFlow, OptionsFlow): """Handle an options flow for Z-Wave JS.""" - def __init__(self, config_entry: ConfigEntry) -> None: + def __init__(self) -> None: """Set up the options flow.""" super().__init__() - self.config_entry = config_entry self.original_addon_config: dict[str, Any] | None = None self.revert_reason: str | None = None diff --git a/homeassistant/components/zwave_js/const.py b/homeassistant/components/zwave_js/const.py index fd81cd7e7de..16cf6f748bb 100644 --- a/homeassistant/components/zwave_js/const.py +++ b/homeassistant/components/zwave_js/const.py @@ -25,6 +25,7 @@ CONF_ADDON_S2_AUTHENTICATED_KEY = "s2_authenticated_key" CONF_ADDON_S2_UNAUTHENTICATED_KEY = "s2_unauthenticated_key" CONF_ADDON_LR_S2_ACCESS_CONTROL_KEY = "lr_s2_access_control_key" CONF_ADDON_LR_S2_AUTHENTICATED_KEY = "lr_s2_authenticated_key" +CONF_INSTALLER_MODE = "installer_mode" CONF_INTEGRATION_CREATED_ADDON = "integration_created_addon" CONF_NETWORK_KEY = "network_key" CONF_S0_LEGACY_KEY = "s0_legacy_key" diff --git a/homeassistant/components/zwave_js/manifest.json b/homeassistant/components/zwave_js/manifest.json index a37b3560526..ad435b97cbc 100644 --- a/homeassistant/components/zwave_js/manifest.json +++ b/homeassistant/components/zwave_js/manifest.json @@ -9,8 +9,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["zwave_js_server"], - "quality_scale": "platinum", - "requirements": ["pyserial==3.5", "zwave-js-server-python==0.58.1"], + "requirements": ["pyserial==3.5", "zwave-js-server-python==0.59.1"], "usb": [ { "vid": "0658", diff --git a/homeassistant/components/zwave_js/services.py b/homeassistant/components/zwave_js/services.py index 969a235bb41..d1cb66ceafc 100644 --- a/homeassistant/components/zwave_js/services.py +++ b/homeassistant/components/zwave_js/services.py @@ -529,8 +529,15 @@ class ZWaveServices: for node_or_endpoint, result in get_valid_responses_from_results( nodes_or_endpoints_list, _results ): - zwave_value = result[0] - cmd_status = result[1] + if value_size is None: + # async_set_config_parameter still returns (Value, SetConfigParameterResult) + zwave_value = result[0] + cmd_status = result[1] + else: + # async_set_raw_config_parameter_value now returns just SetConfigParameterResult + cmd_status = result + zwave_value = f"parameter {property_or_property_name}" + if cmd_status.status == CommandStatus.ACCEPTED: msg = "Set configuration parameter %s on Node %s with value %s" else: diff --git a/homeassistant/components/zwave_js/services.yaml b/homeassistant/components/zwave_js/services.yaml index f5063fdfd93..acf6e9a0665 100644 --- a/homeassistant/components/zwave_js/services.yaml +++ b/homeassistant/components/zwave_js/services.yaml @@ -51,16 +51,6 @@ set_lock_configuration: min: 0 max: 65535 unit_of_measurement: sec - outside_handles_can_open_door_configuration: - required: false - example: [true, true, true, false] - selector: - object: - inside_handles_can_open_door_configuration: - required: false - example: [true, true, true, false] - selector: - object: auto_relock_time: required: false example: 1 diff --git a/homeassistant/components/zwave_js/strings.json b/homeassistant/components/zwave_js/strings.json index ca7d5153e6e..28789bbf9f4 100644 --- a/homeassistant/components/zwave_js/strings.json +++ b/homeassistant/components/zwave_js/strings.json @@ -523,10 +523,6 @@ "description": "Duration in seconds the latch stays retracted.", "name": "Hold and release time" }, - "inside_handles_can_open_door_configuration": { - "description": "A list of four booleans which indicate which inside handles can open the door.", - "name": "Inside handles can open door configuration" - }, "lock_timeout": { "description": "Seconds until lock mode times out. Should only be used if operation type is `timed`.", "name": "Lock timeout" @@ -535,10 +531,6 @@ "description": "The operation type of the lock.", "name": "Operation Type" }, - "outside_handles_can_open_door_configuration": { - "description": "A list of four booleans which indicate which outside handles can open the door.", - "name": "Outside handles can open door configuration" - }, "twist_assist": { "description": "Enable Twist Assist.", "name": "Twist assist" diff --git a/homeassistant/config.py b/homeassistant/config.py index cab4d0c7aff..e9089f27662 100644 --- a/homeassistant/config.py +++ b/homeassistant/config.py @@ -814,6 +814,8 @@ def _get_log_message_and_stack_print_pref( "domain": domain, "error": str(exception), "p_name": platform_path, + "config_file": "?", + "line": "?", } show_stack_trace: bool | None = _CONFIG_LOG_SHOW_STACK_TRACE.get( diff --git a/homeassistant/config_entries.py b/homeassistant/config_entries.py index 2781dea529e..ade4cd855ca 100644 --- a/homeassistant/config_entries.py +++ b/homeassistant/config_entries.py @@ -54,7 +54,12 @@ from .exceptions import ( ConfigEntryNotReady, HomeAssistantError, ) -from .helpers import device_registry, entity_registry, issue_registry as ir, storage +from .helpers import ( + device_registry as dr, + entity_registry as er, + issue_registry as ir, + storage, +) from .helpers.debounce import Debouncer from .helpers.discovery_flow import DiscoveryKey from .helpers.dispatcher import SignalType, async_dispatcher_send_internal @@ -63,7 +68,7 @@ from .helpers.event import ( RANDOM_MICROSECOND_MIN, async_call_later, ) -from .helpers.frame import report +from .helpers.frame import ReportBehavior, report_usage from .helpers.json import json_bytes, json_bytes_sorted, json_fragment from .helpers.typing import UNDEFINED, ConfigType, DiscoveryInfoType, UndefinedType from .loader import async_suggest_report_issue @@ -1191,14 +1196,13 @@ class FlowCancelledError(Exception): def _report_non_awaited_platform_forwards(entry: ConfigEntry, what: str) -> None: """Report non awaited platform forwards.""" - report( + report_usage( f"calls {what} for integration {entry.domain} with " f"title: {entry.title} and entry_id: {entry.entry_id}, " f"during setup without awaiting {what}, which can cause " - "the setup lock to be released before the setup is done. " - "This will stop working in Home Assistant 2025.1", - error_if_integration=False, - error_if_core=False, + "the setup lock to be released before the setup is done", + core_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2025.1", ) @@ -1260,11 +1264,21 @@ class ConfigEntriesFlowManager( if not context or "source" not in context: raise KeyError("Context not set or doesn't have a source set") + # reauth/reconfigure flows should be linked to a config entry + if (source := context["source"]) in { + SOURCE_REAUTH, + SOURCE_RECONFIGURE, + } and "entry_id" not in context: + # Deprecated in 2024.12, should fail in 2025.12 + report_usage( + f"initialises a {source} flow without a link to the config entry", + breaks_in_ha_version="2025.12", + ) + flow_id = ulid_util.ulid_now() # Avoid starting a config flow on an integration that only supports # a single config entry, but which already has an entry - source = context["source"] if ( source not in {SOURCE_IGNORE, SOURCE_REAUTH, SOURCE_RECONFIGURE} and ( @@ -1286,7 +1300,7 @@ class ConfigEntriesFlowManager( loop = self.hass.loop - if context["source"] == SOURCE_IMPORT: + if source == SOURCE_IMPORT: self._pending_import_flows[handler][flow_id] = loop.create_future() cancel_init_future = loop.create_future() @@ -1473,8 +1487,6 @@ class ConfigEntriesFlowManager( ) # Unload the entry before setting up the new one. - # We will remove it only after the other one is set up, - # so that device customizations are not getting lost. if existing_entry is not None and existing_entry.state.recoverable: await self.config_entries.async_unload(existing_entry.entry_id) @@ -1496,10 +1508,16 @@ class ConfigEntriesFlowManager( version=result["version"], ) + if existing_entry is not None: + # Unload and remove the existing entry, but don't clean up devices and + # entities until the new entry is added + await self.config_entries._async_remove(existing_entry.entry_id) # noqa: SLF001 await self.config_entries.async_add(entry) if existing_entry is not None: - await self.config_entries.async_remove(existing_entry.entry_id) + # Clean up devices and entities belonging to the existing entry + # which are not present in the new entry + self.config_entries._async_clean_up(existing_entry) # noqa: SLF001 result["result"] = entry return result @@ -1815,6 +1833,16 @@ class ConfigEntries: """Return entry with matching entry_id.""" return self._entries.data.get(entry_id) + @callback + def async_get_known_entry(self, entry_id: str) -> ConfigEntry: + """Return entry with matching entry_id. + + Raises UnknownEntry if entry is not found. + """ + if (entry := self.async_get_entry(entry_id)) is None: + raise UnknownEntry + return entry + @callback def async_entry_ids(self) -> list[str]: """Return entry ids.""" @@ -1889,9 +1917,22 @@ class ConfigEntries: self._async_schedule_save() async def async_remove(self, entry_id: str) -> dict[str, Any]: - """Remove an entry.""" - if (entry := self.async_get_entry(entry_id)) is None: - raise UnknownEntry + """Remove, unload and clean up after an entry.""" + unload_success, entry = await self._async_remove(entry_id) + self._async_clean_up(entry) + + for discovery_domain in entry.discovery_keys: + async_dispatcher_send_internal( + self.hass, + signal_discovered_config_entry_removed(discovery_domain), + entry, + ) + + return {"require_restart": not unload_success} + + async def _async_remove(self, entry_id: str) -> tuple[bool, ConfigEntry]: + """Remove and unload an entry.""" + entry = self.async_get_known_entry(entry_id) async with entry.setup_lock: if not entry.state.recoverable: @@ -1905,8 +1946,15 @@ class ConfigEntries: self.async_update_issues() self._async_schedule_save() - dev_reg = device_registry.async_get(self.hass) - ent_reg = entity_registry.async_get(self.hass) + return (unload_success, entry) + + @callback + def _async_clean_up(self, entry: ConfigEntry) -> None: + """Clean up after an entry.""" + entry_id = entry.entry_id + + dev_reg = dr.async_get(self.hass) + ent_reg = er.async_get(self.hass) dev_reg.async_clear_config_entry(entry_id) ent_reg.async_clear_config_entry(entry_id) @@ -1923,13 +1971,6 @@ class ConfigEntries: ir.async_delete_issue(self.hass, HOMEASSISTANT_DOMAIN, issue_id) self._async_dispatch(ConfigEntryChange.REMOVED, entry) - for discovery_domain in entry.discovery_keys: - async_dispatcher_send_internal( - self.hass, - signal_discovered_config_entry_removed(discovery_domain), - entry, - ) - return {"require_restart": not unload_success} @callback def _async_shutdown(self, event: Event) -> None: @@ -1984,8 +2025,7 @@ class ConfigEntries: Return True if entry has been successfully loaded. """ - if (entry := self.async_get_entry(entry_id)) is None: - raise UnknownEntry + entry = self.async_get_known_entry(entry_id) if entry.state is not ConfigEntryState.NOT_LOADED: raise OperationNotAllowed( @@ -2016,8 +2056,7 @@ class ConfigEntries: async def async_unload(self, entry_id: str, _lock: bool = True) -> bool: """Unload a config entry.""" - if (entry := self.async_get_entry(entry_id)) is None: - raise UnknownEntry + entry = self.async_get_known_entry(entry_id) if not entry.state.recoverable: raise OperationNotAllowed( @@ -2035,8 +2074,7 @@ class ConfigEntries: @callback def async_schedule_reload(self, entry_id: str) -> None: """Schedule a config entry to be reloaded.""" - if (entry := self.async_get_entry(entry_id)) is None: - raise UnknownEntry + entry = self.async_get_known_entry(entry_id) entry.async_cancel_retry_setup() self.hass.async_create_task( self.async_reload(entry_id), @@ -2054,8 +2092,7 @@ class ConfigEntries: If an entry was not loaded, will just load. """ - if (entry := self.async_get_entry(entry_id)) is None: - raise UnknownEntry + entry = self.async_get_known_entry(entry_id) # Cancel the setup retry task before waiting for the # reload lock to reduce the chance of concurrent reload @@ -2085,8 +2122,7 @@ class ConfigEntries: If disabled_by is changed, the config entry will be reloaded. """ - if (entry := self.async_get_entry(entry_id)) is None: - raise UnknownEntry + entry = self.async_get_known_entry(entry_id) _validate_item(disabled_by=disabled_by) if entry.disabled_by is disabled_by: @@ -2095,21 +2131,21 @@ class ConfigEntries: entry.disabled_by = disabled_by self._async_schedule_save() - dev_reg = device_registry.async_get(self.hass) - ent_reg = entity_registry.async_get(self.hass) + dev_reg = dr.async_get(self.hass) + ent_reg = er.async_get(self.hass) if not entry.disabled_by: # The config entry will no longer be disabled, enable devices and entities - device_registry.async_config_entry_disabled_by_changed(dev_reg, entry) - entity_registry.async_config_entry_disabled_by_changed(ent_reg, entry) + dr.async_config_entry_disabled_by_changed(dev_reg, entry) + er.async_config_entry_disabled_by_changed(ent_reg, entry) # Load or unload the config entry reload_result = await self.async_reload(entry_id) if entry.disabled_by: # The config entry has been disabled, disable devices and entities - device_registry.async_config_entry_disabled_by_changed(dev_reg, entry) - entity_registry.async_config_entry_disabled_by_changed(ent_reg, entry) + dr.async_config_entry_disabled_by_changed(dev_reg, entry) + er.async_config_entry_disabled_by_changed(ent_reg, entry) return reload_result @@ -2292,14 +2328,13 @@ class ConfigEntries: multiple platforms at once and is more efficient since it does not require a separate import executor job for each platform. """ - report( + report_usage( "calls async_forward_entry_setup for " f"integration, {entry.domain} with title: {entry.title} " - f"and entry_id: {entry.entry_id}, which is deprecated and " - "will stop working in Home Assistant 2025.6, " + f"and entry_id: {entry.entry_id}, which is deprecated, " "await async_forward_entry_setups instead", - error_if_core=False, - error_if_integration=False, + core_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2025.6", ) if not entry.setup_lock.locked(): async with entry.setup_lock: @@ -2861,18 +2896,12 @@ class ConfigFlow(ConfigEntryBaseFlow): ) -> ConfigFlowResult: """Finish config flow and create a config entry.""" if self.source in {SOURCE_REAUTH, SOURCE_RECONFIGURE}: - report_issue = async_suggest_report_issue( - self.hass, integration_domain=self.handler - ) - _LOGGER.warning( - ( - "Detected %s config flow creating a new entry, " - "when it is expected to update an existing entry and abort. " - "This will stop working in %s, please %s" - ), - self.source, - "2025.11", - report_issue, + report_usage( + f"creates a new entry in a '{self.source}' flow, " + "when it is expected to update an existing entry and abort", + core_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2025.11", + integration_domain=self.handler, ) result = super().async_create_entry( title=title, @@ -2941,7 +2970,7 @@ class ConfigFlow(ConfigEntryBaseFlow): step_id: str | None = None, data_schema: vol.Schema | None = None, errors: dict[str, str] | None = None, - description_placeholders: Mapping[str, str | None] | None = None, + description_placeholders: Mapping[str, str] | None = None, last_step: bool | None = None, preview: str | None = None, ) -> ConfigFlowResult: @@ -2980,9 +3009,7 @@ class ConfigFlow(ConfigEntryBaseFlow): @callback def _get_reauth_entry(self) -> ConfigEntry: """Return the reauth config entry linked to the current context.""" - if entry := self.hass.config_entries.async_get_entry(self._reauth_entry_id): - return entry - raise UnknownEntry + return self.hass.config_entries.async_get_known_entry(self._reauth_entry_id) @property def _reconfigure_entry_id(self) -> str: @@ -2994,11 +3021,9 @@ class ConfigFlow(ConfigEntryBaseFlow): @callback def _get_reconfigure_entry(self) -> ConfigEntry: """Return the reconfigure config entry linked to the current context.""" - if entry := self.hass.config_entries.async_get_entry( + return self.hass.config_entries.async_get_known_entry( self._reconfigure_entry_id - ): - return entry - raise UnknownEntry + ) class OptionsFlowManager( @@ -3010,11 +3035,7 @@ class OptionsFlowManager( def _async_get_config_entry(self, config_entry_id: str) -> ConfigEntry: """Return config entry or raise if not found.""" - entry = self.hass.config_entries.async_get_entry(config_entry_id) - if entry is None: - raise UnknownEntry(config_entry_id) - - return entry + return self.hass.config_entries.async_get_known_entry(config_entry_id) async def async_create_flow( self, @@ -3048,9 +3069,8 @@ class OptionsFlowManager( if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY: return result - entry = self.hass.config_entries.async_get_entry(flow.handler) - if entry is None: - raise UnknownEntry(flow.handler) + entry = self.hass.config_entries.async_get_known_entry(flow.handler) + if result["data"] is not None: self.hass.config_entries.async_update_entry(entry, options=result["data"]) @@ -3073,6 +3093,9 @@ class OptionsFlow(ConfigEntryBaseFlow): handler: str + _config_entry: ConfigEntry + """For compatibility only - to be removed in 2025.12""" + @callback def _async_abort_entries_match( self, match_dict: dict[str, Any] | None = None @@ -3081,32 +3104,76 @@ class OptionsFlow(ConfigEntryBaseFlow): Requires `already_configured` in strings.json in user visible flows. """ - - config_entry = cast( - ConfigEntry, self.hass.config_entries.async_get_entry(self.handler) - ) _async_abort_entries_match( [ entry - for entry in self.hass.config_entries.async_entries(config_entry.domain) - if entry is not config_entry and entry.source != SOURCE_IGNORE + for entry in self.hass.config_entries.async_entries( + self.config_entry.domain + ) + if entry is not self.config_entry and entry.source != SOURCE_IGNORE ], match_dict, ) + @property + def _config_entry_id(self) -> str: + """Return config entry id. + + Please note that this is not available inside `__init__` method, and + can only be referenced after initialisation. + """ + # This is the same as handler, but that's an implementation detail + if self.handler is None: + raise ValueError( + "The config entry id is not available during initialisation" + ) + return self.handler + + @property + def config_entry(self) -> ConfigEntry: + """Return the config entry linked to the current options flow. + + Please note that this is not available inside `__init__` method, and + can only be referenced after initialisation. + """ + # For compatibility only - to be removed in 2025.12 + if hasattr(self, "_config_entry"): + return self._config_entry + + if self.hass is None: + raise ValueError("The config entry is not available during initialisation") + return self.hass.config_entries.async_get_known_entry(self._config_entry_id) + + @config_entry.setter + def config_entry(self, value: ConfigEntry) -> None: + """Set the config entry value.""" + report_usage( + "sets option flow config_entry explicitly, which is deprecated", + core_behavior=ReportBehavior.ERROR, + core_integration_behavior=ReportBehavior.ERROR, + custom_integration_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2025.12", + ) + self._config_entry = value + class OptionsFlowWithConfigEntry(OptionsFlow): - """Base class for options flows with config entry and options.""" + """Base class for options flows with config entry and options. + + This class is being phased out, and should not be referenced in new code. + It is kept only for backward compatibility, and only for custom integrations. + """ def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" self._config_entry = config_entry self._options = deepcopy(dict(config_entry.options)) - - @property - def config_entry(self) -> ConfigEntry: - """Return the config entry.""" - return self._config_entry + report_usage( + "inherits from OptionsFlowWithConfigEntry", + core_behavior=ReportBehavior.ERROR, + core_integration_behavior=ReportBehavior.ERROR, + custom_integration_behavior=ReportBehavior.IGNORE, + ) @property def options(self) -> dict[str, Any]: @@ -3120,7 +3187,7 @@ class EntityRegistryDisabledHandler: def __init__(self, hass: HomeAssistant) -> None: """Initialize the handler.""" self.hass = hass - self.registry: entity_registry.EntityRegistry | None = None + self.registry: er.EntityRegistry | None = None self.changed: set[str] = set() self._remove_call_later: Callable[[], None] | None = None @@ -3128,18 +3195,18 @@ class EntityRegistryDisabledHandler: def async_setup(self) -> None: """Set up the disable handler.""" self.hass.bus.async_listen( - entity_registry.EVENT_ENTITY_REGISTRY_UPDATED, + er.EVENT_ENTITY_REGISTRY_UPDATED, self._handle_entry_updated, event_filter=_handle_entry_updated_filter, ) @callback def _handle_entry_updated( - self, event: Event[entity_registry.EventEntityRegistryUpdatedData] + self, event: Event[er.EventEntityRegistryUpdatedData] ) -> None: """Handle entity registry entry update.""" if self.registry is None: - self.registry = entity_registry.async_get(self.hass) + self.registry = er.async_get(self.hass) entity_entry = self.registry.async_get(event.data["entity_id"]) @@ -3154,10 +3221,9 @@ class EntityRegistryDisabledHandler: ): return - config_entry = self.hass.config_entries.async_get_entry( + config_entry = self.hass.config_entries.async_get_known_entry( entity_entry.config_entry_id ) - assert config_entry is not None if config_entry.entry_id not in self.changed and config_entry.supports_unload: self.changed.add(config_entry.entry_id) @@ -3197,7 +3263,7 @@ class EntityRegistryDisabledHandler: @callback def _handle_entry_updated_filter( - event_data: entity_registry.EventEntityRegistryUpdatedData, + event_data: er.EventEntityRegistryUpdatedData, ) -> bool: """Handle entity registry entry update filter. @@ -3207,8 +3273,7 @@ def _handle_entry_updated_filter( return not ( event_data["action"] != "update" or "disabled_by" not in event_data["changes"] - or event_data["changes"]["disabled_by"] - is entity_registry.RegistryEntryDisabler.CONFIG_ENTRY + or event_data["changes"]["disabled_by"] is er.RegistryEntryDisabler.CONFIG_ENTRY ) diff --git a/homeassistant/const.py b/homeassistant/const.py index fd2a55c0a64..c41ab6ec382 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -24,14 +24,14 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 -MINOR_VERSION: Final = 11 -PATCH_VERSION: Final = "3" +MINOR_VERSION: Final = 12 +PATCH_VERSION: Final = "0" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) -REQUIRED_NEXT_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) +REQUIRED_NEXT_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 0) # Truthy date string triggers showing related deprecation warning messages. -REQUIRED_NEXT_PYTHON_HA_RELEASE: Final = "" +REQUIRED_NEXT_PYTHON_HA_RELEASE: Final = "2025.2" # Format for platform files PLATFORM_FORMAT: Final = "{platform}.{domain}" @@ -725,6 +725,9 @@ class UnitOfPower(StrEnum): WATT = "W" KILO_WATT = "kW" + MEGA_WATT = "MW" + GIGA_WATT = "GW" + TERA_WATT = "TW" BTU_PER_HOUR = "BTU/h" @@ -770,6 +773,8 @@ class UnitOfEnergy(StrEnum): WATT_HOUR = "Wh" KILO_WATT_HOUR = "kWh" MEGA_WATT_HOUR = "MWh" + GIGA_WATT_HOUR = "GWh" + TERA_WATT_HOUR = "TWh" CALORIE = "cal" KILO_CALORIE = "kcal" MEGA_CALORIE = "Mcal" @@ -817,6 +822,7 @@ _DEPRECATED_ELECTRIC_CURRENT_AMPERE: Final = DeprecatedConstantEnum( class UnitOfElectricPotential(StrEnum): """Electric potential units.""" + MICROVOLT = "µV" MILLIVOLT = "mV" VOLT = "V" @@ -1160,6 +1166,7 @@ class UnitOfVolumeFlowRate(StrEnum): CUBIC_FEET_PER_MINUTE = "ft³/min" LITERS_PER_MINUTE = "L/min" GALLONS_PER_MINUTE = "gal/min" + MILLILITERS_PER_SECOND = "mL/s" _DEPRECATED_VOLUME_FLOW_RATE_CUBIC_METERS_PER_HOUR: Final = DeprecatedConstantEnum( @@ -1173,8 +1180,27 @@ _DEPRECATED_VOLUME_FLOW_RATE_CUBIC_FEET_PER_MINUTE: Final = DeprecatedConstantEn ) """Deprecated: please use UnitOfVolumeFlowRate.CUBIC_FEET_PER_MINUTE""" -# Area units -AREA_SQUARE_METERS: Final = "m²" + +class UnitOfArea(StrEnum): + """Area units.""" + + SQUARE_METERS = "m²" + SQUARE_CENTIMETERS = "cm²" + SQUARE_KILOMETERS = "km²" + SQUARE_MILLIMETERS = "mm²" + SQUARE_INCHES = "in²" + SQUARE_FEET = "ft²" + SQUARE_YARDS = "yd²" + SQUARE_MILES = "mi²" + ACRES = "ac" + HECTARES = "ha" + + +_DEPRECATED_AREA_SQUARE_METERS: Final = DeprecatedConstantEnum( + UnitOfArea.SQUARE_METERS, + "2025.12", +) +"""Deprecated: please use UnitOfArea.SQUARE_METERS""" # Mass units @@ -1353,6 +1379,13 @@ CONCENTRATION_PARTS_PER_MILLION: Final = "ppm" CONCENTRATION_PARTS_PER_BILLION: Final = "ppb" +class UnitOfBloodGlucoseConcentration(StrEnum): + """Blood glucose concentration units.""" + + MILLIGRAMS_PER_DECILITER = "mg/dL" + MILLIMOLE_PER_LITER = "mmol/L" + + # Speed units class UnitOfSpeed(StrEnum): """Speed units.""" @@ -1691,6 +1724,7 @@ RESTART_EXIT_CODE: Final = 100 UNIT_NOT_RECOGNIZED_TEMPLATE: Final = "{} is not a recognized {} unit." LENGTH: Final = "length" +AREA: Final = "area" MASS: Final = "mass" PRESSURE: Final = "pressure" VOLUME: Final = "volume" diff --git a/homeassistant/core.py b/homeassistant/core.py index ab852056353..f4c819c1262 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -656,12 +656,12 @@ class HomeAssistant: # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report( - "calls `async_add_job`, which is deprecated and will be removed in Home " - "Assistant 2025.4; Please review " + frame.report_usage( + "calls `async_add_job`, which should be reviewed against " "https://developers.home-assistant.io/blog/2024/03/13/deprecate_add_run_job" " for replacement options", - error_if_core=False, + core_behavior=frame.ReportBehavior.LOG, + breaks_in_ha_version="2025.4", ) if target is None: @@ -712,12 +712,12 @@ class HomeAssistant: # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report( - "calls `async_add_hass_job`, which is deprecated and will be removed in Home " - "Assistant 2025.5; Please review " + frame.report_usage( + "calls `async_add_hass_job`, which should be reviewed against " "https://developers.home-assistant.io/blog/2024/04/07/deprecate_add_hass_job" " for replacement options", - error_if_core=False, + core_behavior=frame.ReportBehavior.LOG, + breaks_in_ha_version="2025.5", ) return self._async_add_hass_job(hassjob, *args, background=background) @@ -986,12 +986,12 @@ class HomeAssistant: # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report( - "calls `async_run_job`, which is deprecated and will be removed in Home " - "Assistant 2025.4; Please review " + frame.report_usage( + "calls `async_run_job`, which should be reviewed against " "https://developers.home-assistant.io/blog/2024/03/13/deprecate_add_run_job" " for replacement options", - error_if_core=False, + core_behavior=frame.ReportBehavior.LOG, + breaks_in_ha_version="2025.4", ) if asyncio.iscoroutine(target): @@ -1635,10 +1635,10 @@ class EventBus: # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report( - "calls `async_listen` with run_immediately, which is" - " deprecated and will be removed in Home Assistant 2025.5", - error_if_core=False, + frame.report_usage( + "calls `async_listen` with run_immediately", + core_behavior=frame.ReportBehavior.LOG, + breaks_in_ha_version="2025.5", ) if event_filter is not None and not is_callback_check_partial(event_filter): @@ -1705,10 +1705,10 @@ class EventBus: # late import to avoid circular imports from .helpers import frame # pylint: disable=import-outside-toplevel - frame.report( - "calls `async_listen_once` with run_immediately, which is " - "deprecated and will be removed in Home Assistant 2025.5", - error_if_core=False, + frame.report_usage( + "calls `async_listen_once` with run_immediately", + core_behavior=frame.ReportBehavior.LOG, + breaks_in_ha_version="2025.5", ) one_time_listener: _OneTimeListener[_DataT] = _OneTimeListener( diff --git a/homeassistant/core_config.py b/homeassistant/core_config.py index 25f745f110c..430a882ecb9 100644 --- a/homeassistant/core_config.py +++ b/homeassistant/core_config.py @@ -60,7 +60,7 @@ from .core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from .generated.currencies import HISTORIC_CURRENCIES from .helpers import config_validation as cv, issue_registry as ir from .helpers.entity_values import EntityValues -from .helpers.frame import report +from .helpers.frame import ReportBehavior, report_usage from .helpers.storage import Store from .helpers.typing import UNDEFINED, UndefinedType from .util import dt as dt_util, location @@ -695,11 +695,11 @@ class Config: It will be removed in Home Assistant 2025.6. """ - report( - "set the time zone using set_time_zone instead of async_set_time_zone" - " which will stop working in Home Assistant 2025.6", - error_if_core=True, - error_if_integration=True, + report_usage( + "sets the time zone using set_time_zone instead of async_set_time_zone", + core_integration_behavior=ReportBehavior.ERROR, + custom_integration_behavior=ReportBehavior.ERROR, + breaks_in_ha_version="2025.6", ) if time_zone := dt_util.get_time_zone(time_zone_str): self.time_zone = time_zone_str diff --git a/homeassistant/data_entry_flow.py b/homeassistant/data_entry_flow.py index 1fb6439a8c4..338b5f3992f 100644 --- a/homeassistant/data_entry_flow.py +++ b/homeassistant/data_entry_flow.py @@ -26,7 +26,7 @@ from .helpers.deprecation import ( check_if_deprecated_constant, dir_with_deprecated_constants, ) -from .helpers.frame import report +from .helpers.frame import ReportBehavior, report_usage from .loader import async_suggest_report_issue from .util import uuid as uuid_util @@ -155,7 +155,7 @@ class FlowResult(TypedDict, Generic[_FlowContextT, _HandlerT], total=False): context: _FlowContextT data_schema: vol.Schema | None data: Mapping[str, Any] - description_placeholders: Mapping[str, str | None] | None + description_placeholders: Mapping[str, str] | None description: str | None errors: dict[str, str] | None extra: str @@ -530,12 +530,10 @@ class FlowManager(abc.ABC, Generic[_FlowContextT, _FlowResultT, _HandlerT]): if not isinstance(result["type"], FlowResultType): result["type"] = FlowResultType(result["type"]) # type: ignore[unreachable] - report( - ( - "does not use FlowResultType enum for data entry flow result type. " - "This is deprecated and will stop working in Home Assistant 2025.1" - ), - error_if_core=False, + report_usage( + "does not use FlowResultType enum for data entry flow result type", + core_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2025.1", ) if ( @@ -705,7 +703,7 @@ class FlowHandler(Generic[_FlowContextT, _FlowResultT, _HandlerT]): step_id: str | None = None, data_schema: vol.Schema | None = None, errors: dict[str, str] | None = None, - description_placeholders: Mapping[str, str | None] | None = None, + description_placeholders: Mapping[str, str] | None = None, last_step: bool | None = None, preview: str | None = None, ) -> _FlowResultT: diff --git a/homeassistant/generated/bluetooth.py b/homeassistant/generated/bluetooth.py index c4612898cb2..a105efc2685 100644 --- a/homeassistant/generated/bluetooth.py +++ b/homeassistant/generated/bluetooth.py @@ -8,6 +8,26 @@ from __future__ import annotations from typing import Final BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [ + { + "domain": "acaia", + "manufacturer_id": 16962, + }, + { + "domain": "acaia", + "local_name": "ACAIA*", + }, + { + "domain": "acaia", + "local_name": "PYXIS-*", + }, + { + "domain": "acaia", + "local_name": "LUNAR-*", + }, + { + "domain": "acaia", + "local_name": "PROCHBT001", + }, { "domain": "airthings_ble", "manufacturer_id": 820, diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index e1694f8bc54..ffe61b915c6 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -24,6 +24,7 @@ FLOWS = { ], "integration": [ "abode", + "acaia", "accuweather", "acmeda", "adax", @@ -384,12 +385,14 @@ FLOWS = { "mpd", "mqtt", "mullvad", + "music_assistant", "mutesync", "mysensors", "mystrom", "myuplink", "nam", "nanoleaf", + "nasweb", "neato", "nest", "netatmo", @@ -406,6 +409,7 @@ FLOWS = { "nina", "nmap_tracker", "nobo_hub", + "nordpool", "notion", "nuheat", "nuki", @@ -534,6 +538,7 @@ FLOWS = { "simplefin", "simplepush", "simplisafe", + "sky_remote", "skybell", "slack", "sleepiq", diff --git a/homeassistant/generated/dhcp.py b/homeassistant/generated/dhcp.py index 7dd13473d31..e37fb2332b1 100644 --- a/homeassistant/generated/dhcp.py +++ b/homeassistant/generated/dhcp.py @@ -236,6 +236,10 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "hostname": "guardian*", "macaddress": "30AEA4*", }, + { + "domain": "homewizard", + "registered_devices": True, + }, { "domain": "hunterdouglas_powerview", "registered_devices": True, @@ -276,6 +280,22 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "hostname": "polisy*", "macaddress": "000DB9*", }, + { + "domain": "lamarzocco", + "registered_devices": True, + }, + { + "domain": "lamarzocco", + "hostname": "gs[0-9][0-9][0-9][0-9][0-9][0-9]", + }, + { + "domain": "lamarzocco", + "hostname": "lm[0-9][0-9][0-9][0-9][0-9][0-9]", + }, + { + "domain": "lamarzocco", + "hostname": "mr[0-9][0-9][0-9][0-9][0-9][0-9]", + }, { "domain": "lametric", "registered_devices": True, @@ -367,6 +387,15 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "hostname": "gateway*", "macaddress": "F8811A*", }, + { + "domain": "palazzetti", + "hostname": "connbox*", + "macaddress": "40F3857*", + }, + { + "domain": "palazzetti", + "registered_devices": True, + }, { "domain": "powerwall", "hostname": "1118431-*", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 3ed09c6fb9f..8238a09072b 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -9,7 +9,14 @@ "name": "Abode", "integration_type": "hub", "config_flow": true, - "iot_class": "cloud_push" + "iot_class": "cloud_push", + "single_config_entry": true + }, + "acaia": { + "name": "Acaia", + "integration_type": "device", + "config_flow": true, + "iot_class": "local_push" }, "accuweather": { "name": "AccuWeather", @@ -865,7 +872,8 @@ "name": "Canary", "integration_type": "hub", "config_flow": true, - "iot_class": "cloud_polling" + "iot_class": "cloud_polling", + "single_config_entry": true }, "ccm15": { "name": "Midea ccm15 AC Controller", @@ -1050,7 +1058,8 @@ "cpuspeed": { "integration_type": "device", "config_flow": true, - "iot_class": "local_push" + "iot_class": "local_push", + "single_config_entry": true }, "cribl": { "name": "Cribl", @@ -3944,6 +3953,12 @@ "iot_class": "cloud_polling", "single_config_entry": true }, + "music_assistant": { + "name": "Music Assistant", + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_push" + }, "mutesync": { "name": "mutesync", "integration_type": "hub", @@ -4010,6 +4025,12 @@ "config_flow": true, "iot_class": "local_push" }, + "nasweb": { + "name": "NASweb", + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_push" + }, "neato": { "name": "Neato Botvac", "integration_type": "hub", @@ -4175,6 +4196,13 @@ "config_flow": true, "iot_class": "local_push" }, + "nordpool": { + "name": "Nord Pool", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling", + "single_config_entry": true + }, "norway_air": { "name": "Om Luftkvalitet i Norge (Norway Air)", "integration_type": "hub", @@ -5589,11 +5617,22 @@ "config_flow": false, "iot_class": "local_push" }, - "sky_hub": { - "name": "Sky Hub", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling" + "sky": { + "name": "Sky", + "integrations": { + "sky_hub": { + "integration_type": "hub", + "config_flow": false, + "iot_class": "local_polling", + "name": "Sky Hub" + }, + "sky_remote": { + "integration_type": "device", + "config_flow": true, + "iot_class": "assumed_state", + "name": "Sky Remote Control" + } + } }, "skybeacon": { "name": "Skybeacon", diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index eb3c1b3a105..5f7161a8245 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -639,6 +639,11 @@ ZEROCONF = { }, }, ], + "_mass._tcp.local.": [ + { + "domain": "music_assistant", + }, + ], "_matter._tcp.local.": [ { "domain": "matter", @@ -867,6 +872,12 @@ ZEROCONF = { "name": "*zigate*", }, ], + "_zigbee-coordinator._tcp.local.": [ + { + "domain": "zha", + "name": "*", + }, + ], "_zigstar_gw._tcp.local.": [ { "domain": "zha", diff --git a/homeassistant/helpers/aiohttp_client.py b/homeassistant/helpers/aiohttp_client.py index 2f4c1980468..f01ae325875 100644 --- a/homeassistant/helpers/aiohttp_client.py +++ b/homeassistant/helpers/aiohttp_client.py @@ -44,11 +44,13 @@ SERVER_SOFTWARE = ( f"aiohttp/{aiohttp.__version__} Python/{sys.version_info[0]}.{sys.version_info[1]}" ) -ENABLE_CLEANUP_CLOSED = not (3, 11, 1) <= sys.version_info < (3, 11, 4) -# Enabling cleanup closed on python 3.11.1+ leaks memory relatively quickly -# see https://github.com/aio-libs/aiohttp/issues/7252 -# aiohttp interacts poorly with https://github.com/python/cpython/pull/98540 -# The issue was fixed in 3.11.4 via https://github.com/python/cpython/pull/104485 +ENABLE_CLEANUP_CLOSED = (3, 13, 0) <= sys.version_info < ( + 3, + 13, + 1, +) or sys.version_info < (3, 12, 7) +# Cleanup closed is no longer needed after https://github.com/python/cpython/pull/118960 +# which first appeared in Python 3.12.7 and 3.13.1 WARN_CLOSE_MSG = "closes the Home Assistant aiohttp session" diff --git a/homeassistant/helpers/condition.py b/homeassistant/helpers/condition.py index 86965f86d40..5952e28a1eb 100644 --- a/homeassistant/helpers/condition.py +++ b/homeassistant/helpers/condition.py @@ -821,9 +821,15 @@ def time( after_entity.attributes.get("minute", 59), after_entity.attributes.get("second", 59), ) - elif after_entity.attributes.get( - ATTR_DEVICE_CLASS - ) == SensorDeviceClass.TIMESTAMP and after_entity.state not in ( + elif after_entity.domain == "time" and after_entity.state not in ( + STATE_UNAVAILABLE, + STATE_UNKNOWN, + ): + after = datetime.strptime(after_entity.state, "%H:%M:%S").time() + elif ( + after_entity.attributes.get(ATTR_DEVICE_CLASS) + == SensorDeviceClass.TIMESTAMP + ) and after_entity.state not in ( STATE_UNAVAILABLE, STATE_UNKNOWN, ): @@ -845,9 +851,15 @@ def time( before_entity.attributes.get("minute", 59), before_entity.attributes.get("second", 59), ) - elif before_entity.attributes.get( - ATTR_DEVICE_CLASS - ) == SensorDeviceClass.TIMESTAMP and before_entity.state not in ( + elif before_entity.domain == "time": + try: + before = datetime.strptime(before_entity.state, "%H:%M:%S").time() + except ValueError: + return False + elif ( + before_entity.attributes.get(ATTR_DEVICE_CLASS) + == SensorDeviceClass.TIMESTAMP + ) and before_entity.state not in ( STATE_UNAVAILABLE, STATE_UNKNOWN, ): diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index 81ac10f86cc..3681e941eee 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -719,14 +719,14 @@ def template(value: Any | None) -> template_helper.Template: raise vol.Invalid("template value should be a string") if not (hass := _async_get_hass_or_none()): # pylint: disable-next=import-outside-toplevel - from .frame import report + from .frame import ReportBehavior, report_usage - report( + report_usage( ( "validates schema outside the event loop, " "which will stop working in HA Core 2025.10" ), - error_if_core=False, + core_behavior=ReportBehavior.LOG, ) template_value = template_helper.Template(str(value), hass) @@ -748,14 +748,14 @@ def dynamic_template(value: Any | None) -> template_helper.Template: raise vol.Invalid("template value does not contain a dynamic template") if not (hass := _async_get_hass_or_none()): # pylint: disable-next=import-outside-toplevel - from .frame import report + from .frame import ReportBehavior, report_usage - report( + report_usage( ( "validates schema outside the event loop, " "which will stop working in HA Core 2025.10" ), - error_if_core=False, + core_behavior=ReportBehavior.LOG, ) template_value = template_helper.Template(str(value), hass) @@ -1574,10 +1574,10 @@ TIME_CONDITION_SCHEMA = vol.All( **CONDITION_BASE_SCHEMA, vol.Required(CONF_CONDITION): "time", vol.Optional("before"): vol.Any( - time, vol.All(str, entity_domain(["input_datetime", "sensor"])) + time, vol.All(str, entity_domain(["input_datetime", "time", "sensor"])) ), vol.Optional("after"): vol.Any( - time, vol.All(str, entity_domain(["input_datetime", "sensor"])) + time, vol.All(str, entity_domain(["input_datetime", "time", "sensor"])) ), vol.Optional("weekday"): weekdays, } diff --git a/homeassistant/helpers/device_registry.py b/homeassistant/helpers/device_registry.py index faf4257577d..0e56adc7377 100644 --- a/homeassistant/helpers/device_registry.py +++ b/homeassistant/helpers/device_registry.py @@ -38,7 +38,6 @@ from .deprecation import ( check_if_deprecated_constant, dir_with_deprecated_constants, ) -from .frame import report from .json import JSON_DUMP, find_paths_unserializable_data, json_bytes, json_fragment from .registry import BaseRegistry, BaseRegistryItems, RegistryIndexType from .singleton import singleton @@ -827,17 +826,6 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): else: via_device_id = UNDEFINED - if isinstance(entry_type, str) and not isinstance(entry_type, DeviceEntryType): - report( # type: ignore[unreachable] - ( - "uses str for device registry entry_type. This is deprecated and" - " will stop working in Home Assistant 2022.3, it should be updated" - " to use DeviceEntryType instead" - ), - error_if_core=False, - ) - entry_type = DeviceEntryType(entry_type) - device = self.async_update_device( device.id, allow_collisions=True, @@ -924,19 +912,6 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): "Cannot define both merge_identifiers and new_identifiers" ) - if isinstance(disabled_by, str) and not isinstance( - disabled_by, DeviceEntryDisabler - ): - report( # type: ignore[unreachable] - ( - "uses str for device registry disabled_by. This is deprecated and" - " will stop working in Home Assistant 2022.3, it should be updated" - " to use DeviceEntryDisabler instead" - ), - error_if_core=False, - ) - disabled_by = DeviceEntryDisabler(disabled_by) - if ( suggested_area is not None and suggested_area is not UNDEFINED diff --git a/homeassistant/helpers/entity.py b/homeassistant/helpers/entity.py index 1f77dd3f95c..19076c4edc0 100644 --- a/homeassistant/helpers/entity.py +++ b/homeassistant/helpers/entity.py @@ -647,6 +647,22 @@ class Entity( f".{self.translation_key}.name" ) + @cached_property + def _unit_of_measurement_translation_key(self) -> str | None: + """Return translation key for unit of measurement.""" + if self.translation_key is None: + return None + if self.platform is None: + raise ValueError( + f"Entity {type(self)} cannot have a translation key for " + "unit of measurement before being added to the entity platform" + ) + platform = self.platform + return ( + f"component.{platform.platform_name}.entity.{platform.domain}" + f".{self.translation_key}.unit_of_measurement" + ) + def _substitute_name_placeholders(self, name: str) -> str: """Substitute placeholders in entity name.""" try: diff --git a/homeassistant/helpers/entity_platform.py b/homeassistant/helpers/entity_platform.py index 62eed213b2a..0d7614c569c 100644 --- a/homeassistant/helpers/entity_platform.py +++ b/homeassistant/helpers/entity_platform.py @@ -145,6 +145,7 @@ class EntityPlatform: self.platform_translations: dict[str, str] = {} self.object_id_component_translations: dict[str, str] = {} self.object_id_platform_translations: dict[str, str] = {} + self.default_language_platform_translations: dict[str, str] = {} self._tasks: list[asyncio.Task[None]] = [] # Stop tracking tasks after setup is completed self._setup_complete = False @@ -480,6 +481,14 @@ class EntityPlatform: self.object_id_platform_translations = await self._async_get_translations( object_id_language, "entity", self.platform_name ) + if config_language == languages.DEFAULT_LANGUAGE: + self.default_language_platform_translations = self.platform_translations + else: + self.default_language_platform_translations = ( + await self._async_get_translations( + languages.DEFAULT_LANGUAGE, "entity", self.platform_name + ) + ) def _schedule_add_entities( self, new_entities: Iterable[Entity], update_before_add: bool = False diff --git a/homeassistant/helpers/event.py b/homeassistant/helpers/event.py index 02ea8103192..578132f358f 100644 --- a/homeassistant/helpers/event.py +++ b/homeassistant/helpers/event.py @@ -224,10 +224,10 @@ def async_track_state_change( Must be run within the event loop. """ - frame.report( + frame.report_usage( "calls `async_track_state_change` instead of `async_track_state_change_event`" " which is deprecated and will be removed in Home Assistant 2025.5", - error_if_core=False, + core_behavior=frame.ReportBehavior.LOG, ) if from_state is not None: @@ -996,15 +996,10 @@ class TrackTemplateResultInfo: if track_template_.template.hass: continue - # pylint: disable-next=import-outside-toplevel - from .frame import report - - report( - ( - "calls async_track_template_result with template without hass, " - "which will stop working in HA Core 2025.10" - ), - error_if_core=False, + frame.report_usage( + "calls async_track_template_result with template without hass", + core_behavior=frame.ReportBehavior.LOG, + breaks_in_ha_version="2025.10", ) track_template_.template.hass = hass diff --git a/homeassistant/helpers/frame.py b/homeassistant/helpers/frame.py index fd7e014b2ff..6d03ae4ffd2 100644 --- a/homeassistant/helpers/frame.py +++ b/homeassistant/helpers/frame.py @@ -5,6 +5,7 @@ from __future__ import annotations import asyncio from collections.abc import Callable from dataclasses import dataclass +import enum import functools import linecache import logging @@ -14,9 +15,13 @@ from typing import Any, cast from propcache import cached_property -from homeassistant.core import async_get_hass_or_none +from homeassistant.core import HomeAssistant, async_get_hass_or_none from homeassistant.exceptions import HomeAssistantError -from homeassistant.loader import async_suggest_report_issue +from homeassistant.loader import ( + Integration, + async_get_issue_integration, + async_suggest_report_issue, +) _LOGGER = logging.getLogger(__name__) @@ -144,33 +149,163 @@ def report( If error_if_integration is True, raise instead of log if an integration is found when unwinding the stack frame. """ + core_behavior = ReportBehavior.ERROR if error_if_core else ReportBehavior.LOG + core_integration_behavior = ( + ReportBehavior.ERROR if error_if_integration else ReportBehavior.LOG + ) + custom_integration_behavior = core_integration_behavior + + if log_custom_component_only: + if core_behavior is ReportBehavior.LOG: + core_behavior = ReportBehavior.IGNORE + if core_integration_behavior is ReportBehavior.LOG: + core_integration_behavior = ReportBehavior.IGNORE + + report_usage( + what, + core_behavior=core_behavior, + core_integration_behavior=core_integration_behavior, + custom_integration_behavior=custom_integration_behavior, + exclude_integrations=exclude_integrations, + level=level, + ) + + +class ReportBehavior(enum.Enum): + """Enum for behavior on code usage.""" + + IGNORE = enum.auto() + """Ignore the code usage.""" + LOG = enum.auto() + """Log the code usage.""" + ERROR = enum.auto() + """Raise an error on code usage.""" + + +def report_usage( + what: str, + *, + breaks_in_ha_version: str | None = None, + core_behavior: ReportBehavior = ReportBehavior.ERROR, + core_integration_behavior: ReportBehavior = ReportBehavior.LOG, + custom_integration_behavior: ReportBehavior = ReportBehavior.LOG, + exclude_integrations: set[str] | None = None, + integration_domain: str | None = None, + level: int = logging.WARNING, +) -> None: + """Report incorrect code usage. + + :param what: will be wrapped with "Detected that integration 'integration' {what}. + Please create a bug report at https://..." + :param breaks_in_ha_version: if set, the report will be adjusted to specify the + breaking version + :param exclude_integrations: skip specified integration when reviewing the stack. + If no integration is found, the core behavior will be applied + :param integration_domain: fallback for identifying the integration if the + frame is not found + """ try: integration_frame = get_integration_frame( exclude_integrations=exclude_integrations ) except MissingIntegrationFrame as err: - msg = f"Detected code that {what}. Please report this issue." - if error_if_core: + if integration := async_get_issue_integration( + hass := async_get_hass_or_none(), integration_domain + ): + _report_integration_domain( + hass, + what, + breaks_in_ha_version, + integration, + core_integration_behavior, + custom_integration_behavior, + level, + ) + return + msg = f"Detected code that {what}. Please report this issue" + if core_behavior is ReportBehavior.ERROR: raise RuntimeError(msg) from err - if not log_custom_component_only: + if core_behavior is ReportBehavior.LOG: + if breaks_in_ha_version: + msg = ( + f"Detected code that {what}. This will stop working in Home " + f"Assistant {breaks_in_ha_version}, please report this issue" + ) _LOGGER.warning(msg, stack_info=True) return - if ( - error_if_integration - or not log_custom_component_only - or integration_frame.custom_integration - ): - _report_integration(what, integration_frame, level, error_if_integration) + integration_behavior = core_integration_behavior + if integration_frame.custom_integration: + integration_behavior = custom_integration_behavior + + if integration_behavior is not ReportBehavior.IGNORE: + _report_integration_frame( + what, + breaks_in_ha_version, + integration_frame, + level, + integration_behavior is ReportBehavior.ERROR, + ) -def _report_integration( +def _report_integration_domain( + hass: HomeAssistant | None, what: str, + breaks_in_ha_version: str | None, + integration: Integration, + core_integration_behavior: ReportBehavior, + custom_integration_behavior: ReportBehavior, + level: int, +) -> None: + """Report incorrect usage in an integration (identified via domain). + + Async friendly. + """ + integration_behavior = core_integration_behavior + if not integration.is_built_in: + integration_behavior = custom_integration_behavior + + if integration_behavior is ReportBehavior.IGNORE: + return + + # Keep track of integrations already reported to prevent flooding + key = f"{integration.domain}:{what}" + if ( + integration_behavior is not ReportBehavior.ERROR + and key in _REPORTED_INTEGRATIONS + ): + return + _REPORTED_INTEGRATIONS.add(key) + + report_issue = async_suggest_report_issue(hass, integration=integration) + integration_type = "" if integration.is_built_in else "custom " + _LOGGER.log( + level, + "Detected that %sintegration '%s' %s. %s %s", + integration_type, + integration.domain, + what, + f"This will stop working in Home Assistant {breaks_in_ha_version}, please" + if breaks_in_ha_version + else "Please", + report_issue, + ) + + if integration_behavior is ReportBehavior.ERROR: + raise RuntimeError( + f"Detected that {integration_type}integration " + f"'{integration.domain}' {what}. Please {report_issue}" + ) + + +def _report_integration_frame( + what: str, + breaks_in_ha_version: str | None, integration_frame: IntegrationFrame, level: int = logging.WARNING, error: bool = False, ) -> None: - """Report incorrect usage in an integration. + """Report incorrect usage in an integration (identified via frame). Async friendly. """ @@ -188,13 +323,16 @@ def _report_integration( integration_type = "custom " if integration_frame.custom_integration else "" _LOGGER.log( level, - "Detected that %sintegration '%s' %s at %s, line %s: %s, please %s", + "Detected that %sintegration '%s' %s at %s, line %s: %s. %s %s", integration_type, integration_frame.integration, what, integration_frame.relative_filename, integration_frame.line_number, integration_frame.line, + f"This will stop working in Home Assistant {breaks_in_ha_version}, please" + if breaks_in_ha_version + else "Please", report_issue, ) if not error: @@ -204,7 +342,7 @@ def _report_integration( f"'{integration_frame.integration}' {what} at " f"{integration_frame.relative_filename}, line " f"{integration_frame.line_number}: {integration_frame.line}. " - f"Please {report_issue}." + f"Please {report_issue}" ) diff --git a/homeassistant/helpers/intent.py b/homeassistant/helpers/intent.py index b38f769b302..468539f5a9d 100644 --- a/homeassistant/helpers/intent.py +++ b/homeassistant/helpers/intent.py @@ -49,6 +49,7 @@ INTENT_NEVERMIND = "HassNevermind" INTENT_SET_POSITION = "HassSetPosition" INTENT_START_TIMER = "HassStartTimer" INTENT_CANCEL_TIMER = "HassCancelTimer" +INTENT_CANCEL_ALL_TIMERS = "HassCancelAllTimers" INTENT_INCREASE_TIMER = "HassIncreaseTimer" INTENT_DECREASE_TIMER = "HassDecreaseTimer" INTENT_PAUSE_TIMER = "HassPauseTimer" diff --git a/homeassistant/helpers/llm.py b/homeassistant/helpers/llm.py index d322810b0ef..38d80d5649d 100644 --- a/homeassistant/helpers/llm.py +++ b/homeassistant/helpers/llm.py @@ -22,15 +22,13 @@ from homeassistant.components.conversation import ( from homeassistant.components.cover import INTENT_CLOSE_COVER, INTENT_OPEN_COVER from homeassistant.components.homeassistant import async_should_expose from homeassistant.components.intent import async_device_supports_timers -from homeassistant.components.script import ATTR_VARIABLES, DOMAIN as SCRIPT_DOMAIN +from homeassistant.components.script import DOMAIN as SCRIPT_DOMAIN from homeassistant.components.weather import INTENT_GET_WEATHER from homeassistant.const import ( ATTR_DOMAIN, - ATTR_ENTITY_ID, ATTR_SERVICE, EVENT_HOMEASSISTANT_CLOSE, EVENT_SERVICE_REMOVED, - SERVICE_TURN_ON, ) from homeassistant.core import Context, Event, HomeAssistant, callback, split_entity_id from homeassistant.exceptions import HomeAssistantError @@ -416,9 +414,7 @@ class AssistAPI(API): ): continue - script_tool = ScriptTool(self.hass, state.entity_id) - if script_tool.parameters.schema: - tools.append(script_tool) + tools.append(ScriptTool(self.hass, state.entity_id)) return tools @@ -449,17 +445,13 @@ def _get_exposed_entities( entities = {} for state in hass.states.async_all(): - if not async_should_expose(hass, assistant, state.entity_id): + if ( + not async_should_expose(hass, assistant, state.entity_id) + or state.domain == SCRIPT_DOMAIN + ): continue description: str | None = None - if state.domain == SCRIPT_DOMAIN: - description, parameters = _get_cached_script_parameters( - hass, state.entity_id - ) - if parameters.schema: # Only list scripts without input fields here - continue - entity_entry = entity_registry.async_get(state.entity_id) names = [state.name] area_names = [] @@ -702,10 +694,9 @@ class ScriptTool(Tool): script_entity_id: str, ) -> None: """Init the class.""" - self.name = split_entity_id(script_entity_id)[1] + self._object_id = self.name = split_entity_id(script_entity_id)[1] if self.name[0].isdigit(): self.name = "_" + self.name - self._entity_id = script_entity_id self.description, self.parameters = _get_cached_script_parameters( hass, script_entity_id @@ -745,14 +736,13 @@ class ScriptTool(Tool): floor = list(intent.find_floors(floor, floor_reg))[0].floor_id tool_input.tool_args[field] = floor - await hass.services.async_call( + result = await hass.services.async_call( SCRIPT_DOMAIN, - SERVICE_TURN_ON, - { - ATTR_ENTITY_ID: self._entity_id, - ATTR_VARIABLES: tool_input.tool_args, - }, + self._object_id, + tool_input.tool_args, context=llm_context.context, + blocking=True, + return_response=True, ) - return {"success": True} + return {"success": True, "result": result} diff --git a/homeassistant/helpers/restore_state.py b/homeassistant/helpers/restore_state.py index a2b4b3a9b9a..fd1f84a85ff 100644 --- a/homeassistant/helpers/restore_state.py +++ b/homeassistant/helpers/restore_state.py @@ -17,7 +17,6 @@ from homeassistant.util.json import json_loads from . import start from .entity import Entity from .event import async_track_time_interval -from .frame import report from .json import JSONEncoder from .singleton import singleton from .storage import Store @@ -116,21 +115,6 @@ class RestoreStateData: """Dump states now.""" await async_get(hass).async_dump_states() - @classmethod - async def async_get_instance(cls, hass: HomeAssistant) -> RestoreStateData: - """Return the instance of this class.""" - # Nothing should actually be calling this anymore, but we'll keep it - # around for a while to avoid breaking custom components. - # - # In fact they should not be accessing this at all. - report( - "restore_state.RestoreStateData.async_get_instance is deprecated, " - "and not intended to be called by custom components; Please" - "refactor your code to use RestoreEntity instead;" - " restore_state.async_get(hass) can be used in the meantime", - ) - return async_get(hass) - def __init__(self, hass: HomeAssistant) -> None: """Initialize the restore state data class.""" self.hass: HomeAssistant = hass diff --git a/homeassistant/helpers/schema_config_entry_flow.py b/homeassistant/helpers/schema_config_entry_flow.py index 7463c9945b2..af8c4c6402d 100644 --- a/homeassistant/helpers/schema_config_entry_flow.py +++ b/homeassistant/helpers/schema_config_entry_flow.py @@ -16,7 +16,6 @@ from homeassistant.config_entries import ( ConfigFlow, ConfigFlowResult, OptionsFlow, - OptionsFlowWithConfigEntry, ) from homeassistant.core import HomeAssistant, callback, split_entity_id from homeassistant.data_entry_flow import UnknownHandler @@ -403,7 +402,7 @@ class SchemaConfigFlowHandler(ConfigFlow, ABC): ) -class SchemaOptionsFlowHandler(OptionsFlowWithConfigEntry): +class SchemaOptionsFlowHandler(OptionsFlow): """Handle a schema based options flow.""" def __init__( @@ -422,10 +421,8 @@ class SchemaOptionsFlowHandler(OptionsFlowWithConfigEntry): options, which is the union of stored options and user input from the options flow steps. """ - super().__init__(config_entry) - self._common_handler = SchemaCommonFlowHandler( - self, options_flow, self._options - ) + self._options = copy.deepcopy(dict(config_entry.options)) + self._common_handler = SchemaCommonFlowHandler(self, options_flow, self.options) self._async_options_flow_finished = async_options_flow_finished for step in options_flow: @@ -438,6 +435,11 @@ class SchemaOptionsFlowHandler(OptionsFlowWithConfigEntry): if async_setup_preview: setattr(self, "async_setup_preview", async_setup_preview) + @property + def options(self) -> dict[str, Any]: + """Return a mutable copy of the config entry options.""" + return self._options + @staticmethod def _async_step( step_id: str, diff --git a/homeassistant/helpers/script.py b/homeassistant/helpers/script.py index 86dcd858c1b..a67ef60c799 100644 --- a/homeassistant/helpers/script.py +++ b/homeassistant/helpers/script.py @@ -473,13 +473,13 @@ class _ScriptRun: script_execution_set("aborted") except _StopScript as err: script_execution_set("finished", err.response) - response = err.response # Let the _StopScript bubble up if this is a sub-script if not self._script.top_level: - # We already consumed the response, do not pass it on - err.response = None raise + + response = err.response + except Exception: script_execution_set("error") raise diff --git a/homeassistant/helpers/service.py b/homeassistant/helpers/service.py index 33e8f3d3d6e..31b2e8e8ac8 100644 --- a/homeassistant/helpers/service.py +++ b/homeassistant/helpers/service.py @@ -1277,14 +1277,12 @@ def async_register_entity_service( schema = cv.make_entity_service_schema(schema) elif not cv.is_entity_service_schema(schema): # pylint: disable-next=import-outside-toplevel - from .frame import report + from .frame import ReportBehavior, report_usage - report( - ( - "registers an entity service with a non entity service schema " - "which will stop working in HA Core 2025.9" - ), - error_if_core=False, + report_usage( + "registers an entity service with a non entity service schema", + core_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2025.9", ) service_func: str | HassJob[..., Any] diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index 753464c35d5..57587dc21d6 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -515,18 +515,16 @@ class Template: will be non optional in Home Assistant Core 2025.10. """ # pylint: disable-next=import-outside-toplevel - from .frame import report + from .frame import ReportBehavior, report_usage if not isinstance(template, str): raise TypeError("Expected template to be a string") if not hass: - report( - ( - "creates a template object without passing hass, " - "which will stop working in HA Core 2025.10" - ), - error_if_core=False, + report_usage( + "creates a template object without passing hass", + core_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2025.10", ) self.template: str = template.strip() diff --git a/homeassistant/helpers/trigger_template_entity.py b/homeassistant/helpers/trigger_template_entity.py index 7f8ad41d7bb..1486e33d6fa 100644 --- a/homeassistant/helpers/trigger_template_entity.py +++ b/homeassistant/helpers/trigger_template_entity.py @@ -30,7 +30,7 @@ from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads from . import config_validation as cv from .entity import Entity -from .template import render_complex +from .template import TemplateStateFromEntityId, render_complex from .typing import ConfigType CONF_AVAILABILITY = "availability" @@ -231,16 +231,14 @@ class ManualTriggerEntity(TriggerBaseEntity): Ex: self._process_manual_data(payload) """ - self.async_write_ha_state() - this = None - if state := self.hass.states.get(self.entity_id): - this = state.as_dict() - run_variables: dict[str, Any] = {"value": value} # Silently try if variable is a json and store result in `value_json` if it is. with contextlib.suppress(*JSON_DECODE_EXCEPTIONS): run_variables["value_json"] = json_loads(run_variables["value"]) - variables = {"this": this, **(run_variables or {})} + variables = { + "this": TemplateStateFromEntityId(self.hass, self.entity_id), + **(run_variables or {}), + } self._render_templates(variables) diff --git a/homeassistant/helpers/update_coordinator.py b/homeassistant/helpers/update_coordinator.py index f5c2a2a1288..6cc4584935e 100644 --- a/homeassistant/helpers/update_coordinator.py +++ b/homeassistant/helpers/update_coordinator.py @@ -24,12 +24,13 @@ from homeassistant.exceptions import ( ConfigEntryAuthFailed, ConfigEntryError, ConfigEntryNotReady, + HomeAssistantError, ) from homeassistant.util.dt import utcnow from . import entity, event from .debounce import Debouncer -from .frame import report +from .frame import report_usage from .typing import UNDEFINED, UndefinedType REQUEST_REFRESH_DEFAULT_COOLDOWN = 10 @@ -43,7 +44,7 @@ _DataUpdateCoordinatorT = TypeVar( ) -class UpdateFailed(Exception): +class UpdateFailed(HomeAssistantError): """Raised when an update has failed.""" @@ -286,24 +287,20 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]): to ensure that multiple retries do not cause log spam. """ if self.config_entry is None: - report( + report_usage( "uses `async_config_entry_first_refresh`, which is only supported " - "for coordinators with a config entry and will stop working in " - "Home Assistant 2025.11", - error_if_core=True, - error_if_integration=False, + "for coordinators with a config entry", + breaks_in_ha_version="2025.11", ) elif ( self.config_entry.state is not config_entries.ConfigEntryState.SETUP_IN_PROGRESS ): - report( + report_usage( "uses `async_config_entry_first_refresh`, which is only supported " f"when entry state is {config_entries.ConfigEntryState.SETUP_IN_PROGRESS}, " - f"but it is in state {self.config_entry.state}, " - "This will stop working in Home Assistant 2025.11", - error_if_core=True, - error_if_integration=False, + f"but it is in state {self.config_entry.state}", + breaks_in_ha_version="2025.11", ) if await self.__wrap_async_setup(): await self._async_refresh( diff --git a/homeassistant/loader.py b/homeassistant/loader.py index 221a2c7ce19..1fa9d0cd49d 100644 --- a/homeassistant/loader.py +++ b/homeassistant/loader.py @@ -830,6 +830,9 @@ class Integration: @cached_property def quality_scale(self) -> str | None: """Return Integration Quality Scale.""" + # Custom integrations default to "custom" quality scale. + if not self.is_built_in or self.overwrites_built_in: + return "custom" return self.manifest.get("quality_scale") @cached_property @@ -1556,16 +1559,16 @@ class Components: raise ImportError(f"Unable to load {comp_name}") # Local import to avoid circular dependencies - from .helpers.frame import report # pylint: disable=import-outside-toplevel + # pylint: disable-next=import-outside-toplevel + from .helpers.frame import ReportBehavior, report_usage - report( - ( - f"accesses hass.components.{comp_name}." - " This is deprecated and will stop working in Home Assistant 2025.3, it" - f" should be updated to import functions used from {comp_name} directly" - ), - error_if_core=False, - log_custom_component_only=True, + report_usage( + f"accesses hass.components.{comp_name}, which" + f" should be updated to import functions used from {comp_name} directly", + core_behavior=ReportBehavior.IGNORE, + core_integration_behavior=ReportBehavior.IGNORE, + custom_integration_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2025.3", ) wrapped = ModuleWrapper(self._hass, component) @@ -1585,16 +1588,18 @@ class Helpers: helper = importlib.import_module(f"homeassistant.helpers.{helper_name}") # Local import to avoid circular dependencies - from .helpers.frame import report # pylint: disable=import-outside-toplevel + # pylint: disable-next=import-outside-toplevel + from .helpers.frame import ReportBehavior, report_usage - report( + report_usage( ( - f"accesses hass.helpers.{helper_name}." - " This is deprecated and will stop working in Home Assistant 2025.5, it" + f"accesses hass.helpers.{helper_name}, which" f" should be updated to import functions used from {helper_name} directly" ), - error_if_core=False, - log_custom_component_only=True, + core_behavior=ReportBehavior.IGNORE, + core_integration_behavior=ReportBehavior.IGNORE, + custom_integration_behavior=ReportBehavior.LOG, + breaks_in_ha_version="2025.5", ) wrapped = ModuleWrapper(self._hass, helper) @@ -1681,6 +1686,29 @@ def is_component_module_loaded(hass: HomeAssistant, module: str) -> bool: return module in hass.data[DATA_COMPONENTS] +@callback +def async_get_issue_integration( + hass: HomeAssistant | None, + integration_domain: str | None, +) -> Integration | None: + """Return details of an integration for issue reporting.""" + integration: Integration | None = None + if not hass or not integration_domain: + # We are unable to get the integration + return None + + if (comps_or_future := hass.data.get(DATA_CUSTOM_COMPONENTS)) and not isinstance( + comps_or_future, asyncio.Future + ): + integration = comps_or_future.get(integration_domain) + + if not integration: + with suppress(IntegrationNotLoaded): + integration = async_get_loaded_integration(hass, integration_domain) + + return integration + + @callback def async_get_issue_tracker( hass: HomeAssistant | None, @@ -1694,20 +1722,11 @@ def async_get_issue_tracker( "https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue" ) if not integration and not integration_domain and not module: - # If we know nothing about the entity, suggest opening an issue on HA core + # If we know nothing about the integration, suggest opening an issue on HA core return issue_tracker - if ( - not integration - and (hass and integration_domain) - and (comps_or_future := hass.data.get(DATA_CUSTOM_COMPONENTS)) - and not isinstance(comps_or_future, asyncio.Future) - ): - integration = comps_or_future.get(integration_domain) - - if not integration and (hass and integration_domain): - with suppress(IntegrationNotLoaded): - integration = async_get_loaded_integration(hass, integration_domain) + if not integration: + integration = async_get_issue_integration(hass, integration_domain) if integration and not integration.is_built_in: return integration.issue_tracker diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 1f52c4c8b18..ed7e995408f 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -4,8 +4,8 @@ aiodhcpwatcher==1.0.2 aiodiscover==2.1.0 aiodns==3.2.0 aiohasupervisor==0.2.1 -aiohttp-fast-zlib==0.1.1 -aiohttp==3.10.11 +aiohttp-fast-zlib==0.2.0 +aiohttp==3.11.9 aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 @@ -13,6 +13,8 @@ async-interrupt==1.2.0 async-upnp-client==0.41.0 atomicwrites-homeassistant==1.4.1 attrs==24.2.0 +audioop-lts==0.2.1;python_version>='3.13' +av==13.1.0 awesomeversion==24.6.0 bcrypt==4.2.0 bleak-retry-connector==3.6.0 @@ -27,26 +29,25 @@ cryptography==43.0.1 dbus-fast==2.24.3 fnv-hash-fast==1.0.2 go2rtc-client==0.1.1 -ha-av==10.1.1 ha-ffmpeg==3.2.2 habluetooth==3.6.0 -hass-nabucasa==0.83.0 -hassil==1.7.4 +hass-nabucasa==0.85.0 +hassil==2.0.5 home-assistant-bluetooth==1.13.0 -home-assistant-frontend==20241106.2 -home-assistant-intents==2024.11.6 +home-assistant-frontend==20241127.4 +home-assistant-intents==2024.12.4 httpx==0.27.2 ifaddr==0.2.0 Jinja2==3.1.4 lru-dict==1.3.0 mutagen==1.47.0 -orjson==3.10.10 +orjson==3.10.12 packaging>=23.1 paho-mqtt==1.6.1 -Pillow==10.4.0 -propcache==0.2.0 +Pillow==11.0.0 +propcache==0.2.1 psutil-home-assistant==0.0.1 -PyJWT==2.9.0 +PyJWT==2.10.1 pymicro-vad==1.0.1 PyNaCl==1.5.0 pyOpenSSL==24.2.1 @@ -57,17 +58,20 @@ PyTurboJPEG==1.7.5 pyudev==0.24.1 PyYAML==6.0.2 requests==2.32.3 -SQLAlchemy==2.0.31 +securetar==2024.11.0 +SQLAlchemy==2.0.36 +standard-aifc==3.13.0;python_version>='3.13' +standard-telnetlib==3.13.0;python_version>='3.13' typing-extensions>=4.12.2,<5.0 ulid-transform==1.0.2 urllib3>=1.26.5,<2 -uv==0.4.28 +uv==0.5.4 voluptuous-openapi==0.0.5 voluptuous-serialize==2.6.0 voluptuous==0.15.2 -webrtc-models==0.2.0 -yarl==1.17.1 -zeroconf==0.136.0 +webrtc-models==0.3.0 +yarl==1.18.3 +zeroconf==0.136.2 # Constrain pycryptodome to avoid vulnerability # see https://github.com/home-assistant/core/pull/16238 @@ -80,9 +84,9 @@ httplib2>=0.19.0 # gRPC is an implicit dependency that we want to make explicit so we manage # upgrades intentionally. It is a large package to build from source and we # want to ensure we have wheels built. -grpcio==1.66.2 -grpcio-status==1.66.2 -grpcio-reflection==1.66.2 +grpcio==1.67.1 +grpcio-status==1.67.1 +grpcio-reflection==1.67.1 # This is a old unmaintained library and is replaced with pycryptodome pycrypto==1000000000.0.0 @@ -111,7 +115,8 @@ httpcore==1.0.5 hyperframe>=5.2.0 # Ensure we run compatible with musllinux build env -numpy==1.26.4 +numpy==2.1.3 +pandas~=2.2.3 # Constrain multidict to avoid typing issues # https://github.com/home-assistant/core/pull/67046 @@ -122,7 +127,7 @@ backoff>=2.0 # Required to avoid breaking (#101042). # v2 has breaking changes (#99218). -pydantic==1.10.18 +pydantic==1.10.19 # Required for Python 3.12.4 compatibility (#119223). mashumaro>=3.13.1 @@ -147,10 +152,12 @@ protobuf==5.28.3 # 2.1.18 is the first version that works with our wheel builder faust-cchardet>=2.1.18 -# websockets 11.0 is missing files in the source distribution -# which break wheel builds so we need at least 11.0.1 -# https://github.com/aaugustin/websockets/issues/1329 -websockets>=11.0.1 +# websockets 13.1 is the first version to fully support the new +# asyncio implementation. The legacy implementation is now +# deprecated as of websockets 14.0. +# https://websockets.readthedocs.io/en/13.0.1/howto/upgrade.html#missing-features +# https://websockets.readthedocs.io/en/stable/howto/upgrade.html +websockets>=13.1 # pysnmplib is no longer maintained and does not work with newer # python @@ -163,15 +170,12 @@ get-mac==1000000000.0.0 # We want to skip the binary wheels for the 'charset-normalizer' packages. # They are build with mypyc, but causes issues with our wheel builder. # In order to do so, we need to constrain the version. -charset-normalizer==3.2.0 +charset-normalizer==3.4.0 # dacite: Ensure we have a version that is able to handle type unions for # NAM, Brother, and GIOS. dacite>=1.7.0 -# Musle wheels for pandas 2.2.0 cannot be build for any architecture. -pandas==2.1.4 - # chacha20poly1305-reuseable==0.12.x is incompatible with cryptography==43.0.x chacha20poly1305-reuseable>=0.13.0 @@ -179,8 +183,8 @@ chacha20poly1305-reuseable>=0.13.0 # https://github.com/pycountry/pycountry/blob/ea69bab36f00df58624a0e490fdad4ccdc14268b/HISTORY.txt#L39 pycountry>=23.12.11 -# scapy<2.5.0 will not work with python3.12 -scapy>=2.5.0 +# scapy==2.6.0 causes CI failures due to a race condition +scapy>=2.6.1 # tuf isn't updated to deal with breaking changes in securesystemslib==1.0. # Only tuf>=4 includes a constraint to <1.0. @@ -193,3 +197,16 @@ tenacity!=8.4.0 # 5.0.0 breaks Timeout as a context manager # TypeError: 'Timeout' object does not support the context manager protocol async-timeout==4.0.3 + +# aiofiles keeps getting downgraded by custom components +# causing newer methods to not be available and breaking +# some integrations at startup +# https://github.com/home-assistant/core/issues/127529 +# https://github.com/home-assistant/core/issues/122508 +# https://github.com/home-assistant/core/issues/118004 +aiofiles>=24.1.0 + +# 0.22.0 causes CI failures on Python 3.13 +# python3 -X dev -m pytest tests/components/matrix +# python3 -X dev -m pytest tests/components/zha +rpds-py==0.21.0 diff --git a/homeassistant/runner.py b/homeassistant/runner.py index 102dbafe147..59775655854 100644 --- a/homeassistant/runner.py +++ b/homeassistant/runner.py @@ -3,10 +3,8 @@ from __future__ import annotations import asyncio -from asyncio import events import dataclasses import logging -import os import subprocess import threading from time import monotonic @@ -58,22 +56,6 @@ class RuntimeConfig: safe_mode: bool = False -def can_use_pidfd() -> bool: - """Check if pidfd_open is available. - - Back ported from cpython 3.12 - """ - if not hasattr(os, "pidfd_open"): - return False - try: - pid = os.getpid() - os.close(os.pidfd_open(pid, 0)) - except OSError: - # blocked by security policy like SECCOMP - return False - return True - - class HassEventLoopPolicy(asyncio.DefaultEventLoopPolicy): """Event loop policy for Home Assistant.""" @@ -81,23 +63,6 @@ class HassEventLoopPolicy(asyncio.DefaultEventLoopPolicy): """Init the event loop policy.""" super().__init__() self.debug = debug - self._watcher: asyncio.AbstractChildWatcher | None = None - - def _init_watcher(self) -> None: - """Initialize the watcher for child processes. - - Back ported from cpython 3.12 - """ - with events._lock: # type: ignore[attr-defined] # noqa: SLF001 - if self._watcher is None: # pragma: no branch - if can_use_pidfd(): - self._watcher = asyncio.PidfdChildWatcher() - else: - self._watcher = asyncio.ThreadedChildWatcher() - if threading.current_thread() is threading.main_thread(): - self._watcher.attach_loop( - self._local._loop # type: ignore[attr-defined] # noqa: SLF001 - ) @property def loop_name(self) -> str: diff --git a/homeassistant/util/async_.py b/homeassistant/util/async_.py index d010d8cb341..f8901d11114 100644 --- a/homeassistant/util/async_.py +++ b/homeassistant/util/async_.py @@ -39,7 +39,7 @@ def create_eager_task[_T]( # pylint: disable-next=import-outside-toplevel from homeassistant.helpers import frame - frame.report("attempted to create an asyncio task from a thread") + frame.report_usage("attempted to create an asyncio task from a thread") raise return Task(coro, loop=loop, name=name, eager_start=True) diff --git a/homeassistant/util/color.py b/homeassistant/util/color.py index 0745bc96dfb..18f8182650b 100644 --- a/homeassistant/util/color.py +++ b/homeassistant/util/color.py @@ -377,7 +377,7 @@ def color_hsv_to_RGB(iH: float, iS: float, iV: float) -> tuple[int, int, int]: Val is scaled 0-100 """ fRGB = colorsys.hsv_to_rgb(iH / 360, iS / 100, iV / 100) - return (int(fRGB[0] * 255), int(fRGB[1] * 255), int(fRGB[2] * 255)) + return (round(fRGB[0] * 255), round(fRGB[1] * 255), round(fRGB[2] * 255)) def color_hs_to_RGB(iH: float, iS: float) -> tuple[int, int, int]: diff --git a/homeassistant/util/json.py b/homeassistant/util/json.py index fa67f6b1dcc..968567ae0c9 100644 --- a/homeassistant/util/json.py +++ b/homeassistant/util/json.py @@ -30,32 +30,30 @@ class SerializationError(HomeAssistantError): """Error serializing the data to JSON.""" -def json_loads(__obj: bytes | bytearray | memoryview | str) -> JsonValueType: +def json_loads(obj: bytes | bytearray | memoryview | str, /) -> JsonValueType: """Parse JSON data. This adds a workaround for orjson not handling subclasses of str, https://github.com/ijl/orjson/issues/445. """ # Avoid isinstance overhead for the common case - if type(__obj) not in (bytes, bytearray, memoryview, str) and isinstance( - __obj, str - ): - return orjson.loads(str(__obj)) # type:ignore[no-any-return] - return orjson.loads(__obj) # type:ignore[no-any-return] + if type(obj) not in (bytes, bytearray, memoryview, str) and isinstance(obj, str): + return orjson.loads(str(obj)) # type:ignore[no-any-return] + return orjson.loads(obj) # type:ignore[no-any-return] -def json_loads_array(__obj: bytes | bytearray | memoryview | str) -> JsonArrayType: +def json_loads_array(obj: bytes | bytearray | memoryview | str, /) -> JsonArrayType: """Parse JSON data and ensure result is a list.""" - value: JsonValueType = json_loads(__obj) + value: JsonValueType = json_loads(obj) # Avoid isinstance overhead as we are not interested in list subclasses if type(value) is list: # noqa: E721 return value raise ValueError(f"Expected JSON to be parsed as a list got {type(value)}") -def json_loads_object(__obj: bytes | bytearray | memoryview | str) -> JsonObjectType: +def json_loads_object(obj: bytes | bytearray | memoryview | str, /) -> JsonObjectType: """Parse JSON data and ensure result is a dictionary.""" - value: JsonValueType = json_loads(__obj) + value: JsonValueType = json_loads(obj) # Avoid isinstance overhead as we are not interested in dict subclasses if type(value) is dict: # noqa: E721 return value diff --git a/homeassistant/util/unit_conversion.py b/homeassistant/util/unit_conversion.py index 6bc595bd487..3cffcb5768e 100644 --- a/homeassistant/util/unit_conversion.py +++ b/homeassistant/util/unit_conversion.py @@ -10,6 +10,8 @@ from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, PERCENTAGE, UNIT_NOT_RECOGNIZED_TEMPLATE, + UnitOfArea, + UnitOfBloodGlucoseConcentration, UnitOfConductivity, UnitOfDataRate, UnitOfElectricCurrent, @@ -41,6 +43,19 @@ _MILE_TO_M = _YARD_TO_M * 1760 # 1760 yard = 1 mile (1609.344 m) _NAUTICAL_MILE_TO_M = 1852 # 1 nautical mile = 1852 m +# Area constants to square meters +_CM2_TO_M2 = _CM_TO_M**2 # 1 cm² = 0.0001 m² +_MM2_TO_M2 = _MM_TO_M**2 # 1 mm² = 0.000001 m² +_KM2_TO_M2 = _KM_TO_M**2 # 1 km² = 1,000,000 m² + +_IN2_TO_M2 = _IN_TO_M**2 # 1 in² = 0.00064516 m² +_FT2_TO_M2 = _FOOT_TO_M**2 # 1 ft² = 0.092903 m² +_YD2_TO_M2 = _YARD_TO_M**2 # 1 yd² = 0.836127 m² +_MI2_TO_M2 = _MILE_TO_M**2 # 1 mi² = 2,590,000 m² + +_ACRE_TO_M2 = 66 * 660 * _FT2_TO_M2 # 1 acre = 4,046.86 m² +_HECTARE_TO_M2 = 100 * 100 # 1 hectare = 10,000 m² + # Duration conversion constants _MIN_TO_SEC = 60 # 1 min = 60 seconds _HRS_TO_MINUTES = 60 # 1 hr = 60 minutes @@ -145,6 +160,25 @@ class DataRateConverter(BaseUnitConverter): VALID_UNITS = set(UnitOfDataRate) +class AreaConverter(BaseUnitConverter): + """Utility to convert area values.""" + + UNIT_CLASS = "area" + _UNIT_CONVERSION: dict[str | None, float] = { + UnitOfArea.SQUARE_METERS: 1, + UnitOfArea.SQUARE_CENTIMETERS: 1 / _CM2_TO_M2, + UnitOfArea.SQUARE_MILLIMETERS: 1 / _MM2_TO_M2, + UnitOfArea.SQUARE_KILOMETERS: 1 / _KM2_TO_M2, + UnitOfArea.SQUARE_INCHES: 1 / _IN2_TO_M2, + UnitOfArea.SQUARE_FEET: 1 / _FT2_TO_M2, + UnitOfArea.SQUARE_YARDS: 1 / _YD2_TO_M2, + UnitOfArea.SQUARE_MILES: 1 / _MI2_TO_M2, + UnitOfArea.ACRES: 1 / _ACRE_TO_M2, + UnitOfArea.HECTARES: 1 / _HECTARE_TO_M2, + } + VALID_UNITS = set(UnitOfArea) + + class DistanceConverter(BaseUnitConverter): """Utility to convert distance values.""" @@ -173,6 +207,17 @@ class DistanceConverter(BaseUnitConverter): } +class BloodGlucoseConcentrationConverter(BaseUnitConverter): + """Utility to convert blood glucose concentration values.""" + + UNIT_CLASS = "blood_glucose_concentration" + _UNIT_CONVERSION: dict[str | None, float] = { + UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER: 18, + UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER: 1, + } + VALID_UNITS = set(UnitOfBloodGlucoseConcentration) + + class ConductivityConverter(BaseUnitConverter): """Utility to convert electric current values.""" @@ -203,10 +248,12 @@ class ElectricPotentialConverter(BaseUnitConverter): _UNIT_CONVERSION: dict[str | None, float] = { UnitOfElectricPotential.VOLT: 1, UnitOfElectricPotential.MILLIVOLT: 1e3, + UnitOfElectricPotential.MICROVOLT: 1e6, } VALID_UNITS = { UnitOfElectricPotential.VOLT, UnitOfElectricPotential.MILLIVOLT, + UnitOfElectricPotential.MICROVOLT, } @@ -222,6 +269,8 @@ class EnergyConverter(BaseUnitConverter): UnitOfEnergy.WATT_HOUR: 1e3, UnitOfEnergy.KILO_WATT_HOUR: 1, UnitOfEnergy.MEGA_WATT_HOUR: 1 / 1e3, + UnitOfEnergy.GIGA_WATT_HOUR: 1 / 1e6, + UnitOfEnergy.TERA_WATT_HOUR: 1 / 1e9, UnitOfEnergy.CALORIE: _WH_TO_CAL * 1e3, UnitOfEnergy.KILO_CALORIE: _WH_TO_CAL, UnitOfEnergy.MEGA_CALORIE: _WH_TO_CAL / 1e3, @@ -292,10 +341,16 @@ class PowerConverter(BaseUnitConverter): _UNIT_CONVERSION: dict[str | None, float] = { UnitOfPower.WATT: 1, UnitOfPower.KILO_WATT: 1 / 1000, + UnitOfPower.MEGA_WATT: 1 / 1e6, + UnitOfPower.GIGA_WATT: 1 / 1e9, + UnitOfPower.TERA_WATT: 1 / 1e12, } VALID_UNITS = { UnitOfPower.WATT, UnitOfPower.KILO_WATT, + UnitOfPower.MEGA_WATT, + UnitOfPower.GIGA_WATT, + UnitOfPower.TERA_WATT, } @@ -611,12 +666,15 @@ class VolumeFlowRateConverter(BaseUnitConverter): / (_HRS_TO_MINUTES * _L_TO_CUBIC_METER), UnitOfVolumeFlowRate.GALLONS_PER_MINUTE: 1 / (_HRS_TO_MINUTES * _GALLON_TO_CUBIC_METER), + UnitOfVolumeFlowRate.MILLILITERS_PER_SECOND: 1 + / (_HRS_TO_SECS * _ML_TO_CUBIC_METER), } VALID_UNITS = { UnitOfVolumeFlowRate.CUBIC_FEET_PER_MINUTE, UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR, UnitOfVolumeFlowRate.LITERS_PER_MINUTE, UnitOfVolumeFlowRate.GALLONS_PER_MINUTE, + UnitOfVolumeFlowRate.MILLILITERS_PER_SECOND, } diff --git a/homeassistant/util/unit_system.py b/homeassistant/util/unit_system.py index 7f7c7f2b5fd..c812dd38230 100644 --- a/homeassistant/util/unit_system.py +++ b/homeassistant/util/unit_system.py @@ -9,6 +9,7 @@ import voluptuous as vol from homeassistant.const import ( ACCUMULATED_PRECIPITATION, + AREA, LENGTH, MASS, PRESSURE, @@ -16,6 +17,7 @@ from homeassistant.const import ( UNIT_NOT_RECOGNIZED_TEMPLATE, VOLUME, WIND_SPEED, + UnitOfArea, UnitOfLength, UnitOfMass, UnitOfPrecipitationDepth, @@ -27,6 +29,7 @@ from homeassistant.const import ( ) from .unit_conversion import ( + AreaConverter, DistanceConverter, PressureConverter, SpeedConverter, @@ -41,6 +44,8 @@ _CONF_UNIT_SYSTEM_IMPERIAL: Final = "imperial" _CONF_UNIT_SYSTEM_METRIC: Final = "metric" _CONF_UNIT_SYSTEM_US_CUSTOMARY: Final = "us_customary" +AREA_UNITS = AreaConverter.VALID_UNITS + LENGTH_UNITS = DistanceConverter.VALID_UNITS MASS_UNITS: set[str] = { @@ -66,6 +71,7 @@ _VALID_BY_TYPE: dict[str, set[str] | set[str | None]] = { MASS: MASS_UNITS, VOLUME: VOLUME_UNITS, PRESSURE: PRESSURE_UNITS, + AREA: AREA_UNITS, } @@ -84,6 +90,7 @@ class UnitSystem: name: str, *, accumulated_precipitation: UnitOfPrecipitationDepth, + area: UnitOfArea, conversions: dict[tuple[SensorDeviceClass | str | None, str | None], str], length: UnitOfLength, mass: UnitOfMass, @@ -97,6 +104,7 @@ class UnitSystem: UNIT_NOT_RECOGNIZED_TEMPLATE.format(unit, unit_type) for unit, unit_type in ( (accumulated_precipitation, ACCUMULATED_PRECIPITATION), + (area, AREA), (temperature, TEMPERATURE), (length, LENGTH), (wind_speed, WIND_SPEED), @@ -112,10 +120,11 @@ class UnitSystem: self._name = name self.accumulated_precipitation_unit = accumulated_precipitation - self.temperature_unit = temperature + self.area_unit = area self.length_unit = length self.mass_unit = mass self.pressure_unit = pressure + self.temperature_unit = temperature self.volume_unit = volume self.wind_speed_unit = wind_speed self._conversions = conversions @@ -149,6 +158,16 @@ class UnitSystem: precip, from_unit, self.accumulated_precipitation_unit ) + def area(self, area: float | None, from_unit: str) -> float: + """Convert the given area to this unit system.""" + if not isinstance(area, Number): + raise TypeError(f"{area!s} is not a numeric value.") + + # type ignore: https://github.com/python/mypy/issues/7207 + return AreaConverter.convert( # type: ignore[unreachable] + area, from_unit, self.area_unit + ) + def pressure(self, pressure: float | None, from_unit: str) -> float: """Convert the given pressure to this unit system.""" if not isinstance(pressure, Number): @@ -184,6 +203,7 @@ class UnitSystem: return { LENGTH: self.length_unit, ACCUMULATED_PRECIPITATION: self.accumulated_precipitation_unit, + AREA: self.area_unit, MASS: self.mass_unit, PRESSURE: self.pressure_unit, TEMPERATURE: self.temperature_unit, @@ -234,6 +254,12 @@ METRIC_SYSTEM = UnitSystem( for unit in UnitOfPressure if unit != UnitOfPressure.HPA }, + # Convert non-metric area + ("area", UnitOfArea.SQUARE_INCHES): UnitOfArea.SQUARE_CENTIMETERS, + ("area", UnitOfArea.SQUARE_FEET): UnitOfArea.SQUARE_METERS, + ("area", UnitOfArea.SQUARE_MILES): UnitOfArea.SQUARE_KILOMETERS, + ("area", UnitOfArea.SQUARE_YARDS): UnitOfArea.SQUARE_METERS, + ("area", UnitOfArea.ACRES): UnitOfArea.HECTARES, # Convert non-metric distances ("distance", UnitOfLength.FEET): UnitOfLength.METERS, ("distance", UnitOfLength.INCHES): UnitOfLength.MILLIMETERS, @@ -285,6 +311,7 @@ METRIC_SYSTEM = UnitSystem( if unit not in (UnitOfSpeed.KILOMETERS_PER_HOUR, UnitOfSpeed.KNOTS) }, }, + area=UnitOfArea.SQUARE_METERS, length=UnitOfLength.KILOMETERS, mass=UnitOfMass.GRAMS, pressure=UnitOfPressure.PA, @@ -303,6 +330,12 @@ US_CUSTOMARY_SYSTEM = UnitSystem( for unit in UnitOfPressure if unit != UnitOfPressure.INHG }, + # Convert non-USCS areas + ("area", UnitOfArea.SQUARE_METERS): UnitOfArea.SQUARE_FEET, + ("area", UnitOfArea.SQUARE_CENTIMETERS): UnitOfArea.SQUARE_INCHES, + ("area", UnitOfArea.SQUARE_MILLIMETERS): UnitOfArea.SQUARE_INCHES, + ("area", UnitOfArea.SQUARE_KILOMETERS): UnitOfArea.SQUARE_MILES, + ("area", UnitOfArea.HECTARES): UnitOfArea.ACRES, # Convert non-USCS distances ("distance", UnitOfLength.CENTIMETERS): UnitOfLength.INCHES, ("distance", UnitOfLength.KILOMETERS): UnitOfLength.MILES, @@ -356,6 +389,7 @@ US_CUSTOMARY_SYSTEM = UnitSystem( if unit not in (UnitOfSpeed.KNOTS, UnitOfSpeed.MILES_PER_HOUR) }, }, + area=UnitOfArea.SQUARE_FEET, length=UnitOfLength.MILES, mass=UnitOfMass.POUNDS, pressure=UnitOfPressure.PSI, diff --git a/homeassistant/util/yaml/loader.py b/homeassistant/util/yaml/loader.py index 39ac17d94f9..39d38a8f47d 100644 --- a/homeassistant/util/yaml/loader.py +++ b/homeassistant/util/yaml/loader.py @@ -25,7 +25,6 @@ except ImportError: from propcache import cached_property from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.frame import report from .const import SECRET_YAML from .objects import Input, NodeDictClass, NodeListClass, NodeStrClass @@ -144,37 +143,6 @@ class FastSafeLoader(FastestAvailableSafeLoader, _LoaderMixin): self.secrets = secrets -class SafeLoader(FastSafeLoader): - """Provided for backwards compatibility. Logs when instantiated.""" - - def __init__(*args: Any, **kwargs: Any) -> None: - """Log a warning and call super.""" - SafeLoader.__report_deprecated() - FastSafeLoader.__init__(*args, **kwargs) - - @classmethod - def add_constructor(cls, tag: str, constructor: Callable) -> None: - """Log a warning and call super.""" - SafeLoader.__report_deprecated() - FastSafeLoader.add_constructor(tag, constructor) - - @classmethod - def add_multi_constructor( - cls, tag_prefix: str, multi_constructor: Callable - ) -> None: - """Log a warning and call super.""" - SafeLoader.__report_deprecated() - FastSafeLoader.add_multi_constructor(tag_prefix, multi_constructor) - - @staticmethod - def __report_deprecated() -> None: - """Log deprecation warning.""" - report( - "uses deprecated 'SafeLoader' instead of 'FastSafeLoader', " - "which will stop working in HA Core 2024.6," - ) - - class PythonSafeLoader(yaml.SafeLoader, _LoaderMixin): """Python safe loader.""" @@ -184,37 +152,6 @@ class PythonSafeLoader(yaml.SafeLoader, _LoaderMixin): self.secrets = secrets -class SafeLineLoader(PythonSafeLoader): - """Provided for backwards compatibility. Logs when instantiated.""" - - def __init__(*args: Any, **kwargs: Any) -> None: - """Log a warning and call super.""" - SafeLineLoader.__report_deprecated() - PythonSafeLoader.__init__(*args, **kwargs) - - @classmethod - def add_constructor(cls, tag: str, constructor: Callable) -> None: - """Log a warning and call super.""" - SafeLineLoader.__report_deprecated() - PythonSafeLoader.add_constructor(tag, constructor) - - @classmethod - def add_multi_constructor( - cls, tag_prefix: str, multi_constructor: Callable - ) -> None: - """Log a warning and call super.""" - SafeLineLoader.__report_deprecated() - PythonSafeLoader.add_multi_constructor(tag_prefix, multi_constructor) - - @staticmethod - def __report_deprecated() -> None: - """Log deprecation warning.""" - report( - "uses deprecated 'SafeLineLoader' instead of 'PythonSafeLoader', " - "which will stop working in HA Core 2024.6," - ) - - type LoaderType = FastSafeLoader | PythonSafeLoader diff --git a/mypy.ini b/mypy.ini index 794579eb48f..a71f980dac9 100644 --- a/mypy.ini +++ b/mypy.ini @@ -11,6 +11,7 @@ follow_imports = normal local_partial_types = true strict_equality = true no_implicit_optional = true +report_deprecated_as_error = true warn_incomplete_stub = true warn_redundant_casts = true warn_unused_configs = true @@ -2995,6 +2996,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.music_assistant.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.my.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3045,6 +3056,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.nasweb.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.neato.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3135,6 +3156,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.nordpool.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.notify.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3575,6 +3606,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.reolink.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.repairs.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -4096,6 +4137,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.stookwijzer.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.stream.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/pyproject.toml b/pyproject.toml index f25bdbefdf3..2ceb074cc48 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.11.3" +version = "2024.12.0" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" @@ -19,6 +19,7 @@ classifiers = [ "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Topic :: Home Automation", ] requires-python = ">=3.12.0" @@ -28,14 +29,15 @@ dependencies = [ # change behavior based on presence of supervisor. Deprecated with #127228 # Lib can be removed with 2025.11 "aiohasupervisor==0.2.1", - "aiohttp==3.10.11", + "aiohttp==3.11.9", "aiohttp_cors==0.7.0", - "aiohttp-fast-zlib==0.1.1", + "aiohttp-fast-zlib==0.2.0", "aiozoneinfo==0.2.1", "astral==2.2", "async-interrupt==1.2.0", "attrs==24.2.0", "atomicwrites-homeassistant==1.4.1", + "audioop-lts==0.2.1;python_version>='3.13'", "awesomeversion==24.6.0", "bcrypt==4.2.0", "certifi>=2021.5.30", @@ -43,7 +45,7 @@ dependencies = [ "fnv-hash-fast==1.0.2", # hass-nabucasa is imported by helpers which don't depend on the cloud # integration - "hass-nabucasa==0.83.0", + "hass-nabucasa==0.85.0", # When bumping httpx, please check the version pins of # httpcore, anyio, and h11 in gen_requirements_all "httpx==0.27.2", @@ -51,31 +53,34 @@ dependencies = [ "ifaddr==0.2.0", "Jinja2==3.1.4", "lru-dict==1.3.0", - "PyJWT==2.9.0", + "PyJWT==2.10.1", # PyJWT has loose dependency. We want the latest one. "cryptography==43.0.1", - "Pillow==10.4.0", - "propcache==0.2.0", + "Pillow==11.0.0", + "propcache==0.2.1", "pyOpenSSL==24.2.1", - "orjson==3.10.10", + "orjson==3.10.12", "packaging>=23.1", "psutil-home-assistant==0.0.1", "python-slugify==8.0.4", "PyYAML==6.0.2", "requests==2.32.3", - "SQLAlchemy==2.0.31", + "securetar==2024.11.0", + "SQLAlchemy==2.0.36", + "standard-aifc==3.13.0;python_version>='3.13'", + "standard-telnetlib==3.13.0;python_version>='3.13'", "typing-extensions>=4.12.2,<5.0", "ulid-transform==1.0.2", # Constrain urllib3 to ensure we deal with CVE-2020-26137 and CVE-2021-33503 # Temporary setting an upper bound, to prevent compat issues with urllib3>=2 # https://github.com/home-assistant/core/issues/97248 "urllib3>=1.26.5,<2", - "uv==0.4.28", + "uv==0.5.4", "voluptuous==0.15.2", "voluptuous-serialize==2.6.0", "voluptuous-openapi==0.0.5", - "yarl==1.17.1", - "webrtc-models==0.2.0", + "yarl==1.18.3", + "webrtc-models==0.3.0", ] [project.urls] @@ -90,8 +95,6 @@ dependencies = [ hass = "homeassistant.__main__:main" [tool.setuptools] -platforms = ["any"] -zip-safe = false include-package-data = true [tool.setuptools.packages.find] @@ -485,10 +488,13 @@ filterwarnings = [ "ignore:Deprecated call to `pkg_resources.declare_namespace\\(('azure'|'google.*'|'pywinusb'|'repoze'|'xbox'|'zope')\\)`:DeprecationWarning:pkg_resources", # -- tracked upstream / open PRs + # - pyOpenSSL v24.2.1 # https://github.com/certbot/certbot/issues/9828 - v2.11.0 + # https://github.com/certbot/certbot/issues/9992 "ignore:X509Extension support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", - # https://github.com/beetbox/mediafile/issues/67 - v0.12.0 - "ignore:'imghdr' is deprecated and slated for removal in Python 3.13:DeprecationWarning:mediafile", + "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", + "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:josepy.util", + # - other # https://github.com/foxel/python_ndms2_client/issues/6 - v0.1.3 # https://github.com/foxel/python_ndms2_client/pull/8 "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:ndms2_client.connection", @@ -548,7 +554,7 @@ filterwarnings = [ "ignore:setDaemon\\(\\) is deprecated, set the daemon attribute instead:DeprecationWarning:pylutron", # https://github.com/pschmitt/pynuki/blob/1.6.3/pynuki/utils.py#L21 - v1.6.3 - 2024-02-24 "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:pynuki.utils", - # https://github.com/lextudio/pysnmp/blob/v7.1.8/pysnmp/smi/compiler.py#L23-L31 - v7.1.8 - 2024-10-15 + # https://github.com/lextudio/pysnmp/blob/v7.1.10/pysnmp/smi/compiler.py#L23-L31 - v7.1.10 - 2024-11-04 "ignore:smiV1Relaxed is deprecated. Please use smi_v1_relaxed instead:DeprecationWarning:pysnmp.smi.compiler", "ignore:getReadersFromUrls is deprecated. Please use get_readers_from_urls instead:DeprecationWarning:pysmi.reader.url", # wrong stacklevel # https://github.com/briis/pyweatherflowudp/blob/v1.4.5/pyweatherflowudp/const.py#L20 - v1.4.5 - 2023-10-10 @@ -578,7 +584,7 @@ filterwarnings = [ # - pkg_resources # https://pypi.org/project/aiomusiccast/ - v0.14.8 - 2023-03-20 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:aiomusiccast", - # https://pypi.org/project/habitipy/ - v0.3.1 - 2019-01-14 / 2024-04-28 + # https://pypi.org/project/habitipy/ - v0.3.3 - 2024-10-28 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:habitipy.api", # https://github.com/eavanvalkenburg/pysiaalarm/blob/v3.1.1/src/pysiaalarm/data/data.py#L7 - v3.1.1 - 2023-04-17 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pysiaalarm.data.data", @@ -586,14 +592,6 @@ filterwarnings = [ "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pybotvac.version", # https://github.com/home-assistant-ecosystem/python-mystrom/blob/2.2.0/pymystrom/__init__.py#L10 - v2.2.0 - 2023-05-21 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pymystrom", - # https://pypi.org/project/velbus-aio/ - v2024.7.6 - 2024-07-31 - # https://github.com/Cereal2nd/velbus-aio/blob/2024.7.6/velbusaio/handler.py#L22 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:velbusaio.handler", - # - pyOpenSSL v24.2.1 - # https://pypi.org/project/acme/ - v2.11.0 - 2024-06-06 - "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", - # https://pypi.org/project/josepy/ - v1.14.0 - 2023-11-01 - "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:josepy.util", # -- Python 3.13 # HomeAssistant @@ -607,7 +605,7 @@ filterwarnings = [ # https://github.com/Uberi/speech_recognition/blob/3.11.0/speech_recognition/__init__.py#L7 "ignore:'aifc' is deprecated and slated for removal in Python 3.13:DeprecationWarning:speech_recognition", # https://pypi.org/project/voip-utils/ - v0.2.0 - 2024-09-06 - # https://github.com/home-assistant-libs/voip-utils/blob/v0.2.0/voip_utils/rtp_audio.py#L3 + # https://github.com/home-assistant-libs/voip-utils/blob/0.2.0/voip_utils/rtp_audio.py#L3 "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:voip_utils.rtp_audio", # -- Python 3.13 - unmaintained projects, last release about 2+ years @@ -619,6 +617,17 @@ filterwarnings = [ # https://github.com/ssaenger/pyws66i/blob/v1.1/pyws66i/__init__.py#L2 "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:pyws66i", + # -- New in Python 3.13 + # https://github.com/kurtmckee/feedparser/pull/389 - >6.0.11 + # https://github.com/kurtmckee/feedparser/issues/481 + "ignore:'count' is passed as positional argument:DeprecationWarning:feedparser.html", + # https://github.com/youknowone/python-deadlib - Backports for aifc, telnetlib + "ignore:aifc was removed in Python 3.13.*'standard-aifc':DeprecationWarning:speech_recognition", + "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:homeassistant.components.hddtemp.sensor", + "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:ndms2_client.connection", + "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:plumlightpad.lightpad", + "ignore:telnetlib was removed in Python 3.13.*'standard-telnetlib':DeprecationWarning:pyws66i", + # -- unmaintained projects, last release about 2+ years # https://pypi.org/project/agent-py/ - v0.0.23 - 2020-06-04 "ignore:with timeout\\(\\) is deprecated:DeprecationWarning:agent.a", @@ -691,7 +700,7 @@ exclude_lines = [ ] [tool.ruff] -required-version = ">=0.6.8" +required-version = ">=0.8.0" [tool.ruff.lint] select = [ @@ -774,7 +783,7 @@ select = [ "SLOT", # flake8-slots "T100", # Trace found: {name} used "T20", # flake8-print - "TCH", # flake8-type-checking + "TC", # flake8-type-checking "TID", # Tidy imports "TRY", # tryceratops "UP", # pyupgrade @@ -798,7 +807,6 @@ ignore = [ "PLR0915", # Too many statements ({statements} > {max_statements}) "PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable "PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target - "PT004", # Fixture {fixture} does not return anything, add leading underscore "PT011", # pytest.raises({exception}) is too broad, set the `match` parameter or use a more specific exception "PT018", # Assertion should be broken down into multiple parts "RUF001", # String contains ambiguous unicode character. @@ -811,9 +819,9 @@ ignore = [ "SIM115", # Use context handler for opening files # Moving imports into type-checking blocks can mess with pytest.patch() - "TCH001", # Move application import {} into a type-checking block - "TCH002", # Move third-party import {} into a type-checking block - "TCH003", # Move standard library import {} into a type-checking block + "TC001", # Move application import {} into a type-checking block + "TC002", # Move third-party import {} into a type-checking block + "TC003", # Move standard library import {} into a type-checking block "TRY003", # Avoid specifying long messages outside the exception class "TRY400", # Use `logging.exception` instead of `logging.error` diff --git a/requirements.txt b/requirements.txt index 67b875943d7..7aadd55c024 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,43 +5,47 @@ # Home Assistant Core aiodns==3.2.0 aiohasupervisor==0.2.1 -aiohttp==3.10.11 +aiohttp==3.11.9 aiohttp_cors==0.7.0 -aiohttp-fast-zlib==0.1.1 +aiohttp-fast-zlib==0.2.0 aiozoneinfo==0.2.1 astral==2.2 async-interrupt==1.2.0 attrs==24.2.0 atomicwrites-homeassistant==1.4.1 +audioop-lts==0.2.1;python_version>='3.13' awesomeversion==24.6.0 bcrypt==4.2.0 certifi>=2021.5.30 ciso8601==2.3.1 fnv-hash-fast==1.0.2 -hass-nabucasa==0.83.0 +hass-nabucasa==0.85.0 httpx==0.27.2 home-assistant-bluetooth==1.13.0 ifaddr==0.2.0 Jinja2==3.1.4 lru-dict==1.3.0 -PyJWT==2.9.0 +PyJWT==2.10.1 cryptography==43.0.1 -Pillow==10.4.0 -propcache==0.2.0 +Pillow==11.0.0 +propcache==0.2.1 pyOpenSSL==24.2.1 -orjson==3.10.10 +orjson==3.10.12 packaging>=23.1 psutil-home-assistant==0.0.1 python-slugify==8.0.4 PyYAML==6.0.2 requests==2.32.3 -SQLAlchemy==2.0.31 +securetar==2024.11.0 +SQLAlchemy==2.0.36 +standard-aifc==3.13.0;python_version>='3.13' +standard-telnetlib==3.13.0;python_version>='3.13' typing-extensions>=4.12.2,<5.0 ulid-transform==1.0.2 urllib3>=1.26.5,<2 -uv==0.4.28 +uv==0.5.4 voluptuous==0.15.2 voluptuous-serialize==2.6.0 voluptuous-openapi==0.0.5 -yarl==1.17.1 -webrtc-models==0.2.0 +yarl==1.18.3 +webrtc-models==0.3.0 diff --git a/requirements_all.txt b/requirements_all.txt index 128f8b5e62f..20f105b7f07 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -4,7 +4,7 @@ -r requirements.txt # homeassistant.components.aemet -AEMET-OpenData==0.5.4 +AEMET-OpenData==0.6.3 # homeassistant.components.honeywell AIOSomecomfort==0.0.25 @@ -33,7 +33,7 @@ Mastodon.py==1.8.1 # homeassistant.components.seven_segments # homeassistant.components.sighthound # homeassistant.components.tensorflow -Pillow==10.4.0 +Pillow==11.0.0 # homeassistant.components.plex PlexAPI==4.15.16 @@ -60,7 +60,7 @@ PyFronius==0.7.3 PyLoadAPI==1.3.2 # homeassistant.components.met_eireann -PyMetEireann==2021.8.0 +PyMetEireann==2024.11.0 # homeassistant.components.met # homeassistant.components.norway_air @@ -70,7 +70,7 @@ PyMetno==0.13.0 PyMicroBot==0.0.17 # homeassistant.components.nina -PyNINA==0.3.3 +PyNINA==0.3.4 # homeassistant.components.mobile_app # homeassistant.components.owntracks @@ -84,7 +84,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.51.0 +PySwitchbot==0.54.0 # homeassistant.components.switchmate PySwitchmate==0.5.1 @@ -116,7 +116,7 @@ RtmAPI==0.7.2 # homeassistant.components.recorder # homeassistant.components.sql -SQLAlchemy==2.0.31 +SQLAlchemy==2.0.36 # homeassistant.components.tami4 Tami4EdgeAPI==3.0 @@ -131,7 +131,7 @@ TwitterAPI==2.7.12 WSDiscovery==2.0.0 # homeassistant.components.accuweather -accuweather==3.0.0 +accuweather==4.0.0 # homeassistant.components.adax adax==0.4.0 @@ -155,7 +155,7 @@ afsapi==0.2.7 agent-py==0.0.24 # homeassistant.components.geo_json_events -aio-geojson-generic-client==0.4 +aio-geojson-generic-client==0.5 # homeassistant.components.geonetnz_quakes aio-geojson-geonetnz-quakes==0.16 @@ -172,6 +172,9 @@ aio-geojson-usgs-earthquakes==0.3 # homeassistant.components.gdacs aio-georss-gdacs==0.10 +# homeassistant.components.acaia +aioacaia==0.1.10 + # homeassistant.components.airq aioairq==0.4.3 @@ -240,7 +243,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==27.0.1 +aioesphomeapi==27.0.3 # homeassistant.components.flo aioflo==2021.11.0 @@ -262,7 +265,7 @@ aioharmony==0.2.10 aiohasupervisor==0.2.1 # homeassistant.components.homekit_controller -aiohomekit==3.2.6 +aiohomekit==3.2.7 # homeassistant.components.hue aiohue==4.7.3 @@ -295,7 +298,7 @@ aiolookin==1.0.0 aiolyric==2.0.1 # homeassistant.components.mealie -aiomealie==0.9.3 +aiomealie==0.9.4 # homeassistant.components.modern_forms aiomodernforms==0.1.8 @@ -322,7 +325,7 @@ aioopenexchangerates==0.6.8 aiooui==0.1.7 # homeassistant.components.pegel_online -aiopegelonline==0.0.10 +aiopegelonline==0.1.0 # homeassistant.components.acmeda aiopulse==0.4.6 @@ -357,7 +360,7 @@ aioridwell==2024.01.0 aioruckus==0.42 # homeassistant.components.russound_rio -aiorussound==4.0.5 +aiorussound==4.1.0 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 @@ -366,7 +369,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==12.0.1 +aioshelly==12.1.0 # homeassistant.components.skybell aioskybell==22.7.0 @@ -381,10 +384,10 @@ aiosolaredge==0.2.0 aiosteamist==1.0.0 # homeassistant.components.cambridge_audio -aiostreammagic==2.8.5 +aiostreammagic==2.10.0 # homeassistant.components.switcher_kis -aioswitcher==4.4.0 +aioswitcher==5.0.0 # homeassistant.components.syncthing aiosyncthing==0.5.1 @@ -392,6 +395,9 @@ aiosyncthing==0.5.1 # homeassistant.components.tankerkoenig aiotankerkoenig==0.4.2 +# homeassistant.components.tedee +aiotedee==0.2.20 + # homeassistant.components.tractive aiotractive==0.6.0 @@ -435,19 +441,19 @@ airthings-cloud==0.2.0 airtouch4pyapi==1.0.5 # homeassistant.components.airtouch5 -airtouch5py==0.2.10 +airtouch5py==0.2.11 # homeassistant.components.alpha_vantage alpha-vantage==2.3.1 # homeassistant.components.amberelectric -amberelectric==1.1.1 +amberelectric==2.0.12 # homeassistant.components.amcrest amcrest==1.9.8 # homeassistant.components.androidtv -androidtv[async]==0.0.73 +androidtv[async]==0.0.75 # homeassistant.components.androidtv_remote androidtvremote2==0.1.2 @@ -526,6 +532,10 @@ autarco==3.1.0 # homeassistant.components.husqvarna_automower_ble automower-ble==0.2.0 +# homeassistant.components.generic +# homeassistant.components.stream +av==13.1.0 + # homeassistant.components.avea # avea==1.5.1 @@ -536,7 +546,7 @@ automower-ble==0.2.0 axis==63 # homeassistant.components.fujitsu_fglair -ayla-iot-unofficial==1.4.3 +ayla-iot-unofficial==1.4.4 # homeassistant.components.azure_event_hub azure-eventhub==5.11.1 @@ -572,7 +582,7 @@ beautifulsoup4==4.12.3 # beewi-smartclim==0.0.10 # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.16.4 +bimmer-connected[china]==0.17.2 # homeassistant.components.bizkaibus bizkaibus==0.1.1 @@ -698,7 +708,7 @@ connect-box==0.3.1 construct==2.10.68 # homeassistant.components.utility_meter -croniter==2.0.2 +cronsim==2.6 # homeassistant.components.crownstone crownstone-cloud==1.4.11 @@ -728,7 +738,7 @@ debugpy==1.8.6 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==8.4.0 +deebot-client==9.1.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -742,7 +752,7 @@ deluge-client==1.10.2 demetriek==0.4.0 # homeassistant.components.denonavr -denonavr==1.0.0 +denonavr==1.0.1 # homeassistant.components.devialet devialet==1.4.5 @@ -853,7 +863,7 @@ epion==0.0.3 epson-projector==0.5.1 # homeassistant.components.eq3btsmart -eq3btsmart==1.2.0 +eq3btsmart==1.4.1 # homeassistant.components.esphome esphome-dashboard-api==1.2.3 @@ -921,7 +931,7 @@ fnv-hash-fast==1.0.2 foobot_async==1.0.0 # homeassistant.components.forecast_solar -forecast-solar==3.1.0 +forecast-solar==4.0.0 # homeassistant.components.fortios fortiosapi==1.0.5 @@ -937,7 +947,7 @@ freesms==0.2.0 fritzconnection[qr]==1.14.0 # homeassistant.components.fyta -fyta_cli==0.6.10 +fyta_cli==0.7.0 # homeassistant.components.google_translate gTTS==2.2.4 @@ -1059,15 +1069,11 @@ gspread==5.5.0 gstreamer-player==1.1.2 # homeassistant.components.profiler -guppy3==3.1.4.post1 +guppy3==3.1.4.post1;python_version<'3.13' # homeassistant.components.iaqualink h2==4.1.0 -# homeassistant.components.generic -# homeassistant.components.stream -ha-av==10.1.1 - # homeassistant.components.ffmpeg ha-ffmpeg==3.2.2 @@ -1084,16 +1090,16 @@ habitipy==0.3.3 habluetooth==3.6.0 # homeassistant.components.cloud -hass-nabucasa==0.83.0 +hass-nabucasa==0.85.0 # homeassistant.components.splunk hass-splunk==0.1.1 # homeassistant.components.conversation -hassil==1.7.4 +hassil==2.0.5 # homeassistant.components.jewish_calendar -hdate==0.10.9 +hdate==0.11.1 # homeassistant.components.heatmiser heatmiserV3==2.0.3 @@ -1121,13 +1127,13 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.61 +holidays==0.62 # homeassistant.components.frontend -home-assistant-frontend==20241106.2 +home-assistant-frontend==20241127.4 # homeassistant.components.conversation -home-assistant-intents==2024.11.6 +home-assistant-intents==2024.12.4 # homeassistant.components.home_connect homeconnect==0.8.0 @@ -1142,10 +1148,10 @@ horimote==0.4.1 httplib2==0.20.4 # homeassistant.components.huawei_lte -huawei-lte-api==1.9.3 +huawei-lte-api==1.10.0 # homeassistant.components.huum -huum==0.7.10 +huum==0.7.12 # homeassistant.components.hyperion hyperion-py==0.7.5 @@ -1244,10 +1250,10 @@ kegtron-ble==0.4.0 kiwiki-client==0.1.1 # homeassistant.components.knocki -knocki==0.3.5 +knocki==0.4.2 # homeassistant.components.knx -knx-frontend==2024.9.10.221729 +knx-frontend==2024.11.16.205004 # homeassistant.components.konnected konnected==1.2.0 @@ -1265,7 +1271,7 @@ lakeside==0.13 laundrify-aio==1.2.2 # homeassistant.components.lcn -lcn-frontend==0.2.1 +lcn-frontend==0.2.2 # homeassistant.components.ld2410_ble ld2410-ble==0.1.1 @@ -1306,9 +1312,6 @@ linear-garage-door==0.2.9 # homeassistant.components.linode linode-api==4.1.9b1 -# homeassistant.components.lamarzocco -lmcloud==1.2.3 - # homeassistant.components.google_maps locationsharinglib==5.0.1 @@ -1367,13 +1370,13 @@ mficlient==0.5.0 micloud==0.5 # homeassistant.components.microbees -microBeesPy==0.3.2 +microBeesPy==0.3.5 # homeassistant.components.mill mill-local==0.3.0 # homeassistant.components.mill -millheater==0.11.8 +millheater==0.12.2 # homeassistant.components.minio minio==7.1.12 @@ -1385,7 +1388,7 @@ moat-ble==0.1.1 moehlenhoff-alpha2==1.3.1 # homeassistant.components.monzo -monzopy==1.3.2 +monzopy==1.4.2 # homeassistant.components.mopeka mopeka-iot-ble==0.8.0 @@ -1394,17 +1397,20 @@ mopeka-iot-ble==0.8.0 motionblinds==0.6.25 # homeassistant.components.motionblinds_ble -motionblindsble==0.1.2 +motionblindsble==0.1.3 # homeassistant.components.motioneye motioneye-client==0.3.14 # homeassistant.components.bang_olufsen -mozart-api==4.1.1.116.0 +mozart-api==4.1.1.116.3 # homeassistant.components.mullvad mullvad-api==1.0.0 +# homeassistant.components.music_assistant +music-assistant-client==1.0.8 + # homeassistant.components.tts mutagen==1.47.0 @@ -1427,7 +1433,7 @@ ndms2-client==0.1.2 nessclient==1.1.2 # homeassistant.components.netdata -netdata==1.1.0 +netdata==1.3.0 # homeassistant.components.nmap_tracker netmap==0.7.0.2 @@ -1448,10 +1454,10 @@ nextcloudmonitor==1.5.1 nextcord==2.6.0 # homeassistant.components.nextdns -nextdns==3.3.0 +nextdns==4.0.0 # homeassistant.components.nibe_heatpump -nibe==2.11.0 +nibe==2.13.0 # homeassistant.components.nice_go nice-go==0.3.10 @@ -1488,7 +1494,7 @@ numato-gpio==0.13.0 # homeassistant.components.stream # homeassistant.components.tensorflow # homeassistant.components.trend -numpy==1.26.4 +numpy==2.1.3 # homeassistant.components.nyt_games nyt_games==0.4.4 @@ -1616,7 +1622,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.4.4 +plugwise==1.6.0 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 @@ -1648,7 +1654,7 @@ proxmoxer==2.0.1 psutil-home-assistant==0.0.1 # homeassistant.components.systemmonitor -psutil==6.0.0 +psutil==6.1.0 # homeassistant.components.pulseaudio_loopback pulsectl==23.5.2 @@ -1666,7 +1672,7 @@ pushover_complete==1.1.1 pvo==2.1.1 # homeassistant.components.aosmith -py-aosmith==1.0.10 +py-aosmith==1.0.11 # homeassistant.components.canary py-canary==0.5.4 @@ -1772,7 +1778,7 @@ pyatag==0.3.5.3 pyatmo==8.1.0 # homeassistant.components.apple_tv -pyatv==0.15.1 +pyatv==0.16.0 # homeassistant.components.aussie_broadband pyaussiebb==0.0.15 @@ -1886,7 +1892,7 @@ pyeiscp==0.0.7 pyemoncms==0.1.1 # homeassistant.components.enphase_envoy -pyenphase==1.22.0 +pyenphase==1.23.0 # homeassistant.components.envisalink pyenvisalink==4.7 @@ -1901,7 +1907,7 @@ pyeverlights==0.1.0 pyevilgenius==2.0.0 # homeassistant.components.ezviz -pyezviz==0.2.1.2 +pyezviz==0.2.2.3 # homeassistant.components.fibaro pyfibaro==0.8.0 @@ -2006,7 +2012,7 @@ pykmtronic==0.3.0 pykodi==0.2.7 # homeassistant.components.kostal_plenticore -pykoplenti==1.2.2 +pykoplenti==1.3.0 # homeassistant.components.kraken pykrakenapi==0.1.8 @@ -2020,6 +2026,9 @@ pykwb==0.0.8 # homeassistant.components.lacrosse pylacrosse==0.4 +# homeassistant.components.lamarzocco +pylamarzocco==1.2.12 + # homeassistant.components.lastfm pylast==5.1.0 @@ -2039,7 +2048,7 @@ pylitejet==0.6.3 pylitterbot==2023.5.0 # homeassistant.components.lutron_caseta -pylutron-caseta==0.21.1 +pylutron-caseta==0.22.0 # homeassistant.components.lutron pylutron==0.2.16 @@ -2081,7 +2090,7 @@ pymsteams==0.1.12 pymysensors==0.24.0 # homeassistant.components.iron_os -pynecil==0.2.0 +pynecil==0.2.1 # homeassistant.components.netgear pynetgear==0.10.10 @@ -2092,6 +2101,9 @@ pynetio==0.1.9.1 # homeassistant.components.nobo_hub pynobo==1.8.1 +# homeassistant.components.nordpool +pynordpool==0.2.2 + # homeassistant.components.nuki pynuki==1.6.3 @@ -2137,13 +2149,13 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.14.1 +pyoverkiz==1.15.0 # homeassistant.components.onewire pyownet==0.10.0.post1 # homeassistant.components.palazzetti -pypalazzetti==0.1.10 +pypalazzetti==0.1.14 # homeassistant.components.elv pypca==0.0.7 @@ -2209,7 +2221,7 @@ pysabnzbd==1.1.1 pysaj==0.0.16 # homeassistant.components.schlage -pyschlage==2024.8.0 +pyschlage==2024.11.0 # homeassistant.components.sensibo pysensibo==1.1.0 @@ -2257,7 +2269,7 @@ pysmarty2==0.10.1 pysml==0.0.12 # homeassistant.components.smlight -pysmlight==0.1.3 +pysmlight==0.1.4 # homeassistant.components.snmp pysnmp==6.2.6 @@ -2281,7 +2293,7 @@ pysqueezebox==0.10.0 pystiebeleltron==0.0.1.dev2 # homeassistant.components.suez_water -pysuezV2==0.2.2 +pysuezV2==1.3.2 # homeassistant.components.switchbee pyswitchbee==1.8.3 @@ -2289,9 +2301,6 @@ pyswitchbee==1.8.3 # homeassistant.components.tautulli pytautulli==23.1.1 -# homeassistant.components.tedee -pytedee-async==0.2.20 - # homeassistant.components.thinkingcleaner pythinkingcleaner==0.0.3 @@ -2305,7 +2314,7 @@ python-awair==0.2.4 python-blockchain-api==0.0.2 # homeassistant.components.bsblan -python-bsblan==0.6.4 +python-bsblan==1.2.1 # homeassistant.components.clementine python-clementine-remote==1.0.1 @@ -2338,7 +2347,7 @@ python-gitlab==1.6.0 python-homeassistant-analytics==0.8.0 # homeassistant.components.homewizard -python-homewizard-energy==v6.3.0 +python-homewizard-energy==v7.0.0 # homeassistant.components.hp_ilo python-hpilo==4.4.3 @@ -2353,7 +2362,7 @@ python-join-api==0.0.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.7 +python-kasa[speedups]==0.8.0 # homeassistant.components.linkplay python-linkplay==0.0.20 @@ -2402,7 +2411,7 @@ python-smarttub==0.0.38 python-songpal==0.16.2 # homeassistant.components.tado -python-tado==0.17.6 +python-tado==0.17.7 # homeassistant.components.technove python-technove==1.3.1 @@ -2426,7 +2435,7 @@ pytomorrowio==0.3.6 pytouchline==0.7 # homeassistant.components.touchline_sl -pytouchlinesl==0.1.8 +pytouchlinesl==0.3.0 # homeassistant.components.traccar # homeassistant.components.traccar_server @@ -2439,7 +2448,7 @@ pytradfri[async]==9.0.1 # homeassistant.components.trafikverket_ferry # homeassistant.components.trafikverket_train # homeassistant.components.trafikverket_weatherstation -pytrafikverket==1.0.0 +pytrafikverket==1.1.1 # homeassistant.components.v2c pytrydan==0.8.0 @@ -2535,7 +2544,7 @@ rapt-ble==0.1.2 raspyrfm-client==1.2.8 # homeassistant.components.refoss -refoss-ha==1.2.4 +refoss-ha==1.2.5 # homeassistant.components.rainmachine regenmaschine==2024.03.0 @@ -2547,7 +2556,7 @@ renault-api==0.2.7 renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.11.2 +reolink-aio==0.11.4 # homeassistant.components.idteck_prox rfk101py==0.0.1 @@ -2556,7 +2565,7 @@ rfk101py==0.0.1 rflink==0.0.66 # homeassistant.components.ring -ring-doorbell==0.9.12 +ring-doorbell==0.9.13 # homeassistant.components.fleetgo ritassist==0.9.2 @@ -2601,7 +2610,7 @@ rxv==0.7.0 samsungctl[websocket]==0.7.1 # homeassistant.components.samsungtv -samsungtvws[async,encrypted]==2.6.0 +samsungtvws[async,encrypted]==2.7.1 # homeassistant.components.sanix sanix==1.0.6 @@ -2616,7 +2625,7 @@ screenlogicpy==0.10.0 scsgate==0.1.0 # homeassistant.components.backup -securetar==2024.2.1 +securetar==2024.11.0 # homeassistant.components.sendgrid sendgrid==6.8.2 @@ -2667,6 +2676,9 @@ simplisafe-python==2024.01.0 # homeassistant.components.sisyphus sisyphus-control==3.1.4 +# homeassistant.components.sky_remote +skyboxremote==0.0.6 + # homeassistant.components.slack slackclient==2.5.0 @@ -2689,10 +2701,10 @@ soco==0.30.6 solaredge-local==0.2.3 # homeassistant.components.solarlog -solarlog_cli==0.3.2 +solarlog_cli==0.4.0 # homeassistant.components.solax -solax==3.1.1 +solax==3.2.1 # homeassistant.components.somfy_mylink somfy-mylink-synergy==1.0.6 @@ -2707,7 +2719,7 @@ speak2mary==1.4.0 speedtest-cli==2.1.3 # homeassistant.components.spotify -spotifyaio==0.8.8 +spotifyaio==0.8.11 # homeassistant.components.sql sqlparse==0.5.0 @@ -2722,7 +2734,7 @@ starline==0.1.5 starlingbank==3.2 # homeassistant.components.starlink -starlink-grpc-core==1.1.3 +starlink-grpc-core==1.2.0 # homeassistant.components.statsd statsd==3.2.1 @@ -2734,7 +2746,7 @@ steamodd==4.21 stookalert==0.1.4 # homeassistant.components.stookwijzer -stookwijzer==1.3.0 +stookwijzer==1.5.1 # homeassistant.components.streamlabswater streamlabswater==1.0.1 @@ -2825,7 +2837,7 @@ thermopro-ble==0.10.0 thingspeak==1.0.0 # homeassistant.components.lg_thinq -thinqconnect==1.0.0 +thinqconnect==1.0.2 # homeassistant.components.tikteck tikteck==0.4 @@ -2852,7 +2864,7 @@ total-connect-client==2024.5 tp-connected==0.0.4 # homeassistant.components.tplink_omada -tplink-omada-client==1.4.2 +tplink-omada-client==1.4.3 # homeassistant.components.transmission transmission-rpc==7.0.3 @@ -2867,7 +2879,7 @@ ttls==1.8.3 ttn_client==1.2.0 # homeassistant.components.tuya -tuya-device-sharing-sdk==0.1.9 +tuya-device-sharing-sdk==0.2.1 # homeassistant.components.twentemilieu twentemilieu==2.1.0 @@ -2885,7 +2897,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.4.0 +uiprotect==6.6.5 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 @@ -2894,7 +2906,7 @@ ultraheat-api==0.5.7 unifi-discovery==1.2.0 # homeassistant.components.unifi_direct -unifi_ap==0.0.1 +unifi_ap==0.0.2 # homeassistant.components.unifiled unifiled==0.11 @@ -2926,7 +2938,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.10.0 +velbus-aio==2024.11.1 # homeassistant.components.venstar venstarcolortouch==0.19 @@ -2974,11 +2986,14 @@ weatherflow4py==1.0.6 # homeassistant.components.cisco_webex_teams webexpythonsdk==2.0.1 +# homeassistant.components.nasweb +webio-api==0.1.8 + # homeassistant.components.webmin webmin-xmlrpc==0.0.2 # homeassistant.components.weheat -weheat==2024.09.23 +weheat==2024.11.26 # homeassistant.components.whirlpool whirlpool-sixth-sense==0.18.8 @@ -3029,7 +3044,7 @@ yalesmartalarmclient==0.4.3 # homeassistant.components.august # homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.0 +yalexs-ble==2.5.1 # homeassistant.components.august # homeassistant.components.yale @@ -3051,7 +3066,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2024.11.04 +yt-dlp[default]==2024.12.03 # homeassistant.components.zamg zamg==0.3.6 @@ -3060,13 +3075,13 @@ zamg==0.3.6 zengge==0.2 # homeassistant.components.zeroconf -zeroconf==0.136.0 +zeroconf==0.136.2 # homeassistant.components.zeversolar zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.39 +zha==0.0.41 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.13 @@ -3078,7 +3093,7 @@ ziggo-mediabox-xl==1.1.0 zm-py==0.5.4 # homeassistant.components.zwave_js -zwave-js-server-python==0.58.1 +zwave-js-server-python==0.59.1 # homeassistant.components.zwave_me zwave-me-ws==0.4.3 diff --git a/requirements_test.txt b/requirements_test.txt index c879f0c6621..f9763630767 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -8,19 +8,19 @@ -c homeassistant/package_constraints.txt -r requirements_test_pre_commit.txt astroid==3.3.5 -coverage==7.6.1 +coverage==7.6.8 freezegun==1.5.1 license-expression==30.4.0 mock-open==1.4.0 -mypy-dev==1.13.0a1 +mypy-dev==1.14.0a3 pre-commit==4.0.0 -pydantic==1.10.18 +pydantic==1.10.19 pylint==3.3.1 pylint-per-file-ignores==1.3.2 pipdeptree==2.23.4 pytest-asyncio==0.24.0 pytest-aiohttp==1.0.5 -pytest-cov==5.0.0 +pytest-cov==6.0.0 pytest-freezer==0.4.8 pytest-github-actions-annotate-failures==0.2.0 pytest-socket==0.7.0 @@ -36,15 +36,15 @@ syrupy==4.7.2 tqdm==4.66.5 types-aiofiles==24.1.0.20240626 types-atomicwrites==1.4.5.1 -types-croniter==2.0.0.20240423 -types-beautifulsoup4==4.12.0.20240907 -types-caldav==1.3.0.20240824 +types-croniter==4.0.0.20241030 +types-beautifulsoup4==4.12.0.20241020 +types-caldav==1.3.0.20241107 types-chardet==0.1.5 types-decorator==5.1.8.20240310 types-paho-mqtt==1.6.0.20240321 types-pillow==10.2.0.20240822 -types-protobuf==5.28.0.20240924 -types-psutil==6.0.0.20240901 +types-protobuf==5.28.3.20241030 +types-psutil==6.1.0.20241102 types-python-dateutil==2.9.0.20241003 types-python-slugify==8.0.2.20240310 types-pytz==2024.2.0.20241003 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b44385bbd0b..38440ddcf52 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -4,7 +4,7 @@ -r requirements_test.txt # homeassistant.components.aemet -AEMET-OpenData==0.5.4 +AEMET-OpenData==0.6.3 # homeassistant.components.honeywell AIOSomecomfort==0.0.25 @@ -33,7 +33,7 @@ Mastodon.py==1.8.1 # homeassistant.components.seven_segments # homeassistant.components.sighthound # homeassistant.components.tensorflow -Pillow==10.4.0 +Pillow==11.0.0 # homeassistant.components.plex PlexAPI==4.15.16 @@ -57,7 +57,7 @@ PyFronius==0.7.3 PyLoadAPI==1.3.2 # homeassistant.components.met_eireann -PyMetEireann==2021.8.0 +PyMetEireann==2024.11.0 # homeassistant.components.met # homeassistant.components.norway_air @@ -67,7 +67,7 @@ PyMetno==0.13.0 PyMicroBot==0.0.17 # homeassistant.components.nina -PyNINA==0.3.3 +PyNINA==0.3.4 # homeassistant.components.mobile_app # homeassistant.components.owntracks @@ -81,7 +81,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.51.0 +PySwitchbot==0.54.0 # homeassistant.components.syncthru PySyncThru==0.7.10 @@ -110,7 +110,7 @@ RtmAPI==0.7.2 # homeassistant.components.recorder # homeassistant.components.sql -SQLAlchemy==2.0.31 +SQLAlchemy==2.0.36 # homeassistant.components.tami4 Tami4EdgeAPI==3.0 @@ -119,7 +119,7 @@ Tami4EdgeAPI==3.0 WSDiscovery==2.0.0 # homeassistant.components.accuweather -accuweather==3.0.0 +accuweather==4.0.0 # homeassistant.components.adax adax==0.4.0 @@ -143,7 +143,7 @@ afsapi==0.2.7 agent-py==0.0.24 # homeassistant.components.geo_json_events -aio-geojson-generic-client==0.4 +aio-geojson-generic-client==0.5 # homeassistant.components.geonetnz_quakes aio-geojson-geonetnz-quakes==0.16 @@ -160,6 +160,9 @@ aio-geojson-usgs-earthquakes==0.3 # homeassistant.components.gdacs aio-georss-gdacs==0.10 +# homeassistant.components.acaia +aioacaia==0.1.10 + # homeassistant.components.airq aioairq==0.4.3 @@ -228,7 +231,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==27.0.1 +aioesphomeapi==27.0.3 # homeassistant.components.flo aioflo==2021.11.0 @@ -247,7 +250,7 @@ aioharmony==0.2.10 aiohasupervisor==0.2.1 # homeassistant.components.homekit_controller -aiohomekit==3.2.6 +aiohomekit==3.2.7 # homeassistant.components.hue aiohue==4.7.3 @@ -277,7 +280,7 @@ aiolookin==1.0.0 aiolyric==2.0.1 # homeassistant.components.mealie -aiomealie==0.9.3 +aiomealie==0.9.4 # homeassistant.components.modern_forms aiomodernforms==0.1.8 @@ -304,7 +307,7 @@ aioopenexchangerates==0.6.8 aiooui==0.1.7 # homeassistant.components.pegel_online -aiopegelonline==0.0.10 +aiopegelonline==0.1.0 # homeassistant.components.acmeda aiopulse==0.4.6 @@ -339,7 +342,7 @@ aioridwell==2024.01.0 aioruckus==0.42 # homeassistant.components.russound_rio -aiorussound==4.0.5 +aiorussound==4.1.0 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 @@ -348,7 +351,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==12.0.1 +aioshelly==12.1.0 # homeassistant.components.skybell aioskybell==22.7.0 @@ -363,10 +366,10 @@ aiosolaredge==0.2.0 aiosteamist==1.0.0 # homeassistant.components.cambridge_audio -aiostreammagic==2.8.5 +aiostreammagic==2.10.0 # homeassistant.components.switcher_kis -aioswitcher==4.4.0 +aioswitcher==5.0.0 # homeassistant.components.syncthing aiosyncthing==0.5.1 @@ -374,6 +377,9 @@ aiosyncthing==0.5.1 # homeassistant.components.tankerkoenig aiotankerkoenig==0.4.2 +# homeassistant.components.tedee +aiotedee==0.2.20 + # homeassistant.components.tractive aiotractive==0.6.0 @@ -417,13 +423,13 @@ airthings-cloud==0.2.0 airtouch4pyapi==1.0.5 # homeassistant.components.airtouch5 -airtouch5py==0.2.10 +airtouch5py==0.2.11 # homeassistant.components.amberelectric -amberelectric==1.1.1 +amberelectric==2.0.12 # homeassistant.components.androidtv -androidtv[async]==0.0.73 +androidtv[async]==0.0.75 # homeassistant.components.androidtv_remote androidtvremote2==0.1.2 @@ -481,11 +487,15 @@ autarco==3.1.0 # homeassistant.components.husqvarna_automower_ble automower-ble==0.2.0 +# homeassistant.components.generic +# homeassistant.components.stream +av==13.1.0 + # homeassistant.components.axis axis==63 # homeassistant.components.fujitsu_fglair -ayla-iot-unofficial==1.4.3 +ayla-iot-unofficial==1.4.4 # homeassistant.components.azure_event_hub azure-eventhub==5.11.1 @@ -506,7 +516,7 @@ base36==0.1.1 beautifulsoup4==4.12.3 # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.16.4 +bimmer-connected[china]==0.17.2 # homeassistant.components.eq3btsmart # homeassistant.components.esphome @@ -594,7 +604,7 @@ colorthief==0.2.1 construct==2.10.68 # homeassistant.components.utility_meter -croniter==2.0.2 +cronsim==2.6 # homeassistant.components.crownstone crownstone-cloud==1.4.11 @@ -618,7 +628,7 @@ dbus-fast==2.24.3 debugpy==1.8.6 # homeassistant.components.ecovacs -deebot-client==8.4.0 +deebot-client==9.1.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -632,7 +642,7 @@ deluge-client==1.10.2 demetriek==0.4.0 # homeassistant.components.denonavr -denonavr==1.0.0 +denonavr==1.0.1 # homeassistant.components.devialet devialet==1.4.5 @@ -722,7 +732,7 @@ epion==0.0.3 epson-projector==0.5.1 # homeassistant.components.eq3btsmart -eq3btsmart==1.2.0 +eq3btsmart==1.4.1 # homeassistant.components.esphome esphome-dashboard-api==1.2.3 @@ -780,7 +790,7 @@ fnv-hash-fast==1.0.2 foobot_async==1.0.0 # homeassistant.components.forecast_solar -forecast-solar==3.1.0 +forecast-solar==4.0.0 # homeassistant.components.freebox freebox-api==1.1.0 @@ -790,7 +800,7 @@ freebox-api==1.1.0 fritzconnection[qr]==1.14.0 # homeassistant.components.fyta -fyta_cli==0.6.10 +fyta_cli==0.7.0 # homeassistant.components.google_translate gTTS==2.2.4 @@ -897,15 +907,11 @@ growattServer==1.5.0 gspread==5.5.0 # homeassistant.components.profiler -guppy3==3.1.4.post1 +guppy3==3.1.4.post1;python_version<'3.13' # homeassistant.components.iaqualink h2==4.1.0 -# homeassistant.components.generic -# homeassistant.components.stream -ha-av==10.1.1 - # homeassistant.components.ffmpeg ha-ffmpeg==3.2.2 @@ -922,13 +928,13 @@ habitipy==0.3.3 habluetooth==3.6.0 # homeassistant.components.cloud -hass-nabucasa==0.83.0 +hass-nabucasa==0.85.0 # homeassistant.components.conversation -hassil==1.7.4 +hassil==2.0.5 # homeassistant.components.jewish_calendar -hdate==0.10.9 +hdate==0.11.1 # homeassistant.components.here_travel_time here-routing==1.0.1 @@ -947,13 +953,13 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.61 +holidays==0.62 # homeassistant.components.frontend -home-assistant-frontend==20241106.2 +home-assistant-frontend==20241127.4 # homeassistant.components.conversation -home-assistant-intents==2024.11.6 +home-assistant-intents==2024.12.4 # homeassistant.components.home_connect homeconnect==0.8.0 @@ -965,10 +971,10 @@ homematicip==1.1.3 httplib2==0.20.4 # homeassistant.components.huawei_lte -huawei-lte-api==1.9.3 +huawei-lte-api==1.10.0 # homeassistant.components.huum -huum==0.7.10 +huum==0.7.12 # homeassistant.components.hyperion hyperion-py==0.7.5 @@ -1043,10 +1049,10 @@ justnimbus==0.7.4 kegtron-ble==0.4.0 # homeassistant.components.knocki -knocki==0.3.5 +knocki==0.4.2 # homeassistant.components.knx -knx-frontend==2024.9.10.221729 +knx-frontend==2024.11.16.205004 # homeassistant.components.konnected konnected==1.2.0 @@ -1061,7 +1067,7 @@ lacrosse-view==1.0.3 laundrify-aio==1.2.2 # homeassistant.components.lcn -lcn-frontend==0.2.1 +lcn-frontend==0.2.2 # homeassistant.components.ld2410_ble ld2410-ble==0.1.1 @@ -1087,9 +1093,6 @@ libsoundtouch==0.8 # homeassistant.components.linear_garage_door linear-garage-door==0.2.9 -# homeassistant.components.lamarzocco -lmcloud==1.2.3 - # homeassistant.components.london_underground london-tube-status==0.5 @@ -1136,13 +1139,13 @@ mficlient==0.5.0 micloud==0.5 # homeassistant.components.microbees -microBeesPy==0.3.2 +microBeesPy==0.3.5 # homeassistant.components.mill mill-local==0.3.0 # homeassistant.components.mill -millheater==0.11.8 +millheater==0.12.2 # homeassistant.components.minio minio==7.1.12 @@ -1154,7 +1157,7 @@ moat-ble==0.1.1 moehlenhoff-alpha2==1.3.1 # homeassistant.components.monzo -monzopy==1.3.2 +monzopy==1.4.2 # homeassistant.components.mopeka mopeka-iot-ble==0.8.0 @@ -1163,17 +1166,20 @@ mopeka-iot-ble==0.8.0 motionblinds==0.6.25 # homeassistant.components.motionblinds_ble -motionblindsble==0.1.2 +motionblindsble==0.1.3 # homeassistant.components.motioneye motioneye-client==0.3.14 # homeassistant.components.bang_olufsen -mozart-api==4.1.1.116.0 +mozart-api==4.1.1.116.3 # homeassistant.components.mullvad mullvad-api==1.0.0 +# homeassistant.components.music_assistant +music-assistant-client==1.0.8 + # homeassistant.components.tts mutagen==1.47.0 @@ -1208,10 +1214,10 @@ nextcloudmonitor==1.5.1 nextcord==2.6.0 # homeassistant.components.nextdns -nextdns==3.3.0 +nextdns==4.0.0 # homeassistant.components.nibe_heatpump -nibe==2.11.0 +nibe==2.13.0 # homeassistant.components.nice_go nice-go==0.3.10 @@ -1236,7 +1242,7 @@ numato-gpio==0.13.0 # homeassistant.components.stream # homeassistant.components.tensorflow # homeassistant.components.trend -numpy==1.26.4 +numpy==2.1.3 # homeassistant.components.nyt_games nyt_games==0.4.4 @@ -1323,7 +1329,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==1.4.4 +plugwise==1.6.0 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 @@ -1346,7 +1352,7 @@ prometheus-client==0.21.0 psutil-home-assistant==0.0.1 # homeassistant.components.systemmonitor -psutil==6.0.0 +psutil==6.1.0 # homeassistant.components.androidtv pure-python-adb[async]==0.3.0.dev0 @@ -1361,7 +1367,7 @@ pushover_complete==1.1.1 pvo==2.1.1 # homeassistant.components.aosmith -py-aosmith==1.0.10 +py-aosmith==1.0.11 # homeassistant.components.canary py-canary==0.5.4 @@ -1443,7 +1449,7 @@ pyatag==0.3.5.3 pyatmo==8.1.0 # homeassistant.components.apple_tv -pyatv==0.15.1 +pyatv==0.16.0 # homeassistant.components.aussie_broadband pyaussiebb==0.0.15 @@ -1521,7 +1527,7 @@ pyeiscp==0.0.7 pyemoncms==0.1.1 # homeassistant.components.enphase_envoy -pyenphase==1.22.0 +pyenphase==1.23.0 # homeassistant.components.everlights pyeverlights==0.1.0 @@ -1530,7 +1536,7 @@ pyeverlights==0.1.0 pyevilgenius==2.0.0 # homeassistant.components.ezviz -pyezviz==0.2.1.2 +pyezviz==0.2.2.3 # homeassistant.components.fibaro pyfibaro==0.8.0 @@ -1617,7 +1623,7 @@ pykmtronic==0.3.0 pykodi==0.2.7 # homeassistant.components.kostal_plenticore -pykoplenti==1.2.2 +pykoplenti==1.3.0 # homeassistant.components.kraken pykrakenapi==0.1.8 @@ -1625,6 +1631,9 @@ pykrakenapi==0.1.8 # homeassistant.components.kulersky pykulersky==0.5.2 +# homeassistant.components.lamarzocco +pylamarzocco==1.2.12 + # homeassistant.components.lastfm pylast==5.1.0 @@ -1644,7 +1653,7 @@ pylitejet==0.6.3 pylitterbot==2023.5.0 # homeassistant.components.lutron_caseta -pylutron-caseta==0.21.1 +pylutron-caseta==0.22.0 # homeassistant.components.lutron pylutron==0.2.16 @@ -1677,7 +1686,7 @@ pymonoprice==0.4 pymysensors==0.24.0 # homeassistant.components.iron_os -pynecil==0.2.0 +pynecil==0.2.1 # homeassistant.components.netgear pynetgear==0.10.10 @@ -1685,6 +1694,9 @@ pynetgear==0.10.10 # homeassistant.components.nobo_hub pynobo==1.8.1 +# homeassistant.components.nordpool +pynordpool==0.2.2 + # homeassistant.components.nuki pynuki==1.6.3 @@ -1724,13 +1736,13 @@ pyotgw==2.2.2 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.14.1 +pyoverkiz==1.15.0 # homeassistant.components.onewire pyownet==0.10.0.post1 # homeassistant.components.palazzetti -pypalazzetti==0.1.10 +pypalazzetti==0.1.14 # homeassistant.components.lcn pypck==0.7.24 @@ -1778,7 +1790,7 @@ pyrympro==0.0.8 pysabnzbd==1.1.1 # homeassistant.components.schlage -pyschlage==2024.8.0 +pyschlage==2024.11.0 # homeassistant.components.sensibo pysensibo==1.1.0 @@ -1817,7 +1829,7 @@ pysmarty2==0.10.1 pysml==0.0.12 # homeassistant.components.smlight -pysmlight==0.1.3 +pysmlight==0.1.4 # homeassistant.components.snmp pysnmp==6.2.6 @@ -1838,7 +1850,7 @@ pyspeex-noise==1.0.2 pysqueezebox==0.10.0 # homeassistant.components.suez_water -pysuezV2==0.2.2 +pysuezV2==1.3.2 # homeassistant.components.switchbee pyswitchbee==1.8.3 @@ -1846,9 +1858,6 @@ pyswitchbee==1.8.3 # homeassistant.components.tautulli pytautulli==23.1.1 -# homeassistant.components.tedee -pytedee-async==0.2.20 - # homeassistant.components.motionmount python-MotionMount==2.2.0 @@ -1856,7 +1865,7 @@ python-MotionMount==2.2.0 python-awair==0.2.4 # homeassistant.components.bsblan -python-bsblan==0.6.4 +python-bsblan==1.2.1 # homeassistant.components.ecobee python-ecobee-api==0.2.20 @@ -1871,7 +1880,7 @@ python-fullykiosk==0.0.14 python-homeassistant-analytics==0.8.0 # homeassistant.components.homewizard -python-homewizard-energy==v6.3.0 +python-homewizard-energy==v7.0.0 # homeassistant.components.izone python-izone==1.2.9 @@ -1880,7 +1889,7 @@ python-izone==1.2.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.7 +python-kasa[speedups]==0.8.0 # homeassistant.components.linkplay python-linkplay==0.0.20 @@ -1923,7 +1932,7 @@ python-smarttub==0.0.38 python-songpal==0.16.2 # homeassistant.components.tado -python-tado==0.17.6 +python-tado==0.17.7 # homeassistant.components.technove python-technove==1.3.1 @@ -1938,7 +1947,7 @@ pytile==2023.12.0 pytomorrowio==0.3.6 # homeassistant.components.touchline_sl -pytouchlinesl==0.1.8 +pytouchlinesl==0.3.0 # homeassistant.components.traccar # homeassistant.components.traccar_server @@ -1951,7 +1960,7 @@ pytradfri[async]==9.0.1 # homeassistant.components.trafikverket_ferry # homeassistant.components.trafikverket_train # homeassistant.components.trafikverket_weatherstation -pytrafikverket==1.0.0 +pytrafikverket==1.1.1 # homeassistant.components.v2c pytrydan==0.8.0 @@ -2026,7 +2035,7 @@ radiotherm==2.1.0 rapt-ble==0.1.2 # homeassistant.components.refoss -refoss-ha==1.2.4 +refoss-ha==1.2.5 # homeassistant.components.rainmachine regenmaschine==2024.03.0 @@ -2038,13 +2047,13 @@ renault-api==0.2.7 renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.11.2 +reolink-aio==0.11.4 # homeassistant.components.rflink rflink==0.0.66 # homeassistant.components.ring -ring-doorbell==0.9.12 +ring-doorbell==0.9.13 # homeassistant.components.roku rokuecp==0.19.3 @@ -2077,7 +2086,7 @@ rxv==0.7.0 samsungctl[websocket]==0.7.1 # homeassistant.components.samsungtv -samsungtvws[async,encrypted]==2.6.0 +samsungtvws[async,encrypted]==2.7.1 # homeassistant.components.sanix sanix==1.0.6 @@ -2086,7 +2095,7 @@ sanix==1.0.6 screenlogicpy==0.10.0 # homeassistant.components.backup -securetar==2024.2.1 +securetar==2024.11.0 # homeassistant.components.emulated_kasa # homeassistant.components.sense @@ -2125,6 +2134,9 @@ simplepush==2.2.3 # homeassistant.components.simplisafe simplisafe-python==2024.01.0 +# homeassistant.components.sky_remote +skyboxremote==0.0.6 + # homeassistant.components.slack slackclient==2.5.0 @@ -2141,10 +2153,10 @@ snapcast==2.3.6 soco==0.30.6 # homeassistant.components.solarlog -solarlog_cli==0.3.2 +solarlog_cli==0.4.0 # homeassistant.components.solax -solax==3.1.1 +solax==3.2.1 # homeassistant.components.somfy_mylink somfy-mylink-synergy==1.0.6 @@ -2159,7 +2171,7 @@ speak2mary==1.4.0 speedtest-cli==2.1.3 # homeassistant.components.spotify -spotifyaio==0.8.8 +spotifyaio==0.8.11 # homeassistant.components.sql sqlparse==0.5.0 @@ -2171,7 +2183,7 @@ srpenergy==1.3.6 starline==0.1.5 # homeassistant.components.starlink -starlink-grpc-core==1.1.3 +starlink-grpc-core==1.2.0 # homeassistant.components.statsd statsd==3.2.1 @@ -2183,7 +2195,7 @@ steamodd==4.21 stookalert==0.1.4 # homeassistant.components.stookwijzer -stookwijzer==1.3.0 +stookwijzer==1.5.1 # homeassistant.components.streamlabswater streamlabswater==1.0.1 @@ -2247,7 +2259,7 @@ thermobeacon-ble==0.7.0 thermopro-ble==0.10.0 # homeassistant.components.lg_thinq -thinqconnect==1.0.0 +thinqconnect==1.0.2 # homeassistant.components.tilt_ble tilt-ble==0.2.3 @@ -2265,7 +2277,7 @@ toonapi==0.3.0 total-connect-client==2024.5 # homeassistant.components.tplink_omada -tplink-omada-client==1.4.2 +tplink-omada-client==1.4.3 # homeassistant.components.transmission transmission-rpc==7.0.3 @@ -2280,7 +2292,7 @@ ttls==1.8.3 ttn_client==1.2.0 # homeassistant.components.tuya -tuya-device-sharing-sdk==0.1.9 +tuya-device-sharing-sdk==0.2.1 # homeassistant.components.twentemilieu twentemilieu==2.1.0 @@ -2298,7 +2310,7 @@ typedmonarchmoney==0.3.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.4.0 +uiprotect==6.6.5 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 @@ -2333,7 +2345,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.10.0 +velbus-aio==2024.11.1 # homeassistant.components.venstar venstarcolortouch==0.19 @@ -2369,11 +2381,14 @@ watchdog==2.3.1 # homeassistant.components.weatherflow_cloud weatherflow4py==1.0.6 +# homeassistant.components.nasweb +webio-api==0.1.8 + # homeassistant.components.webmin webmin-xmlrpc==0.0.2 # homeassistant.components.weheat -weheat==2024.09.23 +weheat==2024.11.26 # homeassistant.components.whirlpool whirlpool-sixth-sense==0.18.8 @@ -2418,7 +2433,7 @@ yalesmartalarmclient==0.4.3 # homeassistant.components.august # homeassistant.components.yale # homeassistant.components.yalexs_ble -yalexs-ble==2.5.0 +yalexs-ble==2.5.1 # homeassistant.components.august # homeassistant.components.yale @@ -2437,22 +2452,22 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp[default]==2024.11.04 +yt-dlp[default]==2024.12.03 # homeassistant.components.zamg zamg==0.3.6 # homeassistant.components.zeroconf -zeroconf==0.136.0 +zeroconf==0.136.2 # homeassistant.components.zeversolar zeversolar==0.3.2 # homeassistant.components.zha -zha==0.0.39 +zha==0.0.41 # homeassistant.components.zwave_js -zwave-js-server-python==0.58.1 +zwave-js-server-python==0.59.1 # homeassistant.components.zwave_me zwave-me-ws==0.4.3 diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index a1c6304220c..6523c4d0e43 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.3.0 -ruff==0.7.1 +ruff==0.8.0 yamllint==1.35.1 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index 8730acb3867..450469096ea 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -58,8 +58,16 @@ INCLUDED_REQUIREMENTS_WHEELS = { # will be included in requirements_all_{action}.txt OVERRIDDEN_REQUIREMENTS_ACTIONS = { - "pytest": {"exclude": set(), "include": {"python-gammu"}}, - "wheels_aarch64": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, + "pytest": { + "exclude": set(), + "include": {"python-gammu"}, + "markers": {}, + }, + "wheels_aarch64": { + "exclude": set(), + "include": INCLUDED_REQUIREMENTS_WHEELS, + "markers": {}, + }, # Pandas has issues building on armhf, it is expected they # will drop the platform in the near future (they consider it # "flimsy" on 386). The following packages depend on pandas, @@ -67,10 +75,23 @@ OVERRIDDEN_REQUIREMENTS_ACTIONS = { "wheels_armhf": { "exclude": {"env-canada", "noaa-coops", "pyezviz", "pykrakenapi"}, "include": INCLUDED_REQUIREMENTS_WHEELS, + "markers": {}, + }, + "wheels_armv7": { + "exclude": set(), + "include": INCLUDED_REQUIREMENTS_WHEELS, + "markers": {}, + }, + "wheels_amd64": { + "exclude": set(), + "include": INCLUDED_REQUIREMENTS_WHEELS, + "markers": {}, + }, + "wheels_i386": { + "exclude": set(), + "include": INCLUDED_REQUIREMENTS_WHEELS, + "markers": {}, }, - "wheels_armv7": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, - "wheels_amd64": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, - "wheels_i386": {"exclude": set(), "include": INCLUDED_REQUIREMENTS_WHEELS}, } IGNORE_PIN = ("colorlog>2.1,<3", "urllib3") @@ -96,9 +117,9 @@ httplib2>=0.19.0 # gRPC is an implicit dependency that we want to make explicit so we manage # upgrades intentionally. It is a large package to build from source and we # want to ensure we have wheels built. -grpcio==1.66.2 -grpcio-status==1.66.2 -grpcio-reflection==1.66.2 +grpcio==1.67.1 +grpcio-status==1.67.1 +grpcio-reflection==1.67.1 # This is a old unmaintained library and is replaced with pycryptodome pycrypto==1000000000.0.0 @@ -127,7 +148,8 @@ httpcore==1.0.5 hyperframe>=5.2.0 # Ensure we run compatible with musllinux build env -numpy==1.26.4 +numpy==2.1.3 +pandas~=2.2.3 # Constrain multidict to avoid typing issues # https://github.com/home-assistant/core/pull/67046 @@ -138,7 +160,7 @@ backoff>=2.0 # Required to avoid breaking (#101042). # v2 has breaking changes (#99218). -pydantic==1.10.18 +pydantic==1.10.19 # Required for Python 3.12.4 compatibility (#119223). mashumaro>=3.13.1 @@ -163,10 +185,12 @@ protobuf==5.28.3 # 2.1.18 is the first version that works with our wheel builder faust-cchardet>=2.1.18 -# websockets 11.0 is missing files in the source distribution -# which break wheel builds so we need at least 11.0.1 -# https://github.com/aaugustin/websockets/issues/1329 -websockets>=11.0.1 +# websockets 13.1 is the first version to fully support the new +# asyncio implementation. The legacy implementation is now +# deprecated as of websockets 14.0. +# https://websockets.readthedocs.io/en/13.0.1/howto/upgrade.html#missing-features +# https://websockets.readthedocs.io/en/stable/howto/upgrade.html +websockets>=13.1 # pysnmplib is no longer maintained and does not work with newer # python @@ -179,15 +203,12 @@ get-mac==1000000000.0.0 # We want to skip the binary wheels for the 'charset-normalizer' packages. # They are build with mypyc, but causes issues with our wheel builder. # In order to do so, we need to constrain the version. -charset-normalizer==3.2.0 +charset-normalizer==3.4.0 # dacite: Ensure we have a version that is able to handle type unions for # NAM, Brother, and GIOS. dacite>=1.7.0 -# Musle wheels for pandas 2.2.0 cannot be build for any architecture. -pandas==2.1.4 - # chacha20poly1305-reuseable==0.12.x is incompatible with cryptography==43.0.x chacha20poly1305-reuseable>=0.13.0 @@ -195,8 +216,8 @@ chacha20poly1305-reuseable>=0.13.0 # https://github.com/pycountry/pycountry/blob/ea69bab36f00df58624a0e490fdad4ccdc14268b/HISTORY.txt#L39 pycountry>=23.12.11 -# scapy<2.5.0 will not work with python3.12 -scapy>=2.5.0 +# scapy==2.6.0 causes CI failures due to a race condition +scapy>=2.6.1 # tuf isn't updated to deal with breaking changes in securesystemslib==1.0. # Only tuf>=4 includes a constraint to <1.0. @@ -209,6 +230,19 @@ tenacity!=8.4.0 # 5.0.0 breaks Timeout as a context manager # TypeError: 'Timeout' object does not support the context manager protocol async-timeout==4.0.3 + +# aiofiles keeps getting downgraded by custom components +# causing newer methods to not be available and breaking +# some integrations at startup +# https://github.com/home-assistant/core/issues/127529 +# https://github.com/home-assistant/core/issues/122508 +# https://github.com/home-assistant/core/issues/118004 +aiofiles>=24.1.0 + +# 0.22.0 causes CI failures on Python 3.13 +# python3 -X dev -m pytest tests/components/matrix +# python3 -X dev -m pytest tests/components/zha +rpds-py==0.21.0 """ GENERATED_MESSAGE = ( @@ -313,6 +347,10 @@ def process_action_requirement(req: str, action: str) -> str: return req if normalized_package_name in EXCLUDED_REQUIREMENTS_ALL: return f"# {req}" + if markers := OVERRIDDEN_REQUIREMENTS_ACTIONS[action]["markers"].get( + normalized_package_name, None + ): + return f"{req};{markers}" return req @@ -325,8 +363,8 @@ def gather_modules() -> dict[str, list[str]] | None: gather_requirements_from_manifests(errors, reqs) gather_requirements_from_modules(errors, reqs) - for key in reqs: - reqs[key] = sorted(reqs[key], key=lambda name: (len(name.split(".")), name)) + for value in reqs.values(): + value = sorted(value, key=lambda name: (len(name.split(".")), name)) if errors: print("******* ERROR") diff --git a/script/hassfest/__main__.py b/script/hassfest/__main__.py index f0b9ad25dd0..81670de5afd 100644 --- a/script/hassfest/__main__.py +++ b/script/hassfest/__main__.py @@ -23,6 +23,7 @@ from . import ( metadata, mqtt, mypy_config, + quality_scale, requirements, services, ssdp, @@ -43,6 +44,7 @@ INTEGRATION_PLUGINS = [ json, manifest, mqtt, + quality_scale, requirements, services, ssdp, diff --git a/script/hassfest/docker.py b/script/hassfest/docker.py index 0eb72b91c02..57d86bc4def 100644 --- a/script/hassfest/docker.py +++ b/script/hassfest/docker.py @@ -80,7 +80,7 @@ WORKDIR /config _HASSFEST_TEMPLATE = r"""# Automatically generated by hassfest. # # To update, run python3 -m script.hassfest -p docker -FROM python:3.12-alpine +FROM python:3.13-alpine ENV \ UV_SYSTEM_PYTHON=true \ diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index 3128b0f3bbd..100be4fdec9 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -1,7 +1,7 @@ # Automatically generated by hassfest. # # To update, run python3 -m script.hassfest -p docker -FROM python:3.12-alpine +FROM python:3.13-alpine ENV \ UV_SYSTEM_PYTHON=true \ @@ -14,7 +14,7 @@ WORKDIR "/github/workspace" COPY . /usr/src/homeassistant # Uv is only needed during build -RUN --mount=from=ghcr.io/astral-sh/uv:0.4.28,source=/uv,target=/bin/uv \ +RUN --mount=from=ghcr.io/astral-sh/uv:0.5.4,source=/uv,target=/bin/uv \ # Required for PyTurboJPEG apk add --no-cache libturbojpeg \ && uv pip install \ @@ -22,8 +22,8 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.4.28,source=/uv,target=/bin/uv \ --no-cache \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ - stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.7.1 \ - PyTurboJPEG==1.7.5 go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 hassil==1.7.4 home-assistant-intents==2024.11.6 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 + stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.0 \ + PyTurboJPEG==1.7.5 go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 hassil==2.0.5 home-assistant-intents==2024.12.4 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" LABEL "maintainer"="Home Assistant " diff --git a/script/hassfest/manifest.py b/script/hassfest/manifest.py index 6d2f4087f59..fdbcf5bcb78 100644 --- a/script/hassfest/manifest.py +++ b/script/hassfest/manifest.py @@ -2,7 +2,7 @@ from __future__ import annotations -from enum import IntEnum +from enum import StrEnum, auto import json from pathlib import Path import subprocess @@ -20,7 +20,7 @@ from voluptuous.humanize import humanize_error from homeassistant.const import Platform from homeassistant.helpers import config_validation as cv -from .model import Config, Integration +from .model import Config, Integration, ScaledQualityScaleTiers DOCUMENTATION_URL_SCHEMA = "https" DOCUMENTATION_URL_HOST = "www.home-assistant.io" @@ -28,16 +28,20 @@ DOCUMENTATION_URL_PATH_PREFIX = "/integrations/" DOCUMENTATION_URL_EXCEPTIONS = {"https://www.home-assistant.io/hassio"} -class QualityScale(IntEnum): +class NonScaledQualityScaleTiers(StrEnum): """Supported manifest quality scales.""" - INTERNAL = -1 - SILVER = 1 - GOLD = 2 - PLATINUM = 3 + CUSTOM = auto() + NO_SCORE = auto() + INTERNAL = auto() + LEGACY = auto() -SUPPORTED_QUALITY_SCALES = [enum.name.lower() for enum in QualityScale] +SUPPORTED_QUALITY_SCALES = [ + value.name.lower() + for enum in [ScaledQualityScaleTiers, NonScaledQualityScaleTiers] + for value in enum +] SUPPORTED_IOT_CLASSES = [ "assumed_state", "calculated", @@ -111,19 +115,6 @@ NO_IOT_CLASS = [ "websocket_api", "zone", ] -# Grandfather rule for older integrations -# https://github.com/home-assistant/developers.home-assistant/pull/1512 -NO_DIAGNOSTICS = [ - "dlna_dms", - "hyperion", - "nightscout", - "pvpc_hourly_pricing", - "risco", - "smarttub", - "songpal", - "vizio", - "yeelight", -] def documentation_url(value: str) -> str: @@ -268,7 +259,6 @@ INTEGRATION_MANIFEST_SCHEMA = vol.Schema( ) ], vol.Required("documentation"): vol.All(vol.Url(), documentation_url), - vol.Optional("issue_tracker"): vol.Url(), vol.Optional("quality_scale"): vol.In(SUPPORTED_QUALITY_SCALES), vol.Optional("requirements"): [str], vol.Optional("dependencies"): [str], @@ -304,6 +294,7 @@ def manifest_schema(value: dict[str, Any]) -> vol.Schema: CUSTOM_INTEGRATION_MANIFEST_SCHEMA = INTEGRATION_MANIFEST_SCHEMA.extend( { vol.Optional("version"): vol.All(str, verify_version), + vol.Optional("issue_tracker"): vol.Url(), vol.Optional("import_executor"): bool, } ) @@ -359,36 +350,17 @@ def validate_manifest(integration: Integration, core_components_dir: Path) -> No "Virtual integration points to non-existing supported_by integration", ) - if (quality_scale := integration.manifest.get("quality_scale")) and QualityScale[ - quality_scale.upper() - ] > QualityScale.SILVER: + if ( + (quality_scale := integration.manifest.get("quality_scale")) + and quality_scale.upper() in ScaledQualityScaleTiers + and ScaledQualityScaleTiers[quality_scale.upper()] + >= ScaledQualityScaleTiers.SILVER + ): if not integration.manifest.get("codeowners"): integration.add_error( "manifest", f"{quality_scale} integration does not have a code owner", ) - if ( - domain not in NO_DIAGNOSTICS - and not (integration.path / "diagnostics.py").exists() - ): - integration.add_error( - "manifest", - f"{quality_scale} integration does not implement diagnostics", - ) - - if domain in NO_DIAGNOSTICS: - if quality_scale and QualityScale[quality_scale.upper()] < QualityScale.GOLD: - integration.add_error( - "manifest", - "{quality_scale} integration should be " - "removed from NO_DIAGNOSTICS in script/hassfest/manifest.py", - ) - elif (integration.path / "diagnostics.py").exists(): - integration.add_error( - "manifest", - "Implements diagnostics and can be " - "removed from NO_DIAGNOSTICS in script/hassfest/manifest.py", - ) if not integration.core: validate_version(integration) diff --git a/script/hassfest/model.py b/script/hassfest/model.py index 63e9b025ed4..377f82b0d5c 100644 --- a/script/hassfest/model.py +++ b/script/hassfest/model.py @@ -3,6 +3,7 @@ from __future__ import annotations from dataclasses import dataclass, field +from enum import IntEnum import json import pathlib from typing import Any, Literal @@ -230,3 +231,12 @@ class Integration: self._manifest = manifest self.manifest_path = manifest_path + + +class ScaledQualityScaleTiers(IntEnum): + """Supported manifest quality scales.""" + + BRONZE = 1 + SILVER = 2 + GOLD = 3 + PLATINUM = 4 diff --git a/script/hassfest/mypy_config.py b/script/hassfest/mypy_config.py index de42c964ddf..25fe875e437 100644 --- a/script/hassfest/mypy_config.py +++ b/script/hassfest/mypy_config.py @@ -43,6 +43,7 @@ GENERAL_SETTINGS: Final[dict[str, str]] = { "local_partial_types": "true", "strict_equality": "true", "no_implicit_optional": "true", + "report_deprecated_as_error": "true", "warn_incomplete_stub": "true", "warn_redundant_casts": "true", "warn_unused_configs": "true", diff --git a/script/hassfest/quality_scale.py b/script/hassfest/quality_scale.py new file mode 100644 index 00000000000..980d659b03e --- /dev/null +++ b/script/hassfest/quality_scale.py @@ -0,0 +1,1383 @@ +"""Validate integration quality scale files.""" + +from __future__ import annotations + +from dataclasses import dataclass + +import voluptuous as vol +from voluptuous.humanize import humanize_error + +from homeassistant.const import Platform +from homeassistant.exceptions import HomeAssistantError +from homeassistant.util.yaml import load_yaml_dict + +from .model import Config, Integration, ScaledQualityScaleTiers +from .quality_scale_validation import RuleValidationProtocol, config_entry_unloading + +QUALITY_SCALE_TIERS = {value.name.lower(): value for value in ScaledQualityScaleTiers} + + +@dataclass +class Rule: + """Quality scale rules.""" + + name: str + tier: ScaledQualityScaleTiers + validator: RuleValidationProtocol | None = None + + +ALL_RULES = [ + # BRONZE + Rule("action-setup", ScaledQualityScaleTiers.BRONZE), + Rule("appropriate-polling", ScaledQualityScaleTiers.BRONZE), + Rule("brands", ScaledQualityScaleTiers.BRONZE), + Rule("common-modules", ScaledQualityScaleTiers.BRONZE), + Rule("config-flow", ScaledQualityScaleTiers.BRONZE), + Rule("config-flow-test-coverage", ScaledQualityScaleTiers.BRONZE), + Rule("dependency-transparency", ScaledQualityScaleTiers.BRONZE), + Rule("docs-actions", ScaledQualityScaleTiers.BRONZE), + Rule("docs-high-level-description", ScaledQualityScaleTiers.BRONZE), + Rule("docs-installation-instructions", ScaledQualityScaleTiers.BRONZE), + Rule("docs-removal-instructions", ScaledQualityScaleTiers.BRONZE), + Rule("entity-event-setup", ScaledQualityScaleTiers.BRONZE), + Rule("entity-unique-id", ScaledQualityScaleTiers.BRONZE), + Rule("has-entity-name", ScaledQualityScaleTiers.BRONZE), + Rule("runtime-data", ScaledQualityScaleTiers.BRONZE), + Rule("test-before-configure", ScaledQualityScaleTiers.BRONZE), + Rule("test-before-setup", ScaledQualityScaleTiers.BRONZE), + Rule("unique-config-entry", ScaledQualityScaleTiers.BRONZE), + # SILVER + Rule("action-exceptions", ScaledQualityScaleTiers.SILVER), + Rule( + "config-entry-unloading", ScaledQualityScaleTiers.SILVER, config_entry_unloading + ), + Rule("docs-configuration-parameters", ScaledQualityScaleTiers.SILVER), + Rule("docs-installation-parameters", ScaledQualityScaleTiers.SILVER), + Rule("entity-unavailable", ScaledQualityScaleTiers.SILVER), + Rule("integration-owner", ScaledQualityScaleTiers.SILVER), + Rule("log-when-unavailable", ScaledQualityScaleTiers.SILVER), + Rule("parallel-updates", ScaledQualityScaleTiers.SILVER), + Rule("reauthentication-flow", ScaledQualityScaleTiers.SILVER), + Rule("test-coverage", ScaledQualityScaleTiers.SILVER), + # GOLD: [ + Rule("devices", ScaledQualityScaleTiers.GOLD), + Rule("diagnostics", ScaledQualityScaleTiers.GOLD), + Rule("discovery", ScaledQualityScaleTiers.GOLD), + Rule("discovery-update-info", ScaledQualityScaleTiers.GOLD), + Rule("docs-data-update", ScaledQualityScaleTiers.GOLD), + Rule("docs-examples", ScaledQualityScaleTiers.GOLD), + Rule("docs-known-limitations", ScaledQualityScaleTiers.GOLD), + Rule("docs-supported-devices", ScaledQualityScaleTiers.GOLD), + Rule("docs-supported-functions", ScaledQualityScaleTiers.GOLD), + Rule("docs-troubleshooting", ScaledQualityScaleTiers.GOLD), + Rule("docs-use-cases", ScaledQualityScaleTiers.GOLD), + Rule("dynamic-devices", ScaledQualityScaleTiers.GOLD), + Rule("entity-category", ScaledQualityScaleTiers.GOLD), + Rule("entity-device-class", ScaledQualityScaleTiers.GOLD), + Rule("entity-disabled-by-default", ScaledQualityScaleTiers.GOLD), + Rule("entity-translations", ScaledQualityScaleTiers.GOLD), + Rule("exception-translations", ScaledQualityScaleTiers.GOLD), + Rule("icon-translations", ScaledQualityScaleTiers.GOLD), + Rule("reconfiguration-flow", ScaledQualityScaleTiers.GOLD), + Rule("repair-issues", ScaledQualityScaleTiers.GOLD), + Rule("stale-devices", ScaledQualityScaleTiers.GOLD), + # PLATINUM + Rule("async-dependency", ScaledQualityScaleTiers.PLATINUM), + Rule("inject-websession", ScaledQualityScaleTiers.PLATINUM), + Rule("strict-typing", ScaledQualityScaleTiers.PLATINUM), +] + +SCALE_RULES = { + tier: [rule.name for rule in ALL_RULES if rule.tier == tier] + for tier in ScaledQualityScaleTiers +} + +VALIDATORS = {rule.name: rule.validator for rule in ALL_RULES if rule.validator} + +INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE = [ + "abode", + "accuweather", + "acer_projector", + "acmeda", + "actiontec", + "adax", + "adguard", + "ads", + "advantage_air", + "aemet", + "aftership", + "agent_dvr", + "airly", + "airnow", + "airq", + "airthings", + "airthings_ble", + "airtouch4", + "airtouch5", + "airvisual", + "airvisual_pro", + "airzone", + "airzone_cloud", + "aladdin_connect", + "alarmdecoder", + "alert", + "alexa", + "alpha_vantage", + "amazon_polly", + "amberelectric", + "ambient_network", + "ambient_station", + "amcrest", + "ampio", + "analytics", + "analytics_insights", + "android_ip_webcam", + "androidtv", + "androidtv_remote", + "anel_pwrctrl", + "anova", + "anthemav", + "anthropic", + "aosmith", + "apache_kafka", + "apcupsd", + "apple_tv", + "apprise", + "aprilaire", + "aprs", + "apsystems", + "aquacell", + "aqualogic", + "aquostv", + "aranet", + "arcam_fmj", + "arest", + "arris_tg2492lg", + "aruba", + "arve", + "arwn", + "aseko_pool_live", + "assist_pipeline", + "asterisk_mbox", + "asuswrt", + "atag", + "aten_pe", + "atome", + "august", + "aurora", + "aurora_abb_powerone", + "aussie_broadband", + "avea", + "avion", + "awair", + "aws", + "axis", + "azure_data_explorer", + "azure_devops", + "azure_event_hub", + "azure_service_bus", + "backup", + "baf", + "baidu", + "balboa", + "bang_olufsen", + "bayesian", + "bbox", + "beewi_smartclim", + "bitcoin", + "bizkaibus", + "blackbird", + "blebox", + "blink", + "blinksticklight", + "blockchain", + "blue_current", + "bluemaestro", + "bluesound", + "bluetooth", + "bluetooth_adapters", + "bluetooth_le_tracker", + "bluetooth_tracker", + "bmw_connected_drive", + "bond", + "bosch_shc", + "braviatv", + "bring", + "broadlink", + "brother", + "brottsplatskartan", + "browser", + "brunt", + "bryant_evolution", + "bsblan", + "bt_home_hub_5", + "bt_smarthub", + "bthome", + "buienradar", + "caldav", + "cambridge_audio", + "canary", + "cast", + "ccm15", + "cert_expiry", + "chacon_dio", + "channels", + "circuit", + "cisco_ios", + "cisco_mobility_express", + "cisco_webex_teams", + "citybikes", + "clementine", + "clickatell", + "clicksend", + "clicksend_tts", + "climacell", + "cloud", + "cloudflare", + "cmus", + "co2signal", + "coinbase", + "color_extractor", + "comed_hourly_pricing", + "comelit", + "comfoconnect", + "command_line", + "compensation", + "concord232", + "control4", + "coolmaster", + "cppm_tracker", + "cpuspeed", + "crownstone", + "cups", + "currencylayer", + "daikin", + "danfoss_air", + "datadog", + "ddwrt", + "deako", + "debugpy", + "deconz", + "decora", + "decora_wifi", + "delijn", + "deluge", + "demo", + "denon", + "denonavr", + "derivative", + "devialet", + "device_sun_light_trigger", + "devolo_home_control", + "devolo_home_network", + "dexcom", + "dhcp", + "dialogflow", + "digital_ocean", + "directv", + "discogs", + "discord", + "dlib_face_detect", + "dlib_face_identify", + "dlink", + "dlna_dmr", + "dlna_dms", + "dnsip", + "dominos", + "doods", + "doorbird", + "dormakaba_dkey", + "dovado", + "downloader", + "dremel_3d_printer", + "drop_connect", + "dsmr", + "dsmr_reader", + "dte_energy_bridge", + "dublin_bus_transport", + "duckdns", + "duke_energy", + "dunehd", + "duotecno", + "dwd_weather_warnings", + "dweet", + "dynalite", + "eafm", + "easyenergy", + "ebox", + "ebusd", + "ecoal_boiler", + "ecobee", + "ecoforest", + "econet", + "ecovacs", + "ecowitt", + "eddystone_temperature", + "edimax", + "edl21", + "efergy", + "egardia", + "eight_sleep", + "electrasmart", + "electric_kiwi", + "elevenlabs", + "eliqonline", + "elkm1", + "elmax", + "elv", + "elvia", + "emby", + "emoncms", + "emoncms_history", + "emonitor", + "emulated_hue", + "emulated_kasa", + "emulated_roku", + "energenie_power_sockets", + "energy", + "energyzero", + "enigma2", + "enocean", + "enphase_envoy", + "entur_public_transport", + "environment_canada", + "envisalink", + "ephember", + "epic_games_store", + "epion", + "epson", + "eq3btsmart", + "escea", + "esphome", + "etherscan", + "eufy", + "eufylife_ble", + "everlights", + "evil_genius_labs", + "evohome", + "ezviz", + "faa_delays", + "facebook", + "fail2ban", + "familyhub", + "fastdotcom", + "feedreader", + "ffmpeg_motion", + "ffmpeg_noise", + "fibaro", + "fido", + "file", + "filesize", + "filter", + "fints", + "fireservicerota", + "firmata", + "fitbit", + "fivem", + "fixer", + "fjaraskupan", + "fleetgo", + "flexit", + "flexit_bacnet", + "flic", + "flick_electric", + "flipr", + "flo", + "flock", + "flume", + "flux", + "flux_led", + "folder", + "folder_watcher", + "foobot", + "forecast_solar", + "forked_daapd", + "fortios", + "foscam", + "foursquare", + "free_mobile", + "freebox", + "freedns", + "freedompro", + "fritzbox", + "fritzbox_callmonitor", + "fronius", + "frontier_silicon", + "fujitsu_fglair", + "fujitsu_hvac", + "futurenow", + "fyta", + "garadget", + "garages_amsterdam", + "gardena_bluetooth", + "gc100", + "gdacs", + "generic", + "generic_hygrostat", + "generic_thermostat", + "geniushub", + "geo_json_events", + "geo_rss_events", + "geocaching", + "geofency", + "geonetnz_quakes", + "geonetnz_volcano", + "gios", + "github", + "gitlab_ci", + "gitter", + "glances", + "go2rtc", + "goalzero", + "gogogate2", + "goodwe", + "google", + "google_assistant", + "google_assistant_sdk", + "google_cloud", + "google_domains", + "google_generative_ai_conversation", + "google_mail", + "google_maps", + "google_photos", + "google_pubsub", + "google_sheets", + "google_tasks", + "google_translate", + "google_travel_time", + "google_wifi", + "govee_ble", + "govee_light_local", + "gpsd", + "gpslogger", + "graphite", + "gree", + "greeneye_monitor", + "greenwave", + "group", + "growatt_server", + "gstreamer", + "gtfs", + "guardian", + "habitica", + "harman_kardon_avr", + "harmony", + "hassio", + "haveibeenpwned", + "hddtemp", + "hdmi_cec", + "heatmiser", + "heos", + "here_travel_time", + "hikvision", + "hikvisioncam", + "hisense_aehw4a1", + "history_stats", + "hitron_coda", + "hive", + "hko", + "hlk_sw16", + "holiday", + "home_connect", + "homekit", + "homekit_controller", + "homematic", + "homematicip_cloud", + "homeworks", + "honeywell", + "horizon", + "hp_ilo", + "html5", + "http", + "huawei_lte", + "hue", + "huisbaasje", + "hunterdouglas_powerview", + "husqvarna_automower", + "husqvarna_automower_ble", + "huum", + "hvv_departures", + "hydrawise", + "hyperion", + "ialarm", + "iammeter", + "iaqualink", + "ibeacon", + "icloud", + "idasen_desk", + "idteck_prox", + "ifttt", + "iglo", + "ign_sismologia", + "ihc", + "imgw_pib", + "improv_ble", + "incomfort", + "influxdb", + "inkbird", + "insteon", + "integration", + "intellifire", + "intesishome", + "ios", + "iotawatt", + "iotty", + "iperf3", + "ipma", + "ipp", + "iqvia", + "irish_rail_transport", + "iron_os", + "isal", + "iskra", + "islamic_prayer_times", + "israel_rail", + "iss", + "ista_ecotrend", + "isy994", + "itach", + "itunes", + "izone", + "jellyfin", + "jewish_calendar", + "joaoapps_join", + "juicenet", + "justnimbus", + "jvc_projector", + "kaiterra", + "kaleidescape", + "kankun", + "keba", + "keenetic_ndms2", + "kef", + "kegtron", + "keyboard", + "keyboard_remote", + "keymitt_ble", + "kira", + "kitchen_sink", + "kiwi", + "kmtronic", + "knocki", + "knx", + "kodi", + "konnected", + "kostal_plenticore", + "kraken", + "kulersky", + "kwb", + "lacrosse", + "lacrosse_view", + "lametric", + "landisgyr_heat_meter", + "lannouncer", + "lastfm", + "launch_library", + "laundrify", + "lcn", + "ld2410_ble", + "leaone", + "led_ble", + "lektrico", + "lg_netcast", + "lg_soundbar", + "lg_thinq", + "lidarr", + "life360", + "lifx", + "lifx_cloud", + "lightwave", + "limitlessled", + "linear_garage_door", + "linkplay", + "linksys_smart", + "linode", + "linux_battery", + "lirc", + "litejet", + "litterrobot", + "livisi", + "llamalab_automate", + "local_calendar", + "local_file", + "local_ip", + "local_todo", + "location", + "locative", + "logentries", + "logi_circle", + "london_air", + "london_underground", + "lookin", + "loqed", + "luci", + "luftdaten", + "lupusec", + "lutron", + "lutron_caseta", + "lw12wifi", + "lyric", + "madvr", + "mailbox", + "mailgun", + "manual", + "manual_mqtt", + "map", + "marytts", + "mastodon", + "matrix", + "matter", + "maxcube", + "mazda", + "mealie", + "meater", + "medcom_ble", + "media_extractor", + "mediaroom", + "melcloud", + "melissa", + "melnor", + "meraki", + "message_bird", + "met", + "met_eireann", + "meteo_france", + "meteoalarm", + "meteoclimatic", + "metoffice", + "mfi", + "microbees", + "microsoft", + "microsoft_face", + "microsoft_face_detect", + "microsoft_face_identify", + "mikrotik", + "mill", + "min_max", + "minecraft_server", + "minio", + "mjpeg", + "moat", + "mobile_app", + "mochad", + "modbus", + "modem_callerid", + "modern_forms", + "moehlenhoff_alpha2", + "mold_indicator", + "monarch_money", + "monoprice", + "monzo", + "moon", + "mopeka", + "motion_blinds", + "motionblinds_ble", + "motioneye", + "motionmount", + "mpd", + "mqtt_eventstream", + "mqtt_json", + "mqtt_room", + "mqtt_statestream", + "msteams", + "mullvad", + "music_assistant", + "mutesync", + "mvglive", + "mycroft", + "myq", + "mysensors", + "mystrom", + "mythicbeastsdns", + "myuplink", + "nad", + "nam", + "namecheapdns", + "nanoleaf", + "nasweb", + "neato", + "nederlandse_spoorwegen", + "ness_alarm", + "nest", + "netatmo", + "netdata", + "netgear", + "netgear_lte", + "netio", + "network", + "neurio_energy", + "nexia", + "nextbus", + "nextcloud", + "nextdns", + "nfandroidtv", + "nibe_heatpump", + "nice_go", + "nightscout", + "niko_home_control", + "nilu", + "nina", + "nissan_leaf", + "nmap_tracker", + "nmbs", + "no_ip", + "noaa_tides", + "nobo_hub", + "nordpool", + "norway_air", + "notify_events", + "notion", + "nsw_fuel_station", + "nsw_rural_fire_service_feed", + "nuheat", + "nuki", + "numato", + "nut", + "nws", + "nx584", + "nyt_games", + "nzbget", + "oasa_telematics", + "obihai", + "octoprint", + "oem", + "ohmconnect", + "ollama", + "ombi", + "omnilogic", + "oncue", + "ondilo_ico", + "onewire", + "onkyo", + "onvif", + "open_meteo", + "openai_conversation", + "openalpr_cloud", + "openerz", + "openevse", + "openexchangerates", + "opengarage", + "openhardwaremonitor", + "openhome", + "opensensemap", + "opensky", + "opentherm_gw", + "openuv", + "openweathermap", + "opnsense", + "opower", + "opple", + "oralb", + "oru", + "orvibo", + "osoenergy", + "osramlightify", + "otbr", + "otp", + "ourgroceries", + "overkiz", + "ovo_energy", + "owntracks", + "p1_monitor", + "panasonic_bluray", + "panasonic_viera", + "pandora", + "panel_iframe", + "peco", + "pegel_online", + "pencom", + "permobil", + "persistent_notification", + "person", + "philips_js", + "pi_hole", + "picnic", + "picotts", + "pilight", + "ping", + "pioneer", + "pjlink", + "plaato", + "plant", + "plex", + "plugwise", + "plum_lightpad", + "pocketcasts", + "point", + "poolsense", + "powerwall", + "private_ble_device", + "profiler", + "progettihwsw", + "proliphix", + "prometheus", + "prosegur", + "prowl", + "proximity", + "proxmoxve", + "prusalink", + "ps4", + "pulseaudio_loopback", + "pure_energie", + "purpleair", + "push", + "pushbullet", + "pushover", + "pushsafer", + "pvoutput", + "pvpc_hourly_pricing", + "pyload", + "qbittorrent", + "qingping", + "qld_bushfire", + "qnap", + "qnap_qsw", + "qrcode", + "quantum_gateway", + "qvr_pro", + "qwikswitch", + "rabbitair", + "rachio", + "radarr", + "radio_browser", + "radiotherm", + "raincloud", + "rainforest_eagle", + "rainforest_raven", + "rainmachine", + "random", + "rapt_ble", + "raspyrfm", + "rdw", + "recollect_waste", + "recorder", + "recswitch", + "reddit", + "refoss", + "rejseplanen", + "remember_the_milk", + "remote_rpi_gpio", + "renson", + "reolink", + "repetier", + "rest", + "rest_command", + "rflink", + "rfxtrx", + "rhasspy", + "ridwell", + "ring", + "ripple", + "risco", + "rituals_perfume_genie", + "rmvtransport", + "roborock", + "rocketchat", + "roku", + "romy", + "roomba", + "roon", + "route53", + "rova", + "rpi_camera", + "rpi_power", + "rss_feed_template", + "rtorrent", + "rtsp_to_webrtc", + "ruckus_unleashed", + "russound_rio", + "russound_rnet", + "ruuvi_gateway", + "ruuvitag_ble", + "rympro", + "sabnzbd", + "saj", + "samsungtv", + "sanix", + "satel_integra", + "schlage", + "schluter", + "scrape", + "screenlogic", + "scsgate", + "season", + "sendgrid", + "sense", + "sensibo", + "sensirion_ble", + "sensorpro", + "sensorpush", + "sensoterra", + "sentry", + "senz", + "serial", + "serial_pm", + "sesame", + "seven_segments", + "seventeentrack", + "sfr_box", + "sharkiq", + "shell_command", + "shelly", + "shodan", + "shopping_list", + "sia", + "sigfox", + "sighthound", + "signal_messenger", + "simplefin", + "simplepush", + "simplisafe", + "simulated", + "sinch", + "sisyphus", + "sky_hub", + "sky_remote", + "skybeacon", + "skybell", + "slack", + "sleepiq", + "slide", + "slimproto", + "sma", + "smappee", + "smart_meter_texas", + "smartthings", + "smarttub", + "smarty", + "smhi", + "smlight", + "sms", + "smtp", + "snapcast", + "snips", + "snmp", + "snooz", + "solaredge", + "solaredge_local", + "solax", + "soma", + "somfy_mylink", + "sonarr", + "songpal", + "sonos", + "sony_projector", + "soundtouch", + "spaceapi", + "spc", + "speedtestdotnet", + "spider", + "splunk", + "spotify", + "sql", + "squeezebox", + "srp_energy", + "ssdp", + "starline", + "starlingbank", + "starlink", + "startca", + "statistics", + "statsd", + "steam_online", + "steamist", + "stiebel_eltron", + "stookalert", + "stream", + "streamlabswater", + "subaru", + "suez_water", + "sun", + "sunweg", + "supervisord", + "supla", + "surepetcare", + "swiss_hydrological_data", + "swiss_public_transport", + "swisscom", + "switch_as_x", + "switchbee", + "switchbot", + "switchbot_cloud", + "switcher_kis", + "switchmate", + "syncthing", + "syncthru", + "synology_chat", + "synology_dsm", + "synology_srm", + "syslog", + "system_bridge", + "systemmonitor", + "tado", + "tailscale", + "tailwind", + "tami4", + "tank_utility", + "tankerkoenig", + "tapsaff", + "tasmota", + "tautulli", + "tcp", + "technove", + "ted5000", + "telegram", + "telegram_bot", + "tellduslive", + "tellstick", + "telnet", + "temper", + "template", + "tensorflow", + "tesla_fleet", + "tesla_wall_connector", + "teslemetry", + "tessie", + "tfiac", + "thermobeacon", + "thermopro", + "thermoworks_smoke", + "thethingsnetwork", + "thingspeak", + "thinkingcleaner", + "thomson", + "thread", + "threshold", + "tibber", + "tikteck", + "tile", + "tilt_ble", + "time_date", + "tmb", + "tod", + "todoist", + "tolo", + "tomato", + "tomorrowio", + "toon", + "torque", + "totalconnect", + "touchline", + "touchline_sl", + "tplink", + "tplink_lte", + "tplink_omada", + "traccar", + "traccar_server", + "tractive", + "tradfri", + "trafikverket_camera", + "trafikverket_ferry", + "trafikverket_train", + "trafikverket_weatherstation", + "transmission", + "transport_nsw", + "travisci", + "trend", + "triggercmd", + "tuya", + "twilio", + "twilio_call", + "twilio_sms", + "twinkly", + "twitch", + "twitter", + "ubus", + "uk_transport", + "ukraine_alarm", + "unifi", + "unifi_direct", + "unifiled", + "unifiprotect", + "universal", + "upb", + "upc_connect", + "upcloud", + "upnp", + "uptime", + "uptimerobot", + "usb", + "usgs_earthquakes_feed", + "utility_meter", + "uvc", + "v2c", + "vallox", + "vasttrafik", + "velbus", + "velux", + "venstar", + "vera", + "verisure", + "versasense", + "version", + "vesync", + "viaggiatreno", + "vilfo", + "vivotek", + "vizio", + "vlc", + "vlc_telnet", + "vodafone_station", + "voicerss", + "voip", + "volkszaehler", + "volumio", + "volvooncall", + "vulcan", + "vultr", + "w800rf32", + "wake_on_lan", + "wallbox", + "waqi", + "waterfurnace", + "watson_iot", + "watson_tts", + "watttime", + "waze_travel_time", + "weatherflow", + "weatherflow_cloud", + "weatherkit", + "webmin", + "webostv", + "weheat", + "wemo", + "whirlpool", + "whois", + "wiffi", + "wilight", + "wirelesstag", + "withings", + "wiz", + "wled", + "wmspro", + "wolflink", + "workday", + "worldclock", + "worldtidesinfo", + "worxlandroid", + "ws66i", + "wsdot", + "wyoming", + "x10", + "xbox", + "xeoma", + "xiaomi", + "xiaomi_aqara", + "xiaomi_ble", + "xiaomi_miio", + "xiaomi_tv", + "xmpp", + "xs1", + "yale", + "yale_smart_alarm", + "yalexs_ble", + "yamaha", + "yamaha_musiccast", + "yandex_transport", + "yandextts", + "yardian", + "yeelight", + "yeelightsunflower", + "yi", + "yolink", + "youless", + "youtube", + "zabbix", + "zamg", + "zengge", + "zeroconf", + "zerproc", + "zestimate", + "zeversolar", + "zha", + "zhong_hong", + "ziggo_mediabox_xl", + "zodiac", + "zoneminder", + "zwave_js", + "zwave_me", +] + +NO_QUALITY_SCALE = [ + *{platform.value for platform in Platform}, + "api", + "application_credentials", + "auth", + "automation", + "blueprint", + "config", + "configurator", + "counter", + "default_config", + "device_automation", + "device_tracker", + "diagnostics", + "ffmpeg", + "file_upload", + "frontend", + "hardkernel", + "hardware", + "history", + "homeassistant", + "homeassistant_alerts", + "homeassistant_green", + "homeassistant_hardware", + "homeassistant_sky_connect", + "homeassistant_yellow", + "image_upload", + "input_boolean", + "input_button", + "input_datetime", + "input_number", + "input_select", + "input_text", + "intent_script", + "intent", + "logbook", + "logger", + "lovelace", + "media_source", + "my", + "onboarding", + "panel_custom", + "proxy", + "python_script", + "raspberry_pi", + "recovery_mode", + "repairs", + "schedule", + "script", + "search", + "system_health", + "system_log", + "tag", + "timer", + "trace", + "webhook", + "websocket_api", + "zone", +] + +SCHEMA = vol.Schema( + { + vol.Required("rules"): vol.Schema( + { + vol.Optional(rule.name): vol.Any( + vol.In(["todo", "done"]), + vol.Schema( + { + vol.Required("status"): vol.In(["todo", "done"]), + vol.Optional("comment"): str, + } + ), + vol.Schema( + { + vol.Required("status"): "exempt", + vol.Required("comment"): str, + } + ), + ) + for rule in ALL_RULES + } + ) + } +) + + +def validate_iqs_file(config: Config, integration: Integration) -> None: + """Validate quality scale file for integration.""" + if not integration.core: + return + + declared_quality_scale = QUALITY_SCALE_TIERS.get(integration.quality_scale) + + iqs_file = integration.path / "quality_scale.yaml" + has_file = iqs_file.is_file() + if not has_file: + if ( + integration.domain not in INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE + and integration.domain not in NO_QUALITY_SCALE + and integration.integration_type != "virtual" + ): + integration.add_error( + "quality_scale", + "Quality scale definition not found. New integrations are required to at least reach the Bronze tier.", + ) + return + if declared_quality_scale is not None: + integration.add_error( + "quality_scale", + "Quality scale definition not found. Integrations that set a manifest quality scale must have a quality scale definition.", + ) + return + return + if integration.integration_type == "virtual": + integration.add_error( + "quality_scale", + "Virtual integrations are not allowed to have a quality scale file.", + ) + return + if integration.domain in NO_QUALITY_SCALE: + integration.add_error( + "quality_scale", + "This integration is not supposed to have a quality scale file.", + ) + return + if integration.domain in INTEGRATIONS_WITHOUT_QUALITY_SCALE_FILE: + integration.add_error( + "quality_scale", + "Quality scale file found! Please remove from script/hassfest/quality_scale.py", + ) + return + name = str(iqs_file) + + try: + data = load_yaml_dict(name) + except HomeAssistantError: + integration.add_error("quality_scale", "Invalid quality_scale.yaml") + return + + try: + SCHEMA(data) + except vol.Invalid as err: + integration.add_error( + "quality_scale", f"Invalid {name}: {humanize_error(data, err)}" + ) + + rules_met = set[str]() + for rule_name, rule_value in data.get("rules", {}).items(): + status = rule_value["status"] if isinstance(rule_value, dict) else rule_value + if status not in {"done", "exempt"}: + continue + rules_met.add(rule_name) + if ( + status == "done" + and (validator := VALIDATORS.get(rule_name)) + and (errors := validator.validate(integration)) + ): + for error in errors: + integration.add_error("quality_scale", f"[{rule_name}] {error}") + + # An integration must have all the necessary rules for the declared + # quality scale, and all the rules below. + if declared_quality_scale is None: + return + + for scale in ScaledQualityScaleTiers: + if scale > declared_quality_scale: + break + required_rules = set(SCALE_RULES[scale]) + if missing_rules := (required_rules - rules_met): + friendly_rule_str = "\n".join( + f" {rule}: todo" for rule in sorted(missing_rules) + ) + integration.add_error( + "quality_scale", + f"Quality scale tier {scale.name.lower()} requires quality scale rules to be met:\n{friendly_rule_str}", + ) + + +def validate(integrations: dict[str, Integration], config: Config) -> None: + """Handle YAML files inside integrations.""" + for integration in integrations.values(): + validate_iqs_file(config, integration) diff --git a/script/hassfest/quality_scale_validation/__init__.py b/script/hassfest/quality_scale_validation/__init__.py new file mode 100644 index 00000000000..836c1082763 --- /dev/null +++ b/script/hassfest/quality_scale_validation/__init__.py @@ -0,0 +1,15 @@ +"""Integration quality scale rules.""" + +from typing import Protocol + +from script.hassfest.model import Integration + + +class RuleValidationProtocol(Protocol): + """Protocol for rule validation.""" + + def validate(self, integration: Integration) -> list[str] | None: + """Validate a quality scale rule. + + Returns error (if any). + """ diff --git a/script/hassfest/quality_scale_validation/config_entry_unloading.py b/script/hassfest/quality_scale_validation/config_entry_unloading.py new file mode 100644 index 00000000000..42134e0391e --- /dev/null +++ b/script/hassfest/quality_scale_validation/config_entry_unloading.py @@ -0,0 +1,26 @@ +"""Enforce that the integration implements entry unloading.""" + +import ast + +from script.hassfest.model import Integration + + +def _has_async_function(module: ast.Module, name: str) -> bool: + """Test if the module defines a function.""" + return any( + type(item) is ast.AsyncFunctionDef and item.name == name for item in module.body + ) + + +def validate(integration: Integration) -> list[str] | None: + """Validate that the integration has a config flow.""" + + init_file = integration.path / "__init__.py" + init = ast.parse(init_file.read_text()) + + if not _has_async_function(init, "async_unload_entry"): + return [ + "Integration does not support config entry unloading " + "(is missing `async_unload_entry` in __init__.py)" + ] + return None diff --git a/script/hassfest/services.py b/script/hassfest/services.py index 92fca14d373..8c9ab5c0c0b 100644 --- a/script/hassfest/services.py +++ b/script/hassfest/services.py @@ -75,6 +75,14 @@ CUSTOM_INTEGRATION_FIELD_SCHEMA = CORE_INTEGRATION_FIELD_SCHEMA.extend( } ) +CUSTOM_INTEGRATION_SECTION_SCHEMA = vol.Schema( + { + vol.Optional("collapsed"): bool, + vol.Required("fields"): vol.Schema({str: CUSTOM_INTEGRATION_FIELD_SCHEMA}), + } +) + + CORE_INTEGRATION_SERVICE_SCHEMA = vol.Any( vol.Schema( { @@ -105,7 +113,17 @@ CUSTOM_INTEGRATION_SERVICE_SCHEMA = vol.Any( vol.Optional("target"): vol.Any( selector.TargetSelector.CONFIG_SCHEMA, None ), - vol.Optional("fields"): vol.Schema({str: CUSTOM_INTEGRATION_FIELD_SCHEMA}), + vol.Optional("fields"): vol.All( + vol.Schema( + { + str: vol.Any( + CUSTOM_INTEGRATION_FIELD_SCHEMA, + CUSTOM_INTEGRATION_SECTION_SCHEMA, + ) + } + ), + unique_field_validator, + ), } ), None, diff --git a/script/hassfest/translations.py b/script/hassfest/translations.py index 2c3b9b4d99b..2fb70b6e0be 100644 --- a/script/hassfest/translations.py +++ b/script/hassfest/translations.py @@ -172,6 +172,9 @@ def gen_data_entry_schema( vol.Optional("sections"): { str: { vol.Optional("data"): {str: translation_value_validator}, + vol.Optional("data_description"): { + str: translation_value_validator + }, vol.Optional("description"): translation_value_validator, vol.Optional("name"): translation_value_validator, }, @@ -368,6 +371,9 @@ def gen_strings_schema(config: Config, integration: Integration) -> vol.Schema: }, slug_validator=translation_key_validator, ), + vol.Optional( + "unit_of_measurement" + ): translation_value_validator, }, slug_validator=translation_key_validator, ), diff --git a/script/hassfest/zeroconf.py b/script/hassfest/zeroconf.py index 48fcc0a4589..fe3e5bb3875 100644 --- a/script/hassfest/zeroconf.py +++ b/script/hassfest/zeroconf.py @@ -55,19 +55,19 @@ def generate_and_validate(integrations: dict[str, Integration]) -> str: # HomeKit models are matched on starting string, make sure none overlap. warned = set() - for key in homekit_dict: + for key, value in homekit_dict.items(): if key in warned: continue # n^2 yoooo - for key_2 in homekit_dict: + for key_2, value_2 in homekit_dict.items(): if key == key_2 or key_2 in warned: continue if key.startswith(key_2) or key_2.startswith(key): integration.add_error( "zeroconf", - f"Integrations {homekit_dict[key]} and {homekit_dict[key_2]} " + f"Integrations {value} and {value_2} " "have overlapping HomeKit models", ) warned.add(key) diff --git a/script/json_schemas/manifest_schema.json b/script/json_schemas/manifest_schema.json index 40f08fd2c85..7349f12b55a 100644 --- a/script/json_schemas/manifest_schema.json +++ b/script/json_schemas/manifest_schema.json @@ -308,7 +308,7 @@ "quality_scale": { "description": "The quality scale of the integration.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#integration-quality-scale", "type": "string", - "enum": ["internal", "silver", "gold", "platinum"] + "enum": ["bronze", "silver", "gold", "platinum", "internal", "legacy"] }, "requirements": { "description": "The PyPI package requirements for the integration. The package has to be pinned to a specific version.\nhttps://developers.home-assistant.io/docs/creating_integration_manifest/#requirements", diff --git a/script/licenses.py b/script/licenses.py index 4f5432ad519..464a2fc456b 100644 --- a/script/licenses.py +++ b/script/licenses.py @@ -84,6 +84,7 @@ OSI_APPROVED_LICENSES_SPDX = { "LGPL-3.0-only", "LGPL-3.0-or-later", "MIT", + "MIT-CMU", "MPL-1.1", "MPL-2.0", "PSF-2.0", @@ -188,7 +189,6 @@ EXCEPTIONS = { "crownstone-uart", # https://github.com/crownstone/crownstone-lib-python-uart/pull/12 "eliqonline", # https://github.com/molobrakos/eliqonline/pull/17 "enocean", # https://github.com/kipe/enocean/pull/142 - "huum", # https://github.com/frwickst/pyhuum/pull/8 "imutils", # https://github.com/PyImageSearch/imutils/pull/292 "iso4217", # Public domain "kiwiki_client", # https://github.com/c7h/kiwiki_client/pull/6 diff --git a/script/split_tests.py b/script/split_tests.py index e124f722552..c64de46a068 100755 --- a/script/split_tests.py +++ b/script/split_tests.py @@ -49,16 +49,27 @@ class BucketHolder: test_folder.get_all_flatten(), reverse=True, key=lambda x: x.total_tests ) for tests in sorted_tests: - print(f"{tests.total_tests:>{digits}} tests in {tests.path}") if tests.added_to_bucket: # Already added to bucket continue + print(f"{tests.total_tests:>{digits}} tests in {tests.path}") smallest_bucket = min(self._buckets, key=lambda x: x.total_tests) + is_file = isinstance(tests, TestFile) if ( smallest_bucket.total_tests + tests.total_tests < self._tests_per_bucket - ) or isinstance(tests, TestFile): + ) or is_file: smallest_bucket.add(tests) + # Ensure all files from the same folder are in the same bucket + # to ensure that syrupy correctly identifies unused snapshots + if is_file: + for other_test in tests.parent.children.values(): + if other_test is tests or isinstance(other_test, TestFolder): + continue + print( + f"{other_test.total_tests:>{digits}} tests in {other_test.path} (same bucket)" + ) + smallest_bucket.add(other_test) # verify that all tests are added to a bucket if not test_folder.added_to_bucket: @@ -79,6 +90,7 @@ class TestFile: total_tests: int path: Path added_to_bucket: bool = field(default=False, init=False) + parent: TestFolder | None = field(default=None, init=False) def add_to_bucket(self) -> None: """Add test file to bucket.""" @@ -125,6 +137,7 @@ class TestFolder: def add_test_file(self, file: TestFile) -> None: """Add test file to folder.""" path = file.path + file.parent = self relative_path = path.relative_to(self.path) if not relative_path.parts: raise ValueError("Path is not a child of this folder") diff --git a/script/translations/deduplicate.py b/script/translations/deduplicate.py index 8cc4cee3b10..f92f90115ce 100644 --- a/script/translations/deduplicate.py +++ b/script/translations/deduplicate.py @@ -7,8 +7,7 @@ from pathlib import Path from homeassistant.const import Platform from . import upload -from .develop import flatten_translations -from .util import get_base_arg_parser, load_json_from_path +from .util import flatten_translations, get_base_arg_parser, load_json_from_path def get_arguments() -> argparse.Namespace: diff --git a/script/translations/develop.py b/script/translations/develop.py index 00465e1bc24..9e3a2ded046 100644 --- a/script/translations/develop.py +++ b/script/translations/develop.py @@ -9,7 +9,7 @@ import sys from . import download, upload from .const import INTEGRATIONS_DIR -from .util import get_base_arg_parser +from .util import flatten_translations, get_base_arg_parser def valid_integration(integration): @@ -32,29 +32,6 @@ def get_arguments() -> argparse.Namespace: return parser.parse_args() -def flatten_translations(translations): - """Flatten all translations.""" - stack = [iter(translations.items())] - key_stack = [] - flattened_translations = {} - while stack: - for k, v in stack[-1]: - key_stack.append(k) - if isinstance(v, dict): - stack.append(iter(v.items())) - break - if isinstance(v, str): - common_key = "::".join(key_stack) - flattened_translations[common_key] = v - key_stack.pop() - else: - stack.pop() - if key_stack: - key_stack.pop() - - return flattened_translations - - def substitute_translation_references(integration_strings, flattened_translations): """Recursively processes all translation strings for the integration.""" result = {} diff --git a/script/translations/download.py b/script/translations/download.py index 756de46fb61..3fa7065d058 100755 --- a/script/translations/download.py +++ b/script/translations/download.py @@ -7,10 +7,11 @@ import json from pathlib import Path import re import subprocess +from typing import Any from .const import CLI_2_DOCKER_IMAGE, CORE_PROJECT_ID, INTEGRATIONS_DIR from .error import ExitApp -from .util import get_lokalise_token, load_json_from_path +from .util import flatten_translations, get_lokalise_token, load_json_from_path FILENAME_FORMAT = re.compile(r"strings\.(?P\w+)\.json") DOWNLOAD_DIR = Path("build/translations-download").absolute() @@ -103,7 +104,15 @@ def save_language_translations(lang, translations): f"Skipping {lang} for {component}, as the integration doesn't seem to exist." ) continue + if not ( + Path("homeassistant") / "components" / component / "strings.json" + ).exists(): + print( + f"Skipping {lang} for {component}, as the integration doesn't have a strings.json file." + ) + continue path.parent.mkdir(parents=True, exist_ok=True) + base_translations = pick_keys(component, base_translations) save_json(path, base_translations) if "platform" not in component_translations: @@ -131,6 +140,32 @@ def delete_old_translations(): fil.unlink() +def get_current_keys(component: str) -> dict[str, Any]: + """Get the current keys for a component.""" + strings_path = Path("homeassistant") / "components" / component / "strings.json" + return load_json_from_path(strings_path) + + +def pick_keys(component: str, translations: dict[str, Any]) -> dict[str, Any]: + """Pick the keys that are in the current strings.""" + flat_translations = flatten_translations(translations) + flat_current_keys = flatten_translations(get_current_keys(component)) + flatten_result = {} + for key in flat_current_keys: + if key in flat_translations: + flatten_result[key] = flat_translations[key] + result = {} + for key, value in flatten_result.items(): + parts = key.split("::") + d = result + for part in parts[:-1]: + if part not in d: + d[part] = {} + d = d[part] + d[parts[-1]] = value + return result + + def run(): """Run the script.""" DOWNLOAD_DIR.mkdir(parents=True, exist_ok=True) diff --git a/script/translations/util.py b/script/translations/util.py index 8892bb46b7a..d78b2c4faff 100644 --- a/script/translations/util.py +++ b/script/translations/util.py @@ -66,3 +66,26 @@ def load_json_from_path(path: pathlib.Path) -> Any: return json.loads(path.read_text()) except json.JSONDecodeError as err: raise JSONDecodeErrorWithPath(err.msg, err.doc, err.pos, path) from err + + +def flatten_translations(translations): + """Flatten all translations.""" + stack = [iter(translations.items())] + key_stack = [] + flattened_translations = {} + while stack: + for k, v in stack[-1]: + key_stack.append(k) + if isinstance(v, dict): + stack.append(iter(v.items())) + break + if isinstance(v, str): + common_key = "::".join(key_stack) + flattened_translations[common_key] = v + key_stack.pop() + else: + stack.pop() + if key_stack: + key_stack.pop() + + return flattened_translations diff --git a/tests/auth/test_jwt_wrapper.py b/tests/auth/test_jwt_wrapper.py index 297d4dd5d7f..f9295a7791c 100644 --- a/tests/auth/test_jwt_wrapper.py +++ b/tests/auth/test_jwt_wrapper.py @@ -6,6 +6,12 @@ import pytest from homeassistant.auth import jwt_wrapper +async def test_all_default_options_are_in_verify_options() -> None: + """Test that all default options in _VERIFY_OPTIONS.""" + for option in jwt_wrapper._PyJWTWithVerify._get_default_options(): + assert option in jwt_wrapper._VERIFY_OPTIONS + + async def test_reject_access_token_with_impossible_large_size() -> None: """Test rejecting access tokens with impossible sizes.""" with pytest.raises(jwt.DecodeError): diff --git a/tests/common.py b/tests/common.py index 8bd45e4d7f8..3ec3f6d844c 100644 --- a/tests/common.py +++ b/tests/common.py @@ -1815,3 +1815,20 @@ async def snapshot_platform( state = hass.states.get(entity_entry.entity_id) assert state, f"State not found for {entity_entry.entity_id}" assert state == snapshot(name=f"{entity_entry.entity_id}-state") + + +def reset_translation_cache(hass: HomeAssistant, components: list[str]) -> None: + """Reset translation cache for specified components. + + Use this if you are mocking a core component (for example via + mock_integration), to ensure that the mocked translations are not + persisted in the shared session cache. + """ + translations_cache = translation._async_get_translations_cache(hass) + for loaded_components in translations_cache.cache_data.loaded.values(): + for component_to_unload in components: + loaded_components.discard(component_to_unload) + for loaded_categories in translations_cache.cache_data.cache.values(): + for loaded_components in loaded_categories.values(): + for component_to_unload in components: + loaded_components.pop(component_to_unload, None) diff --git a/tests/components/abode/test_init.py b/tests/components/abode/test_init.py index 9fca6dcbdd3..ed71cb550a7 100644 --- a/tests/components/abode/test_init.py +++ b/tests/components/abode/test_init.py @@ -13,7 +13,6 @@ from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_USERNAME from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType from .common import setup_platform @@ -63,25 +62,23 @@ async def test_unload_entry(hass: HomeAssistant) -> None: async def test_invalid_credentials(hass: HomeAssistant) -> None: """Test Abode credentials changing.""" - with ( - patch( - "homeassistant.components.abode.Abode", - side_effect=AbodeAuthenticationException( - (HTTPStatus.BAD_REQUEST, "auth error") - ), + with patch( + "homeassistant.components.abode.Abode", + side_effect=AbodeAuthenticationException( + (HTTPStatus.BAD_REQUEST, "auth error") ), - patch( - "homeassistant.components.abode.config_flow.AbodeFlowHandler.async_step_reauth", - return_value={ - "type": FlowResultType.FORM, - "flow_id": "mock_flow", - "step_id": "reauth_confirm", - }, - ) as mock_async_step_reauth, ): - await setup_platform(hass, ALARM_DOMAIN) + config_entry = await setup_platform(hass, ALARM_DOMAIN) + await hass.async_block_till_done() - mock_async_step_reauth.assert_called_once() + assert config_entry.state is ConfigEntryState.SETUP_ERROR + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + assert flows[0]["step_id"] == "reauth_confirm" + + hass.config_entries.flow.async_abort(flows[0]["flow_id"]) + assert not hass.config_entries.flow.async_progress() async def test_raise_config_entry_not_ready_when_offline(hass: HomeAssistant) -> None: diff --git a/tests/components/abode/test_light.py b/tests/components/abode/test_light.py index fc9000a39f8..d556a20fa90 100644 --- a/tests/components/abode/test_light.py +++ b/tests/components/abode/test_light.py @@ -45,7 +45,7 @@ async def test_attributes(hass: HomeAssistant) -> None: state = hass.states.get(DEVICE_ID) assert state.state == STATE_ON assert state.attributes.get(ATTR_BRIGHTNESS) == 204 - assert state.attributes.get(ATTR_RGB_COLOR) == (0, 63, 255) + assert state.attributes.get(ATTR_RGB_COLOR) == (0, 64, 255) assert state.attributes.get(ATTR_COLOR_TEMP) is None assert state.attributes.get(ATTR_DEVICE_ID) == "ZB:db5b1a" assert not state.attributes.get("battery_low") diff --git a/tests/components/acaia/__init__.py b/tests/components/acaia/__init__.py new file mode 100644 index 00000000000..f4eaa39e615 --- /dev/null +++ b/tests/components/acaia/__init__.py @@ -0,0 +1,14 @@ +"""Common test tools for the acaia integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Set up the acaia integration for testing.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/acaia/conftest.py b/tests/components/acaia/conftest.py new file mode 100644 index 00000000000..ff151f3b096 --- /dev/null +++ b/tests/components/acaia/conftest.py @@ -0,0 +1,84 @@ +"""Common fixtures for the acaia tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +from aioacaia.acaiascale import AcaiaDeviceState +from aioacaia.const import UnitMass as AcaiaUnitOfMass +import pytest + +from homeassistant.components.acaia.const import CONF_IS_NEW_STYLE_SCALE, DOMAIN +from homeassistant.const import CONF_ADDRESS +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.acaia.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_verify() -> Generator[AsyncMock]: + """Override is_new_scale check.""" + with patch( + "homeassistant.components.acaia.config_flow.is_new_scale", return_value=True + ) as mock_verify: + yield mock_verify + + +@pytest.fixture +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + title="LUNAR-DDEEFF", + domain=DOMAIN, + version=1, + data={ + CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", + CONF_IS_NEW_STYLE_SCALE: True, + }, + unique_id="aa:bb:cc:dd:ee:ff", + ) + + +@pytest.fixture +async def init_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_scale: MagicMock +) -> MockConfigEntry: + """Set up the acaia integration for testing.""" + await setup_integration(hass, mock_config_entry) + return mock_config_entry + + +@pytest.fixture +def mock_scale() -> Generator[MagicMock]: + """Return a mocked acaia scale client.""" + with ( + patch( + "homeassistant.components.acaia.coordinator.AcaiaScale", + autospec=True, + ) as scale_mock, + ): + scale = scale_mock.return_value + scale.connected = True + scale.mac = "aa:bb:cc:dd:ee:ff" + scale.model = "Lunar" + scale.last_disconnect_time = "1732181388.1895587" + scale.timer_running = True + scale.heartbeat_task = None + scale.process_queue_task = None + scale.device_state = AcaiaDeviceState( + battery_level=42, units=AcaiaUnitOfMass.OUNCES + ) + scale.weight = 123.45 + scale.timer = 23 + scale.flow_rate = 1.23 + yield scale diff --git a/tests/components/acaia/snapshots/test_binary_sensor.ambr b/tests/components/acaia/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..113b5f1501e --- /dev/null +++ b/tests/components/acaia/snapshots/test_binary_sensor.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_binary_sensors[binary_sensor.lunar_ddeeff_timer_running-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.lunar_ddeeff_timer_running', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Timer running', + 'platform': 'acaia', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'timer_running', + 'unique_id': 'aa:bb:cc:dd:ee:ff_timer_running', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.lunar_ddeeff_timer_running-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'LUNAR-DDEEFF Timer running', + }), + 'context': , + 'entity_id': 'binary_sensor.lunar_ddeeff_timer_running', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/acaia/snapshots/test_button.ambr b/tests/components/acaia/snapshots/test_button.ambr new file mode 100644 index 00000000000..cd91ca1a17a --- /dev/null +++ b/tests/components/acaia/snapshots/test_button.ambr @@ -0,0 +1,139 @@ +# serializer version: 1 +# name: test_buttons[button.lunar_ddeeff_reset_timer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.lunar_ddeeff_reset_timer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset timer', + 'platform': 'acaia', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_timer', + 'unique_id': 'aa:bb:cc:dd:ee:ff_reset_timer', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[button.lunar_ddeeff_reset_timer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'LUNAR-DDEEFF Reset timer', + }), + 'context': , + 'entity_id': 'button.lunar_ddeeff_reset_timer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[button.lunar_ddeeff_start_stop_timer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.lunar_ddeeff_start_stop_timer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Start/stop timer', + 'platform': 'acaia', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'start_stop', + 'unique_id': 'aa:bb:cc:dd:ee:ff_start_stop', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[button.lunar_ddeeff_start_stop_timer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'LUNAR-DDEEFF Start/stop timer', + }), + 'context': , + 'entity_id': 'button.lunar_ddeeff_start_stop_timer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_buttons[button.lunar_ddeeff_tare-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.lunar_ddeeff_tare', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tare', + 'platform': 'acaia', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tare', + 'unique_id': 'aa:bb:cc:dd:ee:ff_tare', + 'unit_of_measurement': None, + }) +# --- +# name: test_buttons[button.lunar_ddeeff_tare-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'LUNAR-DDEEFF Tare', + }), + 'context': , + 'entity_id': 'button.lunar_ddeeff_tare', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/acaia/snapshots/test_diagnostics.ambr b/tests/components/acaia/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..df5e4d36555 --- /dev/null +++ b/tests/components/acaia/snapshots/test_diagnostics.ambr @@ -0,0 +1,16 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'device_state': dict({ + 'auto_off_time': 0, + 'battery_level': 42, + 'beeps': True, + 'units': 'ounces', + }), + 'last_disconnect_time': '1732181388.1895587', + 'mac': 'aa:bb:cc:dd:ee:ff', + 'model': 'Lunar', + 'timer': 23, + 'weight': 123.45, + }) +# --- diff --git a/tests/components/acaia/snapshots/test_init.ambr b/tests/components/acaia/snapshots/test_init.ambr new file mode 100644 index 00000000000..7011b20f68c --- /dev/null +++ b/tests/components/acaia/snapshots/test_init.ambr @@ -0,0 +1,37 @@ +# serializer version: 1 +# name: test_device + DeviceRegistryEntrySnapshot({ + 'area_id': 'kitchen', + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + tuple( + 'bluetooth', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'acaia', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Acaia', + 'model': 'Lunar', + 'model_id': None, + 'name': 'LUNAR-DDEEFF', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': 'Kitchen', + 'sw_version': None, + 'via_device_id': None, + }) +# --- diff --git a/tests/components/acaia/snapshots/test_sensor.ambr b/tests/components/acaia/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..c3c8ce966ee --- /dev/null +++ b/tests/components/acaia/snapshots/test_sensor.ambr @@ -0,0 +1,157 @@ +# serializer version: 1 +# name: test_sensors[sensor.lunar_ddeeff_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.lunar_ddeeff_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'acaia', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'aa:bb:cc:dd:ee:ff_battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.lunar_ddeeff_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'LUNAR-DDEEFF Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.lunar_ddeeff_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '42', + }) +# --- +# name: test_sensors[sensor.lunar_ddeeff_volume_flow_rate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.lunar_ddeeff_volume_flow_rate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Volume flow rate', + 'platform': 'acaia', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'aa:bb:cc:dd:ee:ff_flow_rate', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.lunar_ddeeff_volume_flow_rate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'volume_flow_rate', + 'friendly_name': 'LUNAR-DDEEFF Volume flow rate', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.lunar_ddeeff_volume_flow_rate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.23', + }) +# --- +# name: test_sensors[sensor.lunar_ddeeff_weight-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.lunar_ddeeff_weight', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Weight', + 'platform': 'acaia', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'aa:bb:cc:dd:ee:ff_weight', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.lunar_ddeeff_weight-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'weight', + 'friendly_name': 'LUNAR-DDEEFF Weight', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.lunar_ddeeff_weight', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '123.45', + }) +# --- diff --git a/tests/components/acaia/test_binary_sensor.py b/tests/components/acaia/test_binary_sensor.py new file mode 100644 index 00000000000..a7aa7034d8d --- /dev/null +++ b/tests/components/acaia/test_binary_sensor.py @@ -0,0 +1,28 @@ +"""Test binary sensors for acaia integration.""" + +from unittest.mock import MagicMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_binary_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_scale: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the acaia binary sensors.""" + + with patch("homeassistant.components.acaia.PLATFORMS", [Platform.BINARY_SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/acaia/test_button.py b/tests/components/acaia/test_button.py new file mode 100644 index 00000000000..f68f85e253d --- /dev/null +++ b/tests/components/acaia/test_button.py @@ -0,0 +1,90 @@ +"""Tests for the acaia buttons.""" + +from datetime import timedelta +from unittest.mock import MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_UNAVAILABLE, + STATE_UNKNOWN, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + +BUTTONS = ( + "tare", + "reset_timer", + "start_stop_timer", +) + + +async def test_buttons( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_scale: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the acaia buttons.""" + + with patch("homeassistant.components.acaia.PLATFORMS", [Platform.BUTTON]): + await setup_integration(hass, mock_config_entry) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_button_presses( + hass: HomeAssistant, + mock_scale: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the acaia button presses.""" + + await setup_integration(hass, mock_config_entry) + + for button in BUTTONS: + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: f"button.lunar_ddeeff_{button}", + }, + blocking=True, + ) + + function = getattr(mock_scale, button) + function.assert_called_once() + + +async def test_buttons_unavailable_on_disconnected_scale( + hass: HomeAssistant, + mock_scale: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the acaia buttons are unavailable when the scale is disconnected.""" + + await setup_integration(hass, mock_config_entry) + + for button in BUTTONS: + state = hass.states.get(f"button.lunar_ddeeff_{button}") + assert state + assert state.state == STATE_UNKNOWN + + mock_scale.connected = False + freezer.tick(timedelta(minutes=10)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + for button in BUTTONS: + state = hass.states.get(f"button.lunar_ddeeff_{button}") + assert state + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/acaia/test_config_flow.py b/tests/components/acaia/test_config_flow.py new file mode 100644 index 00000000000..2bf4b1dbe8a --- /dev/null +++ b/tests/components/acaia/test_config_flow.py @@ -0,0 +1,242 @@ +"""Test the acaia config flow.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError, AcaiaUnknownDevice +import pytest + +from homeassistant.components.acaia.const import CONF_IS_NEW_STYLE_SCALE, DOMAIN +from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_USER +from homeassistant.const import CONF_ADDRESS +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.service_info.bluetooth import BluetoothServiceInfo + +from tests.common import MockConfigEntry + +service_info = BluetoothServiceInfo( + name="LUNAR-DDEEFF", + address="aa:bb:cc:dd:ee:ff", + rssi=-63, + manufacturer_data={}, + service_data={}, + service_uuids=[], + source="local", +) + + +@pytest.fixture +def mock_discovered_service_info() -> Generator[AsyncMock]: + """Override getting Bluetooth service info.""" + with patch( + "homeassistant.components.acaia.config_flow.async_discovered_service_info", + return_value=[service_info], + ) as mock_discovered_service_info: + yield mock_discovered_service_info + + +async def test_form( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_verify: AsyncMock, + mock_discovered_service_info: AsyncMock, +) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + user_input = { + CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", + } + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=user_input, + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == "LUNAR-DDEEFF" + assert result2["data"] == { + **user_input, + CONF_IS_NEW_STYLE_SCALE: True, + } + + +async def test_bluetooth_discovery( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_verify: AsyncMock, +) -> None: + """Test we can discover a device.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_BLUETOOTH}, data=service_info + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "bluetooth_confirm" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={}, + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == service_info.name + assert result2["data"] == { + CONF_ADDRESS: service_info.address, + CONF_IS_NEW_STYLE_SCALE: True, + } + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (AcaiaDeviceNotFound("Error"), "device_not_found"), + (AcaiaError, "unknown"), + (AcaiaUnknownDevice, "unsupported_device"), + ], +) +async def test_bluetooth_discovery_errors( + hass: HomeAssistant, + mock_verify: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test abortions of Bluetooth discovery.""" + mock_verify.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_BLUETOOTH}, data=service_info + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == error + + +async def test_already_configured( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_verify: AsyncMock, + mock_discovered_service_info: AsyncMock, +) -> None: + """Ensure we can't add the same device twice.""" + + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", + }, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "already_configured" + + +async def test_already_configured_bluetooth_discovery( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Ensure configure device is not discovered again.""" + + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_BLUETOOTH}, data=service_info + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (AcaiaDeviceNotFound("Error"), "device_not_found"), + (AcaiaError, "unknown"), + ], +) +async def test_recoverable_config_flow_errors( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_verify: AsyncMock, + mock_discovered_service_info: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test recoverable errors.""" + mock_verify.side_effect = exception + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", + }, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": error} + + # recover + mock_verify.side_effect = None + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + { + CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", + }, + ) + assert result3["type"] is FlowResultType.CREATE_ENTRY + + +async def test_unsupported_device( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_verify: AsyncMock, + mock_discovered_service_info: AsyncMock, +) -> None: + """Test flow aborts on unsupported device.""" + mock_verify.side_effect = AcaiaUnknownDevice + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", + }, + ) + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "unsupported_device" + + +async def test_no_bluetooth_devices( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_discovered_service_info: AsyncMock, +) -> None: + """Test flow aborts on unsupported device.""" + mock_discovered_service_info.return_value = [] + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_devices_found" diff --git a/tests/components/acaia/test_diagnostics.py b/tests/components/acaia/test_diagnostics.py new file mode 100644 index 00000000000..77f6306b068 --- /dev/null +++ b/tests/components/acaia/test_diagnostics.py @@ -0,0 +1,22 @@ +"""Tests for the diagnostics data provided by the Acaia integration.""" + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + init_integration: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, init_integration) + == snapshot + ) diff --git a/tests/components/acaia/test_init.py b/tests/components/acaia/test_init.py new file mode 100644 index 00000000000..8ad988d3b9b --- /dev/null +++ b/tests/components/acaia/test_init.py @@ -0,0 +1,65 @@ +"""Test init of acaia integration.""" + +from datetime import timedelta +from unittest.mock import MagicMock + +from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.acaia.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from tests.common import MockConfigEntry, async_fire_time_changed + +pytestmark = pytest.mark.usefixtures("init_integration") + + +async def test_load_unload_config_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test loading and unloading the integration.""" + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +@pytest.mark.parametrize( + "exception", [AcaiaError, AcaiaDeviceNotFound("Boom"), TimeoutError] +) +async def test_update_exception_leads_to_active_disconnect( + hass: HomeAssistant, + mock_scale: MagicMock, + freezer: FrozenDateTimeFactory, + exception: Exception, +) -> None: + """Test scale gets disconnected on exception.""" + + mock_scale.connect.side_effect = exception + mock_scale.connected = False + + freezer.tick(timedelta(minutes=10)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_scale.device_disconnected_handler.assert_called_once() + + +async def test_device( + mock_scale: MagicMock, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Snapshot the device from registry.""" + + device = device_registry.async_get_device({(DOMAIN, mock_scale.mac)}) + assert device + assert device == snapshot diff --git a/tests/components/acaia/test_sensor.py b/tests/components/acaia/test_sensor.py new file mode 100644 index 00000000000..2f5a851121c --- /dev/null +++ b/tests/components/acaia/test_sensor.py @@ -0,0 +1,63 @@ +"""Test sensors for acaia integration.""" + +from unittest.mock import MagicMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import PERCENTAGE, Platform +from homeassistant.core import HomeAssistant, State +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import ( + MockConfigEntry, + mock_restore_cache_with_extra_data, + snapshot_platform, +) + + +async def test_sensors( + hass: HomeAssistant, + mock_scale: MagicMock, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the Acaia sensors.""" + with patch("homeassistant.components.acaia.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_restore_state( + hass: HomeAssistant, + mock_scale: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test battery sensor restore state.""" + mock_scale.device_state = None + entity_id = "sensor.lunar_ddeeff_battery" + + mock_restore_cache_with_extra_data( + hass, + ( + ( + State( + entity_id, + "1", + ), + { + "native_value": 65, + "native_unit_of_measurement": PERCENTAGE, + }, + ), + ), + ) + + await setup_integration(hass, mock_config_entry) + + state = hass.states.get(entity_id) + assert state + assert state.state == "65" diff --git a/tests/components/alarm_control_panel/__init__.py b/tests/components/alarm_control_panel/__init__.py index 1ef1161edd0..1f43c567844 100644 --- a/tests/components/alarm_control_panel/__init__.py +++ b/tests/components/alarm_control_panel/__init__.py @@ -1 +1,27 @@ """The tests for Alarm control panel platforms.""" + +from homeassistant.components.alarm_control_panel import ( + DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + + +async def help_async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry +) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups( + config_entry, [ALARM_CONTROL_PANEL_DOMAIN] + ) + return True + + +async def help_async_unload_entry( + hass: HomeAssistant, config_entry: ConfigEntry +) -> bool: + """Unload test config emntry.""" + return await hass.config_entries.async_unload_platforms( + config_entry, [Platform.ALARM_CONTROL_PANEL] + ) diff --git a/tests/components/alarm_control_panel/conftest.py b/tests/components/alarm_control_panel/conftest.py index 3e82b935493..ddf67b27860 100644 --- a/tests/components/alarm_control_panel/conftest.py +++ b/tests/components/alarm_control_panel/conftest.py @@ -1,7 +1,7 @@ """Fixturs for Alarm Control Panel tests.""" -from collections.abc import Generator -from unittest.mock import MagicMock +from collections.abc import AsyncGenerator, Generator +from unittest.mock import MagicMock, patch import pytest @@ -13,7 +13,7 @@ from homeassistant.components.alarm_control_panel import ( from homeassistant.components.alarm_control_panel.const import CodeFormat from homeassistant.config_entries import ConfigEntry, ConfigFlow from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import entity_registry as er, frame from homeassistant.helpers.entity_platform import AddEntitiesCallback from .common import MockAlarm @@ -107,6 +107,22 @@ class MockFlow(ConfigFlow): """Test flow.""" +@pytest.fixture(name="mock_as_custom_component") +async def mock_frame(hass: HomeAssistant) -> AsyncGenerator[None]: + """Mock frame.""" + with patch( + "homeassistant.helpers.frame.get_integration_frame", + return_value=frame.IntegrationFrame( + custom_integration=True, + integration="alarm_control_panel", + module="test_init.py", + relative_filename="test_init.py", + frame=frame.get_current_frame(), + ), + ): + yield + + @pytest.fixture(autouse=True) def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: """Mock config flow.""" diff --git a/tests/components/alarm_control_panel/test_init.py b/tests/components/alarm_control_panel/test_init.py index 89a2a2a2b1a..58f585b40ea 100644 --- a/tests/components/alarm_control_panel/test_init.py +++ b/tests/components/alarm_control_panel/test_init.py @@ -12,7 +12,6 @@ from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntityFeature, CodeFormat, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_CODE, SERVICE_ALARM_ARM_AWAY, @@ -25,20 +24,19 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers import entity_registry as er, frame from homeassistant.helpers.typing import UNDEFINED, UndefinedType -from .conftest import TEST_DOMAIN, MockAlarmControlPanel +from . import help_async_setup_entry_init, help_async_unload_entry +from .conftest import MockAlarmControlPanel from tests.common import ( MockConfigEntry, MockModule, - MockPlatform, help_test_all, import_and_test_deprecated_constant_enum, mock_integration, - mock_platform, + setup_test_component_platform, ) @@ -297,6 +295,7 @@ async def test_alarm_control_panel_with_default_code( mock_alarm_control_panel_entity.calls_disarm.assert_called_with("1234") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_alarm_control_panel_not_log_deprecated_state_warning( hass: HomeAssistant, mock_alarm_control_panel_entity: MockAlarmControlPanel, @@ -305,9 +304,14 @@ async def test_alarm_control_panel_not_log_deprecated_state_warning( """Test correctly using alarm_state doesn't log issue or raise repair.""" state = hass.states.get(mock_alarm_control_panel_entity.entity_id) assert state is not None - assert "Entities should implement the 'alarm_state' property and" not in caplog.text + assert ( + "the 'alarm_state' property and return its state using the AlarmControlPanelState enum" + not in caplog.text + ) +@pytest.mark.usefixtures("mock_as_custom_component") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_alarm_control_panel_log_deprecated_state_warning_using_state_prop( hass: HomeAssistant, code_format: CodeFormat | None, @@ -317,23 +321,6 @@ async def test_alarm_control_panel_log_deprecated_state_warning_using_state_prop ) -> None: """Test incorrectly using state property does log issue and raise repair.""" - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups( - config_entry, [ALARM_CONTROL_PANEL_DOMAIN] - ) - return True - - mock_integration( - hass, - MockModule( - TEST_DOMAIN, - async_setup_entry=async_setup_entry_init, - ), - ) - class MockLegacyAlarmControlPanel(MockAlarmControlPanel): """Mocked alarm control entity.""" @@ -358,37 +345,38 @@ async def test_alarm_control_panel_log_deprecated_state_warning_using_state_prop code_format=code_format, code_arm_required=code_arm_required, ) - - async def async_setup_entry_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test alarm control panel platform via config entry.""" - async_add_entities([entity]) - - mock_platform( + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + mock_integration( hass, - f"{TEST_DOMAIN}.{ALARM_CONTROL_PANEL_DOMAIN}", - MockPlatform(async_setup_entry=async_setup_entry_platform), + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), + built_in=False, ) - - with patch.object( - MockLegacyAlarmControlPanel, - "__module__", - "tests.custom_components.test.alarm_control_panel", - ): - config_entry = MockConfigEntry(domain=TEST_DOMAIN) - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + setup_test_component_platform( + hass, ALARM_CONTROL_PANEL_DOMAIN, [entity], from_config_entry=True + ) + assert await hass.config_entries.async_setup(config_entry.entry_id) state = hass.states.get(entity.entity_id) assert state is not None - assert "Entities should implement the 'alarm_state' property and" in caplog.text + assert ( + "Detected that custom integration 'alarm_control_panel' is setting state" + " directly. Entity None (.MockLegacyAlarmControlPanel'>) should implement" + " the 'alarm_state' property and return its state using the AlarmControlPanelState" + " enum at test_init.py, line 123: yield. This will stop working in Home Assistant" + " 2025.11, please create a bug report at" in caplog.text + ) +@pytest.mark.usefixtures("mock_as_custom_component") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_alarm_control_panel_log_deprecated_state_warning_using_attr_state_attr( hass: HomeAssistant, code_format: CodeFormat | None, @@ -398,23 +386,6 @@ async def test_alarm_control_panel_log_deprecated_state_warning_using_attr_state ) -> None: """Test incorrectly using _attr_state attribute does log issue and raise repair.""" - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups( - config_entry, [ALARM_CONTROL_PANEL_DOMAIN] - ) - return True - - mock_integration( - hass, - MockModule( - TEST_DOMAIN, - async_setup_entry=async_setup_entry_init, - ), - ) - class MockLegacyAlarmControlPanel(MockAlarmControlPanel): """Mocked alarm control entity.""" @@ -438,59 +409,56 @@ async def test_alarm_control_panel_log_deprecated_state_warning_using_attr_state code_format=code_format, code_arm_required=code_arm_required, ) - - async def async_setup_entry_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test alarm control panel platform via config entry.""" - async_add_entities([entity]) - - mock_platform( + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + mock_integration( hass, - f"{TEST_DOMAIN}.{ALARM_CONTROL_PANEL_DOMAIN}", - MockPlatform(async_setup_entry=async_setup_entry_platform), + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), ) - - with patch.object( - MockLegacyAlarmControlPanel, - "__module__", - "tests.custom_components.test.alarm_control_panel", - ): - config_entry = MockConfigEntry(domain=TEST_DOMAIN) - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + setup_test_component_platform( + hass, ALARM_CONTROL_PANEL_DOMAIN, [entity], from_config_entry=True + ) + assert await hass.config_entries.async_setup(config_entry.entry_id) state = hass.states.get(entity.entity_id) assert state is not None - assert "Entities should implement the 'alarm_state' property and" not in caplog.text + assert ( + "Detected that custom integration 'alarm_control_panel' is setting state directly." + not in caplog.text + ) - with patch.object( - MockLegacyAlarmControlPanel, - "__module__", - "tests.custom_components.test.alarm_control_panel", - ): - await help_test_async_alarm_control_panel_service( - hass, entity.entity_id, SERVICE_ALARM_DISARM - ) + await help_test_async_alarm_control_panel_service( + hass, entity.entity_id, SERVICE_ALARM_DISARM + ) - assert "Entities should implement the 'alarm_state' property and" in caplog.text + assert ( + "Detected that custom integration 'alarm_control_panel' is setting state directly." + " Entity alarm_control_panel.test_alarm_control_panel" + " (.MockLegacyAlarmControlPanel'>) should implement the 'alarm_state' property" + " and return its state using the AlarmControlPanelState enum at test_init.py, line 123:" + " yield. This will stop working in Home Assistant 2025.11," + " please create a bug report at" in caplog.text + ) caplog.clear() - with patch.object( - MockLegacyAlarmControlPanel, - "__module__", - "tests.custom_components.test.alarm_control_panel", - ): - await help_test_async_alarm_control_panel_service( - hass, entity.entity_id, SERVICE_ALARM_DISARM - ) + await help_test_async_alarm_control_panel_service( + hass, entity.entity_id, SERVICE_ALARM_DISARM + ) # Test we only log once - assert "Entities should implement the 'alarm_state' property and" not in caplog.text + assert ( + "Detected that custom integration 'alarm_control_panel' is setting state directly." + not in caplog.text + ) +@pytest.mark.usefixtures("mock_as_custom_component") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_alarm_control_panel_deprecated_state_does_not_break_state( hass: HomeAssistant, code_format: CodeFormat | None, @@ -500,23 +468,6 @@ async def test_alarm_control_panel_deprecated_state_does_not_break_state( ) -> None: """Test using _attr_state attribute does not break state.""" - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups( - config_entry, [ALARM_CONTROL_PANEL_DOMAIN] - ) - return True - - mock_integration( - hass, - MockModule( - TEST_DOMAIN, - async_setup_entry=async_setup_entry_init, - ), - ) - class MockLegacyAlarmControlPanel(MockAlarmControlPanel): """Mocked alarm control entity.""" @@ -541,43 +492,28 @@ async def test_alarm_control_panel_deprecated_state_does_not_break_state( code_format=code_format, code_arm_required=code_arm_required, ) - - async def async_setup_entry_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test alarm control panel platform via config entry.""" - async_add_entities([entity]) - - mock_platform( + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + mock_integration( hass, - f"{TEST_DOMAIN}.{ALARM_CONTROL_PANEL_DOMAIN}", - MockPlatform(async_setup_entry=async_setup_entry_platform), + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), ) - - with patch.object( - MockLegacyAlarmControlPanel, - "__module__", - "tests.custom_components.test.alarm_control_panel", - ): - config_entry = MockConfigEntry(domain=TEST_DOMAIN) - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + setup_test_component_platform( + hass, ALARM_CONTROL_PANEL_DOMAIN, [entity], from_config_entry=True + ) + assert await hass.config_entries.async_setup(config_entry.entry_id) state = hass.states.get(entity.entity_id) assert state is not None assert state.state == "armed_away" - with patch.object( - MockLegacyAlarmControlPanel, - "__module__", - "tests.custom_components.test.alarm_control_panel", - ): - await help_test_async_alarm_control_panel_service( - hass, entity.entity_id, SERVICE_ALARM_DISARM - ) + await help_test_async_alarm_control_panel_service( + hass, entity.entity_id, SERVICE_ALARM_DISARM + ) state = hass.states.get(entity.entity_id) assert state is not None diff --git a/tests/components/alexa/test_smart_home.py b/tests/components/alexa/test_smart_home.py index 68010a6a711..e4a46db7d34 100644 --- a/tests/components/alexa/test_smart_home.py +++ b/tests/components/alexa/test_smart_home.py @@ -4546,6 +4546,7 @@ async def test_presence_sensor(hass: HomeAssistant) -> None: "tilt_position_attr_in_service_call", "supported_features", "service_call", + "stop_feature_enabled", ), [ ( @@ -4556,6 +4557,7 @@ async def test_presence_sensor(hass: HomeAssistant) -> None: | CoverEntityFeature.CLOSE_TILT | CoverEntityFeature.STOP_TILT, "cover.set_cover_tilt_position", + True, ), ( 0, @@ -4565,6 +4567,7 @@ async def test_presence_sensor(hass: HomeAssistant) -> None: | CoverEntityFeature.CLOSE_TILT | CoverEntityFeature.STOP_TILT, "cover.close_cover_tilt", + True, ), ( 99, @@ -4574,6 +4577,7 @@ async def test_presence_sensor(hass: HomeAssistant) -> None: | CoverEntityFeature.CLOSE_TILT | CoverEntityFeature.STOP_TILT, "cover.set_cover_tilt_position", + True, ), ( 100, @@ -4583,36 +4587,42 @@ async def test_presence_sensor(hass: HomeAssistant) -> None: | CoverEntityFeature.CLOSE_TILT | CoverEntityFeature.STOP_TILT, "cover.open_cover_tilt", + True, ), ( 0, 0, CoverEntityFeature.SET_TILT_POSITION, "cover.set_cover_tilt_position", + False, ), ( 60, 60, CoverEntityFeature.SET_TILT_POSITION, "cover.set_cover_tilt_position", + False, ), ( 100, 100, CoverEntityFeature.SET_TILT_POSITION, "cover.set_cover_tilt_position", + False, ), ( 0, 0, CoverEntityFeature.SET_TILT_POSITION | CoverEntityFeature.OPEN_TILT, "cover.set_cover_tilt_position", + False, ), ( 100, 100, CoverEntityFeature.SET_TILT_POSITION | CoverEntityFeature.CLOSE_TILT, "cover.set_cover_tilt_position", + False, ), ], ids=[ @@ -4633,6 +4643,7 @@ async def test_cover_tilt_position( tilt_position_attr_in_service_call: int | None, supported_features: CoverEntityFeature, service_call: str, + stop_feature_enabled: bool, ) -> None: """Test cover discovery and tilt position using rangeController.""" device = ( @@ -4651,12 +4662,24 @@ async def test_cover_tilt_position( assert appliance["displayCategories"][0] == "INTERIOR_BLIND" assert appliance["friendlyName"] == "Test cover tilt range" + expected_interfaces: dict[bool, list[str]] = { + False: [ + "Alexa.PowerController", + "Alexa.RangeController", + "Alexa.EndpointHealth", + "Alexa", + ], + True: [ + "Alexa.PowerController", + "Alexa.RangeController", + "Alexa.PlaybackController", + "Alexa.EndpointHealth", + "Alexa", + ], + } + capabilities = assert_endpoint_capabilities( - appliance, - "Alexa.PowerController", - "Alexa.RangeController", - "Alexa.EndpointHealth", - "Alexa", + appliance, *expected_interfaces[stop_feature_enabled] ) range_capability = get_capability(capabilities, "Alexa.RangeController") @@ -4713,6 +4736,7 @@ async def test_cover_tilt_position_range(hass: HomeAssistant) -> None: appliance, "Alexa.PowerController", "Alexa.RangeController", + "Alexa.PlaybackController", "Alexa.EndpointHealth", "Alexa", ) @@ -4767,6 +4791,66 @@ async def test_cover_tilt_position_range(hass: HomeAssistant) -> None: ) +@pytest.mark.parametrize( + ("supported_stop_features", "cover_stop_calls", "cover_stop_tilt_calls"), + [ + (CoverEntityFeature(0), 0, 0), + (CoverEntityFeature.STOP, 1, 0), + (CoverEntityFeature.STOP_TILT, 0, 1), + (CoverEntityFeature.STOP | CoverEntityFeature.STOP_TILT, 1, 1), + ], + ids=["no_stop", "stop_cover", "stop_cover_tilt", "stop_cover_and_stop_cover_tilt"], +) +async def test_cover_stop( + hass: HomeAssistant, + supported_stop_features: CoverEntityFeature, + cover_stop_calls: int, + cover_stop_tilt_calls: int, +) -> None: + """Test cover and cover tilt can be stopped.""" + + base_features = ( + CoverEntityFeature.OPEN + | CoverEntityFeature.CLOSE + | CoverEntityFeature.OPEN_TILT + | CoverEntityFeature.CLOSE_TILT + | CoverEntityFeature.SET_POSITION + | CoverEntityFeature.SET_TILT_POSITION + ) + + device = ( + "cover.test_semantics", + "open", + { + "friendly_name": "Test cover semantics", + "device_class": "blind", + "supported_features": int(base_features | supported_stop_features), + "current_position": 30, + "tilt_position": 30, + }, + ) + appliance = await discovery_test(device, hass) + + assert appliance["endpointId"] == "cover#test_semantics" + assert appliance["displayCategories"][0] == "INTERIOR_BLIND" + assert appliance["friendlyName"] == "Test cover semantics" + + calls_stop = async_mock_service(hass, "cover", "stop_cover") + calls_stop_tilt = async_mock_service(hass, "cover", "stop_cover_tilt") + + context = Context() + request = get_new_request( + "Alexa.PlaybackController", "Stop", "cover#test_semantics" + ) + await smart_home.async_handle_message( + hass, get_default_config(hass), request, context + ) + await hass.async_block_till_done() + + assert len(calls_stop) == cover_stop_calls + assert len(calls_stop_tilt) == cover_stop_tilt_calls + + async def test_cover_semantics_position_and_tilt(hass: HomeAssistant) -> None: """Test cover discovery and semantics with position and tilt support.""" device = ( @@ -4790,10 +4874,30 @@ async def test_cover_semantics_position_and_tilt(hass: HomeAssistant) -> None: appliance, "Alexa.PowerController", "Alexa.RangeController", + "Alexa.PlaybackController", "Alexa.EndpointHealth", "Alexa", ) + playback_controller_capability = get_capability( + capabilities, "Alexa.PlaybackController" + ) + assert playback_controller_capability is not None + assert playback_controller_capability["supportedOperations"] == ["Stop"] + + # Assert both the cover and tilt stop calls are invoked + stop_cover_tilt_calls = async_mock_service(hass, "cover", "stop_cover_tilt") + await assert_request_calls_service( + "Alexa.PlaybackController", + "Stop", + "cover#test_semantics", + "cover.stop_cover", + hass, + ) + assert len(stop_cover_tilt_calls) == 1 + call = stop_cover_tilt_calls[0] + assert call.data == {"entity_id": "cover.test_semantics"} + # Assert for Position Semantics position_capability = get_capability( capabilities, "Alexa.RangeController", "cover.position" diff --git a/tests/components/amberelectric/helpers.py b/tests/components/amberelectric/helpers.py index 2bc65fdd558..971f3690a0d 100644 --- a/tests/components/amberelectric/helpers.py +++ b/tests/components/amberelectric/helpers.py @@ -2,73 +2,82 @@ from datetime import datetime, timedelta -from amberelectric.model.actual_interval import ActualInterval -from amberelectric.model.channel import ChannelType -from amberelectric.model.current_interval import CurrentInterval -from amberelectric.model.forecast_interval import ForecastInterval -from amberelectric.model.interval import Descriptor, SpikeStatus +from amberelectric.models.actual_interval import ActualInterval +from amberelectric.models.channel import ChannelType +from amberelectric.models.current_interval import CurrentInterval +from amberelectric.models.forecast_interval import ForecastInterval +from amberelectric.models.interval import Interval +from amberelectric.models.price_descriptor import PriceDescriptor +from amberelectric.models.spike_status import SpikeStatus from dateutil import parser -def generate_actual_interval( - channel_type: ChannelType, end_time: datetime -) -> ActualInterval: +def generate_actual_interval(channel_type: ChannelType, end_time: datetime) -> Interval: """Generate a mock actual interval.""" start_time = end_time - timedelta(minutes=30) - return ActualInterval( - duration=30, - spot_per_kwh=1.0, - per_kwh=8.0, - date=start_time.date(), - nem_time=end_time, - start_time=start_time, - end_time=end_time, - renewables=50, - channel_type=channel_type.value, - spike_status=SpikeStatus.NO_SPIKE.value, - descriptor=Descriptor.LOW.value, + return Interval( + ActualInterval( + type="ActualInterval", + duration=30, + spot_per_kwh=1.0, + per_kwh=8.0, + date=start_time.date(), + nem_time=end_time, + start_time=start_time, + end_time=end_time, + renewables=50, + channel_type=channel_type, + spike_status=SpikeStatus.NONE, + descriptor=PriceDescriptor.LOW, + ) ) def generate_current_interval( channel_type: ChannelType, end_time: datetime -) -> CurrentInterval: +) -> Interval: """Generate a mock current price.""" start_time = end_time - timedelta(minutes=30) - return CurrentInterval( - duration=30, - spot_per_kwh=1.0, - per_kwh=8.0, - date=start_time.date(), - nem_time=end_time, - start_time=start_time, - end_time=end_time, - renewables=50.6, - channel_type=channel_type.value, - spike_status=SpikeStatus.NO_SPIKE.value, - descriptor=Descriptor.EXTREMELY_LOW.value, - estimate=True, + return Interval( + CurrentInterval( + type="CurrentInterval", + duration=30, + spot_per_kwh=1.0, + per_kwh=8.0, + date=start_time.date(), + nem_time=end_time, + start_time=start_time, + end_time=end_time, + renewables=50.6, + channel_type=channel_type, + spike_status=SpikeStatus.NONE, + descriptor=PriceDescriptor.EXTREMELYLOW, + estimate=True, + ) ) def generate_forecast_interval( channel_type: ChannelType, end_time: datetime -) -> ForecastInterval: +) -> Interval: """Generate a mock forecast interval.""" start_time = end_time - timedelta(minutes=30) - return ForecastInterval( - duration=30, - spot_per_kwh=1.1, - per_kwh=8.8, - date=start_time.date(), - nem_time=end_time, - start_time=start_time, - end_time=end_time, - renewables=50, - channel_type=channel_type.value, - spike_status=SpikeStatus.NO_SPIKE.value, - descriptor=Descriptor.VERY_LOW.value, - estimate=True, + return Interval( + ForecastInterval( + type="ForecastInterval", + duration=30, + spot_per_kwh=1.1, + per_kwh=8.8, + date=start_time.date(), + nem_time=end_time, + start_time=start_time, + end_time=end_time, + renewables=50, + channel_type=channel_type, + spike_status=SpikeStatus.NONE, + descriptor=PriceDescriptor.VERYLOW, + estimate=True, + ) ) @@ -94,31 +103,31 @@ GENERAL_CHANNEL = [ CONTROLLED_LOAD_CHANNEL = [ generate_current_interval( - ChannelType.CONTROLLED_LOAD, parser.parse("2021-09-21T08:30:00+10:00") + ChannelType.CONTROLLEDLOAD, parser.parse("2021-09-21T08:30:00+10:00") ), generate_forecast_interval( - ChannelType.CONTROLLED_LOAD, parser.parse("2021-09-21T09:00:00+10:00") + ChannelType.CONTROLLEDLOAD, parser.parse("2021-09-21T09:00:00+10:00") ), generate_forecast_interval( - ChannelType.CONTROLLED_LOAD, parser.parse("2021-09-21T09:30:00+10:00") + ChannelType.CONTROLLEDLOAD, parser.parse("2021-09-21T09:30:00+10:00") ), generate_forecast_interval( - ChannelType.CONTROLLED_LOAD, parser.parse("2021-09-21T10:00:00+10:00") + ChannelType.CONTROLLEDLOAD, parser.parse("2021-09-21T10:00:00+10:00") ), ] FEED_IN_CHANNEL = [ generate_current_interval( - ChannelType.FEED_IN, parser.parse("2021-09-21T08:30:00+10:00") + ChannelType.FEEDIN, parser.parse("2021-09-21T08:30:00+10:00") ), generate_forecast_interval( - ChannelType.FEED_IN, parser.parse("2021-09-21T09:00:00+10:00") + ChannelType.FEEDIN, parser.parse("2021-09-21T09:00:00+10:00") ), generate_forecast_interval( - ChannelType.FEED_IN, parser.parse("2021-09-21T09:30:00+10:00") + ChannelType.FEEDIN, parser.parse("2021-09-21T09:30:00+10:00") ), generate_forecast_interval( - ChannelType.FEED_IN, parser.parse("2021-09-21T10:00:00+10:00") + ChannelType.FEEDIN, parser.parse("2021-09-21T10:00:00+10:00") ), ] diff --git a/tests/components/amberelectric/test_binary_sensor.py b/tests/components/amberelectric/test_binary_sensor.py index 2c1ee22b644..6a6ca372bc2 100644 --- a/tests/components/amberelectric/test_binary_sensor.py +++ b/tests/components/amberelectric/test_binary_sensor.py @@ -5,10 +5,10 @@ from __future__ import annotations from collections.abc import AsyncGenerator from unittest.mock import Mock, patch -from amberelectric.model.channel import ChannelType -from amberelectric.model.current_interval import CurrentInterval -from amberelectric.model.interval import SpikeStatus -from amberelectric.model.tariff_information import TariffInformation +from amberelectric.models.channel import ChannelType +from amberelectric.models.current_interval import CurrentInterval +from amberelectric.models.spike_status import SpikeStatus +from amberelectric.models.tariff_information import TariffInformation from dateutil import parser import pytest @@ -42,10 +42,10 @@ async def setup_no_spike(hass: HomeAssistant) -> AsyncGenerator[Mock]: instance = Mock() with patch( - "amberelectric.api.AmberApi.create", + "amberelectric.AmberApi", return_value=instance, ) as mock_update: - instance.get_current_price = Mock(return_value=GENERAL_CHANNEL) + instance.get_current_prices = Mock(return_value=GENERAL_CHANNEL) assert await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() yield mock_update.return_value @@ -65,7 +65,7 @@ async def setup_potential_spike(hass: HomeAssistant) -> AsyncGenerator[Mock]: instance = Mock() with patch( - "amberelectric.api.AmberApi.create", + "amberelectric.AmberApi", return_value=instance, ) as mock_update: general_channel: list[CurrentInterval] = [ @@ -73,8 +73,8 @@ async def setup_potential_spike(hass: HomeAssistant) -> AsyncGenerator[Mock]: ChannelType.GENERAL, parser.parse("2021-09-21T08:30:00+10:00") ), ] - general_channel[0].spike_status = SpikeStatus.POTENTIAL - instance.get_current_price = Mock(return_value=general_channel) + general_channel[0].actual_instance.spike_status = SpikeStatus.POTENTIAL + instance.get_current_prices = Mock(return_value=general_channel) assert await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() yield mock_update.return_value @@ -94,7 +94,7 @@ async def setup_spike(hass: HomeAssistant) -> AsyncGenerator[Mock]: instance = Mock() with patch( - "amberelectric.api.AmberApi.create", + "amberelectric.AmberApi", return_value=instance, ) as mock_update: general_channel: list[CurrentInterval] = [ @@ -102,8 +102,8 @@ async def setup_spike(hass: HomeAssistant) -> AsyncGenerator[Mock]: ChannelType.GENERAL, parser.parse("2021-09-21T08:30:00+10:00") ), ] - general_channel[0].spike_status = SpikeStatus.SPIKE - instance.get_current_price = Mock(return_value=general_channel) + general_channel[0].actual_instance.spike_status = SpikeStatus.SPIKE + instance.get_current_prices = Mock(return_value=general_channel) assert await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() yield mock_update.return_value @@ -156,7 +156,7 @@ async def setup_inactive_demand_window(hass: HomeAssistant) -> AsyncGenerator[Mo instance = Mock() with patch( - "amberelectric.api.AmberApi.create", + "amberelectric.AmberApi", return_value=instance, ) as mock_update: general_channel: list[CurrentInterval] = [ @@ -164,8 +164,10 @@ async def setup_inactive_demand_window(hass: HomeAssistant) -> AsyncGenerator[Mo ChannelType.GENERAL, parser.parse("2021-09-21T08:30:00+10:00") ), ] - general_channel[0].tariff_information = TariffInformation(demandWindow=False) - instance.get_current_price = Mock(return_value=general_channel) + general_channel[0].actual_instance.tariff_information = TariffInformation( + demandWindow=False + ) + instance.get_current_prices = Mock(return_value=general_channel) assert await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() yield mock_update.return_value @@ -185,7 +187,7 @@ async def setup_active_demand_window(hass: HomeAssistant) -> AsyncGenerator[Mock instance = Mock() with patch( - "amberelectric.api.AmberApi.create", + "amberelectric.AmberApi", return_value=instance, ) as mock_update: general_channel: list[CurrentInterval] = [ @@ -193,8 +195,10 @@ async def setup_active_demand_window(hass: HomeAssistant) -> AsyncGenerator[Mock ChannelType.GENERAL, parser.parse("2021-09-21T08:30:00+10:00") ), ] - general_channel[0].tariff_information = TariffInformation(demandWindow=True) - instance.get_current_price = Mock(return_value=general_channel) + general_channel[0].actual_instance.tariff_information = TariffInformation( + demandWindow=True + ) + instance.get_current_prices = Mock(return_value=general_channel) assert await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() yield mock_update.return_value diff --git a/tests/components/amberelectric/test_config_flow.py b/tests/components/amberelectric/test_config_flow.py index 030b82d3596..b394977b0e8 100644 --- a/tests/components/amberelectric/test_config_flow.py +++ b/tests/components/amberelectric/test_config_flow.py @@ -5,7 +5,8 @@ from datetime import date from unittest.mock import Mock, patch from amberelectric import ApiException -from amberelectric.model.site import Site, SiteStatus +from amberelectric.models.site import Site +from amberelectric.models.site_status import SiteStatus import pytest from homeassistant.components.amberelectric.config_flow import filter_sites @@ -28,7 +29,7 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry") def mock_invalid_key_api() -> Generator: """Return an authentication error.""" - with patch("amberelectric.api.AmberApi.create") as mock: + with patch("amberelectric.AmberApi") as mock: mock.return_value.get_sites.side_effect = ApiException(status=403) yield mock @@ -36,7 +37,7 @@ def mock_invalid_key_api() -> Generator: @pytest.fixture(name="api_error") def mock_api_error() -> Generator: """Return an authentication error.""" - with patch("amberelectric.api.AmberApi.create") as mock: + with patch("amberelectric.AmberApi") as mock: mock.return_value.get_sites.side_effect = ApiException(status=500) yield mock @@ -45,16 +46,36 @@ def mock_api_error() -> Generator: def mock_single_site_api() -> Generator: """Return a single site.""" site = Site( - "01FG0AGP818PXK0DWHXJRRT2DH", - "11111111111", - [], - "Jemena", - SiteStatus.ACTIVE, - date(2002, 1, 1), - None, + id="01FG0AGP818PXK0DWHXJRRT2DH", + nmi="11111111111", + channels=[], + network="Jemena", + status=SiteStatus.ACTIVE, + active_from=date(2002, 1, 1), + closed_on=None, + interval_length=30, ) - with patch("amberelectric.api.AmberApi.create") as mock: + with patch("amberelectric.AmberApi") as mock: + mock.return_value.get_sites.return_value = [site] + yield mock + + +@pytest.fixture(name="single_site_closed_no_close_date_api") +def single_site_closed_no_close_date_api() -> Generator: + """Return a single closed site with no closed date.""" + site = Site( + id="01FG0AGP818PXK0DWHXJRRT2DH", + nmi="11111111111", + channels=[], + network="Jemena", + status=SiteStatus.CLOSED, + active_from=None, + closed_on=None, + interval_length=30, + ) + + with patch("amberelectric.AmberApi") as mock: mock.return_value.get_sites.return_value = [site] yield mock @@ -63,16 +84,17 @@ def mock_single_site_api() -> Generator: def mock_single_site_pending_api() -> Generator: """Return a single site.""" site = Site( - "01FG0AGP818PXK0DWHXJRRT2DH", - "11111111111", - [], - "Jemena", - SiteStatus.PENDING, - None, - None, + id="01FG0AGP818PXK0DWHXJRRT2DH", + nmi="11111111111", + channels=[], + network="Jemena", + status=SiteStatus.PENDING, + active_from=None, + closed_on=None, + interval_length=30, ) - with patch("amberelectric.api.AmberApi.create") as mock: + with patch("amberelectric.AmberApi") as mock: mock.return_value.get_sites.return_value = [site] yield mock @@ -82,35 +104,38 @@ def mock_single_site_rejoin_api() -> Generator: """Return a single site.""" instance = Mock() site_1 = Site( - "01HGD9QB72HB3DWQNJ6SSCGXGV", - "11111111111", - [], - "Jemena", - SiteStatus.CLOSED, - date(2002, 1, 1), - date(2002, 6, 1), + id="01HGD9QB72HB3DWQNJ6SSCGXGV", + nmi="11111111111", + channels=[], + network="Jemena", + status=SiteStatus.CLOSED, + active_from=date(2002, 1, 1), + closed_on=date(2002, 6, 1), + interval_length=30, ) site_2 = Site( - "01FG0AGP818PXK0DWHXJRRT2DH", - "11111111111", - [], - "Jemena", - SiteStatus.ACTIVE, - date(2003, 1, 1), - None, + id="01FG0AGP818PXK0DWHXJRRT2DH", + nmi="11111111111", + channels=[], + network="Jemena", + status=SiteStatus.ACTIVE, + active_from=date(2003, 1, 1), + closed_on=None, + interval_length=30, ) site_3 = Site( - "01FG0AGP818PXK0DWHXJRRT2DH", - "11111111112", - [], - "Jemena", - SiteStatus.CLOSED, - date(2003, 1, 1), - date(2003, 6, 1), + id="01FG0AGP818PXK0DWHXJRRT2DH", + nmi="11111111112", + channels=[], + network="Jemena", + status=SiteStatus.CLOSED, + active_from=date(2003, 1, 1), + closed_on=date(2003, 6, 1), + interval_length=30, ) instance.get_sites.return_value = [site_1, site_2, site_3] - with patch("amberelectric.api.AmberApi.create", return_value=instance): + with patch("amberelectric.AmberApi", return_value=instance): yield instance @@ -120,7 +145,7 @@ def mock_no_site_api() -> Generator: instance = Mock() instance.get_sites.return_value = [] - with patch("amberelectric.api.AmberApi.create", return_value=instance): + with patch("amberelectric.AmberApi", return_value=instance): yield instance @@ -188,6 +213,39 @@ async def test_single_site(hass: HomeAssistant, single_site_api: Mock) -> None: assert data[CONF_SITE_ID] == "01FG0AGP818PXK0DWHXJRRT2DH" +async def test_single_closed_site_no_closed_date( + hass: HomeAssistant, single_site_closed_no_close_date_api: Mock +) -> None: + """Test single closed site with no closed date.""" + initial_result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert initial_result.get("type") is FlowResultType.FORM + assert initial_result.get("step_id") == "user" + + # Test filling in API key + enter_api_key_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_API_TOKEN: API_KEY}, + ) + assert enter_api_key_result.get("type") is FlowResultType.FORM + assert enter_api_key_result.get("step_id") == "site" + + select_site_result = await hass.config_entries.flow.async_configure( + enter_api_key_result["flow_id"], + {CONF_SITE_ID: "01FG0AGP818PXK0DWHXJRRT2DH", CONF_SITE_NAME: "Home"}, + ) + + # Show available sites + assert select_site_result.get("type") is FlowResultType.CREATE_ENTRY + assert select_site_result.get("title") == "Home" + data = select_site_result.get("data") + assert data + assert data[CONF_API_TOKEN] == API_KEY + assert data[CONF_SITE_ID] == "01FG0AGP818PXK0DWHXJRRT2DH" + + async def test_single_site_rejoin( hass: HomeAssistant, single_site_rejoin_api: Mock ) -> None: diff --git a/tests/components/amberelectric/test_coordinator.py b/tests/components/amberelectric/test_coordinator.py index cb3912cb5ac..0a8f5b874fa 100644 --- a/tests/components/amberelectric/test_coordinator.py +++ b/tests/components/amberelectric/test_coordinator.py @@ -7,10 +7,12 @@ from datetime import date from unittest.mock import Mock, patch from amberelectric import ApiException -from amberelectric.model.channel import Channel, ChannelType -from amberelectric.model.current_interval import CurrentInterval -from amberelectric.model.interval import Descriptor, SpikeStatus -from amberelectric.model.site import Site, SiteStatus +from amberelectric.models.channel import Channel, ChannelType +from amberelectric.models.interval import Interval +from amberelectric.models.price_descriptor import PriceDescriptor +from amberelectric.models.site import Site +from amberelectric.models.site_status import SiteStatus +from amberelectric.models.spike_status import SpikeStatus from dateutil import parser import pytest @@ -38,37 +40,40 @@ def mock_api_current_price() -> Generator: instance = Mock() general_site = Site( - GENERAL_ONLY_SITE_ID, - "11111111111", - [Channel(identifier="E1", type=ChannelType.GENERAL, tariff="A100")], - "Jemena", - SiteStatus.ACTIVE, - date(2021, 1, 1), - None, + id=GENERAL_ONLY_SITE_ID, + nmi="11111111111", + channels=[Channel(identifier="E1", type=ChannelType.GENERAL, tariff="A100")], + network="Jemena", + status=SiteStatus("active"), + activeFrom=date(2021, 1, 1), + closedOn=None, + interval_length=30, ) general_and_controlled_load = Site( - GENERAL_AND_CONTROLLED_SITE_ID, - "11111111112", - [ + id=GENERAL_AND_CONTROLLED_SITE_ID, + nmi="11111111112", + channels=[ Channel(identifier="E1", type=ChannelType.GENERAL, tariff="A100"), - Channel(identifier="E2", type=ChannelType.CONTROLLED_LOAD, tariff="A180"), + Channel(identifier="E2", type=ChannelType.CONTROLLEDLOAD, tariff="A180"), ], - "Jemena", - SiteStatus.ACTIVE, - date(2021, 1, 1), - None, + network="Jemena", + status=SiteStatus("active"), + activeFrom=date(2021, 1, 1), + closedOn=None, + interval_length=30, ) general_and_feed_in = Site( - GENERAL_AND_FEED_IN_SITE_ID, - "11111111113", - [ + id=GENERAL_AND_FEED_IN_SITE_ID, + nmi="11111111113", + channels=[ Channel(identifier="E1", type=ChannelType.GENERAL, tariff="A100"), - Channel(identifier="E2", type=ChannelType.FEED_IN, tariff="A100"), + Channel(identifier="E2", type=ChannelType.FEEDIN, tariff="A100"), ], - "Jemena", - SiteStatus.ACTIVE, - date(2021, 1, 1), - None, + network="Jemena", + status=SiteStatus("active"), + activeFrom=date(2021, 1, 1), + closedOn=None, + interval_length=30, ) instance.get_sites.return_value = [ general_site, @@ -76,44 +81,46 @@ def mock_api_current_price() -> Generator: general_and_feed_in, ] - with patch("amberelectric.api.AmberApi.create", return_value=instance): + with patch("amberelectric.AmberApi", return_value=instance): yield instance def test_normalize_descriptor() -> None: """Test normalizing descriptors works correctly.""" assert normalize_descriptor(None) is None - assert normalize_descriptor(Descriptor.NEGATIVE) == "negative" - assert normalize_descriptor(Descriptor.EXTREMELY_LOW) == "extremely_low" - assert normalize_descriptor(Descriptor.VERY_LOW) == "very_low" - assert normalize_descriptor(Descriptor.LOW) == "low" - assert normalize_descriptor(Descriptor.NEUTRAL) == "neutral" - assert normalize_descriptor(Descriptor.HIGH) == "high" - assert normalize_descriptor(Descriptor.SPIKE) == "spike" + assert normalize_descriptor(PriceDescriptor.NEGATIVE) == "negative" + assert normalize_descriptor(PriceDescriptor.EXTREMELYLOW) == "extremely_low" + assert normalize_descriptor(PriceDescriptor.VERYLOW) == "very_low" + assert normalize_descriptor(PriceDescriptor.LOW) == "low" + assert normalize_descriptor(PriceDescriptor.NEUTRAL) == "neutral" + assert normalize_descriptor(PriceDescriptor.HIGH) == "high" + assert normalize_descriptor(PriceDescriptor.SPIKE) == "spike" async def test_fetch_general_site(hass: HomeAssistant, current_price_api: Mock) -> None: """Test fetching a site with only a general channel.""" - current_price_api.get_current_price.return_value = GENERAL_CHANNEL + current_price_api.get_current_prices.return_value = GENERAL_CHANNEL data_service = AmberUpdateCoordinator(hass, current_price_api, GENERAL_ONLY_SITE_ID) result = await data_service._async_update_data() - current_price_api.get_current_price.assert_called_with( + current_price_api.get_current_prices.assert_called_with( GENERAL_ONLY_SITE_ID, next=48 ) - assert result["current"].get("general") == GENERAL_CHANNEL[0] + assert result["current"].get("general") == GENERAL_CHANNEL[0].actual_instance assert result["forecasts"].get("general") == [ - GENERAL_CHANNEL[1], - GENERAL_CHANNEL[2], - GENERAL_CHANNEL[3], + GENERAL_CHANNEL[1].actual_instance, + GENERAL_CHANNEL[2].actual_instance, + GENERAL_CHANNEL[3].actual_instance, ] assert result["current"].get("controlled_load") is None assert result["forecasts"].get("controlled_load") is None assert result["current"].get("feed_in") is None assert result["forecasts"].get("feed_in") is None - assert result["grid"]["renewables"] == round(GENERAL_CHANNEL[0].renewables) + assert result["grid"]["renewables"] == round( + GENERAL_CHANNEL[0].actual_instance.renewables + ) assert result["grid"]["price_spike"] == "none" @@ -122,12 +129,12 @@ async def test_fetch_no_general_site( ) -> None: """Test fetching a site with no general channel.""" - current_price_api.get_current_price.return_value = CONTROLLED_LOAD_CHANNEL + current_price_api.get_current_prices.return_value = CONTROLLED_LOAD_CHANNEL data_service = AmberUpdateCoordinator(hass, current_price_api, GENERAL_ONLY_SITE_ID) with pytest.raises(UpdateFailed): await data_service._async_update_data() - current_price_api.get_current_price.assert_called_with( + current_price_api.get_current_prices.assert_called_with( GENERAL_ONLY_SITE_ID, next=48 ) @@ -135,41 +142,45 @@ async def test_fetch_no_general_site( async def test_fetch_api_error(hass: HomeAssistant, current_price_api: Mock) -> None: """Test that the old values are maintained if a second call fails.""" - current_price_api.get_current_price.return_value = GENERAL_CHANNEL + current_price_api.get_current_prices.return_value = GENERAL_CHANNEL data_service = AmberUpdateCoordinator(hass, current_price_api, GENERAL_ONLY_SITE_ID) result = await data_service._async_update_data() - current_price_api.get_current_price.assert_called_with( + current_price_api.get_current_prices.assert_called_with( GENERAL_ONLY_SITE_ID, next=48 ) - assert result["current"].get("general") == GENERAL_CHANNEL[0] + assert result["current"].get("general") == GENERAL_CHANNEL[0].actual_instance assert result["forecasts"].get("general") == [ - GENERAL_CHANNEL[1], - GENERAL_CHANNEL[2], - GENERAL_CHANNEL[3], + GENERAL_CHANNEL[1].actual_instance, + GENERAL_CHANNEL[2].actual_instance, + GENERAL_CHANNEL[3].actual_instance, ] assert result["current"].get("controlled_load") is None assert result["forecasts"].get("controlled_load") is None assert result["current"].get("feed_in") is None assert result["forecasts"].get("feed_in") is None - assert result["grid"]["renewables"] == round(GENERAL_CHANNEL[0].renewables) + assert result["grid"]["renewables"] == round( + GENERAL_CHANNEL[0].actual_instance.renewables + ) - current_price_api.get_current_price.side_effect = ApiException(status=403) + current_price_api.get_current_prices.side_effect = ApiException(status=403) with pytest.raises(UpdateFailed): await data_service._async_update_data() - assert result["current"].get("general") == GENERAL_CHANNEL[0] + assert result["current"].get("general") == GENERAL_CHANNEL[0].actual_instance assert result["forecasts"].get("general") == [ - GENERAL_CHANNEL[1], - GENERAL_CHANNEL[2], - GENERAL_CHANNEL[3], + GENERAL_CHANNEL[1].actual_instance, + GENERAL_CHANNEL[2].actual_instance, + GENERAL_CHANNEL[3].actual_instance, ] assert result["current"].get("controlled_load") is None assert result["forecasts"].get("controlled_load") is None assert result["current"].get("feed_in") is None assert result["forecasts"].get("feed_in") is None - assert result["grid"]["renewables"] == round(GENERAL_CHANNEL[0].renewables) + assert result["grid"]["renewables"] == round( + GENERAL_CHANNEL[0].actual_instance.renewables + ) assert result["grid"]["price_spike"] == "none" @@ -178,7 +189,7 @@ async def test_fetch_general_and_controlled_load_site( ) -> None: """Test fetching a site with a general and controlled load channel.""" - current_price_api.get_current_price.return_value = ( + current_price_api.get_current_prices.return_value = ( GENERAL_CHANNEL + CONTROLLED_LOAD_CHANNEL ) data_service = AmberUpdateCoordinator( @@ -186,25 +197,30 @@ async def test_fetch_general_and_controlled_load_site( ) result = await data_service._async_update_data() - current_price_api.get_current_price.assert_called_with( + current_price_api.get_current_prices.assert_called_with( GENERAL_AND_CONTROLLED_SITE_ID, next=48 ) - assert result["current"].get("general") == GENERAL_CHANNEL[0] + assert result["current"].get("general") == GENERAL_CHANNEL[0].actual_instance assert result["forecasts"].get("general") == [ - GENERAL_CHANNEL[1], - GENERAL_CHANNEL[2], - GENERAL_CHANNEL[3], + GENERAL_CHANNEL[1].actual_instance, + GENERAL_CHANNEL[2].actual_instance, + GENERAL_CHANNEL[3].actual_instance, ] - assert result["current"].get("controlled_load") is CONTROLLED_LOAD_CHANNEL[0] + assert ( + result["current"].get("controlled_load") + is CONTROLLED_LOAD_CHANNEL[0].actual_instance + ) assert result["forecasts"].get("controlled_load") == [ - CONTROLLED_LOAD_CHANNEL[1], - CONTROLLED_LOAD_CHANNEL[2], - CONTROLLED_LOAD_CHANNEL[3], + CONTROLLED_LOAD_CHANNEL[1].actual_instance, + CONTROLLED_LOAD_CHANNEL[2].actual_instance, + CONTROLLED_LOAD_CHANNEL[3].actual_instance, ] assert result["current"].get("feed_in") is None assert result["forecasts"].get("feed_in") is None - assert result["grid"]["renewables"] == round(GENERAL_CHANNEL[0].renewables) + assert result["grid"]["renewables"] == round( + GENERAL_CHANNEL[0].actual_instance.renewables + ) assert result["grid"]["price_spike"] == "none" @@ -213,31 +229,35 @@ async def test_fetch_general_and_feed_in_site( ) -> None: """Test fetching a site with a general and feed_in channel.""" - current_price_api.get_current_price.return_value = GENERAL_CHANNEL + FEED_IN_CHANNEL + current_price_api.get_current_prices.return_value = ( + GENERAL_CHANNEL + FEED_IN_CHANNEL + ) data_service = AmberUpdateCoordinator( hass, current_price_api, GENERAL_AND_FEED_IN_SITE_ID ) result = await data_service._async_update_data() - current_price_api.get_current_price.assert_called_with( + current_price_api.get_current_prices.assert_called_with( GENERAL_AND_FEED_IN_SITE_ID, next=48 ) - assert result["current"].get("general") == GENERAL_CHANNEL[0] + assert result["current"].get("general") == GENERAL_CHANNEL[0].actual_instance assert result["forecasts"].get("general") == [ - GENERAL_CHANNEL[1], - GENERAL_CHANNEL[2], - GENERAL_CHANNEL[3], + GENERAL_CHANNEL[1].actual_instance, + GENERAL_CHANNEL[2].actual_instance, + GENERAL_CHANNEL[3].actual_instance, ] assert result["current"].get("controlled_load") is None assert result["forecasts"].get("controlled_load") is None - assert result["current"].get("feed_in") is FEED_IN_CHANNEL[0] + assert result["current"].get("feed_in") is FEED_IN_CHANNEL[0].actual_instance assert result["forecasts"].get("feed_in") == [ - FEED_IN_CHANNEL[1], - FEED_IN_CHANNEL[2], - FEED_IN_CHANNEL[3], + FEED_IN_CHANNEL[1].actual_instance, + FEED_IN_CHANNEL[2].actual_instance, + FEED_IN_CHANNEL[3].actual_instance, ] - assert result["grid"]["renewables"] == round(GENERAL_CHANNEL[0].renewables) + assert result["grid"]["renewables"] == round( + GENERAL_CHANNEL[0].actual_instance.renewables + ) assert result["grid"]["price_spike"] == "none" @@ -246,13 +266,13 @@ async def test_fetch_potential_spike( ) -> None: """Test fetching a site with only a general channel.""" - general_channel: list[CurrentInterval] = [ + general_channel: list[Interval] = [ generate_current_interval( ChannelType.GENERAL, parser.parse("2021-09-21T08:30:00+10:00") - ), + ) ] - general_channel[0].spike_status = SpikeStatus.POTENTIAL - current_price_api.get_current_price.return_value = general_channel + general_channel[0].actual_instance.spike_status = SpikeStatus.POTENTIAL + current_price_api.get_current_prices.return_value = general_channel data_service = AmberUpdateCoordinator(hass, current_price_api, GENERAL_ONLY_SITE_ID) result = await data_service._async_update_data() assert result["grid"]["price_spike"] == "potential" @@ -261,13 +281,13 @@ async def test_fetch_potential_spike( async def test_fetch_spike(hass: HomeAssistant, current_price_api: Mock) -> None: """Test fetching a site with only a general channel.""" - general_channel: list[CurrentInterval] = [ + general_channel: list[Interval] = [ generate_current_interval( ChannelType.GENERAL, parser.parse("2021-09-21T08:30:00+10:00") - ), + ) ] - general_channel[0].spike_status = SpikeStatus.SPIKE - current_price_api.get_current_price.return_value = general_channel + general_channel[0].actual_instance.spike_status = SpikeStatus.SPIKE + current_price_api.get_current_prices.return_value = general_channel data_service = AmberUpdateCoordinator(hass, current_price_api, GENERAL_ONLY_SITE_ID) result = await data_service._async_update_data() assert result["grid"]["price_spike"] == "spike" diff --git a/tests/components/amberelectric/test_sensor.py b/tests/components/amberelectric/test_sensor.py index 3a5626d14d5..203b65d6df6 100644 --- a/tests/components/amberelectric/test_sensor.py +++ b/tests/components/amberelectric/test_sensor.py @@ -3,8 +3,9 @@ from collections.abc import AsyncGenerator from unittest.mock import Mock, patch -from amberelectric.model.current_interval import CurrentInterval -from amberelectric.model.range import Range +from amberelectric.models.current_interval import CurrentInterval +from amberelectric.models.interval import Interval +from amberelectric.models.range import Range import pytest from homeassistant.components.amberelectric.const import ( @@ -44,10 +45,10 @@ async def setup_general(hass: HomeAssistant) -> AsyncGenerator[Mock]: instance = Mock() with patch( - "amberelectric.api.AmberApi.create", + "amberelectric.AmberApi", return_value=instance, ) as mock_update: - instance.get_current_price = Mock(return_value=GENERAL_CHANNEL) + instance.get_current_prices = Mock(return_value=GENERAL_CHANNEL) assert await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() yield mock_update.return_value @@ -68,10 +69,10 @@ async def setup_general_and_controlled_load( instance = Mock() with patch( - "amberelectric.api.AmberApi.create", + "amberelectric.AmberApi", return_value=instance, ) as mock_update: - instance.get_current_price = Mock( + instance.get_current_prices = Mock( return_value=GENERAL_CHANNEL + CONTROLLED_LOAD_CHANNEL ) assert await async_setup_component(hass, DOMAIN, {}) @@ -92,10 +93,10 @@ async def setup_general_and_feed_in(hass: HomeAssistant) -> AsyncGenerator[Mock] instance = Mock() with patch( - "amberelectric.api.AmberApi.create", + "amberelectric.AmberApi", return_value=instance, ) as mock_update: - instance.get_current_price = Mock( + instance.get_current_prices = Mock( return_value=GENERAL_CHANNEL + FEED_IN_CHANNEL ) assert await async_setup_component(hass, DOMAIN, {}) @@ -126,7 +127,7 @@ async def test_general_price_sensor(hass: HomeAssistant, setup_general: Mock) -> assert attributes.get("range_max") is None with_range: list[CurrentInterval] = GENERAL_CHANNEL - with_range[0].range = Range(7.8, 12.4) + with_range[0].actual_instance.range = Range(min=7.8, max=12.4) setup_general.get_current_price.return_value = with_range config_entry = hass.config_entries.async_entries(DOMAIN)[0] @@ -211,8 +212,8 @@ async def test_general_forecast_sensor( assert first_forecast.get("range_min") is None assert first_forecast.get("range_max") is None - with_range: list[CurrentInterval] = GENERAL_CHANNEL - with_range[1].range = Range(7.8, 12.4) + with_range: list[Interval] = GENERAL_CHANNEL + with_range[1].actual_instance.range = Range(min=7.8, max=12.4) setup_general.get_current_price.return_value = with_range config_entry = hass.config_entries.async_entries(DOMAIN)[0] diff --git a/tests/components/apsystems/test_init.py b/tests/components/apsystems/test_init.py new file mode 100644 index 00000000000..c85c4094e30 --- /dev/null +++ b/tests/components/apsystems/test_init.py @@ -0,0 +1,25 @@ +"""Test the APSystem setup.""" + +from unittest.mock import AsyncMock + +from APsystemsEZ1 import InverterReturnedError + +from homeassistant.components.apsystems.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_update_failed( + hass: HomeAssistant, + mock_apsystems: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test update failed.""" + mock_apsystems.get_output_data.side_effect = InverterReturnedError + await setup_integration(hass, mock_config_entry) + entry = hass.config_entries.async_entries(DOMAIN)[0] + assert entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/assist_pipeline/snapshots/test_init.ambr b/tests/components/assist_pipeline/snapshots/test_init.ambr index e14bbac1839..3b829e0e14a 100644 --- a/tests/components/assist_pipeline/snapshots/test_init.ambr +++ b/tests/components/assist_pipeline/snapshots/test_init.ambr @@ -37,6 +37,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en', + 'prefer_local_intents': False, }), 'type': , }), @@ -60,6 +61,7 @@ }), }), }), + 'processed_locally': True, }), 'type': , }), @@ -77,7 +79,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', - 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_031e2ec052_test.mp3', + 'url': '/api/tts_proxy/test_token.mp3', }), }), 'type': , @@ -126,6 +128,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en-US', + 'prefer_local_intents': False, }), 'type': , }), @@ -139,7 +142,7 @@ 'data': dict({ 'code': 'no_intent_match', }), - 'language': 'en-US', + 'language': 'en', 'response_type': 'error', 'speech': dict({ 'plain': dict({ @@ -149,6 +152,7 @@ }), }), }), + 'processed_locally': True, }), 'type': , }), @@ -166,7 +170,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22Arnold+Schwarzenegger%22%7D", 'mime_type': 'audio/mpeg', - 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_2657c1a8ee_test.mp3', + 'url': '/api/tts_proxy/test_token.mp3', }), }), 'type': , @@ -215,6 +219,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en-US', + 'prefer_local_intents': False, }), 'type': , }), @@ -228,7 +233,7 @@ 'data': dict({ 'code': 'no_intent_match', }), - 'language': 'en-US', + 'language': 'en', 'response_type': 'error', 'speech': dict({ 'plain': dict({ @@ -238,6 +243,7 @@ }), }), }), + 'processed_locally': True, }), 'type': , }), @@ -255,7 +261,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22Arnold+Schwarzenegger%22%7D", 'mime_type': 'audio/mpeg', - 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_2657c1a8ee_test.mp3', + 'url': '/api/tts_proxy/test_token.mp3', }), }), 'type': , @@ -328,6 +334,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en', + 'prefer_local_intents': False, }), 'type': , }), @@ -351,6 +358,7 @@ }), }), }), + 'processed_locally': True, }), 'type': , }), @@ -368,7 +376,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', - 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_031e2ec052_test.mp3', + 'url': '/api/tts_proxy/test_token.mp3', }), }), 'type': , diff --git a/tests/components/assist_pipeline/snapshots/test_websocket.ambr b/tests/components/assist_pipeline/snapshots/test_websocket.ambr index 131444c17ac..41747a50eb6 100644 --- a/tests/components/assist_pipeline/snapshots/test_websocket.ambr +++ b/tests/components/assist_pipeline/snapshots/test_websocket.ambr @@ -36,6 +36,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_audio_pipeline.4 @@ -58,6 +59,7 @@ }), }), }), + 'processed_locally': True, }) # --- # name: test_audio_pipeline.5 @@ -73,7 +75,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', - 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_031e2ec052_test.mp3', + 'url': '/api/tts_proxy/test_token.mp3', }), }) # --- @@ -117,6 +119,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_audio_pipeline_debug.4 @@ -139,6 +142,7 @@ }), }), }), + 'processed_locally': True, }) # --- # name: test_audio_pipeline_debug.5 @@ -154,7 +158,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', - 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_031e2ec052_test.mp3', + 'url': '/api/tts_proxy/test_token.mp3', }), }) # --- @@ -210,6 +214,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_audio_pipeline_with_enhancements.4 @@ -232,6 +237,7 @@ }), }), }), + 'processed_locally': True, }) # --- # name: test_audio_pipeline_with_enhancements.5 @@ -247,7 +253,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', - 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_031e2ec052_test.mp3', + 'url': '/api/tts_proxy/test_token.mp3', }), }) # --- @@ -313,6 +319,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'test transcript', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_audio_pipeline_with_wake_word_no_timeout.6 @@ -335,6 +342,7 @@ }), }), }), + 'processed_locally': True, }) # --- # name: test_audio_pipeline_with_wake_word_no_timeout.7 @@ -350,7 +358,7 @@ 'tts_output': dict({ 'media_id': "media-source://tts/test?message=Sorry,+I+couldn't+understand+that&language=en-US&tts_options=%7B%22voice%22:%22james_earl_jones%22%7D", 'mime_type': 'audio/mpeg', - 'url': '/api/tts_proxy/dae2cdcb27a1d1c3b07ba2c7db91480f9d4bfd8f_en-us_031e2ec052_test.mp3', + 'url': '/api/tts_proxy/test_token.mp3', }), }) # --- @@ -519,6 +527,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'Are the lights on?', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_intent_failed.2 @@ -541,6 +550,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'Are the lights on?', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_intent_timeout.2 @@ -569,6 +579,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'never mind', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_pipeline_empty_tts_output.2 @@ -592,6 +603,7 @@ }), }), }), + 'processed_locally': True, }) # --- # name: test_pipeline_empty_tts_output.3 @@ -680,6 +692,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'Are the lights on?', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_text_only_pipeline[extra_msg0].2 @@ -697,11 +710,12 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any area called are', + 'speech': 'Sorry, I am not aware of any area called Are', }), }), }), }), + 'processed_locally': True, }) # --- # name: test_text_only_pipeline[extra_msg0].3 @@ -724,6 +738,7 @@ 'engine': 'conversation.home_assistant', 'intent_input': 'Are the lights on?', 'language': 'en', + 'prefer_local_intents': False, }) # --- # name: test_text_only_pipeline[extra_msg1].2 @@ -741,11 +756,12 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any area called are', + 'speech': 'Sorry, I am not aware of any area called Are', }), }), }), }), + 'processed_locally': True, }) # --- # name: test_text_only_pipeline[extra_msg1].3 diff --git a/tests/components/assist_pipeline/test_init.py b/tests/components/assist_pipeline/test_init.py index c4696573bad..b177530219e 100644 --- a/tests/components/assist_pipeline/test_init.py +++ b/tests/components/assist_pipeline/test_init.py @@ -11,13 +11,20 @@ import wave import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components import assist_pipeline, media_source, stt, tts +from homeassistant.components import ( + assist_pipeline, + conversation, + media_source, + stt, + tts, +) from homeassistant.components.assist_pipeline.const import ( BYTES_PER_CHUNK, CONF_DEBUG_RECORDING_DIR, DOMAIN, ) from homeassistant.core import Context, HomeAssistant +from homeassistant.helpers import intent from homeassistant.setup import async_setup_component from .conftest import ( @@ -63,21 +70,24 @@ async def test_pipeline_from_audio_stream_auto( yield make_10ms_chunk(b"part2") yield b"" - await assist_pipeline.async_pipeline_from_audio_stream( - hass, - context=Context(), - event_callback=events.append, - stt_metadata=stt.SpeechMetadata( - language="", - format=stt.AudioFormats.WAV, - codec=stt.AudioCodecs.PCM, - bit_rate=stt.AudioBitRates.BITRATE_16, - sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, - channel=stt.AudioChannels.CHANNEL_MONO, - ), - stt_stream=audio_data(), - audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), - ) + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + await assist_pipeline.async_pipeline_from_audio_stream( + hass, + context=Context(), + event_callback=events.append, + stt_metadata=stt.SpeechMetadata( + language="", + format=stt.AudioFormats.WAV, + codec=stt.AudioCodecs.PCM, + bit_rate=stt.AudioBitRates.BITRATE_16, + sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, + channel=stt.AudioChannels.CHANNEL_MONO, + ), + stt_stream=audio_data(), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), + ) assert process_events(events) == snapshot assert len(mock_stt_provider_entity.received) == 2 @@ -126,23 +136,26 @@ async def test_pipeline_from_audio_stream_legacy( assert msg["success"] pipeline_id = msg["result"]["id"] - # Use the created pipeline - await assist_pipeline.async_pipeline_from_audio_stream( - hass, - context=Context(), - event_callback=events.append, - stt_metadata=stt.SpeechMetadata( - language="en-UK", - format=stt.AudioFormats.WAV, - codec=stt.AudioCodecs.PCM, - bit_rate=stt.AudioBitRates.BITRATE_16, - sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, - channel=stt.AudioChannels.CHANNEL_MONO, - ), - stt_stream=audio_data(), - pipeline_id=pipeline_id, - audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), - ) + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + # Use the created pipeline + await assist_pipeline.async_pipeline_from_audio_stream( + hass, + context=Context(), + event_callback=events.append, + stt_metadata=stt.SpeechMetadata( + language="en-UK", + format=stt.AudioFormats.WAV, + codec=stt.AudioCodecs.PCM, + bit_rate=stt.AudioBitRates.BITRATE_16, + sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, + channel=stt.AudioChannels.CHANNEL_MONO, + ), + stt_stream=audio_data(), + pipeline_id=pipeline_id, + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), + ) assert process_events(events) == snapshot assert len(mock_stt_provider.received) == 2 @@ -191,23 +204,26 @@ async def test_pipeline_from_audio_stream_entity( assert msg["success"] pipeline_id = msg["result"]["id"] - # Use the created pipeline - await assist_pipeline.async_pipeline_from_audio_stream( - hass, - context=Context(), - event_callback=events.append, - stt_metadata=stt.SpeechMetadata( - language="en-UK", - format=stt.AudioFormats.WAV, - codec=stt.AudioCodecs.PCM, - bit_rate=stt.AudioBitRates.BITRATE_16, - sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, - channel=stt.AudioChannels.CHANNEL_MONO, - ), - stt_stream=audio_data(), - pipeline_id=pipeline_id, - audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), - ) + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + # Use the created pipeline + await assist_pipeline.async_pipeline_from_audio_stream( + hass, + context=Context(), + event_callback=events.append, + stt_metadata=stt.SpeechMetadata( + language="en-UK", + format=stt.AudioFormats.WAV, + codec=stt.AudioCodecs.PCM, + bit_rate=stt.AudioBitRates.BITRATE_16, + sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, + channel=stt.AudioChannels.CHANNEL_MONO, + ), + stt_stream=audio_data(), + pipeline_id=pipeline_id, + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), + ) assert process_events(events) == snapshot assert len(mock_stt_provider_entity.received) == 2 @@ -355,25 +371,28 @@ async def test_pipeline_from_audio_stream_wake_word( yield b"" - await assist_pipeline.async_pipeline_from_audio_stream( - hass, - context=Context(), - event_callback=events.append, - stt_metadata=stt.SpeechMetadata( - language="", - format=stt.AudioFormats.WAV, - codec=stt.AudioCodecs.PCM, - bit_rate=stt.AudioBitRates.BITRATE_16, - sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, - channel=stt.AudioChannels.CHANNEL_MONO, - ), - stt_stream=audio_data(), - start_stage=assist_pipeline.PipelineStage.WAKE_WORD, - wake_word_settings=assist_pipeline.WakeWordSettings( - audio_seconds_to_buffer=1.5 - ), - audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), - ) + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + await assist_pipeline.async_pipeline_from_audio_stream( + hass, + context=Context(), + event_callback=events.append, + stt_metadata=stt.SpeechMetadata( + language="", + format=stt.AudioFormats.WAV, + codec=stt.AudioCodecs.PCM, + bit_rate=stt.AudioBitRates.BITRATE_16, + sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, + channel=stt.AudioChannels.CHANNEL_MONO, + ), + stt_stream=audio_data(), + start_stage=assist_pipeline.PipelineStage.WAKE_WORD, + wake_word_settings=assist_pipeline.WakeWordSettings( + audio_seconds_to_buffer=1.5 + ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), + ) assert process_events(events) == snapshot @@ -927,3 +946,155 @@ async def test_tts_dict_preferred_format( assert int(options.get(tts.ATTR_PREFERRED_SAMPLE_RATE)) == 48000 assert int(options.get(tts.ATTR_PREFERRED_SAMPLE_CHANNELS)) == 2 assert int(options.get(tts.ATTR_PREFERRED_SAMPLE_BYTES)) == 2 + + +async def test_sentence_trigger_overrides_conversation_agent( + hass: HomeAssistant, + init_components, + pipeline_data: assist_pipeline.pipeline.PipelineData, +) -> None: + """Test that sentence triggers are checked before a non-default conversation agent.""" + assert await async_setup_component( + hass, + "automation", + { + "automation": { + "trigger": { + "platform": "conversation", + "command": [ + "test trigger sentence", + ], + }, + "action": { + "set_conversation_response": "test trigger response", + }, + } + }, + ) + + events: list[assist_pipeline.PipelineEvent] = [] + + pipeline_store = pipeline_data.pipeline_store + pipeline_id = pipeline_store.async_get_preferred_item() + pipeline = assist_pipeline.pipeline.async_get_pipeline(hass, pipeline_id) + + pipeline_input = assist_pipeline.pipeline.PipelineInput( + intent_input="test trigger sentence", + run=assist_pipeline.pipeline.PipelineRun( + hass, + context=Context(), + pipeline=pipeline, + start_stage=assist_pipeline.PipelineStage.INTENT, + end_stage=assist_pipeline.PipelineStage.INTENT, + event_callback=events.append, + intent_agent="test-agent", # not the default agent + ), + ) + + # Ensure prepare succeeds + with patch( + "homeassistant.components.assist_pipeline.pipeline.conversation.async_get_agent_info", + return_value=conversation.AgentInfo(id="test-agent", name="Test Agent"), + ): + await pipeline_input.validate() + + with patch( + "homeassistant.components.assist_pipeline.pipeline.conversation.async_converse" + ) as mock_async_converse: + await pipeline_input.execute() + + # Sentence trigger should have been handled + mock_async_converse.assert_not_called() + + # Verify sentence trigger response + intent_end_event = next( + ( + e + for e in events + if e.type == assist_pipeline.PipelineEventType.INTENT_END + ), + None, + ) + assert (intent_end_event is not None) and intent_end_event.data + assert ( + intent_end_event.data["intent_output"]["response"]["speech"]["plain"][ + "speech" + ] + == "test trigger response" + ) + + +async def test_prefer_local_intents( + hass: HomeAssistant, + init_components, + pipeline_data: assist_pipeline.pipeline.PipelineData, +) -> None: + """Test that the default agent is checked first when local intents are preferred.""" + events: list[assist_pipeline.PipelineEvent] = [] + + # Reuse custom sentences in test config + class OrderBeerIntentHandler(intent.IntentHandler): + intent_type = "OrderBeer" + + async def async_handle( + self, intent_obj: intent.Intent + ) -> intent.IntentResponse: + response = intent_obj.create_response() + response.async_set_speech("Order confirmed") + return response + + handler = OrderBeerIntentHandler() + intent.async_register(hass, handler) + + # Fake a test agent and prefer local intents + pipeline_store = pipeline_data.pipeline_store + pipeline_id = pipeline_store.async_get_preferred_item() + pipeline = assist_pipeline.pipeline.async_get_pipeline(hass, pipeline_id) + await assist_pipeline.pipeline.async_update_pipeline( + hass, pipeline, conversation_engine="test-agent", prefer_local_intents=True + ) + pipeline = assist_pipeline.pipeline.async_get_pipeline(hass, pipeline_id) + + pipeline_input = assist_pipeline.pipeline.PipelineInput( + intent_input="I'd like to order a stout please", + run=assist_pipeline.pipeline.PipelineRun( + hass, + context=Context(), + pipeline=pipeline, + start_stage=assist_pipeline.PipelineStage.INTENT, + end_stage=assist_pipeline.PipelineStage.INTENT, + event_callback=events.append, + ), + ) + + # Ensure prepare succeeds + with patch( + "homeassistant.components.assist_pipeline.pipeline.conversation.async_get_agent_info", + return_value=conversation.AgentInfo(id="test-agent", name="Test Agent"), + ): + await pipeline_input.validate() + + with patch( + "homeassistant.components.assist_pipeline.pipeline.conversation.async_converse" + ) as mock_async_converse: + await pipeline_input.execute() + + # Test agent should not have been called + mock_async_converse.assert_not_called() + + # Verify local intent response + intent_end_event = next( + ( + e + for e in events + if e.type == assist_pipeline.PipelineEventType.INTENT_END + ), + None, + ) + assert (intent_end_event is not None) and intent_end_event.data + assert ( + intent_end_event.data["intent_output"]["response"]["speech"]["plain"][ + "speech" + ] + == "Order confirmed" + ) diff --git a/tests/components/assist_pipeline/test_pipeline.py b/tests/components/assist_pipeline/test_pipeline.py index 50d0fc9bed8..d52e2a762ee 100644 --- a/tests/components/assist_pipeline/test_pipeline.py +++ b/tests/components/assist_pipeline/test_pipeline.py @@ -574,6 +574,7 @@ async def test_update_pipeline( "tts_voice": "test_voice", "wake_word_entity": "wake_work.test_1", "wake_word_id": "wake_word_id_1", + "prefer_local_intents": False, } await async_update_pipeline( @@ -617,6 +618,7 @@ async def test_update_pipeline( "tts_voice": "test_voice", "wake_word_entity": "wake_work.test_1", "wake_word_id": "wake_word_id_1", + "prefer_local_intents": False, } diff --git a/tests/components/assist_pipeline/test_select.py b/tests/components/assist_pipeline/test_select.py index 9fb02e228d8..5ce3b1020d0 100644 --- a/tests/components/assist_pipeline/test_select.py +++ b/tests/components/assist_pipeline/test_select.py @@ -184,7 +184,7 @@ async def test_select_entity_changing_vad_sensitivity( hass: HomeAssistant, init_select: MockConfigEntry, ) -> None: - """Test entity tracking pipeline changes.""" + """Test entity tracking vad sensitivity changes.""" config_entry = init_select # nicer naming config_entry.mock_state(hass, ConfigEntryState.LOADED) @@ -192,7 +192,7 @@ async def test_select_entity_changing_vad_sensitivity( assert state is not None assert state.state == VadSensitivity.DEFAULT.value - # Change select to new pipeline + # Change select to new sensitivity await hass.services.async_call( "select", "select_option", diff --git a/tests/components/assist_pipeline/test_vad.py b/tests/components/assist_pipeline/test_vad.py index fda26d2fb94..bd07601cd5d 100644 --- a/tests/components/assist_pipeline/test_vad.py +++ b/tests/components/assist_pipeline/test_vad.py @@ -16,7 +16,7 @@ def test_silence() -> None: segmenter = VoiceCommandSegmenter() # True return value indicates voice command has not finished - assert segmenter.process(_ONE_SECOND * 3, False) + assert segmenter.process(_ONE_SECOND * 3, 0.0) assert not segmenter.in_command @@ -26,15 +26,15 @@ def test_speech() -> None: segmenter = VoiceCommandSegmenter() # silence - assert segmenter.process(_ONE_SECOND, False) + assert segmenter.process(_ONE_SECOND, 0.0) # "speech" - assert segmenter.process(_ONE_SECOND, True) + assert segmenter.process(_ONE_SECOND, 1.0) assert segmenter.in_command # silence # False return value indicates voice command is finished - assert not segmenter.process(_ONE_SECOND, False) + assert not segmenter.process(_ONE_SECOND, 0.0) assert not segmenter.in_command @@ -112,19 +112,19 @@ def test_silence_seconds() -> None: segmenter = VoiceCommandSegmenter(silence_seconds=1.0) # silence - assert segmenter.process(_ONE_SECOND, False) + assert segmenter.process(_ONE_SECOND, 0.0) assert not segmenter.in_command # "speech" - assert segmenter.process(_ONE_SECOND, True) + assert segmenter.process(_ONE_SECOND, 1.0) assert segmenter.in_command # not enough silence to end - assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.process(_ONE_SECOND * 0.5, 0.0) assert segmenter.in_command # exactly enough silence now - assert not segmenter.process(_ONE_SECOND * 0.5, False) + assert not segmenter.process(_ONE_SECOND * 0.5, 0.0) assert not segmenter.in_command @@ -134,27 +134,27 @@ def test_silence_reset() -> None: segmenter = VoiceCommandSegmenter(silence_seconds=1.0, reset_seconds=0.5) # silence - assert segmenter.process(_ONE_SECOND, False) + assert segmenter.process(_ONE_SECOND, 0.0) assert not segmenter.in_command # "speech" - assert segmenter.process(_ONE_SECOND, True) + assert segmenter.process(_ONE_SECOND, 1.0) assert segmenter.in_command # not enough silence to end - assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.process(_ONE_SECOND * 0.5, 0.0) assert segmenter.in_command # speech should reset silence detection - assert segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.process(_ONE_SECOND * 0.5, 1.0) assert segmenter.in_command # not enough silence to end - assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.process(_ONE_SECOND * 0.5, 0.0) assert segmenter.in_command # exactly enough silence now - assert not segmenter.process(_ONE_SECOND * 0.5, False) + assert not segmenter.process(_ONE_SECOND * 0.5, 0.0) assert not segmenter.in_command @@ -166,23 +166,23 @@ def test_speech_reset() -> None: ) # silence - assert segmenter.process(_ONE_SECOND, False) + assert segmenter.process(_ONE_SECOND, 0.0) assert not segmenter.in_command # not enough speech to start voice command - assert segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.process(_ONE_SECOND * 0.5, 1.0) assert not segmenter.in_command # silence should reset speech detection - assert segmenter.process(_ONE_SECOND, False) + assert segmenter.process(_ONE_SECOND, 0.0) assert not segmenter.in_command # not enough speech to start voice command - assert segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.process(_ONE_SECOND * 0.5, 1.0) assert not segmenter.in_command # exactly enough speech now - assert segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.process(_ONE_SECOND * 0.5, 1.0) assert segmenter.in_command @@ -193,18 +193,18 @@ def test_timeout() -> None: # not enough to time out assert not segmenter.timed_out - assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.process(_ONE_SECOND * 0.5, 0.0) assert not segmenter.timed_out # enough to time out - assert not segmenter.process(_ONE_SECOND * 0.5, True) + assert not segmenter.process(_ONE_SECOND * 0.5, 1.0) assert segmenter.timed_out # flag resets with more audio - assert segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.process(_ONE_SECOND * 0.5, 1.0) assert not segmenter.timed_out - assert not segmenter.process(_ONE_SECOND * 0.5, False) + assert not segmenter.process(_ONE_SECOND * 0.5, 0.0) assert segmenter.timed_out @@ -215,14 +215,38 @@ def test_command_seconds() -> None: command_seconds=3, speech_seconds=1, silence_seconds=1, reset_seconds=1 ) - assert segmenter.process(_ONE_SECOND, True) + assert segmenter.process(_ONE_SECOND, 1.0) # Silence counts towards total command length - assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.process(_ONE_SECOND * 0.5, 0.0) # Enough to finish command now - assert segmenter.process(_ONE_SECOND, True) - assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.process(_ONE_SECOND, 1.0) + assert segmenter.process(_ONE_SECOND * 0.5, 0.0) # Silence to finish - assert not segmenter.process(_ONE_SECOND * 0.5, False) + assert not segmenter.process(_ONE_SECOND * 0.5, 0.0) + + +def test_speech_thresholds() -> None: + """Test before/in command speech thresholds.""" + + segmenter = VoiceCommandSegmenter( + before_command_speech_threshold=0.2, + in_command_speech_threshold=0.5, + command_seconds=2, + speech_seconds=1, + silence_seconds=1, + ) + + # Not high enough probability to trigger command + assert segmenter.process(_ONE_SECOND, 0.1) + assert not segmenter.in_command + + # Triggers command + assert segmenter.process(_ONE_SECOND, 0.3) + assert segmenter.in_command + + # Now that same probability is considered silence. + # Finishes command. + assert not segmenter.process(_ONE_SECOND, 0.3) diff --git a/tests/components/assist_pipeline/test_websocket.py b/tests/components/assist_pipeline/test_websocket.py index e339ee74fbb..c1caf6f86a4 100644 --- a/tests/components/assist_pipeline/test_websocket.py +++ b/tests/components/assist_pipeline/test_websocket.py @@ -119,85 +119,88 @@ async def test_audio_pipeline( events = [] client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - "type": "assist_pipeline/run", - "start_stage": "stt", - "end_stage": "tts", - "input": { - "sample_rate": 44100, - }, - } - ) + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + await client.send_json_auto_id( + { + "type": "assist_pipeline/run", + "start_stage": "stt", + "end_stage": "tts", + "input": { + "sample_rate": 44100, + }, + } + ) - # result - msg = await client.receive_json() - assert msg["success"] + # result + msg = await client.receive_json() + assert msg["success"] - # run start - msg = await client.receive_json() - assert msg["event"]["type"] == "run-start" - msg["event"]["data"]["pipeline"] = ANY - assert msg["event"]["data"] == snapshot - handler_id = msg["event"]["data"]["runner_data"]["stt_binary_handler_id"] - events.append(msg["event"]) + # run start + msg = await client.receive_json() + assert msg["event"]["type"] == "run-start" + msg["event"]["data"]["pipeline"] = ANY + assert msg["event"]["data"] == snapshot + handler_id = msg["event"]["data"]["runner_data"]["stt_binary_handler_id"] + events.append(msg["event"]) - # stt - msg = await client.receive_json() - assert msg["event"]["type"] == "stt-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # stt + msg = await client.receive_json() + assert msg["event"]["type"] == "stt-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # End of audio stream (handler id + empty payload) - await client.send_bytes(bytes([handler_id])) + # End of audio stream (handler id + empty payload) + await client.send_bytes(bytes([handler_id])) - msg = await client.receive_json() - assert msg["event"]["type"] == "stt-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + msg = await client.receive_json() + assert msg["event"]["type"] == "stt-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # intent - msg = await client.receive_json() - assert msg["event"]["type"] == "intent-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # intent + msg = await client.receive_json() + assert msg["event"]["type"] == "intent-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - msg = await client.receive_json() - assert msg["event"]["type"] == "intent-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + msg = await client.receive_json() + assert msg["event"]["type"] == "intent-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # text-to-speech - msg = await client.receive_json() - assert msg["event"]["type"] == "tts-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # text-to-speech + msg = await client.receive_json() + assert msg["event"]["type"] == "tts-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - msg = await client.receive_json() - assert msg["event"]["type"] == "tts-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + msg = await client.receive_json() + assert msg["event"]["type"] == "tts-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # run end - msg = await client.receive_json() - assert msg["event"]["type"] == "run-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # run end + msg = await client.receive_json() + assert msg["event"]["type"] == "run-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - pipeline_data: PipelineData = hass.data[DOMAIN] - pipeline_id = list(pipeline_data.pipeline_debug)[0] - pipeline_run_id = list(pipeline_data.pipeline_debug[pipeline_id])[0] + pipeline_data: PipelineData = hass.data[DOMAIN] + pipeline_id = list(pipeline_data.pipeline_debug)[0] + pipeline_run_id = list(pipeline_data.pipeline_debug[pipeline_id])[0] - await client.send_json_auto_id( - { - "type": "assist_pipeline/pipeline_debug/get", - "pipeline_id": pipeline_id, - "pipeline_run_id": pipeline_run_id, - } - ) - msg = await client.receive_json() - assert msg["success"] - assert msg["result"] == {"events": events} + await client.send_json_auto_id( + { + "type": "assist_pipeline/pipeline_debug/get", + "pipeline_id": pipeline_id, + "pipeline_run_id": pipeline_run_id, + } + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] == {"events": events} async def test_audio_pipeline_with_wake_word_timeout( @@ -210,49 +213,52 @@ async def test_audio_pipeline_with_wake_word_timeout( events = [] client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - "type": "assist_pipeline/run", - "start_stage": "wake_word", - "end_stage": "tts", - "input": { - "sample_rate": SAMPLE_RATE, - "timeout": 1, - }, - } - ) + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + await client.send_json_auto_id( + { + "type": "assist_pipeline/run", + "start_stage": "wake_word", + "end_stage": "tts", + "input": { + "sample_rate": SAMPLE_RATE, + "timeout": 1, + }, + } + ) - # result - msg = await client.receive_json() - assert msg["success"], msg + # result + msg = await client.receive_json() + assert msg["success"], msg - # run start - msg = await client.receive_json() - assert msg["event"]["type"] == "run-start" - msg["event"]["data"]["pipeline"] = ANY - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # run start + msg = await client.receive_json() + assert msg["event"]["type"] == "run-start" + msg["event"]["data"]["pipeline"] = ANY + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # wake_word - msg = await client.receive_json() - assert msg["event"]["type"] == "wake_word-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # wake_word + msg = await client.receive_json() + assert msg["event"]["type"] == "wake_word-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # 2 seconds of silence - await client.send_bytes(bytes([1]) + bytes(2 * BYTES_ONE_SECOND)) + # 2 seconds of silence + await client.send_bytes(bytes([1]) + bytes(2 * BYTES_ONE_SECOND)) - # Time out error - msg = await client.receive_json() - assert msg["event"]["type"] == "error" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # Time out error + msg = await client.receive_json() + assert msg["event"]["type"] == "error" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # run end - msg = await client.receive_json() - assert msg["event"]["type"] == "run-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # run end + msg = await client.receive_json() + assert msg["event"]["type"] == "run-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) async def test_audio_pipeline_with_wake_word_no_timeout( @@ -265,98 +271,101 @@ async def test_audio_pipeline_with_wake_word_no_timeout( events = [] client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - "type": "assist_pipeline/run", - "start_stage": "wake_word", - "end_stage": "tts", - "input": {"sample_rate": SAMPLE_RATE, "timeout": 0, "no_vad": True}, - } - ) + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + await client.send_json_auto_id( + { + "type": "assist_pipeline/run", + "start_stage": "wake_word", + "end_stage": "tts", + "input": {"sample_rate": SAMPLE_RATE, "timeout": 0, "no_vad": True}, + } + ) - # result - msg = await client.receive_json() - assert msg["success"], msg - - # run start - msg = await client.receive_json() - assert msg["event"]["type"] == "run-start" - msg["event"]["data"]["pipeline"] = ANY - assert msg["event"]["data"] == snapshot - handler_id = msg["event"]["data"]["runner_data"]["stt_binary_handler_id"] - events.append(msg["event"]) - - # wake_word - msg = await client.receive_json() - assert msg["event"]["type"] == "wake_word-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) - - # "audio" - await client.send_bytes(bytes([handler_id]) + make_10ms_chunk(b"wake word")) - - async with asyncio.timeout(1): + # result msg = await client.receive_json() - assert msg["event"]["type"] == "wake_word-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + assert msg["success"], msg - # stt - msg = await client.receive_json() - assert msg["event"]["type"] == "stt-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # run start + msg = await client.receive_json() + assert msg["event"]["type"] == "run-start" + msg["event"]["data"]["pipeline"] = ANY + assert msg["event"]["data"] == snapshot + handler_id = msg["event"]["data"]["runner_data"]["stt_binary_handler_id"] + events.append(msg["event"]) - # End of audio stream (handler id + empty payload) - await client.send_bytes(bytes([handler_id])) + # wake_word + msg = await client.receive_json() + assert msg["event"]["type"] == "wake_word-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - msg = await client.receive_json() - assert msg["event"]["type"] == "stt-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # "audio" + await client.send_bytes(bytes([handler_id]) + make_10ms_chunk(b"wake word")) - # intent - msg = await client.receive_json() - assert msg["event"]["type"] == "intent-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + async with asyncio.timeout(1): + msg = await client.receive_json() + assert msg["event"]["type"] == "wake_word-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - msg = await client.receive_json() - assert msg["event"]["type"] == "intent-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # stt + msg = await client.receive_json() + assert msg["event"]["type"] == "stt-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # text-to-speech - msg = await client.receive_json() - assert msg["event"]["type"] == "tts-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # End of audio stream (handler id + empty payload) + await client.send_bytes(bytes([handler_id])) - msg = await client.receive_json() - assert msg["event"]["type"] == "tts-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + msg = await client.receive_json() + assert msg["event"]["type"] == "stt-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # run end - msg = await client.receive_json() - assert msg["event"]["type"] == "run-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # intent + msg = await client.receive_json() + assert msg["event"]["type"] == "intent-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - pipeline_data: PipelineData = hass.data[DOMAIN] - pipeline_id = list(pipeline_data.pipeline_debug)[0] - pipeline_run_id = list(pipeline_data.pipeline_debug[pipeline_id])[0] + msg = await client.receive_json() + assert msg["event"]["type"] == "intent-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - await client.send_json_auto_id( - { - "type": "assist_pipeline/pipeline_debug/get", - "pipeline_id": pipeline_id, - "pipeline_run_id": pipeline_run_id, - } - ) - msg = await client.receive_json() - assert msg["success"] - assert msg["result"] == {"events": events} + # text-to-speech + msg = await client.receive_json() + assert msg["event"]["type"] == "tts-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) + + msg = await client.receive_json() + assert msg["event"]["type"] == "tts-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) + + # run end + msg = await client.receive_json() + assert msg["event"]["type"] == "run-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) + + pipeline_data: PipelineData = hass.data[DOMAIN] + pipeline_id = list(pipeline_data.pipeline_debug)[0] + pipeline_run_id = list(pipeline_data.pipeline_debug[pipeline_id])[0] + + await client.send_json_auto_id( + { + "type": "assist_pipeline/pipeline_debug/get", + "pipeline_id": pipeline_id, + "pipeline_run_id": pipeline_run_id, + } + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] == {"events": events} async def test_audio_pipeline_no_wake_word_engine( @@ -974,6 +983,7 @@ async def test_add_pipeline( "tts_voice": "Arnold Schwarzenegger", "wake_word_entity": "wakeword_entity_1", "wake_word_id": "wakeword_id_1", + "prefer_local_intents": True, } ) msg = await client.receive_json() @@ -991,6 +1001,7 @@ async def test_add_pipeline( "tts_voice": "Arnold Schwarzenegger", "wake_word_entity": "wakeword_entity_1", "wake_word_id": "wakeword_id_1", + "prefer_local_intents": True, } assert len(pipeline_store.data) == 2 @@ -1008,6 +1019,7 @@ async def test_add_pipeline( tts_voice="Arnold Schwarzenegger", wake_word_entity="wakeword_entity_1", wake_word_id="wakeword_id_1", + prefer_local_intents=True, ) await client.send_json_auto_id( @@ -1195,6 +1207,7 @@ async def test_get_pipeline( "tts_voice": "james_earl_jones", "wake_word_entity": None, "wake_word_id": None, + "prefer_local_intents": False, } # Get conversation agent as pipeline @@ -1220,6 +1233,7 @@ async def test_get_pipeline( "tts_voice": "james_earl_jones", "wake_word_entity": None, "wake_word_id": None, + "prefer_local_intents": False, } await client.send_json_auto_id( @@ -1249,6 +1263,7 @@ async def test_get_pipeline( "tts_voice": "Arnold Schwarzenegger", "wake_word_entity": "wakeword_entity_1", "wake_word_id": "wakeword_id_1", + "prefer_local_intents": False, } ) msg = await client.receive_json() @@ -1277,6 +1292,7 @@ async def test_get_pipeline( "tts_voice": "Arnold Schwarzenegger", "wake_word_entity": "wakeword_entity_1", "wake_word_id": "wakeword_id_1", + "prefer_local_intents": False, } @@ -1304,6 +1320,7 @@ async def test_list_pipelines( "tts_voice": "james_earl_jones", "wake_word_entity": None, "wake_word_id": None, + "prefer_local_intents": False, } ], "preferred_pipeline": ANY, @@ -1395,6 +1412,7 @@ async def test_update_pipeline( "tts_voice": "new_tts_voice", "wake_word_entity": "new_wakeword_entity", "wake_word_id": "new_wakeword_id", + "prefer_local_intents": False, } assert len(pipeline_store.data) == 2 @@ -1446,6 +1464,7 @@ async def test_update_pipeline( "tts_voice": None, "wake_word_entity": None, "wake_word_id": None, + "prefer_local_intents": False, } pipeline = pipeline_store.data[pipeline_id] @@ -1530,99 +1549,102 @@ async def test_audio_pipeline_debug( events = [] client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - "type": "assist_pipeline/run", - "start_stage": "stt", - "end_stage": "tts", - "input": { - "sample_rate": 44100, - }, - } - ) + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + await client.send_json_auto_id( + { + "type": "assist_pipeline/run", + "start_stage": "stt", + "end_stage": "tts", + "input": { + "sample_rate": 44100, + }, + } + ) - # result - msg = await client.receive_json() - assert msg["success"] + # result + msg = await client.receive_json() + assert msg["success"] - # run start - msg = await client.receive_json() - assert msg["event"]["type"] == "run-start" - msg["event"]["data"]["pipeline"] = ANY - assert msg["event"]["data"] == snapshot - handler_id = msg["event"]["data"]["runner_data"]["stt_binary_handler_id"] - events.append(msg["event"]) + # run start + msg = await client.receive_json() + assert msg["event"]["type"] == "run-start" + msg["event"]["data"]["pipeline"] = ANY + assert msg["event"]["data"] == snapshot + handler_id = msg["event"]["data"]["runner_data"]["stt_binary_handler_id"] + events.append(msg["event"]) - # stt - msg = await client.receive_json() - assert msg["event"]["type"] == "stt-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # stt + msg = await client.receive_json() + assert msg["event"]["type"] == "stt-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # End of audio stream (handler id + empty payload) - await client.send_bytes(bytes([handler_id])) + # End of audio stream (handler id + empty payload) + await client.send_bytes(bytes([handler_id])) - msg = await client.receive_json() - assert msg["event"]["type"] == "stt-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + msg = await client.receive_json() + assert msg["event"]["type"] == "stt-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # intent - msg = await client.receive_json() - assert msg["event"]["type"] == "intent-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # intent + msg = await client.receive_json() + assert msg["event"]["type"] == "intent-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - msg = await client.receive_json() - assert msg["event"]["type"] == "intent-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + msg = await client.receive_json() + assert msg["event"]["type"] == "intent-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # text-to-speech - msg = await client.receive_json() - assert msg["event"]["type"] == "tts-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # text-to-speech + msg = await client.receive_json() + assert msg["event"]["type"] == "tts-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - msg = await client.receive_json() - assert msg["event"]["type"] == "tts-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + msg = await client.receive_json() + assert msg["event"]["type"] == "tts-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # run end - msg = await client.receive_json() - assert msg["event"]["type"] == "run-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # run end + msg = await client.receive_json() + assert msg["event"]["type"] == "run-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # Get the id of the pipeline - await client.send_json_auto_id({"type": "assist_pipeline/pipeline/list"}) - msg = await client.receive_json() - assert msg["success"] - assert len(msg["result"]["pipelines"]) == 1 + # Get the id of the pipeline + await client.send_json_auto_id({"type": "assist_pipeline/pipeline/list"}) + msg = await client.receive_json() + assert msg["success"] + assert len(msg["result"]["pipelines"]) == 1 - pipeline_id = msg["result"]["pipelines"][0]["id"] + pipeline_id = msg["result"]["pipelines"][0]["id"] - # Get the id for the run - await client.send_json_auto_id( - {"type": "assist_pipeline/pipeline_debug/list", "pipeline_id": pipeline_id} - ) - msg = await client.receive_json() - assert msg["success"] - assert msg["result"] == {"pipeline_runs": [ANY]} + # Get the id for the run + await client.send_json_auto_id( + {"type": "assist_pipeline/pipeline_debug/list", "pipeline_id": pipeline_id} + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] == {"pipeline_runs": [ANY]} - pipeline_run_id = msg["result"]["pipeline_runs"][0]["pipeline_run_id"] + pipeline_run_id = msg["result"]["pipeline_runs"][0]["pipeline_run_id"] - await client.send_json_auto_id( - { - "type": "assist_pipeline/pipeline_debug/get", - "pipeline_id": pipeline_id, - "pipeline_run_id": pipeline_run_id, - } - ) - msg = await client.receive_json() - assert msg["success"] - assert msg["result"] == {"events": events} + await client.send_json_auto_id( + { + "type": "assist_pipeline/pipeline_debug/get", + "pipeline_id": pipeline_id, + "pipeline_run_id": pipeline_run_id, + } + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] == {"events": events} async def test_pipeline_debug_list_runs_wrong_pipeline( @@ -1777,94 +1799,97 @@ async def test_audio_pipeline_with_enhancements( events = [] client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - "type": "assist_pipeline/run", - "start_stage": "stt", - "end_stage": "tts", - "input": { - "sample_rate": SAMPLE_RATE, - # Enhancements - "noise_suppression_level": 2, - "auto_gain_dbfs": 15, - "volume_multiplier": 2.0, - }, - } - ) + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + await client.send_json_auto_id( + { + "type": "assist_pipeline/run", + "start_stage": "stt", + "end_stage": "tts", + "input": { + "sample_rate": SAMPLE_RATE, + # Enhancements + "noise_suppression_level": 2, + "auto_gain_dbfs": 15, + "volume_multiplier": 2.0, + }, + } + ) - # result - msg = await client.receive_json() - assert msg["success"] + # result + msg = await client.receive_json() + assert msg["success"] - # run start - msg = await client.receive_json() - assert msg["event"]["type"] == "run-start" - msg["event"]["data"]["pipeline"] = ANY - assert msg["event"]["data"] == snapshot - handler_id = msg["event"]["data"]["runner_data"]["stt_binary_handler_id"] - events.append(msg["event"]) + # run start + msg = await client.receive_json() + assert msg["event"]["type"] == "run-start" + msg["event"]["data"]["pipeline"] = ANY + assert msg["event"]["data"] == snapshot + handler_id = msg["event"]["data"]["runner_data"]["stt_binary_handler_id"] + events.append(msg["event"]) - # stt - msg = await client.receive_json() - assert msg["event"]["type"] == "stt-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # stt + msg = await client.receive_json() + assert msg["event"]["type"] == "stt-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # One second of silence. - # This will pass through the audio enhancement pipeline, but we don't test - # the actual output. - await client.send_bytes(bytes([handler_id]) + bytes(BYTES_ONE_SECOND)) + # One second of silence. + # This will pass through the audio enhancement pipeline, but we don't test + # the actual output. + await client.send_bytes(bytes([handler_id]) + bytes(BYTES_ONE_SECOND)) - # End of audio stream (handler id + empty payload) - await client.send_bytes(bytes([handler_id])) + # End of audio stream (handler id + empty payload) + await client.send_bytes(bytes([handler_id])) - msg = await client.receive_json() - assert msg["event"]["type"] == "stt-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + msg = await client.receive_json() + assert msg["event"]["type"] == "stt-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # intent - msg = await client.receive_json() - assert msg["event"]["type"] == "intent-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # intent + msg = await client.receive_json() + assert msg["event"]["type"] == "intent-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - msg = await client.receive_json() - assert msg["event"]["type"] == "intent-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + msg = await client.receive_json() + assert msg["event"]["type"] == "intent-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # text-to-speech - msg = await client.receive_json() - assert msg["event"]["type"] == "tts-start" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # text-to-speech + msg = await client.receive_json() + assert msg["event"]["type"] == "tts-start" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - msg = await client.receive_json() - assert msg["event"]["type"] == "tts-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + msg = await client.receive_json() + assert msg["event"]["type"] == "tts-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - # run end - msg = await client.receive_json() - assert msg["event"]["type"] == "run-end" - assert msg["event"]["data"] == snapshot - events.append(msg["event"]) + # run end + msg = await client.receive_json() + assert msg["event"]["type"] == "run-end" + assert msg["event"]["data"] == snapshot + events.append(msg["event"]) - pipeline_data: PipelineData = hass.data[DOMAIN] - pipeline_id = list(pipeline_data.pipeline_debug)[0] - pipeline_run_id = list(pipeline_data.pipeline_debug[pipeline_id])[0] + pipeline_data: PipelineData = hass.data[DOMAIN] + pipeline_id = list(pipeline_data.pipeline_debug)[0] + pipeline_run_id = list(pipeline_data.pipeline_debug[pipeline_id])[0] - await client.send_json_auto_id( - { - "type": "assist_pipeline/pipeline_debug/get", - "pipeline_id": pipeline_id, - "pipeline_run_id": pipeline_run_id, - } - ) - msg = await client.receive_json() - assert msg["success"] - assert msg["result"] == {"events": events} + await client.send_json_auto_id( + { + "type": "assist_pipeline/pipeline_debug/get", + "pipeline_id": pipeline_id, + "pipeline_run_id": pipeline_run_id, + } + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] == {"events": events} async def test_wake_word_cooldown_same_id( diff --git a/tests/components/backup/snapshots/test_websocket.ambr b/tests/components/backup/snapshots/test_websocket.ambr index 07e099561b1..096df37d704 100644 --- a/tests/components/backup/snapshots/test_websocket.ambr +++ b/tests/components/backup/snapshots/test_websocket.ambr @@ -269,3 +269,22 @@ 'type': 'result', }) # --- +# name: test_restore[with_hassio] + dict({ + 'error': dict({ + 'code': 'unknown_command', + 'message': 'Unknown command.', + }), + 'id': 1, + 'success': False, + 'type': 'result', + }) +# --- +# name: test_restore[without_hassio] + dict({ + 'id': 1, + 'result': None, + 'success': True, + 'type': 'result', + }) +# --- diff --git a/tests/components/backup/test_http.py b/tests/components/backup/test_http.py index 93ecb27bc97..76b1f76b55b 100644 --- a/tests/components/backup/test_http.py +++ b/tests/components/backup/test_http.py @@ -1,8 +1,11 @@ """Tests for the Backup integration.""" +import asyncio +from io import StringIO from unittest.mock import patch from aiohttp import web +import pytest from homeassistant.core import HomeAssistant @@ -49,12 +52,12 @@ async def test_downloading_backup_not_found( assert resp.status == 404 -async def test_non_admin( +async def test_downloading_as_non_admin( hass: HomeAssistant, hass_client: ClientSessionGenerator, hass_admin_user: MockUser, ) -> None: - """Test downloading a backup file that does not exist.""" + """Test downloading a backup file when you are not an admin.""" hass_admin_user.groups = [] await setup_backup_integration(hass) @@ -62,3 +65,53 @@ async def test_non_admin( resp = await client.get("/api/backup/download/abc123") assert resp.status == 401 + + +async def test_uploading_a_backup_file( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> None: + """Test uploading a backup file.""" + await setup_backup_integration(hass) + + client = await hass_client() + + with patch( + "homeassistant.components.backup.manager.BackupManager.async_receive_backup", + ) as async_receive_backup_mock: + resp = await client.post( + "/api/backup/upload", + data={"file": StringIO("test")}, + ) + assert resp.status == 201 + assert async_receive_backup_mock.called + + +@pytest.mark.parametrize( + ("error", "message"), + [ + (OSError("Boom!"), "Can't write backup file Boom!"), + (asyncio.CancelledError("Boom!"), ""), + ], +) +async def test_error_handling_uploading_a_backup_file( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + error: Exception, + message: str, +) -> None: + """Test error handling when uploading a backup file.""" + await setup_backup_integration(hass) + + client = await hass_client() + + with patch( + "homeassistant.components.backup.manager.BackupManager.async_receive_backup", + side_effect=error, + ): + resp = await client.post( + "/api/backup/upload", + data={"file": StringIO("test")}, + ) + assert resp.status == 500 + assert await resp.text() == message diff --git a/tests/components/backup/test_manager.py b/tests/components/backup/test_manager.py index 1bf801a0fcf..a3f70267643 100644 --- a/tests/components/backup/test_manager.py +++ b/tests/components/backup/test_manager.py @@ -3,8 +3,10 @@ from __future__ import annotations from pathlib import Path -from unittest.mock import AsyncMock, MagicMock, Mock, patch +from unittest.mock import AsyncMock, MagicMock, Mock, mock_open, patch +import aiohttp +from multidict import CIMultiDict, CIMultiDictProxy import pytest from homeassistant.components.backup import BackupManager @@ -333,3 +335,65 @@ async def test_loading_platforms_when_running_async_post_backup_actions( assert len(manager.platforms) == 1 assert "Loaded 1 platforms" in caplog.text + + +async def test_async_receive_backup( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test receiving a backup file.""" + manager = BackupManager(hass) + + size = 2 * 2**16 + protocol = Mock(_reading_paused=False) + stream = aiohttp.StreamReader(protocol, 2**16) + stream.feed_data(b"0" * size + b"\r\n--:--") + stream.feed_eof() + + open_mock = mock_open() + + with patch("pathlib.Path.open", open_mock), patch("shutil.move") as mover_mock: + await manager.async_receive_backup( + contents=aiohttp.BodyPartReader( + b"--:", + CIMultiDictProxy( + CIMultiDict( + { + aiohttp.hdrs.CONTENT_DISPOSITION: "attachment; filename=abc123.tar" + } + ) + ), + stream, + ) + ) + assert open_mock.call_count == 1 + assert mover_mock.call_count == 1 + assert mover_mock.mock_calls[0].args[1].name == "abc123.tar" + + +async def test_async_trigger_restore( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test trigger restore.""" + manager = BackupManager(hass) + manager.loaded_backups = True + manager.backups = {TEST_BACKUP.slug: TEST_BACKUP} + + with ( + patch("pathlib.Path.exists", return_value=True), + patch("pathlib.Path.write_text") as mocked_write_text, + patch("homeassistant.core.ServiceRegistry.async_call") as mocked_service_call, + ): + await manager.async_restore_backup(TEST_BACKUP.slug) + assert mocked_write_text.call_args[0][0] == '{"path": "abc123.tar"}' + assert mocked_service_call.called + + +async def test_async_trigger_restore_missing_backup(hass: HomeAssistant) -> None: + """Test trigger restore.""" + manager = BackupManager(hass) + manager.loaded_backups = True + + with pytest.raises(HomeAssistantError, match="Backup abc123 not found"): + await manager.async_restore_backup(TEST_BACKUP.slug) diff --git a/tests/components/backup/test_websocket.py b/tests/components/backup/test_websocket.py index 805182391da..125ba8adaad 100644 --- a/tests/components/backup/test_websocket.py +++ b/tests/components/backup/test_websocket.py @@ -141,6 +141,32 @@ async def test_generate( assert snapshot == await client.receive_json() +@pytest.mark.parametrize( + "with_hassio", + [ + pytest.param(True, id="with_hassio"), + pytest.param(False, id="without_hassio"), + ], +) +async def test_restore( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + with_hassio: bool, +) -> None: + """Test calling the restore command.""" + await setup_backup_integration(hass, with_hassio=with_hassio) + + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + with patch( + "homeassistant.components.backup.manager.BackupManager.async_restore_backup", + ): + await client.send_json_auto_id({"type": "backup/restore", "slug": "abc123"}) + assert await client.receive_json() == snapshot + + @pytest.mark.parametrize( "access_token_fixture_name", ["hass_access_token", "hass_supervisor_access_token"], diff --git a/tests/components/bang_olufsen/conftest.py b/tests/components/bang_olufsen/conftest.py index 6c19a29c1da..cbde856ff89 100644 --- a/tests/components/bang_olufsen/conftest.py +++ b/tests/components/bang_olufsen/conftest.py @@ -35,13 +35,13 @@ from .const import ( TEST_DATA_CREATE_ENTRY, TEST_DATA_CREATE_ENTRY_2, TEST_FRIENDLY_NAME, - TEST_FRIENDLY_NAME_2, TEST_FRIENDLY_NAME_3, - TEST_HOST_2, + TEST_FRIENDLY_NAME_4, TEST_HOST_3, + TEST_HOST_4, TEST_JID_1, - TEST_JID_2, TEST_JID_3, + TEST_JID_4, TEST_NAME, TEST_NAME_2, TEST_SERIAL_NUMBER, @@ -267,29 +267,29 @@ def mock_mozart_client() -> Generator[AsyncMock]: } client.get_beolink_peers = AsyncMock() client.get_beolink_peers.return_value = [ - BeolinkPeer( - friendly_name=TEST_FRIENDLY_NAME_2, - jid=TEST_JID_2, - ip_address=TEST_HOST_2, - ), BeolinkPeer( friendly_name=TEST_FRIENDLY_NAME_3, jid=TEST_JID_3, ip_address=TEST_HOST_3, ), + BeolinkPeer( + friendly_name=TEST_FRIENDLY_NAME_4, + jid=TEST_JID_4, + ip_address=TEST_HOST_4, + ), ] client.get_beolink_listeners = AsyncMock() client.get_beolink_listeners.return_value = [ - BeolinkPeer( - friendly_name=TEST_FRIENDLY_NAME_2, - jid=TEST_JID_2, - ip_address=TEST_HOST_2, - ), BeolinkPeer( friendly_name=TEST_FRIENDLY_NAME_3, jid=TEST_JID_3, ip_address=TEST_HOST_3, ), + BeolinkPeer( + friendly_name=TEST_FRIENDLY_NAME_4, + jid=TEST_JID_4, + ip_address=TEST_HOST_4, + ), ] client.get_listening_mode_set = AsyncMock() diff --git a/tests/components/bang_olufsen/const.py b/tests/components/bang_olufsen/const.py index 3769aef5cd3..6602a898eb6 100644 --- a/tests/components/bang_olufsen/const.py +++ b/tests/components/bang_olufsen/const.py @@ -16,6 +16,7 @@ from mozart_api.models import ( PlayQueueItemType, RenderingState, SceneProperties, + Source, UserFlow, VolumeLevel, VolumeMute, @@ -125,7 +126,10 @@ TEST_DATA_ZEROCONF_IPV6 = ZeroconfServiceInfo( }, ) -TEST_AUDIO_SOURCES = [BangOlufsenSource.TIDAL.name, BangOlufsenSource.LINE_IN.name] +TEST_SOURCE = Source( + name="Tidal", id="tidal", is_seekable=True, is_enabled=True, is_playable=True +) +TEST_AUDIO_SOURCES = [TEST_SOURCE.name, BangOlufsenSource.LINE_IN.name] TEST_VIDEO_SOURCES = ["HDMI A"] TEST_SOURCES = TEST_AUDIO_SOURCES + TEST_VIDEO_SOURCES TEST_FALLBACK_SOURCES = [ diff --git a/tests/components/bang_olufsen/snapshots/test_diagnostics.ambr b/tests/components/bang_olufsen/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..e9540b5cec6 --- /dev/null +++ b/tests/components/bang_olufsen/snapshots/test_diagnostics.ambr @@ -0,0 +1,67 @@ +# serializer version: 1 +# name: test_async_get_config_entry_diagnostics + dict({ + 'config_entry': dict({ + 'data': dict({ + 'host': '192.168.0.1', + 'jid': '1111.1111111.11111111@products.bang-olufsen.com', + 'model': 'Beosound Balance', + 'name': 'Beosound Balance-11111111', + }), + 'disabled_by': None, + 'discovery_keys': dict({ + }), + 'domain': 'bang_olufsen', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'Beosound Balance-11111111', + 'unique_id': '11111111', + 'version': 1, + }), + 'media_player': dict({ + 'attributes': dict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': 'music', + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': 2095933, + }), + 'entity_id': 'media_player.beosound_balance_11111111', + 'state': 'playing', + }), + 'websocket_connected': False, + }) +# --- diff --git a/tests/components/bang_olufsen/snapshots/test_media_player.ambr b/tests/components/bang_olufsen/snapshots/test_media_player.ambr new file mode 100644 index 00000000000..36fcc72aa22 --- /dev/null +++ b/tests/components/bang_olufsen/snapshots/test_media_player.ambr @@ -0,0 +1,856 @@ +# serializer version: 1 +# name: test_async_beolink_allstandby + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_expand[all_discovered-True-None-log_messages0-2] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Tidal', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_expand[all_discovered-True-expand_side_effect1-log_messages1-2] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Tidal', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_expand[beolink_jids-parameter_value2-None-log_messages2-1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Tidal', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_expand[beolink_jids-parameter_value3-expand_side_effect3-log_messages3-1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Tidal', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_join + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_beolink_unexpand + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players[group_members0-1-0] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Tidal', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players[group_members0-1-0].1 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_22222222', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_22222222', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players[group_members1-0-1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Tidal', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players[group_members1-0-1].1 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_22222222', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_22222222', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players_invalid[source0-group_members0-expected_result0-invalid_source] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'media_position': 0, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Line-In', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players_invalid[source0-group_members0-expected_result0-invalid_source].1 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_22222222', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_22222222', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players_invalid[source1-group_members1-expected_result1-invalid_grouping_entity] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source': 'Tidal', + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_join_players_invalid[source1-group_members1-expected_result1-invalid_grouping_entity].1 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_22222222', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_22222222', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_unjoin_player + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_11111111', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_update_beolink_listener + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'leader': dict({ + 'Laundry room Balance': '1111.1111111.22222222@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.11111111@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_22222222', + 'media_player.beosound_balance_11111111', + ]), + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_11111111', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_async_update_beolink_listener.1 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'beolink': dict({ + 'listeners': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'peers': dict({ + 'Lego room Balance': '1111.1111111.33333333@products.bang-olufsen.com', + 'Lounge room Balance': '1111.1111111.44444444@products.bang-olufsen.com', + }), + 'self': dict({ + 'Living room Balance': '1111.1111111.22222222@products.bang-olufsen.com', + }), + }), + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Living room Balance', + 'group_members': list([ + 'media_player.beosound_balance_22222222', + 'listener_not_in_hass-1111.1111111.33333333@products.bang-olufsen.com', + 'listener_not_in_hass-1111.1111111.44444444@products.bang-olufsen.com', + ]), + 'media_content_type': , + 'sound_mode': 'Test Listening Mode (123)', + 'sound_mode_list': list([ + 'Test Listening Mode (123)', + 'Test Listening Mode (234)', + 'Test Listening Mode 2 (345)', + ]), + 'source_list': list([ + 'Tidal', + 'Line-In', + 'HDMI A', + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.beosound_balance_22222222', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- diff --git a/tests/components/bang_olufsen/test_diagnostics.py b/tests/components/bang_olufsen/test_diagnostics.py new file mode 100644 index 00000000000..7c99648ace4 --- /dev/null +++ b/tests/components/bang_olufsen/test_diagnostics.py @@ -0,0 +1,41 @@ +"""Test bang_olufsen config entry diagnostics.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_async_get_config_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_config_entry: MockConfigEntry, + mock_mozart_client: AsyncMock, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + result = await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) + + assert result == snapshot( + exclude=props( + "created_at", + "entry_id", + "id", + "last_changed", + "last_reported", + "last_updated", + "media_position_updated_at", + "modified_at", + ) + ) diff --git a/tests/components/bang_olufsen/test_init.py b/tests/components/bang_olufsen/test_init.py index 5b809488ed8..c8e4c05f9ab 100644 --- a/tests/components/bang_olufsen/test_init.py +++ b/tests/components/bang_olufsen/test_init.py @@ -9,7 +9,7 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceRegistry -from .const import TEST_MODEL_BALANCE, TEST_NAME, TEST_SERIAL_NUMBER +from .const import TEST_FRIENDLY_NAME, TEST_MODEL_BALANCE, TEST_SERIAL_NUMBER from tests.common import MockConfigEntry @@ -35,7 +35,8 @@ async def test_setup_entry( identifiers={(DOMAIN, TEST_SERIAL_NUMBER)} ) assert device is not None - assert device.name == TEST_NAME + # Is usually TEST_NAME, but is updated to the device's friendly name by _update_name_and_beolink + assert device.name == TEST_FRIENDLY_NAME assert device.model == TEST_MODEL_BALANCE # Ensure that the connection has been checked WebSocket connection has been initialized diff --git a/tests/components/bang_olufsen/test_media_player.py b/tests/components/bang_olufsen/test_media_player.py index 8f23af9e04a..aa35b0265dc 100644 --- a/tests/components/bang_olufsen/test_media_player.py +++ b/tests/components/bang_olufsen/test_media_player.py @@ -4,8 +4,10 @@ from contextlib import AbstractContextManager, nullcontext as does_not_raise import logging from unittest.mock import AsyncMock, patch +from mozart_api.exceptions import NotFoundException from mozart_api.models import ( BeolinkLeader, + BeolinkSelf, PlaybackContentMetadata, PlayQueueSettings, RenderingState, @@ -14,6 +16,8 @@ from mozart_api.models import ( WebsocketNotificationTag, ) import pytest +from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.bang_olufsen.const import ( BANG_OLUFSEN_REPEAT_FROM_HA, @@ -46,24 +50,29 @@ from homeassistant.components.media_player import ( ATTR_SOUND_MODE_LIST, DOMAIN as MEDIA_PLAYER_DOMAIN, SERVICE_CLEAR_PLAYLIST, + SERVICE_JOIN, SERVICE_MEDIA_NEXT_TRACK, SERVICE_MEDIA_PLAY_PAUSE, SERVICE_MEDIA_PREVIOUS_TRACK, SERVICE_MEDIA_SEEK, SERVICE_MEDIA_STOP, SERVICE_PLAY_MEDIA, + SERVICE_REPEAT_SET, SERVICE_SELECT_SOUND_MODE, SERVICE_SELECT_SOURCE, + SERVICE_SHUFFLE_SET, SERVICE_TURN_OFF, + SERVICE_UNJOIN, SERVICE_VOLUME_MUTE, SERVICE_VOLUME_SET, MediaPlayerState, MediaType, RepeatMode, ) -from homeassistant.const import ATTR_ENTITY_ID, SERVICE_REPEAT_SET, SERVICE_SHUFFLE_SET +from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.setup import async_setup_component from .const import ( @@ -76,7 +85,10 @@ from .const import ( TEST_DEEZER_TRACK, TEST_FALLBACK_SOURCES, TEST_FRIENDLY_NAME_2, + TEST_JID_1, TEST_JID_2, + TEST_JID_3, + TEST_JID_4, TEST_LISTENING_MODE_REF, TEST_MEDIA_PLAYER_ENTITY_ID, TEST_MEDIA_PLAYER_ENTITY_ID_2, @@ -93,6 +105,7 @@ from .const import ( TEST_SEEK_POSITION_HOME_ASSISTANT_FORMAT, TEST_SOUND_MODE_2, TEST_SOUND_MODES, + TEST_SOURCE, TEST_SOURCES, TEST_VIDEO_SOURCES, TEST_VOLUME, @@ -136,6 +149,9 @@ async def test_initialization( mock_mozart_client.get_remote_menu.assert_called_once() mock_mozart_client.get_listening_mode_set.assert_called_once() mock_mozart_client.get_active_listening_mode.assert_called_once() + mock_mozart_client.get_beolink_self.assert_called_once() + mock_mozart_client.get_beolink_peers.assert_called_once() + mock_mozart_client.get_beolink_listeners.assert_called_once() async def test_async_update_sources_audio_only( @@ -216,7 +232,7 @@ async def test_async_update_sources_availability( # Add a source that is available and playable mock_mozart_client.get_available_sources.return_value = SourceArray( - items=[BangOlufsenSource.TIDAL] + items=[TEST_SOURCE] ) # Send playback_source. The source is not actually used, so its attributes don't matter @@ -224,7 +240,7 @@ async def test_async_update_sources_availability( assert mock_mozart_client.get_available_sources.call_count == 2 assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states.attributes[ATTR_INPUT_SOURCE_LIST] == [BangOlufsenSource.TIDAL.name] + assert states.attributes[ATTR_INPUT_SOURCE_LIST] == [TEST_SOURCE.name] async def test_async_update_playback_metadata( @@ -342,19 +358,17 @@ async def test_async_update_playback_state( @pytest.mark.parametrize( - ("reported_source", "real_source", "content_type", "progress", "metadata"), + ("source", "content_type", "progress", "metadata"), [ - # Normal source, music mediatype expected, no progress expected + # Normal source, music mediatype expected ( - BangOlufsenSource.TIDAL, - BangOlufsenSource.TIDAL, + TEST_SOURCE, MediaType.MUSIC, TEST_PLAYBACK_PROGRESS.progress, PlaybackContentMetadata(), ), - # URI source, url media type expected, no progress expected + # URI source, url media type expected ( - BangOlufsenSource.URI_STREAMER, BangOlufsenSource.URI_STREAMER, MediaType.URL, TEST_PLAYBACK_PROGRESS.progress, @@ -363,44 +377,17 @@ async def test_async_update_playback_state( # Line-In source,media type expected, progress 0 expected ( BangOlufsenSource.LINE_IN, - BangOlufsenSource.CHROMECAST, MediaType.MUSIC, 0, PlaybackContentMetadata(), ), - # Chromecast as source, but metadata says Line-In. - # Progress is not set to 0 as the source is Chromecast first - ( - BangOlufsenSource.CHROMECAST, - BangOlufsenSource.LINE_IN, - MediaType.MUSIC, - TEST_PLAYBACK_PROGRESS.progress, - PlaybackContentMetadata(title=BangOlufsenSource.LINE_IN.name), - ), - # Chromecast as source, but metadata says Bluetooth - ( - BangOlufsenSource.CHROMECAST, - BangOlufsenSource.BLUETOOTH, - MediaType.MUSIC, - TEST_PLAYBACK_PROGRESS.progress, - PlaybackContentMetadata(title=BangOlufsenSource.BLUETOOTH.name), - ), - # Chromecast as source, but metadata says Bluetooth in another way - ( - BangOlufsenSource.CHROMECAST, - BangOlufsenSource.BLUETOOTH, - MediaType.MUSIC, - TEST_PLAYBACK_PROGRESS.progress, - PlaybackContentMetadata(art=[]), - ), ], ) async def test_async_update_source_change( hass: HomeAssistant, mock_mozart_client: AsyncMock, mock_config_entry: MockConfigEntry, - reported_source: Source, - real_source: Source, + source: Source, content_type: MediaType, progress: int, metadata: PlaybackContentMetadata, @@ -429,10 +416,10 @@ async def test_async_update_source_change( # Simulate metadata playback_metadata_callback(metadata) - source_change_callback(reported_source) + source_change_callback(source) assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) - assert states.attributes[ATTR_INPUT_SOURCE] == real_source.name + assert states.attributes[ATTR_INPUT_SOURCE] == source.name assert states.attributes[ATTR_MEDIA_CONTENT_TYPE] == content_type assert states.attributes[ATTR_MEDIA_POSITION] == progress @@ -530,11 +517,14 @@ async def test_async_update_beolink_line_in( assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert states.attributes["group_members"] == [] - assert mock_mozart_client.get_beolink_listeners.call_count == 1 + # Called once during _initialize and once during _async_update_beolink + assert mock_mozart_client.get_beolink_listeners.call_count == 2 + assert mock_mozart_client.get_beolink_peers.call_count == 2 async def test_async_update_beolink_listener( hass: HomeAssistant, + snapshot: SnapshotAssertion, mock_mozart_client: AsyncMock, mock_config_entry: MockConfigEntry, mock_config_entry_2: MockConfigEntry, @@ -567,7 +557,56 @@ async def test_async_update_beolink_listener( TEST_MEDIA_PLAYER_ENTITY_ID, ] - assert mock_mozart_client.get_beolink_listeners.call_count == 0 + # Called once for each entity during _initialize + assert mock_mozart_client.get_beolink_listeners.call_count == 2 + # Called once for each entity during _initialize and + # once more during _async_update_beolink for the entity that has the callback associated with it. + assert mock_mozart_client.get_beolink_peers.call_count == 3 + + # Main entity + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + # Secondary entity + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID_2)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + +async def test_async_update_name_and_beolink( + hass: HomeAssistant, + device_registry: DeviceRegistry, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test _async_update_name_and_beolink.""" + # Change response to ensure device name is changed + mock_mozart_client.get_beolink_self.return_value = BeolinkSelf( + friendly_name=TEST_FRIENDLY_NAME_2, jid=TEST_JID_1 + ) + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + configuration_callback = ( + mock_mozart_client.get_notification_notifications.call_args[0][0] + ) + # Trigger callback + configuration_callback(WebsocketNotificationTag(value="configuration")) + + await hass.async_block_till_done() + + assert mock_mozart_client.get_beolink_self.call_count == 2 + assert mock_mozart_client.get_beolink_peers.call_count == 2 + assert mock_mozart_client.get_beolink_listeners.call_count == 2 + + # Check that device name has been changed + assert mock_config_entry.unique_id + assert ( + device := device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) + ) + assert device.name == TEST_FRIENDLY_NAME_2 async def test_async_mute_volume( @@ -707,7 +746,7 @@ async def test_async_media_next_track( ("source", "expected_result", "seek_called_times"), [ # Seekable source, seek expected - (BangOlufsenSource.DEEZER, does_not_raise(), 1), + (TEST_SOURCE, does_not_raise(), 1), # Non seekable source, seek shouldn't work (BangOlufsenSource.LINE_IN, pytest.raises(HomeAssistantError), 0), # Malformed source, seek shouldn't work @@ -795,7 +834,7 @@ async def test_async_clear_playlist( # Invalid source ("Test source", pytest.raises(ServiceValidationError), 0, 0), # Valid audio source - (BangOlufsenSource.TIDAL.name, does_not_raise(), 1, 0), + (TEST_SOURCE.name, does_not_raise(), 1, 0), # Valid video source (TEST_VIDEO_SOURCES[0], does_not_raise(), 0, 1), ], @@ -1343,6 +1382,7 @@ async def test_async_browse_media( ) async def test_async_join_players( hass: HomeAssistant, + snapshot: SnapshotAssertion, mock_mozart_client: AsyncMock, mock_config_entry: MockConfigEntry, mock_config_entry_2: MockConfigEntry, @@ -1364,11 +1404,11 @@ async def test_async_join_players( await hass.config_entries.async_setup(mock_config_entry_2.entry_id) # Set the source to a beolink expandable source - source_change_callback(BangOlufsenSource.TIDAL) + source_change_callback(TEST_SOURCE) await hass.services.async_call( - "media_player", - "join", + MEDIA_PLAYER_DOMAIN, + SERVICE_JOIN, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_GROUP_MEMBERS: group_members, @@ -1379,6 +1419,14 @@ async def test_async_join_players( assert mock_mozart_client.post_beolink_expand.call_count == expand_count assert mock_mozart_client.join_latest_beolink_experience.call_count == join_count + # Main entity + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + # Secondary entity + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID_2)) + assert states == snapshot(exclude=props("media_position_updated_at")) + @pytest.mark.parametrize( ("source", "group_members", "expected_result", "error_type"), @@ -1392,7 +1440,7 @@ async def test_async_join_players( ), # Invalid media_player entity ( - BangOlufsenSource.TIDAL, + TEST_SOURCE, [TEST_MEDIA_PLAYER_ENTITY_ID_3], pytest.raises(ServiceValidationError), "invalid_grouping_entity", @@ -1401,6 +1449,7 @@ async def test_async_join_players( ) async def test_async_join_players_invalid( hass: HomeAssistant, + snapshot: SnapshotAssertion, mock_mozart_client: AsyncMock, mock_config_entry: MockConfigEntry, mock_config_entry_2: MockConfigEntry, @@ -1425,8 +1474,8 @@ async def test_async_join_players_invalid( with expected_result as exc_info: await hass.services.async_call( - "media_player", - "join", + MEDIA_PLAYER_DOMAIN, + SERVICE_JOIN, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_GROUP_MEMBERS: group_members, @@ -1441,9 +1490,18 @@ async def test_async_join_players_invalid( assert mock_mozart_client.post_beolink_expand.call_count == 0 assert mock_mozart_client.join_latest_beolink_experience.call_count == 0 + # Main entity + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + # Secondary entity + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID_2)) + assert states == snapshot(exclude=props("media_position_updated_at")) + async def test_async_unjoin_player( hass: HomeAssistant, + snapshot: SnapshotAssertion, mock_mozart_client: AsyncMock, mock_config_entry: MockConfigEntry, ) -> None: @@ -1453,14 +1511,181 @@ async def test_async_unjoin_player( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.services.async_call( - "media_player", - "unjoin", + MEDIA_PLAYER_DOMAIN, + SERVICE_UNJOIN, {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, blocking=True, ) mock_mozart_client.post_beolink_leave.assert_called_once() + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + +async def test_async_beolink_join( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test async_beolink_join with defined JID.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + DOMAIN, + "beolink_join", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + "beolink_jid": TEST_JID_2, + }, + blocking=True, + ) + + mock_mozart_client.join_beolink_peer.assert_called_once_with(jid=TEST_JID_2) + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + +@pytest.mark.parametrize( + ( + "parameter", + "parameter_value", + "expand_side_effect", + "log_messages", + "peers_call_count", + ), + [ + # All discovered + # Valid peers + ("all_discovered", True, None, [], 2), + # Invalid peers + ( + "all_discovered", + True, + NotFoundException(), + [f"Unable to expand to {TEST_JID_3}", f"Unable to expand to {TEST_JID_4}"], + 2, + ), + # Beolink JIDs + # Valid peer + ("beolink_jids", [TEST_JID_3, TEST_JID_4], None, [], 1), + # Invalid peer + ( + "beolink_jids", + [TEST_JID_3, TEST_JID_4], + NotFoundException(), + [ + f"Unable to expand to {TEST_JID_3}. Is the device available on the network?", + f"Unable to expand to {TEST_JID_4}. Is the device available on the network?", + ], + 1, + ), + ], +) +async def test_async_beolink_expand( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + snapshot: SnapshotAssertion, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, + parameter: str, + parameter_value: bool | list[str], + expand_side_effect: NotFoundException | None, + log_messages: list[str], + peers_call_count: int, +) -> None: + """Test async_beolink_expand.""" + mock_mozart_client.post_beolink_expand.side_effect = expand_side_effect + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + source_change_callback = ( + mock_mozart_client.get_source_change_notifications.call_args[0][0] + ) + + # Set the source to a beolink expandable source + source_change_callback(TEST_SOURCE) + + await hass.services.async_call( + DOMAIN, + "beolink_expand", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + parameter: parameter_value, + }, + blocking=True, + ) + + # Check log messages + for log_message in log_messages: + assert log_message in caplog.text + + # Called once during _initialize and once during async_beolink_expand for all_discovered + assert mock_mozart_client.get_beolink_peers.call_count == peers_call_count + + assert mock_mozart_client.post_beolink_expand.call_count == len( + await mock_mozart_client.get_beolink_peers() + ) + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + +async def test_async_beolink_unexpand( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test test_async_beolink_unexpand.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + DOMAIN, + "beolink_unexpand", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + "beolink_jids": [TEST_JID_3, TEST_JID_4], + }, + blocking=True, + ) + + assert mock_mozart_client.post_beolink_unexpand.call_count == 2 + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + + +async def test_async_beolink_allstandby( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test async_beolink_allstandby.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + DOMAIN, + "beolink_allstandby", + {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, + blocking=True, + ) + + mock_mozart_client.post_beolink_allstandby.assert_called_once() + + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert states == snapshot(exclude=props("media_position_updated_at")) + @pytest.mark.parametrize( ("repeat"), diff --git a/tests/components/bang_olufsen/test_websocket.py b/tests/components/bang_olufsen/test_websocket.py index b17859a4f4e..ecf5b2d011e 100644 --- a/tests/components/bang_olufsen/test_websocket.py +++ b/tests/components/bang_olufsen/test_websocket.py @@ -135,7 +135,6 @@ async def test_on_all_notifications_raw( }, "eventType": "WebSocketEventVolume", } - raw_notification_full = raw_notification # Get device ID for the modified notification that is sent as an event and in the log assert mock_config_entry.unique_id @@ -144,12 +143,11 @@ async def test_on_all_notifications_raw( identifiers={(DOMAIN, mock_config_entry.unique_id)} ) ) - raw_notification_full.update( - { - "device_id": device.id, - "serial_number": mock_config_entry.unique_id, - } - ) + raw_notification_full = { + "device_id": device.id, + "serial_number": int(mock_config_entry.unique_id), + **raw_notification, + } caplog.set_level(logging.DEBUG) diff --git a/tests/components/blink/test_config_flow.py b/tests/components/blink/test_config_flow.py index c89ab65ea1d..ec1a8b95e0d 100644 --- a/tests/components/blink/test_config_flow.py +++ b/tests/components/blink/test_config_flow.py @@ -55,6 +55,35 @@ async def test_form(hass: HomeAssistant) -> None: } assert len(mock_setup_entry.mock_calls) == 1 + # Now check for duplicates + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with ( + patch("homeassistant.components.blink.config_flow.Auth.startup"), + patch( + "homeassistant.components.blink.config_flow.Auth.check_key_required", + return_value=False, + ), + patch( + "homeassistant.components.blink.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"username": "blink@example.com", "password": "example"}, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "already_configured" + + assert len(mock_setup_entry.mock_calls) == 0 + async def test_form_2fa(hass: HomeAssistant) -> None: """Test we get the 2fa form.""" diff --git a/tests/components/bluesound/test_media_player.py b/tests/components/bluesound/test_media_player.py index 0bf615de3da..217225628f2 100644 --- a/tests/components/bluesound/test_media_player.py +++ b/tests/components/bluesound/test_media_player.py @@ -325,17 +325,17 @@ async def test_attr_bluesound_group( setup_config_entry_secondary: None, player_mocks: PlayerMocks, ) -> None: - """Test the media player grouping.""" + """Test the media player grouping for leader.""" attr_bluesound_group = hass.states.get( "media_player.player_name1111" ).attributes.get("bluesound_group") assert attr_bluesound_group is None - updated_status = dataclasses.replace( - player_mocks.player_data.status_long_polling_mock.get(), - group_name="player-name1111+player-name2222", + updated_sync_status = dataclasses.replace( + player_mocks.player_data.sync_status_long_polling_mock.get(), + slaves=[PairedPlayer("2.2.2.2", 11000)], ) - player_mocks.player_data.status_long_polling_mock.set(updated_status) + player_mocks.player_data.sync_status_long_polling_mock.set(updated_sync_status) # give the long polling loop a chance to update the state; this could be any async call await hass.async_block_till_done() @@ -347,6 +347,45 @@ async def test_attr_bluesound_group( assert attr_bluesound_group == ["player-name1111", "player-name2222"] +async def test_attr_bluesound_group_for_follower( + hass: HomeAssistant, + setup_config_entry: None, + setup_config_entry_secondary: None, + player_mocks: PlayerMocks, +) -> None: + """Test the media player grouping for follower.""" + attr_bluesound_group = hass.states.get( + "media_player.player_name2222" + ).attributes.get("bluesound_group") + assert attr_bluesound_group is None + + updated_sync_status = dataclasses.replace( + player_mocks.player_data.sync_status_long_polling_mock.get(), + slaves=[PairedPlayer("2.2.2.2", 11000)], + ) + player_mocks.player_data.sync_status_long_polling_mock.set(updated_sync_status) + + # give the long polling loop a chance to update the state; this could be any async call + await hass.async_block_till_done() + + updated_sync_status = dataclasses.replace( + player_mocks.player_data_secondary.sync_status_long_polling_mock.get(), + master=PairedPlayer("1.1.1.1", 11000), + ) + player_mocks.player_data_secondary.sync_status_long_polling_mock.set( + updated_sync_status + ) + + # give the long polling loop a chance to update the state; this could be any async call + await hass.async_block_till_done() + + attr_bluesound_group = hass.states.get( + "media_player.player_name2222" + ).attributes.get("bluesound_group") + + assert attr_bluesound_group == ["player-name1111", "player-name2222"] + + async def test_volume_up_from_6_to_7( hass: HomeAssistant, setup_config_entry: None, diff --git a/tests/components/bmw_connected_drive/__init__.py b/tests/components/bmw_connected_drive/__init__.py index 4d280a1d0e5..f490b854749 100644 --- a/tests/components/bmw_connected_drive/__init__.py +++ b/tests/components/bmw_connected_drive/__init__.py @@ -9,6 +9,7 @@ import respx from homeassistant import config_entries from homeassistant.components.bmw_connected_drive.const import ( + CONF_CAPTCHA_TOKEN, CONF_GCID, CONF_READ_ONLY, CONF_REFRESH_TOKEN, @@ -24,8 +25,12 @@ FIXTURE_USER_INPUT = { CONF_PASSWORD: "p4ssw0rd", CONF_REGION: "rest_of_world", } -FIXTURE_REFRESH_TOKEN = "SOME_REFRESH_TOKEN" -FIXTURE_GCID = "SOME_GCID" +FIXTURE_CAPTCHA_INPUT = { + CONF_CAPTCHA_TOKEN: "captcha_token", +} +FIXTURE_USER_INPUT_W_CAPTCHA = FIXTURE_USER_INPUT | FIXTURE_CAPTCHA_INPUT +FIXTURE_REFRESH_TOKEN = "another_token_string" +FIXTURE_GCID = "DUMMY" FIXTURE_CONFIG_ENTRY = { "entry_id": "1", diff --git a/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr b/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr index 81ef1220069..b87da22a332 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr @@ -4833,7 +4833,7 @@ }), ]), 'info': dict({ - 'gcid': 'SOME_GCID', + 'gcid': 'DUMMY', 'password': '**REDACTED**', 'refresh_token': '**REDACTED**', 'region': 'rest_of_world', @@ -7202,7 +7202,7 @@ }), ]), 'info': dict({ - 'gcid': 'SOME_GCID', + 'gcid': 'DUMMY', 'password': '**REDACTED**', 'refresh_token': '**REDACTED**', 'region': 'rest_of_world', @@ -8925,7 +8925,7 @@ }), ]), 'info': dict({ - 'gcid': 'SOME_GCID', + 'gcid': 'DUMMY', 'password': '**REDACTED**', 'refresh_token': '**REDACTED**', 'region': 'rest_of_world', diff --git a/tests/components/bmw_connected_drive/test_config_flow.py b/tests/components/bmw_connected_drive/test_config_flow.py index f57f1a304ac..8fa9d9be22b 100644 --- a/tests/components/bmw_connected_drive/test_config_flow.py +++ b/tests/components/bmw_connected_drive/test_config_flow.py @@ -4,17 +4,14 @@ from copy import deepcopy from unittest.mock import patch from bimmer_connected.api.authentication import MyBMWAuthentication -from bimmer_connected.models import ( - MyBMWAPIError, - MyBMWAuthError, - MyBMWCaptchaMissingError, -) +from bimmer_connected.models import MyBMWAPIError, MyBMWAuthError from httpx import RequestError import pytest from homeassistant import config_entries from homeassistant.components.bmw_connected_drive.config_flow import DOMAIN from homeassistant.components.bmw_connected_drive.const import ( + CONF_CAPTCHA_TOKEN, CONF_READ_ONLY, CONF_REFRESH_TOKEN, ) @@ -23,10 +20,12 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from . import ( + FIXTURE_CAPTCHA_INPUT, FIXTURE_CONFIG_ENTRY, FIXTURE_GCID, FIXTURE_REFRESH_TOKEN, FIXTURE_USER_INPUT, + FIXTURE_USER_INPUT_W_CAPTCHA, ) from tests.common import MockConfigEntry @@ -61,7 +60,7 @@ async def test_authentication_error(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, - data=FIXTURE_USER_INPUT, + data=deepcopy(FIXTURE_USER_INPUT_W_CAPTCHA), ) assert result["type"] is FlowResultType.FORM @@ -79,7 +78,7 @@ async def test_connection_error(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, - data=FIXTURE_USER_INPUT, + data=deepcopy(FIXTURE_USER_INPUT_W_CAPTCHA), ) assert result["type"] is FlowResultType.FORM @@ -97,7 +96,7 @@ async def test_api_error(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, - data=deepcopy(FIXTURE_USER_INPUT), + data=deepcopy(FIXTURE_USER_INPUT_W_CAPTCHA), ) assert result["type"] is FlowResultType.FORM @@ -105,6 +104,28 @@ async def test_api_error(hass: HomeAssistant) -> None: assert result["errors"] == {"base": "cannot_connect"} +@pytest.mark.usefixtures("bmw_fixture") +async def test_captcha_flow_missing_error(hass: HomeAssistant) -> None: + """Test the external flow with captcha failing once and succeeding the second time.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_USER}, + data=deepcopy(FIXTURE_USER_INPUT), + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "captcha" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_CAPTCHA_TOKEN: " "} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "missing_captcha"} + + async def test_full_user_flow_implementation(hass: HomeAssistant) -> None: """Test registering an integration and finishing flow works.""" with ( @@ -118,14 +139,22 @@ async def test_full_user_flow_implementation(hass: HomeAssistant) -> None: return_value=True, ) as mock_setup_entry, ): - result2 = await hass.config_entries.flow.async_init( + result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data=deepcopy(FIXTURE_USER_INPUT), ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == FIXTURE_COMPLETE_ENTRY[CONF_USERNAME] - assert result2["data"] == FIXTURE_COMPLETE_ENTRY + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "captcha" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], FIXTURE_CAPTCHA_INPUT + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == FIXTURE_COMPLETE_ENTRY[CONF_USERNAME] + assert result["data"] == FIXTURE_COMPLETE_ENTRY assert len(mock_setup_entry.mock_calls) == 1 @@ -206,13 +235,20 @@ async def test_reauth(hass: HomeAssistant) -> None: assert suggested_values[CONF_PASSWORD] == wrong_password assert suggested_values[CONF_REGION] == FIXTURE_USER_INPUT[CONF_REGION] - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], FIXTURE_USER_INPUT + result = await hass.config_entries.flow.async_configure( + result["flow_id"], deepcopy(FIXTURE_USER_INPUT) ) await hass.async_block_till_done() - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reauth_successful" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "captcha" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], FIXTURE_CAPTCHA_INPUT + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" assert config_entry.data == FIXTURE_COMPLETE_ENTRY assert len(mock_setup_entry.mock_calls) == 2 @@ -243,13 +279,13 @@ async def test_reauth_unique_id_abort(hass: HomeAssistant) -> None: assert result["step_id"] == "user" assert result["errors"] == {} - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], {**FIXTURE_USER_INPUT, CONF_REGION: "north_america"} ) await hass.async_block_till_done() - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "account_mismatch" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "account_mismatch" assert config_entry.data == config_entry_with_wrong_password["data"] @@ -279,13 +315,20 @@ async def test_reconfigure(hass: HomeAssistant) -> None: assert suggested_values[CONF_PASSWORD] == FIXTURE_USER_INPUT[CONF_PASSWORD] assert suggested_values[CONF_REGION] == FIXTURE_USER_INPUT[CONF_REGION] - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], FIXTURE_USER_INPUT ) await hass.async_block_till_done() - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reconfigure_successful" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "captcha" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], FIXTURE_CAPTCHA_INPUT + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" assert config_entry.data == FIXTURE_COMPLETE_ENTRY @@ -307,40 +350,12 @@ async def test_reconfigure_unique_id_abort(hass: HomeAssistant) -> None: assert result["step_id"] == "user" assert result["errors"] == {} - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], {**FIXTURE_USER_INPUT, CONF_USERNAME: "somebody@email.com"}, ) await hass.async_block_till_done() - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "account_mismatch" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "account_mismatch" assert config_entry.data == FIXTURE_COMPLETE_ENTRY - - -@pytest.mark.usefixtures("bmw_fixture") -async def test_captcha_flow_not_set(hass: HomeAssistant) -> None: - """Test the external flow with captcha failing once and succeeding the second time.""" - - TEST_REGION = "north_america" - - # Start flow and open form - # Start flow and open form - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - # Add login data - with patch( - "bimmer_connected.api.authentication.MyBMWAuthentication._login_row_na", - side_effect=MyBMWCaptchaMissingError( - "Missing hCaptcha token for North America login" - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={**FIXTURE_USER_INPUT, CONF_REGION: TEST_REGION}, - ) - assert result["errors"]["base"] == "missing_captcha" diff --git a/tests/components/brother/snapshots/test_sensor.ambr b/tests/components/brother/snapshots/test_sensor.ambr index a27c5addd61..4de85859461 100644 --- a/tests/components/brother/snapshots/test_sensor.ambr +++ b/tests/components/brother/snapshots/test_sensor.ambr @@ -31,7 +31,7 @@ 'supported_features': 0, 'translation_key': 'bw_pages', 'unique_id': '0123456789_bw_counter', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_b_w_pages-state] @@ -39,7 +39,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW B/W pages', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_b_w_pages', @@ -131,7 +131,7 @@ 'supported_features': 0, 'translation_key': 'black_drum_page_counter', 'unique_id': '0123456789_black_drum_counter', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_black_drum_page_counter-state] @@ -139,7 +139,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Black drum page counter', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_black_drum_page_counter', @@ -231,7 +231,7 @@ 'supported_features': 0, 'translation_key': 'black_drum_remaining_pages', 'unique_id': '0123456789_black_drum_remaining_pages', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_black_drum_remaining_pages-state] @@ -239,7 +239,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Black drum remaining pages', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_black_drum_remaining_pages', @@ -331,7 +331,7 @@ 'supported_features': 0, 'translation_key': 'color_pages', 'unique_id': '0123456789_color_counter', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_color_pages-state] @@ -339,7 +339,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Color pages', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_color_pages', @@ -381,7 +381,7 @@ 'supported_features': 0, 'translation_key': 'cyan_drum_page_counter', 'unique_id': '0123456789_cyan_drum_counter', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_cyan_drum_page_counter-state] @@ -389,7 +389,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Cyan drum page counter', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_cyan_drum_page_counter', @@ -481,7 +481,7 @@ 'supported_features': 0, 'translation_key': 'cyan_drum_remaining_pages', 'unique_id': '0123456789_cyan_drum_remaining_pages', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_cyan_drum_remaining_pages-state] @@ -489,7 +489,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Cyan drum remaining pages', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_cyan_drum_remaining_pages', @@ -581,7 +581,7 @@ 'supported_features': 0, 'translation_key': 'drum_page_counter', 'unique_id': '0123456789_drum_counter', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_drum_page_counter-state] @@ -589,7 +589,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Drum page counter', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_drum_page_counter', @@ -681,7 +681,7 @@ 'supported_features': 0, 'translation_key': 'drum_remaining_pages', 'unique_id': '0123456789_drum_remaining_pages', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_drum_remaining_pages-state] @@ -689,7 +689,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Drum remaining pages', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_drum_remaining_pages', @@ -731,7 +731,7 @@ 'supported_features': 0, 'translation_key': 'duplex_unit_page_counter', 'unique_id': '0123456789_duplex_unit_pages_counter', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_duplex_unit_page_counter-state] @@ -739,7 +739,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Duplex unit page counter', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_duplex_unit_page_counter', @@ -878,7 +878,7 @@ 'supported_features': 0, 'translation_key': 'magenta_drum_page_counter', 'unique_id': '0123456789_magenta_drum_counter', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_magenta_drum_page_counter-state] @@ -886,7 +886,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Magenta drum page counter', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_magenta_drum_page_counter', @@ -978,7 +978,7 @@ 'supported_features': 0, 'translation_key': 'magenta_drum_remaining_pages', 'unique_id': '0123456789_magenta_drum_remaining_pages', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_magenta_drum_remaining_pages-state] @@ -986,7 +986,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Magenta drum remaining pages', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_magenta_drum_remaining_pages', @@ -1078,7 +1078,7 @@ 'supported_features': 0, 'translation_key': 'page_counter', 'unique_id': '0123456789_page_counter', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_page_counter-state] @@ -1086,7 +1086,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Page counter', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_page_counter', @@ -1224,7 +1224,7 @@ 'supported_features': 0, 'translation_key': 'yellow_drum_page_counter', 'unique_id': '0123456789_yellow_drum_counter', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_yellow_drum_page_counter-state] @@ -1232,7 +1232,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Yellow drum page counter', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_yellow_drum_page_counter', @@ -1324,7 +1324,7 @@ 'supported_features': 0, 'translation_key': 'yellow_drum_remaining_pages', 'unique_id': '0123456789_yellow_drum_remaining_pages', - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }) # --- # name: test_sensors[sensor.hl_l2340dw_yellow_drum_remaining_pages-state] @@ -1332,7 +1332,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'HL-L2340DW Yellow drum remaining pages', 'state_class': , - 'unit_of_measurement': 'p', + 'unit_of_measurement': 'pages', }), 'context': , 'entity_id': 'sensor.hl_l2340dw_yellow_drum_remaining_pages', diff --git a/tests/components/bsblan/conftest.py b/tests/components/bsblan/conftest.py index e46cdd75f2d..7d2db2f8b46 100644 --- a/tests/components/bsblan/conftest.py +++ b/tests/components/bsblan/conftest.py @@ -3,7 +3,7 @@ from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch -from bsblan import Device, Info, Sensor, State, StaticState +from bsblan import Device, HotWaterState, Info, Sensor, State, StaticState import pytest from homeassistant.components.bsblan.const import CONF_PASSKEY, DOMAIN @@ -58,6 +58,11 @@ def mock_bsblan() -> Generator[MagicMock]: bsblan.sensor.return_value = Sensor.from_json( load_fixture("sensor.json", DOMAIN) ) + bsblan.hot_water_state.return_value = HotWaterState.from_json( + load_fixture("dhw_state.json", DOMAIN) + ) + # mock get_temperature_unit property + bsblan.get_temperature_unit = "°C" yield bsblan diff --git a/tests/components/bsblan/fixtures/dhw_state.json b/tests/components/bsblan/fixtures/dhw_state.json new file mode 100644 index 00000000000..41b8c7beda5 --- /dev/null +++ b/tests/components/bsblan/fixtures/dhw_state.json @@ -0,0 +1,110 @@ +{ + "operating_mode": { + "name": "DHW operating mode", + "error": 0, + "value": "On", + "desc": "On", + "dataType": 1, + "readonly": 0, + "unit": "" + }, + "nominal_setpoint": { + "name": "DHW nominal setpoint", + "error": 0, + "value": "50.0", + "desc": "", + "dataType": 0, + "readonly": 0, + "unit": "°C" + }, + "nominal_setpoint_max": { + "name": "DHW nominal setpoint maximum", + "error": 0, + "value": "65.0", + "desc": "", + "dataType": 0, + "readonly": 0, + "unit": "°C" + }, + "reduced_setpoint": { + "name": "DHW reduced setpoint", + "error": 0, + "value": "40.0", + "desc": "", + "dataType": 0, + "readonly": 0, + "unit": "°C" + }, + "release": { + "name": "DHW release programme", + "error": 0, + "value": "1", + "desc": "Released", + "dataType": 1, + "readonly": 0, + "unit": "" + }, + "legionella_function": { + "name": "Legionella function fixed weekday", + "error": 0, + "value": "0", + "desc": "Off", + "dataType": 1, + "readonly": 0, + "unit": "" + }, + "legionella_setpoint": { + "name": "Legionella function setpoint", + "error": 0, + "value": "60.0", + "desc": "", + "dataType": 0, + "readonly": 0, + "unit": "°C" + }, + "legionella_periodicity": { + "name": "Legionella function periodicity", + "error": 0, + "value": "7", + "desc": "Weekly", + "dataType": 0, + "readonly": 0, + "unit": "days" + }, + "legionella_function_day": { + "name": "Legionella function day", + "error": 0, + "value": "6", + "desc": "Saturday", + "dataType": 1, + "readonly": 0, + "unit": "" + }, + "legionella_function_time": { + "name": "Legionella function time", + "error": 0, + "value": "12:00", + "desc": "", + "dataType": 2, + "readonly": 0, + "unit": "" + }, + "dhw_actual_value_top_temperature": { + "name": "DHW temperature actual value", + "error": 0, + "value": "48.5", + "desc": "", + "dataType": 0, + "readonly": 1, + "unit": "°C" + }, + "state_dhw_pump": { + "name": "State DHW circulation pump", + "error": 0, + "value": "0", + "desc": "Off", + "dataType": 1, + "readonly": 1, + "unit": "" + } +} diff --git a/tests/components/bsblan/snapshots/test_climate.ambr b/tests/components/bsblan/snapshots/test_climate.ambr index 4eb70fe2658..16828fea752 100644 --- a/tests/components/bsblan/snapshots/test_climate.ambr +++ b/tests/components/bsblan/snapshots/test_climate.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_celsius_fahrenheit[static.json][climate.bsb_lan-entry] +# name: test_celsius_fahrenheit[climate.bsb_lan-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -44,7 +44,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_celsius_fahrenheit[static.json][climate.bsb_lan-state] +# name: test_celsius_fahrenheit[climate.bsb_lan-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'current_temperature': 18.6, @@ -72,79 +72,6 @@ 'state': 'heat', }) # --- -# name: test_celsius_fahrenheit[static_F.json][climate.bsb_lan-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'hvac_modes': list([ - , - , - , - ]), - 'max_temp': -6.7, - 'min_temp': -13.3, - 'preset_modes': list([ - 'eco', - 'none', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'climate', - 'entity_category': None, - 'entity_id': 'climate.bsb_lan', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'bsblan', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:80:41:19:69:90-climate', - 'unit_of_measurement': None, - }) -# --- -# name: test_celsius_fahrenheit[static_F.json][climate.bsb_lan-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'current_temperature': -7.4, - 'friendly_name': 'BSB-LAN', - 'hvac_modes': list([ - , - , - , - ]), - 'max_temp': -6.7, - 'min_temp': -13.3, - 'preset_mode': 'none', - 'preset_modes': list([ - 'eco', - 'none', - ]), - 'supported_features': , - 'temperature': -7.5, - }), - 'context': , - 'entity_id': 'climate.bsb_lan', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'heat', - }) -# --- # name: test_climate_entity_properties[climate.bsb_lan-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/bsblan/snapshots/test_diagnostics.ambr b/tests/components/bsblan/snapshots/test_diagnostics.ambr index e033b2417d2..9fabd373205 100644 --- a/tests/components/bsblan/snapshots/test_diagnostics.ambr +++ b/tests/components/bsblan/snapshots/test_diagnostics.ambr @@ -6,67 +6,103 @@ 'current_temperature': dict({ 'data_type': 0, 'desc': '', + 'error': 0, 'name': 'Room temp 1 actual value', + 'precision': None, + 'readonly': 1, + 'readwrite': 0, 'unit': '°C', - 'value': '18.6', + 'value': 18.6, }), 'outside_temperature': dict({ 'data_type': 0, 'desc': '', + 'error': 0, 'name': 'Outside temp sensor local', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, 'unit': '°C', - 'value': '6.1', + 'value': 6.1, }), }), 'state': dict({ 'current_temperature': dict({ 'data_type': 0, 'desc': '', + 'error': 0, 'name': 'Room temp 1 actual value', + 'precision': None, + 'readonly': 1, + 'readwrite': 0, 'unit': '°C', - 'value': '18.6', + 'value': 18.6, }), 'hvac_action': dict({ 'data_type': 1, 'desc': 'Raumtemp’begrenzung', + 'error': 0, 'name': 'Status heating circuit 1', + 'precision': None, + 'readonly': 1, + 'readwrite': 0, 'unit': '', - 'value': '122', + 'value': 122, }), 'hvac_mode': dict({ 'data_type': 1, 'desc': 'Komfort', + 'error': 0, 'name': 'Operating mode', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, 'unit': '', 'value': 'heat', }), 'hvac_mode2': dict({ 'data_type': 1, 'desc': 'Reduziert', + 'error': 0, 'name': 'Operating mode', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, 'unit': '', - 'value': '2', + 'value': 2, }), 'room1_temp_setpoint_boost': dict({ 'data_type': 1, 'desc': 'Boost', + 'error': 0, 'name': 'Room 1 Temp Setpoint Boost', + 'precision': None, + 'readonly': 1, + 'readwrite': 0, 'unit': '°C', 'value': '22.5', }), 'room1_thermostat_mode': dict({ 'data_type': 1, 'desc': 'Kein Bedarf', + 'error': 0, 'name': 'Raumthermostat 1', + 'precision': None, + 'readonly': 1, + 'readwrite': 0, 'unit': '', - 'value': '0', + 'value': 0, }), 'target_temperature': dict({ 'data_type': 0, 'desc': '', + 'error': 0, 'name': 'Room temperature Comfort setpoint', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, 'unit': '°C', - 'value': '18.5', + 'value': 18.5, }), }), }), @@ -80,21 +116,33 @@ 'controller_family': dict({ 'data_type': 0, 'desc': '', + 'error': 0, 'name': 'Device family', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, 'unit': '', - 'value': '211', + 'value': 211, }), 'controller_variant': dict({ 'data_type': 0, 'desc': '', + 'error': 0, 'name': 'Device variant', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, 'unit': '', - 'value': '127', + 'value': 127, }), 'device_identification': dict({ 'data_type': 7, 'desc': '', + 'error': 0, 'name': 'Gerte-Identifikation', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, 'unit': '', 'value': 'RVS21.831F/127', }), @@ -103,16 +151,24 @@ 'max_temp': dict({ 'data_type': 0, 'desc': '', + 'error': 0, 'name': 'Summer/winter changeover temp heat circuit 1', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, 'unit': '°C', - 'value': '20.0', + 'value': 20.0, }), 'min_temp': dict({ 'data_type': 0, 'desc': '', + 'error': 0, 'name': 'Room temp frost protection setpoint', + 'precision': None, + 'readonly': 0, + 'readwrite': 0, 'unit': '°C', - 'value': '8.0', + 'value': 8.0, }), }), }) diff --git a/tests/components/bsblan/snapshots/test_water_heater.ambr b/tests/components/bsblan/snapshots/test_water_heater.ambr new file mode 100644 index 00000000000..c1a13b764c0 --- /dev/null +++ b/tests/components/bsblan/snapshots/test_water_heater.ambr @@ -0,0 +1,68 @@ +# serializer version: 1 +# name: test_water_heater_states[dhw_state.json][water_heater.bsb_lan-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_temp': 65.0, + 'min_temp': 40.0, + 'operation_list': list([ + 'eco', + 'off', + 'on', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'water_heater', + 'entity_category': None, + 'entity_id': 'water_heater.bsb_lan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'bsblan', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:80:41:19:69:90', + 'unit_of_measurement': None, + }) +# --- +# name: test_water_heater_states[dhw_state.json][water_heater.bsb_lan-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 48.5, + 'friendly_name': 'BSB-LAN', + 'max_temp': 65.0, + 'min_temp': 40.0, + 'operation_list': list([ + 'eco', + 'off', + 'on', + ]), + 'operation_mode': 'on', + 'supported_features': , + 'target_temp_high': None, + 'target_temp_low': None, + 'temperature': 50.0, + }), + 'context': , + 'entity_id': 'water_heater.bsb_lan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/bsblan/test_climate.py b/tests/components/bsblan/test_climate.py index c519c3043da..7ee12c5fa1a 100644 --- a/tests/components/bsblan/test_climate.py +++ b/tests/components/bsblan/test_climate.py @@ -3,12 +3,11 @@ from datetime import timedelta from unittest.mock import AsyncMock, MagicMock -from bsblan import BSBLANError, StaticState +from bsblan import BSBLANError from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.bsblan.const import DOMAIN from homeassistant.components.climate import ( ATTR_HVAC_MODE, ATTR_PRESET_MODE, @@ -27,37 +26,19 @@ import homeassistant.helpers.entity_registry as er from . import setup_with_selected_platforms -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - load_json_object_fixture, - snapshot_platform, -) +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform ENTITY_ID = "climate.bsb_lan" -@pytest.mark.parametrize( - ("static_file"), - [ - ("static.json"), - ("static_F.json"), - ], -) async def test_celsius_fahrenheit( hass: HomeAssistant, mock_bsblan: AsyncMock, mock_config_entry: MockConfigEntry, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, - static_file: str, ) -> None: """Test Celsius and Fahrenheit temperature units.""" - - static_data = load_json_object_fixture(static_file, DOMAIN) - - mock_bsblan.static_values.return_value = StaticState.from_dict(static_data) - await setup_with_selected_platforms(hass, mock_config_entry, [Platform.CLIMATE]) await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) @@ -75,21 +56,9 @@ async def test_climate_entity_properties( await setup_with_selected_platforms(hass, mock_config_entry, [Platform.CLIMATE]) await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - # Test when current_temperature is "---" - mock_current_temp = MagicMock() - mock_current_temp.value = "---" - mock_bsblan.state.return_value.current_temperature = mock_current_temp - - freezer.tick(timedelta(minutes=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - state = hass.states.get(ENTITY_ID) - assert state.attributes["current_temperature"] is None - # Test target_temperature mock_target_temp = MagicMock() - mock_target_temp.value = "23.5" + mock_target_temp.value = 23.5 mock_bsblan.state.return_value.target_temperature = mock_target_temp freezer.tick(timedelta(minutes=1)) diff --git a/tests/components/bsblan/test_sensor.py b/tests/components/bsblan/test_sensor.py index dc22574168d..c95671a1a6b 100644 --- a/tests/components/bsblan/test_sensor.py +++ b/tests/components/bsblan/test_sensor.py @@ -1,19 +1,17 @@ """Tests for the BSB-Lan sensor platform.""" -from datetime import timedelta -from unittest.mock import AsyncMock, MagicMock +from unittest.mock import AsyncMock from freezegun.api import FrozenDateTimeFactory -import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.const import STATE_UNKNOWN, Platform +from homeassistant.const import Platform from homeassistant.core import HomeAssistant import homeassistant.helpers.entity_registry as er from . import setup_with_selected_platforms -from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform +from tests.common import MockConfigEntry, snapshot_platform ENTITY_CURRENT_TEMP = "sensor.bsb_lan_current_temperature" ENTITY_OUTSIDE_TEMP = "sensor.bsb_lan_outside_temperature" @@ -30,37 +28,3 @@ async def test_sensor_entity_properties( """Test the sensor entity properties.""" await setup_with_selected_platforms(hass, mock_config_entry, [Platform.SENSOR]) await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -@pytest.mark.parametrize( - ("value", "expected_state"), - [ - (18.6, "18.6"), - (None, STATE_UNKNOWN), - ("---", STATE_UNKNOWN), - ], -) -async def test_current_temperature_scenarios( - hass: HomeAssistant, - mock_bsblan: AsyncMock, - mock_config_entry: MockConfigEntry, - freezer: FrozenDateTimeFactory, - value, - expected_state, -) -> None: - """Test various scenarios for current temperature sensor.""" - await setup_with_selected_platforms(hass, mock_config_entry, [Platform.SENSOR]) - - # Set up the mock value - mock_current_temp = MagicMock() - mock_current_temp.value = value - mock_bsblan.sensor.return_value.current_temperature = mock_current_temp - - # Trigger an update - freezer.tick(timedelta(minutes=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - # Check the state - state = hass.states.get(ENTITY_CURRENT_TEMP) - assert state.state == expected_state diff --git a/tests/components/bsblan/test_water_heater.py b/tests/components/bsblan/test_water_heater.py new file mode 100644 index 00000000000..ed920774aa5 --- /dev/null +++ b/tests/components/bsblan/test_water_heater.py @@ -0,0 +1,210 @@ +"""Tests for the BSB-Lan water heater platform.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, MagicMock + +from bsblan import BSBLANError +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.water_heater import ( + ATTR_OPERATION_MODE, + DOMAIN as WATER_HEATER_DOMAIN, + SERVICE_SET_OPERATION_MODE, + SERVICE_SET_TEMPERATURE, + STATE_ECO, + STATE_OFF, + STATE_ON, +) +from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +import homeassistant.helpers.entity_registry as er + +from . import setup_with_selected_platforms + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + +ENTITY_ID = "water_heater.bsb_lan" + + +@pytest.mark.parametrize( + ("dhw_file"), + [ + ("dhw_state.json"), + ], +) +async def test_water_heater_states( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + dhw_file: str, +) -> None: + """Test water heater states with different configurations.""" + await setup_with_selected_platforms( + hass, mock_config_entry, [Platform.WATER_HEATER] + ) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_water_heater_entity_properties( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the water heater entity properties.""" + await setup_with_selected_platforms( + hass, mock_config_entry, [Platform.WATER_HEATER] + ) + + state = hass.states.get(ENTITY_ID) + assert state is not None + + # Test when nominal setpoint is "10" + mock_setpoint = MagicMock() + mock_setpoint.value = 10 + mock_bsblan.hot_water_state.return_value.nominal_setpoint = mock_setpoint + + freezer.tick(timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID) + assert state.attributes.get("temperature") == 10 + + +@pytest.mark.parametrize( + ("mode", "bsblan_mode"), + [ + (STATE_ECO, "Eco"), + (STATE_OFF, "Off"), + (STATE_ON, "On"), + ], +) +async def test_set_operation_mode( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, + mode: str, + bsblan_mode: str, +) -> None: + """Test setting operation mode.""" + await setup_with_selected_platforms( + hass, mock_config_entry, [Platform.WATER_HEATER] + ) + + await hass.services.async_call( + domain=WATER_HEATER_DOMAIN, + service=SERVICE_SET_OPERATION_MODE, + service_data={ + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_OPERATION_MODE: mode, + }, + blocking=True, + ) + + mock_bsblan.set_hot_water.assert_called_once_with(operating_mode=bsblan_mode) + + +async def test_set_invalid_operation_mode( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting invalid operation mode.""" + await setup_with_selected_platforms( + hass, mock_config_entry, [Platform.WATER_HEATER] + ) + + with pytest.raises( + HomeAssistantError, + match=r"Operation mode invalid_mode is not valid for water_heater\.bsb_lan\. Valid operation modes are: eco, off, on", + ): + await hass.services.async_call( + domain=WATER_HEATER_DOMAIN, + service=SERVICE_SET_OPERATION_MODE, + service_data={ + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_OPERATION_MODE: "invalid_mode", + }, + blocking=True, + ) + + +async def test_set_temperature( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting temperature.""" + await setup_with_selected_platforms( + hass, mock_config_entry, [Platform.WATER_HEATER] + ) + + await hass.services.async_call( + domain=WATER_HEATER_DOMAIN, + service=SERVICE_SET_TEMPERATURE, + service_data={ + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_TEMPERATURE: 50, + }, + blocking=True, + ) + + mock_bsblan.set_hot_water.assert_called_once_with(nominal_setpoint=50) + + +async def test_set_temperature_failure( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting temperature with API failure.""" + await setup_with_selected_platforms( + hass, mock_config_entry, [Platform.WATER_HEATER] + ) + + mock_bsblan.set_hot_water.side_effect = BSBLANError("Test error") + + with pytest.raises( + HomeAssistantError, match="An error occurred while setting the temperature" + ): + await hass.services.async_call( + domain=WATER_HEATER_DOMAIN, + service=SERVICE_SET_TEMPERATURE, + service_data={ + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_TEMPERATURE: 50, + }, + blocking=True, + ) + + +async def test_operation_mode_error( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test operation mode setting with API failure.""" + await setup_with_selected_platforms( + hass, mock_config_entry, [Platform.WATER_HEATER] + ) + + mock_bsblan.set_hot_water.side_effect = BSBLANError("Test error") + + with pytest.raises( + HomeAssistantError, match="An error occurred while setting the operation mode" + ): + await hass.services.async_call( + domain=WATER_HEATER_DOMAIN, + service=SERVICE_SET_OPERATION_MODE, + service_data={ + ATTR_ENTITY_ID: ENTITY_ID, + ATTR_OPERATION_MODE: STATE_ECO, + }, + blocking=True, + ) diff --git a/tests/components/cambridge_audio/__init__.py b/tests/components/cambridge_audio/__init__.py index f6b5f48d39d..4e11a728f41 100644 --- a/tests/components/cambridge_audio/__init__.py +++ b/tests/components/cambridge_audio/__init__.py @@ -1,5 +1,9 @@ """Tests for the Cambridge Audio integration.""" +from unittest.mock import AsyncMock + +from aiostreammagic.models import CallbackType + from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -11,3 +15,11 @@ async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() + + +async def mock_state_update( + client: AsyncMock, callback_type: CallbackType = CallbackType.STATE +) -> None: + """Trigger a callback in the media player.""" + for callback in client.register_state_update_callbacks.call_args_list: + await callback[0][0](client, callback_type) diff --git a/tests/components/cambridge_audio/test_init.py b/tests/components/cambridge_audio/test_init.py index 4a8c1b668e2..a058f7c8b6c 100644 --- a/tests/components/cambridge_audio/test_init.py +++ b/tests/components/cambridge_audio/test_init.py @@ -1,8 +1,10 @@ """Tests for the Cambridge Audio integration.""" -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, Mock from aiostreammagic import StreamMagicError +from aiostreammagic.models import CallbackType +import pytest from syrupy import SnapshotAssertion from homeassistant.components.cambridge_audio.const import DOMAIN @@ -10,7 +12,7 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from . import setup_integration +from . import mock_state_update, setup_integration from tests.common import MockConfigEntry @@ -43,3 +45,23 @@ async def test_device_info( ) assert device_entry is not None assert device_entry == snapshot + + +async def test_disconnect_reconnect_log( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_stream_magic_client: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test device registry integration.""" + await setup_integration(hass, mock_config_entry) + + mock_stream_magic_client.is_connected = Mock(return_value=False) + await mock_state_update(mock_stream_magic_client, CallbackType.CONNECTION) + assert "Disconnected from device at 192.168.20.218" in caplog.text + + mock_stream_magic_client.is_connected = Mock(return_value=True) + await mock_state_update(mock_stream_magic_client, CallbackType.CONNECTION) + assert "Reconnected to device at 192.168.20.218" in caplog.text diff --git a/tests/components/cambridge_audio/test_media_player.py b/tests/components/cambridge_audio/test_media_player.py index b857e61c235..bb2ccd1aec4 100644 --- a/tests/components/cambridge_audio/test_media_player.py +++ b/tests/components/cambridge_audio/test_media_player.py @@ -7,7 +7,6 @@ from aiostreammagic import ( ShuffleMode, TransportControl, ) -from aiostreammagic.models import CallbackType import pytest from homeassistant.components.media_player import ( @@ -49,18 +48,12 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from . import setup_integration +from . import mock_state_update, setup_integration from .const import ENTITY_ID from tests.common import MockConfigEntry -async def mock_state_update(client: AsyncMock) -> None: - """Trigger a callback in the media player.""" - for callback in client.register_state_update_callbacks.call_args_list: - await callback[0][0](client, CallbackType.STATE) - - async def test_entity_supported_features( hass: HomeAssistant, mock_stream_magic_client: AsyncMock, diff --git a/tests/components/camera/common.py b/tests/components/camera/common.py index 569756c2640..19ac2cc168b 100644 --- a/tests/components/camera/common.py +++ b/tests/components/camera/common.py @@ -6,7 +6,7 @@ components. Instead call the service directly. from unittest.mock import Mock -from webrtc_models import RTCIceCandidate +from webrtc_models import RTCIceCandidateInit from homeassistant.components.camera import ( Camera, @@ -66,7 +66,7 @@ class SomeTestProvider(CameraWebRTCProvider): send_message(WebRTCAnswer(answer="answer")) async def async_on_webrtc_candidate( - self, session_id: str, candidate: RTCIceCandidate + self, session_id: str, candidate: RTCIceCandidateInit ) -> None: """Handle the WebRTC candidate.""" diff --git a/tests/components/camera/conftest.py b/tests/components/camera/conftest.py index f0c418711c7..b529ee3e9b9 100644 --- a/tests/components/camera/conftest.py +++ b/tests/components/camera/conftest.py @@ -4,7 +4,7 @@ from collections.abc import AsyncGenerator, Generator from unittest.mock import AsyncMock, Mock, PropertyMock, patch import pytest -from webrtc_models import RTCIceCandidate +from webrtc_models import RTCIceCandidateInit from homeassistant.components import camera from homeassistant.components.camera.const import StreamType @@ -62,32 +62,17 @@ async def mock_camera_fixture(hass: HomeAssistant) -> AsyncGenerator[None]: def mock_camera_hls_fixture(mock_camera: None) -> Generator[None]: """Initialize a demo camera platform with HLS.""" with patch( - "homeassistant.components.camera.Camera.frontend_stream_type", - new_callable=PropertyMock(return_value=StreamType.HLS), - ): - yield - - -@pytest.fixture -async def mock_camera_webrtc_frontendtype_only( - hass: HomeAssistant, -) -> AsyncGenerator[None]: - """Initialize a demo camera platform with WebRTC.""" - assert await async_setup_component( - hass, "camera", {camera.DOMAIN: {"platform": "demo"}} - ) - await hass.async_block_till_done() - - with patch( - "homeassistant.components.camera.Camera.frontend_stream_type", - new_callable=PropertyMock(return_value=StreamType.WEB_RTC), + "homeassistant.components.camera.Camera.camera_capabilities", + new_callable=PropertyMock( + return_value=camera.CameraCapabilities({StreamType.HLS}) + ), ): yield @pytest.fixture async def mock_camera_webrtc( - mock_camera_webrtc_frontendtype_only: None, + mock_camera: None, ) -> AsyncGenerator[None]: """Initialize a demo camera platform with WebRTC.""" @@ -96,9 +81,17 @@ async def mock_camera_webrtc( ) -> None: send_message(WebRTCAnswer(WEBRTC_ANSWER)) - with patch( - "homeassistant.components.camera.Camera.async_handle_async_webrtc_offer", - side_effect=async_handle_async_webrtc_offer, + with ( + patch( + "homeassistant.components.camera.Camera.async_handle_async_webrtc_offer", + side_effect=async_handle_async_webrtc_offer, + ), + patch( + "homeassistant.components.camera.Camera.camera_capabilities", + new_callable=PropertyMock( + return_value=camera.CameraCapabilities({StreamType.WEB_RTC}) + ), + ), ): yield @@ -168,7 +161,6 @@ async def mock_test_webrtc_cameras(hass: HomeAssistant) -> None: _attr_supported_features: camera.CameraEntityFeature = ( camera.CameraEntityFeature.STREAM ) - _attr_frontend_stream_type: camera.StreamType = camera.StreamType.WEB_RTC async def stream_source(self) -> str | None: return STREAM_SOURCE @@ -192,7 +184,7 @@ async def mock_test_webrtc_cameras(hass: HomeAssistant) -> None: send_message(WebRTCAnswer(WEBRTC_ANSWER)) async def async_on_webrtc_candidate( - self, session_id: str, candidate: RTCIceCandidate + self, session_id: str, candidate: RTCIceCandidateInit ) -> None: """Handle a WebRTC candidate.""" # Do nothing diff --git a/tests/components/camera/test_init.py b/tests/components/camera/test_init.py index 32024694b7e..f9d30c240db 100644 --- a/tests/components/camera/test_init.py +++ b/tests/components/camera/test_init.py @@ -7,7 +7,7 @@ from unittest.mock import ANY, AsyncMock, Mock, PropertyMock, mock_open, patch import pytest from syrupy.assertion import SnapshotAssertion -from webrtc_models import RTCIceCandidate +from webrtc_models import RTCIceCandidateInit from homeassistant.components import camera from homeassistant.components.camera import ( @@ -27,6 +27,7 @@ from homeassistant.components.camera.helper import get_camera_from_entity_id from homeassistant.components.websocket_api import TYPE_RESULT from homeassistant.const import ( ATTR_ENTITY_ID, + CONF_PLATFORM, EVENT_HOMEASSISTANT_STARTED, STATE_UNAVAILABLE, ) @@ -954,7 +955,7 @@ async def _test_capabilities( send_message(WebRTCAnswer("answer")) async def async_on_webrtc_candidate( - self, session_id: str, candidate: RTCIceCandidate + self, session_id: str, candidate: RTCIceCandidateInit ) -> None: """Handle the WebRTC candidate.""" @@ -1054,3 +1055,27 @@ async def test_camera_capabilities_changing_native_support( await hass.async_block_till_done() await _test_capabilities(hass, hass_ws_client, cam.entity_id, set(), set()) + + +@pytest.mark.usefixtures("enable_custom_integrations") +async def test_deprecated_frontend_stream_type_logs( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test using (_attr_)frontend_stream_type will log.""" + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}}) + await hass.async_block_till_done() + + for entity_id in ( + "camera.property_frontend_stream_type", + "camera.attr_frontend_stream_type", + ): + camera_obj = get_camera_from_entity_id(hass, entity_id) + assert camera_obj.frontend_stream_type == StreamType.WEB_RTC + + assert ( + "Detected that custom integration 'test' is overwriting the 'frontend_stream_type' property in the PropertyFrontendStreamTypeCamera class, which is deprecated and will be removed in Home Assistant 2025.6," + ) in caplog.text + assert ( + "Detected that custom integration 'test' is setting the '_attr_frontend_stream_type' attribute in the AttrFrontendStreamTypeCamera class, which is deprecated and will be removed in Home Assistant 2025.6," + ) in caplog.text diff --git a/tests/components/camera/test_media_source.py b/tests/components/camera/test_media_source.py index 85f876d4e81..bd92010d242 100644 --- a/tests/components/camera/test_media_source.py +++ b/tests/components/camera/test_media_source.py @@ -5,6 +5,7 @@ from unittest.mock import PropertyMock, patch import pytest from homeassistant.components import media_source +from homeassistant.components.camera import CameraCapabilities from homeassistant.components.camera.const import StreamType from homeassistant.components.stream import FORMAT_CONTENT_TYPE from homeassistant.core import HomeAssistant @@ -91,7 +92,7 @@ async def test_browsing_webrtc(hass: HomeAssistant) -> None: assert item.children[0].media_content_type == FORMAT_CONTENT_TYPE["hls"] -@pytest.mark.usefixtures("mock_camera_hls") +@pytest.mark.usefixtures("mock_camera") async def test_resolving(hass: HomeAssistant) -> None: """Test resolving.""" # Adding stream enables HLS camera @@ -109,7 +110,7 @@ async def test_resolving(hass: HomeAssistant) -> None: assert item.mime_type == FORMAT_CONTENT_TYPE["hls"] -@pytest.mark.usefixtures("mock_camera_hls") +@pytest.mark.usefixtures("mock_camera") async def test_resolving_errors(hass: HomeAssistant) -> None: """Test resolving.""" @@ -130,8 +131,10 @@ async def test_resolving_errors(hass: HomeAssistant) -> None: with ( pytest.raises(media_source.Unresolvable) as exc_info, patch( - "homeassistant.components.camera.Camera.frontend_stream_type", - new_callable=PropertyMock(return_value=StreamType.WEB_RTC), + "homeassistant.components.camera.Camera.camera_capabilities", + new_callable=PropertyMock( + return_value=CameraCapabilities({StreamType.WEB_RTC}) + ), ), ): await media_source.async_resolve_media( diff --git a/tests/components/camera/test_webrtc.py b/tests/components/camera/test_webrtc.py index 7a1df556c20..a7c6d889409 100644 --- a/tests/components/camera/test_webrtc.py +++ b/tests/components/camera/test_webrtc.py @@ -6,7 +6,7 @@ from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest -from webrtc_models import RTCIceCandidate, RTCIceServer +from webrtc_models import RTCIceCandidate, RTCIceCandidateInit, RTCIceServer from homeassistant.components.camera import ( DATA_ICE_SERVERS, @@ -65,7 +65,6 @@ class MockCamera(Camera): _attr_name = "Test" _attr_supported_features: CameraEntityFeature = CameraEntityFeature.STREAM - _attr_frontend_stream_type: StreamType = StreamType.WEB_RTC def __init__(self) -> None: """Initialize the mock entity.""" @@ -139,42 +138,46 @@ async def init_test_integration( return test_camera -@pytest.mark.usefixtures("mock_camera", "mock_stream", "mock_stream_source") +@pytest.mark.usefixtures("mock_camera", "mock_stream_source") async def test_async_register_webrtc_provider( hass: HomeAssistant, ) -> None: """Test registering a WebRTC provider.""" - await async_setup_component(hass, "camera", {}) - camera = get_camera_from_entity_id(hass, "camera.demo_camera") - assert camera.frontend_stream_type is StreamType.HLS + assert camera.camera_capabilities.frontend_stream_types == {StreamType.HLS} provider = SomeTestProvider() unregister = async_register_webrtc_provider(hass, provider) await hass.async_block_till_done() - assert camera.frontend_stream_type is StreamType.WEB_RTC + assert camera.camera_capabilities.frontend_stream_types == { + StreamType.HLS, + StreamType.WEB_RTC, + } # Mark stream as unsupported provider._is_supported = False # Manually refresh the provider await camera.async_refresh_providers() - assert camera.frontend_stream_type is StreamType.HLS + assert camera.camera_capabilities.frontend_stream_types == {StreamType.HLS} # Mark stream as supported provider._is_supported = True # Manually refresh the provider await camera.async_refresh_providers() - assert camera.frontend_stream_type is StreamType.WEB_RTC + assert camera.camera_capabilities.frontend_stream_types == { + StreamType.HLS, + StreamType.WEB_RTC, + } unregister() await hass.async_block_till_done() - assert camera.frontend_stream_type is StreamType.HLS + assert camera.camera_capabilities.frontend_stream_types == {StreamType.HLS} -@pytest.mark.usefixtures("mock_camera", "mock_stream", "mock_stream_source") +@pytest.mark.usefixtures("mock_camera", "mock_stream_source") async def test_async_register_webrtc_provider_twice( hass: HomeAssistant, register_test_provider: SomeTestProvider, @@ -192,13 +195,11 @@ async def test_async_register_webrtc_provider_camera_not_loaded( async_register_webrtc_provider(hass, SomeTestProvider()) -@pytest.mark.usefixtures("mock_camera", "mock_stream", "mock_stream_source") +@pytest.mark.usefixtures("mock_test_webrtc_cameras") async def test_async_register_ice_server( hass: HomeAssistant, ) -> None: """Test registering an ICE server.""" - await async_setup_component(hass, "camera", {}) - # Clear any existing ICE servers hass.data[DATA_ICE_SERVERS].clear() @@ -216,7 +217,7 @@ async def test_async_register_ice_server( unregister = async_register_ice_servers(hass, get_ice_servers) assert not called - camera = get_camera_from_entity_id(hass, "camera.demo_camera") + camera = get_camera_from_entity_id(hass, "camera.async") config = camera.async_get_webrtc_client_configuration() assert config.configuration.ice_servers == [ @@ -277,7 +278,7 @@ async def test_async_register_ice_server( assert config.configuration.ice_servers == [] -@pytest.mark.usefixtures("mock_camera_webrtc") +@pytest.mark.usefixtures("mock_test_webrtc_cameras") async def test_ws_get_client_config( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -286,7 +287,7 @@ async def test_ws_get_client_config( client = await hass_ws_client(hass) await client.send_json_auto_id( - {"type": "camera/webrtc/get_client_config", "entity_id": "camera.demo_camera"} + {"type": "camera/webrtc/get_client_config", "entity_id": "camera.async"} ) msg = await client.receive_json() @@ -296,8 +297,12 @@ async def test_ws_get_client_config( assert msg["result"] == { "configuration": { "iceServers": [ - {"urls": "stun:stun.home-assistant.io:80"}, - {"urls": "stun:stun.home-assistant.io:3478"}, + { + "urls": [ + "stun:stun.home-assistant.io:80", + "stun:stun.home-assistant.io:3478", + ] + }, ], }, "getCandidatesUpfront": False, @@ -316,7 +321,7 @@ async def test_ws_get_client_config( async_register_ice_servers(hass, get_ice_server) await client.send_json_auto_id( - {"type": "camera/webrtc/get_client_config", "entity_id": "camera.demo_camera"} + {"type": "camera/webrtc/get_client_config", "entity_id": "camera.async"} ) msg = await client.receive_json() @@ -326,8 +331,12 @@ async def test_ws_get_client_config( assert msg["result"] == { "configuration": { "iceServers": [ - {"urls": "stun:stun.home-assistant.io:80"}, - {"urls": "stun:stun.home-assistant.io:3478"}, + { + "urls": [ + "stun:stun.home-assistant.io:80", + "stun:stun.home-assistant.io:3478", + ] + }, { "urls": ["stun:example2.com", "turn:example2.com"], "username": "user", @@ -362,7 +371,7 @@ async def test_ws_get_client_config_sync_offer( } -@pytest.mark.usefixtures("mock_camera_webrtc") +@pytest.mark.usefixtures("mock_test_webrtc_cameras") async def test_ws_get_client_config_custom_config( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -376,7 +385,7 @@ async def test_ws_get_client_config_custom_config( client = await hass_ws_client(hass) await client.send_json_auto_id( - {"type": "camera/webrtc/get_client_config", "entity_id": "camera.demo_camera"} + {"type": "camera/webrtc/get_client_config", "entity_id": "camera.async"} ) msg = await client.receive_json() @@ -389,7 +398,7 @@ async def test_ws_get_client_config_custom_config( } -@pytest.mark.usefixtures("mock_camera_hls") +@pytest.mark.usefixtures("mock_camera") async def test_ws_get_client_config_no_rtc_camera( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -407,7 +416,7 @@ async def test_ws_get_client_config_no_rtc_camera( assert not msg["success"] assert msg["error"] == { "code": "webrtc_get_client_config_failed", - "message": "Camera does not support WebRTC, frontend_stream_type=hls", + "message": "Camera does not support WebRTC, frontend_stream_types={}", } @@ -419,15 +428,21 @@ async def provide_webrtc_answer(stream_source: str, offer: str, stream_id: str) @pytest.fixture(name="mock_rtsp_to_webrtc") -def mock_rtsp_to_webrtc_fixture(hass: HomeAssistant) -> Generator[Mock]: +def mock_rtsp_to_webrtc_fixture( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> Generator[Mock]: """Fixture that registers a mock rtsp to webrtc provider.""" mock_provider = Mock(side_effect=provide_webrtc_answer) unsub = async_register_rtsp_to_web_rtc_provider(hass, "mock_domain", mock_provider) + assert ( + "async_register_rtsp_to_web_rtc_provider is a deprecated function which will" + " be removed in HA Core 2025.6. Use async_register_webrtc_provider instead" + ) in caplog.text yield mock_provider unsub() -@pytest.mark.usefixtures("mock_camera_webrtc") +@pytest.mark.usefixtures("mock_test_webrtc_cameras") async def test_websocket_webrtc_offer( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -436,7 +451,7 @@ async def test_websocket_webrtc_offer( await client.send_json_auto_id( { "type": "camera/webrtc/offer", - "entity_id": "camera.demo_camera", + "entity_id": "camera.async", "offer": WEBRTC_OFFER, } ) @@ -471,12 +486,34 @@ async def test_websocket_webrtc_offer( assert msg["success"] +@pytest.mark.filterwarnings( + "ignore:Using RTCIceCandidate is deprecated. Use RTCIceCandidateInit instead" +) +@pytest.mark.usefixtures("mock_stream_source", "mock_camera") +async def test_websocket_webrtc_offer_webrtc_provider_deprecated( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + register_test_provider: SomeTestProvider, +) -> None: + """Test initiating a WebRTC stream with a webrtc provider with the deprecated class.""" + await _test_websocket_webrtc_offer_webrtc_provider( + hass, + hass_ws_client, + register_test_provider, + WebRTCCandidate(RTCIceCandidate("candidate")), + {"type": "candidate", "candidate": {"candidate": "candidate"}}, + ) + + @pytest.mark.parametrize( ("message", "expected_frontend_message"), [ ( - WebRTCCandidate(RTCIceCandidate("candidate")), - {"type": "candidate", "candidate": "candidate"}, + WebRTCCandidate(RTCIceCandidateInit("candidate")), + { + "type": "candidate", + "candidate": {"candidate": "candidate", "sdpMLineIndex": 0}, + }, ), ( WebRTCError("webrtc_offer_failed", "error"), @@ -493,6 +530,23 @@ async def test_websocket_webrtc_offer_webrtc_provider( register_test_provider: SomeTestProvider, message: WebRTCMessage, expected_frontend_message: dict[str, Any], +) -> None: + """Test initiating a WebRTC stream with a webrtc provider.""" + await _test_websocket_webrtc_offer_webrtc_provider( + hass, + hass_ws_client, + register_test_provider, + message, + expected_frontend_message, + ) + + +async def _test_websocket_webrtc_offer_webrtc_provider( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + register_test_provider: SomeTestProvider, + message: WebRTCMessage, + expected_frontend_message: dict[str, Any], ) -> None: """Test initiating a WebRTC stream with a webrtc provider.""" client = await hass_ws_client(hass) @@ -547,11 +601,11 @@ async def test_websocket_webrtc_offer_webrtc_provider( mock_async_close_session.assert_called_once_with(session_id) -@pytest.mark.usefixtures("mock_camera_webrtc") async def test_websocket_webrtc_offer_invalid_entity( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test WebRTC with a camera entity that does not exist.""" + await async_setup_component(hass, "camera", {}) client = await hass_ws_client(hass) await client.send_json_auto_id( { @@ -570,7 +624,7 @@ async def test_websocket_webrtc_offer_invalid_entity( } -@pytest.mark.usefixtures("mock_camera_webrtc") +@pytest.mark.usefixtures("mock_test_webrtc_cameras") async def test_websocket_webrtc_offer_missing_offer( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -597,7 +651,6 @@ async def test_websocket_webrtc_offer_missing_offer( (TimeoutError(), "Timeout handling WebRTC offer"), ], ) -@pytest.mark.usefixtures("mock_camera_webrtc_frontendtype_only") async def test_websocket_webrtc_offer_failure( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, @@ -639,24 +692,33 @@ async def test_websocket_webrtc_offer_failure( } +@pytest.mark.usefixtures("mock_test_webrtc_cameras") async def test_websocket_webrtc_offer_sync( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - init_test_integration: MockCamera, + caplog: pytest.LogCaptureFixture, ) -> None: """Test sync WebRTC stream offer.""" client = await hass_ws_client(hass) - init_test_integration.set_sync_answer(WEBRTC_ANSWER) await client.send_json_auto_id( { "type": "camera/webrtc/offer", - "entity_id": "camera.test", + "entity_id": "camera.sync", "offer": WEBRTC_OFFER, } ) response = await client.receive_json() + assert ( + "tests.components.camera.conftest", + logging.WARNING, + ( + "async_handle_web_rtc_offer was called from camera, this is a deprecated " + "function which will be removed in HA Core 2025.6. Use " + "async_handle_async_webrtc_offer instead" + ), + ) in caplog.record_tuples assert response["type"] == TYPE_RESULT assert response["success"] subscription_id = response["id"] @@ -738,7 +800,7 @@ async def test_websocket_webrtc_offer_invalid_stream_type( assert not response["success"] assert response["error"] == { "code": "webrtc_offer_failed", - "message": "Camera does not support WebRTC, frontend_stream_type=hls", + "message": "Camera does not support WebRTC, frontend_stream_types={}", } @@ -791,45 +853,6 @@ async def mock_hls_stream_source_fixture() -> AsyncGenerator[AsyncMock]: yield mock_hls_stream_source -@pytest.mark.usefixtures( - "mock_camera", - "mock_hls_stream_source", # Not an RTSP stream source - "mock_camera_webrtc_frontendtype_only", -) -async def test_unsupported_rtsp_to_webrtc_stream_type( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator -) -> None: - """Test rtsp-to-webrtc is not registered for non-RTSP streams.""" - client = await hass_ws_client(hass) - await client.send_json_auto_id( - { - "type": "camera/webrtc/offer", - "entity_id": "camera.demo_camera", - "offer": WEBRTC_OFFER, - } - ) - response = await client.receive_json() - assert response["type"] == TYPE_RESULT - assert response["success"] - subscription_id = response["id"] - - # Session id - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"]["type"] == "session" - - # Answer - response = await client.receive_json() - assert response["id"] == subscription_id - assert response["type"] == "event" - assert response["event"] == { - "type": "error", - "code": "webrtc_offer_failed", - "message": "Camera does not support WebRTC", - } - - @pytest.mark.usefixtures("mock_camera", "mock_stream_source") async def test_rtsp_to_webrtc_provider_unregistered( hass: HomeAssistant, hass_ws_client: WebSocketGenerator @@ -885,7 +908,7 @@ async def test_rtsp_to_webrtc_provider_unregistered( assert not response["success"] assert response["error"] == { "code": "webrtc_offer_failed", - "message": "Camera does not support WebRTC, frontend_stream_type=hls", + "message": "Camera does not support WebRTC, frontend_stream_types={}", } assert not mock_provider.called @@ -941,34 +964,103 @@ async def test_rtsp_to_webrtc_offer_not_accepted( unsub() -@pytest.mark.usefixtures("mock_camera_webrtc") +@pytest.mark.parametrize( + ("frontend_candidate", "expected_candidate"), + [ + ( + {"candidate": "candidate", "sdpMLineIndex": 0}, + RTCIceCandidateInit("candidate"), + ), + ( + {"candidate": "candidate", "sdpMLineIndex": 1}, + RTCIceCandidateInit("candidate", sdp_m_line_index=1), + ), + ( + {"candidate": "candidate", "sdpMid": "1"}, + RTCIceCandidateInit("candidate", sdp_mid="1"), + ), + ], + ids=["candidate", "candidate-mline-index", "candidate-mid"], +) +@pytest.mark.usefixtures("mock_test_webrtc_cameras") async def test_ws_webrtc_candidate( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + frontend_candidate: dict[str, Any], + expected_candidate: RTCIceCandidateInit, ) -> None: """Test ws webrtc candidate command.""" client = await hass_ws_client(hass) session_id = "session_id" - candidate = "candidate" - with patch( - "homeassistant.components.camera.Camera.async_on_webrtc_candidate" + with patch.object( + get_camera_from_entity_id(hass, "camera.async"), "async_on_webrtc_candidate" ) as mock_on_webrtc_candidate: await client.send_json_auto_id( { "type": "camera/webrtc/candidate", - "entity_id": "camera.demo_camera", + "entity_id": "camera.async", "session_id": session_id, - "candidate": candidate, + "candidate": frontend_candidate, } ) response = await client.receive_json() assert response["type"] == TYPE_RESULT assert response["success"] - mock_on_webrtc_candidate.assert_called_once_with( - session_id, RTCIceCandidate(candidate) + mock_on_webrtc_candidate.assert_called_once_with(session_id, expected_candidate) + + +@pytest.mark.parametrize( + ("message", "expected_error_msg"), + [ + ( + {"sdpMLineIndex": 0}, + ( + 'Field "candidate" of type str is missing in RTCIceCandidateInit instance' + " for dictionary value @ data['candidate']. Got {'sdpMLineIndex': 0}" + ), + ), + ( + {"candidate": "candidate", "sdpMLineIndex": -1}, + ( + "sdpMLineIndex must be greater than or equal to 0 for dictionary value @ " + "data['candidate']. Got {'candidate': 'candidate', 'sdpMLineIndex': -1}" + ), + ), + ], + ids=[ + "candidate missing", + "spd_mline_index smaller than 0", + ], +) +@pytest.mark.usefixtures("mock_test_webrtc_cameras") +async def test_ws_webrtc_candidate_invalid_candidate_message( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + message: dict, + expected_error_msg: str, +) -> None: + """Test ws WebRTC candidate command for a camera with a different stream_type.""" + client = await hass_ws_client(hass) + with patch("homeassistant.components.camera.Camera.async_on_webrtc_candidate"): + await client.send_json_auto_id( + { + "type": "camera/webrtc/candidate", + "entity_id": "camera.async", + "session_id": "session_id", + "candidate": message, + } ) + response = await client.receive_json() + + assert response["type"] == TYPE_RESULT + assert not response["success"] + assert response["error"] == { + "code": "invalid_format", + "message": expected_error_msg, + } -@pytest.mark.usefixtures("mock_camera_webrtc") +@pytest.mark.usefixtures("mock_test_webrtc_cameras") async def test_ws_webrtc_candidate_not_supported( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -977,9 +1069,9 @@ async def test_ws_webrtc_candidate_not_supported( await client.send_json_auto_id( { "type": "camera/webrtc/candidate", - "entity_id": "camera.demo_camera", + "entity_id": "camera.sync", "session_id": "session_id", - "candidate": "candidate", + "candidate": {"candidate": "candidate"}, } ) response = await client.receive_json() @@ -1009,29 +1101,29 @@ async def test_ws_webrtc_candidate_webrtc_provider( "type": "camera/webrtc/candidate", "entity_id": "camera.demo_camera", "session_id": session_id, - "candidate": candidate, + "candidate": {"candidate": candidate, "sdpMLineIndex": 1}, } ) response = await client.receive_json() assert response["type"] == TYPE_RESULT assert response["success"] mock_on_webrtc_candidate.assert_called_once_with( - session_id, RTCIceCandidate(candidate) + session_id, RTCIceCandidateInit(candidate, sdp_m_line_index=1) ) -@pytest.mark.usefixtures("mock_camera_webrtc") async def test_ws_webrtc_candidate_invalid_entity( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test ws WebRTC candidate command with a camera entity that does not exist.""" + await async_setup_component(hass, "camera", {}) client = await hass_ws_client(hass) await client.send_json_auto_id( { "type": "camera/webrtc/candidate", "entity_id": "camera.does_not_exist", "session_id": "session_id", - "candidate": "candidate", + "candidate": {"candidate": "candidate"}, } ) response = await client.receive_json() @@ -1044,7 +1136,7 @@ async def test_ws_webrtc_candidate_invalid_entity( } -@pytest.mark.usefixtures("mock_camera_webrtc") +@pytest.mark.usefixtures("mock_test_webrtc_cameras") async def test_ws_webrtc_canidate_missing_candidate( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -1053,7 +1145,7 @@ async def test_ws_webrtc_canidate_missing_candidate( await client.send_json_auto_id( { "type": "camera/webrtc/candidate", - "entity_id": "camera.demo_camera", + "entity_id": "camera.async", "session_id": "session_id", } ) @@ -1075,7 +1167,7 @@ async def test_ws_webrtc_candidate_invalid_stream_type( "type": "camera/webrtc/candidate", "entity_id": "camera.demo_camera", "session_id": "session_id", - "candidate": "candidate", + "candidate": {"candidate": "candidate"}, } ) response = await client.receive_json() @@ -1084,7 +1176,7 @@ async def test_ws_webrtc_candidate_invalid_stream_type( assert not response["success"] assert response["error"] == { "code": "webrtc_candidate_failed", - "message": "Camera does not support WebRTC, frontend_stream_type=hls", + "message": "Camera does not support WebRTC, frontend_stream_types={}", } @@ -1118,7 +1210,7 @@ async def test_webrtc_provider_optional_interface(hass: HomeAssistant) -> None: send_message(WebRTCAnswer(answer="answer")) async def async_on_webrtc_candidate( - self, session_id: str, candidate: RTCIceCandidate + self, session_id: str, candidate: RTCIceCandidateInit ) -> None: """Handle the WebRTC candidate.""" @@ -1128,7 +1220,9 @@ async def test_webrtc_provider_optional_interface(hass: HomeAssistant) -> None: await provider.async_handle_async_webrtc_offer( Mock(), "offer_sdp", "session_id", Mock() ) - await provider.async_on_webrtc_candidate("session_id", RTCIceCandidate("candidate")) + await provider.async_on_webrtc_candidate( + "session_id", RTCIceCandidateInit("candidate") + ) provider.async_close_session("session_id") diff --git a/tests/components/climate/test_init.py b/tests/components/climate/test_init.py index aa162e0b683..254fb26a471 100644 --- a/tests/components/climate/test_init.py +++ b/tests/components/climate/test_init.py @@ -24,6 +24,7 @@ from homeassistant.components.climate.const import ( ATTR_MAX_TEMP, ATTR_MIN_TEMP, ATTR_PRESET_MODE, + ATTR_SWING_HORIZONTAL_MODE, ATTR_SWING_MODE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, @@ -31,8 +32,11 @@ from homeassistant.components.climate.const import ( SERVICE_SET_HUMIDITY, SERVICE_SET_HVAC_MODE, SERVICE_SET_PRESET_MODE, + SERVICE_SET_SWING_HORIZONTAL_MODE, SERVICE_SET_SWING_MODE, SERVICE_SET_TEMPERATURE, + SWING_HORIZONTAL_OFF, + SWING_HORIZONTAL_ON, ClimateEntityFeature, ) from homeassistant.config_entries import ConfigEntry @@ -104,6 +108,7 @@ class MockClimateEntity(MockEntity, ClimateEntity): ClimateEntityFeature.FAN_MODE | ClimateEntityFeature.PRESET_MODE | ClimateEntityFeature.SWING_MODE + | ClimateEntityFeature.SWING_HORIZONTAL_MODE ) _attr_preset_mode = "home" _attr_preset_modes = ["home", "away"] @@ -111,6 +116,8 @@ class MockClimateEntity(MockEntity, ClimateEntity): _attr_fan_modes = ["auto", "off"] _attr_swing_mode = "auto" _attr_swing_modes = ["auto", "off"] + _attr_swing_horizontal_mode = "on" + _attr_swing_horizontal_modes = [SWING_HORIZONTAL_ON, SWING_HORIZONTAL_OFF] _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_target_temperature = 20 _attr_target_temperature_high = 25 @@ -144,6 +151,10 @@ class MockClimateEntity(MockEntity, ClimateEntity): """Set swing mode.""" self._attr_swing_mode = swing_mode + def set_swing_horizontal_mode(self, swing_horizontal_mode: str) -> None: + """Set horizontal swing mode.""" + self._attr_swing_horizontal_mode = swing_horizontal_mode + def set_hvac_mode(self, hvac_mode: HVACMode) -> None: """Set new target hvac mode.""" self._attr_hvac_mode = hvac_mode @@ -194,7 +205,11 @@ def _create_tuples(enum: type[Enum], constant_prefix: str) -> list[tuple[Enum, s (enum_field, constant_prefix) for enum_field in enum if enum_field - not in [ClimateEntityFeature.TURN_ON, ClimateEntityFeature.TURN_OFF] + not in [ + ClimateEntityFeature.TURN_ON, + ClimateEntityFeature.TURN_OFF, + ClimateEntityFeature.SWING_HORIZONTAL_MODE, + ] ] @@ -339,6 +354,7 @@ async def test_mode_validation( assert state.attributes.get(ATTR_PRESET_MODE) == "home" assert state.attributes.get(ATTR_FAN_MODE) == "auto" assert state.attributes.get(ATTR_SWING_MODE) == "auto" + assert state.attributes.get(ATTR_SWING_HORIZONTAL_MODE) == "on" await hass.services.async_call( DOMAIN, @@ -358,6 +374,15 @@ async def test_mode_validation( }, blocking=True, ) + await hass.services.async_call( + DOMAIN, + SERVICE_SET_SWING_HORIZONTAL_MODE, + { + "entity_id": "climate.test", + "swing_horizontal_mode": "off", + }, + blocking=True, + ) await hass.services.async_call( DOMAIN, SERVICE_SET_FAN_MODE, @@ -371,6 +396,7 @@ async def test_mode_validation( assert state.attributes.get(ATTR_PRESET_MODE) == "away" assert state.attributes.get(ATTR_FAN_MODE) == "off" assert state.attributes.get(ATTR_SWING_MODE) == "off" + assert state.attributes.get(ATTR_SWING_HORIZONTAL_MODE) == "off" await hass.services.async_call( DOMAIN, @@ -427,6 +453,25 @@ async def test_mode_validation( ) assert exc.value.translation_key == "not_valid_swing_mode" + with pytest.raises( + ServiceValidationError, + match="Horizontal swing mode invalid is not valid. Valid horizontal swing modes are: on, off", + ) as exc: + await hass.services.async_call( + DOMAIN, + SERVICE_SET_SWING_HORIZONTAL_MODE, + { + "entity_id": "climate.test", + "swing_horizontal_mode": "invalid", + }, + blocking=True, + ) + assert ( + str(exc.value) + == "Horizontal swing mode invalid is not valid. Valid horizontal swing modes are: on, off" + ) + assert exc.value.translation_key == "not_valid_horizontal_swing_mode" + with pytest.raises( ServiceValidationError, match="Fan mode invalid is not valid. Valid fan modes are: auto, off", diff --git a/tests/components/climate/test_reproduce_state.py b/tests/components/climate/test_reproduce_state.py index 0632ebcc9e4..3bc91467f14 100644 --- a/tests/components/climate/test_reproduce_state.py +++ b/tests/components/climate/test_reproduce_state.py @@ -6,6 +6,7 @@ from homeassistant.components.climate import ( ATTR_FAN_MODE, ATTR_HUMIDITY, ATTR_PRESET_MODE, + ATTR_SWING_HORIZONTAL_MODE, ATTR_SWING_MODE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, @@ -14,6 +15,7 @@ from homeassistant.components.climate import ( SERVICE_SET_HUMIDITY, SERVICE_SET_HVAC_MODE, SERVICE_SET_PRESET_MODE, + SERVICE_SET_SWING_HORIZONTAL_MODE, SERVICE_SET_SWING_MODE, SERVICE_SET_TEMPERATURE, HVACMode, @@ -96,6 +98,7 @@ async def test_state_with_context(hass: HomeAssistant) -> None: [ (SERVICE_SET_PRESET_MODE, ATTR_PRESET_MODE), (SERVICE_SET_SWING_MODE, ATTR_SWING_MODE), + (SERVICE_SET_SWING_HORIZONTAL_MODE, ATTR_SWING_HORIZONTAL_MODE), (SERVICE_SET_FAN_MODE, ATTR_FAN_MODE), (SERVICE_SET_HUMIDITY, ATTR_HUMIDITY), (SERVICE_SET_TEMPERATURE, ATTR_TEMPERATURE), @@ -122,6 +125,7 @@ async def test_attribute(hass: HomeAssistant, service, attribute) -> None: [ (SERVICE_SET_PRESET_MODE, ATTR_PRESET_MODE), (SERVICE_SET_SWING_MODE, ATTR_SWING_MODE), + (SERVICE_SET_SWING_HORIZONTAL_MODE, ATTR_SWING_HORIZONTAL_MODE), (SERVICE_SET_FAN_MODE, ATTR_FAN_MODE), ], ) diff --git a/tests/components/climate/test_significant_change.py b/tests/components/climate/test_significant_change.py index f060344722a..7d709090357 100644 --- a/tests/components/climate/test_significant_change.py +++ b/tests/components/climate/test_significant_change.py @@ -10,6 +10,7 @@ from homeassistant.components.climate import ( ATTR_HUMIDITY, ATTR_HVAC_ACTION, ATTR_PRESET_MODE, + ATTR_SWING_HORIZONTAL_MODE, ATTR_SWING_MODE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, @@ -66,6 +67,18 @@ async def test_significant_state_change(hass: HomeAssistant) -> None: ), (METRIC, {ATTR_SWING_MODE: "old_value"}, {ATTR_SWING_MODE: "old_value"}, False), (METRIC, {ATTR_SWING_MODE: "old_value"}, {ATTR_SWING_MODE: "new_value"}, True), + ( + METRIC, + {ATTR_SWING_HORIZONTAL_MODE: "old_value"}, + {ATTR_SWING_HORIZONTAL_MODE: "old_value"}, + False, + ), + ( + METRIC, + {ATTR_SWING_HORIZONTAL_MODE: "old_value"}, + {ATTR_SWING_HORIZONTAL_MODE: "new_value"}, + True, + ), # multiple attributes ( METRIC, diff --git a/tests/components/cloud/__init__.py b/tests/components/cloud/__init__.py index 18f8cd4d311..1fb9f2b0d40 100644 --- a/tests/components/cloud/__init__.py +++ b/tests/components/cloud/__init__.py @@ -35,6 +35,7 @@ PIPELINE_DATA = { "tts_voice": "Arnold Schwarzenegger", "wake_word_entity": None, "wake_word_id": None, + "prefer_local_intents": False, }, { "conversation_engine": "conversation_engine_2", @@ -49,6 +50,7 @@ PIPELINE_DATA = { "tts_voice": "The Voice", "wake_word_entity": None, "wake_word_id": None, + "prefer_local_intents": False, }, { "conversation_engine": "conversation_engine_3", @@ -63,6 +65,7 @@ PIPELINE_DATA = { "tts_voice": None, "wake_word_entity": None, "wake_word_id": None, + "prefer_local_intents": False, }, ], "preferred_item": "01GX8ZWBAQYWNB1XV3EXEZ75DY", diff --git a/tests/components/cloud/test_tts.py b/tests/components/cloud/test_tts.py index 499981c643d..bf9fd7302ae 100644 --- a/tests/components/cloud/test_tts.py +++ b/tests/components/cloud/test_tts.py @@ -227,25 +227,21 @@ async def test_get_tts_audio( await on_start_callback() client = await hass_client() - url = "/api/tts_get_url" - data |= {"message": "There is someone at the door."} + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + url = "/api/tts_get_url" + data |= {"message": "There is someone at the door."} - req = await client.post(url, json=data) - assert req.status == HTTPStatus.OK - response = await req.json() + req = await client.post(url, json=data) + assert req.status == HTTPStatus.OK + response = await req.json() - assert response == { - "url": ( - "http://example.local:8123/api/tts_proxy/" - "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_6e8b81ac47_{expected_url_suffix}.mp3" - ), - "path": ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_6e8b81ac47_{expected_url_suffix}.mp3" - ), - } - await hass.async_block_till_done() + assert response == { + "url": ("http://example.local:8123/api/tts_proxy/test_token.mp3"), + "path": ("/api/tts_proxy/test_token.mp3"), + } + await hass.async_block_till_done() assert mock_process_tts.call_count == 1 assert mock_process_tts.call_args is not None @@ -280,25 +276,21 @@ async def test_get_tts_audio_logged_out( await hass.async_block_till_done() client = await hass_client() - url = "/api/tts_get_url" - data |= {"message": "There is someone at the door."} + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + url = "/api/tts_get_url" + data |= {"message": "There is someone at the door."} - req = await client.post(url, json=data) - assert req.status == HTTPStatus.OK - response = await req.json() + req = await client.post(url, json=data) + assert req.status == HTTPStatus.OK + response = await req.json() - assert response == { - "url": ( - "http://example.local:8123/api/tts_proxy/" - "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_6e8b81ac47_{expected_url_suffix}.mp3" - ), - "path": ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_6e8b81ac47_{expected_url_suffix}.mp3" - ), - } - await hass.async_block_till_done() + assert response == { + "url": ("http://example.local:8123/api/tts_proxy/test_token.mp3"), + "path": ("/api/tts_proxy/test_token.mp3"), + } + await hass.async_block_till_done() assert mock_process_tts.call_count == 1 assert mock_process_tts.call_args is not None @@ -342,28 +334,24 @@ async def test_tts_entity( assert state assert state.state == STATE_UNKNOWN - url = "/api/tts_get_url" - data = { - "engine_id": entity_id, - "message": "There is someone at the door.", - } + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + url = "/api/tts_get_url" + data = { + "engine_id": entity_id, + "message": "There is someone at the door.", + } - req = await client.post(url, json=data) - assert req.status == HTTPStatus.OK - response = await req.json() + req = await client.post(url, json=data) + assert req.status == HTTPStatus.OK + response = await req.json() - assert response == { - "url": ( - "http://example.local:8123/api/tts_proxy/" - "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_6e8b81ac47_{entity_id}.mp3" - ), - "path": ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_6e8b81ac47_{entity_id}.mp3" - ), - } - await hass.async_block_till_done() + assert response == { + "url": ("http://example.local:8123/api/tts_proxy/test_token.mp3"), + "path": ("/api/tts_proxy/test_token.mp3"), + } + await hass.async_block_till_done() assert mock_process_tts.call_count == 1 assert mock_process_tts.call_args is not None @@ -482,29 +470,25 @@ async def test_deprecated_voice( client = await hass_client() # Test with non deprecated voice. - url = "/api/tts_get_url" - data |= { - "message": "There is someone at the door.", - "language": language, - "options": {"voice": replacement_voice}, - } + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + url = "/api/tts_get_url" + data |= { + "message": "There is someone at the door.", + "language": language, + "options": {"voice": replacement_voice}, + } - req = await client.post(url, json=data) - assert req.status == HTTPStatus.OK - response = await req.json() + req = await client.post(url, json=data) + assert req.status == HTTPStatus.OK + response = await req.json() - assert response == { - "url": ( - "http://example.local:8123/api/tts_proxy/" - "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_{language.lower()}_87567e3e29_{expected_url_suffix}.mp3" - ), - "path": ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_{language.lower()}_87567e3e29_{expected_url_suffix}.mp3" - ), - } - await hass.async_block_till_done() + assert response == { + "url": ("http://example.local:8123/api/tts_proxy/test_token.mp3"), + "path": ("/api/tts_proxy/test_token.mp3"), + } + await hass.async_block_till_done() assert mock_process_tts.call_count == 1 assert mock_process_tts.call_args is not None @@ -522,22 +506,18 @@ async def test_deprecated_voice( # Test with deprecated voice. data["options"] = {"voice": deprecated_voice} - req = await client.post(url, json=data) - assert req.status == HTTPStatus.OK - response = await req.json() + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + req = await client.post(url, json=data) + assert req.status == HTTPStatus.OK + response = await req.json() - assert response == { - "url": ( - "http://example.local:8123/api/tts_proxy/" - "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_{language.lower()}_13646b7d32_{expected_url_suffix}.mp3" - ), - "path": ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_{language.lower()}_13646b7d32_{expected_url_suffix}.mp3" - ), - } - await hass.async_block_till_done() + assert response == { + "url": ("http://example.local:8123/api/tts_proxy/test_token.mp3"), + "path": ("/api/tts_proxy/test_token.mp3"), + } + await hass.async_block_till_done() issue_id = f"deprecated_voice_{deprecated_voice}" @@ -631,28 +611,24 @@ async def test_deprecated_gender( client = await hass_client() # Test without deprecated gender option. - url = "/api/tts_get_url" - data |= { - "message": "There is someone at the door.", - "language": language, - } + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + url = "/api/tts_get_url" + data |= { + "message": "There is someone at the door.", + "language": language, + } - req = await client.post(url, json=data) - assert req.status == HTTPStatus.OK - response = await req.json() + req = await client.post(url, json=data) + assert req.status == HTTPStatus.OK + response = await req.json() - assert response == { - "url": ( - "http://example.local:8123/api/tts_proxy/" - "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_{language.lower()}_6e8b81ac47_{expected_url_suffix}.mp3" - ), - "path": ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_{language.lower()}_6e8b81ac47_{expected_url_suffix}.mp3" - ), - } - await hass.async_block_till_done() + assert response == { + "url": ("http://example.local:8123/api/tts_proxy/test_token.mp3"), + "path": ("/api/tts_proxy/test_token.mp3"), + } + await hass.async_block_till_done() assert mock_process_tts.call_count == 1 assert mock_process_tts.call_args is not None @@ -667,22 +643,18 @@ async def test_deprecated_gender( # Test with deprecated gender option. data["options"] = {"gender": gender_option} - req = await client.post(url, json=data) - assert req.status == HTTPStatus.OK - response = await req.json() + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + req = await client.post(url, json=data) + assert req.status == HTTPStatus.OK + response = await req.json() - assert response == { - "url": ( - "http://example.local:8123/api/tts_proxy/" - "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_{language.lower()}_dd0e95eb04_{expected_url_suffix}.mp3" - ), - "path": ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_{language.lower()}_dd0e95eb04_{expected_url_suffix}.mp3" - ), - } - await hass.async_block_till_done() + assert response == { + "url": ("http://example.local:8123/api/tts_proxy/test_token.mp3"), + "path": ("/api/tts_proxy/test_token.mp3"), + } + await hass.async_block_till_done() issue_id = "deprecated_gender" diff --git a/tests/components/color_extractor/test_service.py b/tests/components/color_extractor/test_service.py index 7b603420bdf..23ba5e7808c 100644 --- a/tests/components/color_extractor/test_service.py +++ b/tests/components/color_extractor/test_service.py @@ -78,7 +78,7 @@ async def setup_light(hass: HomeAssistant): # Validate starting values assert state.state == STATE_ON assert state.attributes.get(ATTR_BRIGHTNESS) == 180 - assert state.attributes.get(ATTR_RGB_COLOR) == (255, 63, 111) + assert state.attributes.get(ATTR_RGB_COLOR) == (255, 64, 112) await hass.services.async_call( LIGHT_DOMAIN, diff --git a/tests/components/command_line/test_binary_sensor.py b/tests/components/command_line/test_binary_sensor.py index 5d1cd845e27..aa49410aacb 100644 --- a/tests/components/command_line/test_binary_sensor.py +++ b/tests/components/command_line/test_binary_sensor.py @@ -87,7 +87,7 @@ async def test_setup_platform_yaml(hass: HomeAssistant) -> None: "payload_off": "0", "value_template": "{{ value | multiply(0.1) }}", "icon": ( - '{% if this.state=="on" %} mdi:on {% else %} mdi:off {% endif %}' + '{% if this.attributes.icon=="mdi:icon2" %} mdi:icon1 {% else %} mdi:icon2 {% endif %}' ), } } @@ -101,7 +101,15 @@ async def test_template(hass: HomeAssistant, load_yaml_integration: None) -> Non entity_state = hass.states.get("binary_sensor.test") assert entity_state assert entity_state.state == STATE_ON - assert entity_state.attributes.get("icon") == "mdi:on" + assert entity_state.attributes.get("icon") == "mdi:icon2" + + async_fire_time_changed(hass, dt_util.now() + timedelta(seconds=30)) + await hass.async_block_till_done(wait_background_tasks=True) + + entity_state = hass.states.get("binary_sensor.test") + assert entity_state + assert entity_state.state == STATE_ON + assert entity_state.attributes.get("icon") == "mdi:icon1" @pytest.mark.parametrize( diff --git a/tests/components/command_line/test_cover.py b/tests/components/command_line/test_cover.py index da9d86ba8a5..426968eccc5 100644 --- a/tests/components/command_line/test_cover.py +++ b/tests/components/command_line/test_cover.py @@ -422,13 +422,19 @@ async def test_icon_template(hass: HomeAssistant) -> None: "command_close": f"echo 0 > {path}", "command_stop": f"echo 0 > {path}", "name": "Test", - "icon": "{% if this.state=='open' %} mdi:open {% else %} mdi:closed {% endif %}", + "icon": '{% if this.attributes.icon=="mdi:icon2" %} mdi:icon1 {% else %} mdi:icon2 {% endif %}', } } ] }, ) await hass.async_block_till_done() + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: "cover.test"}, + blocking=True, + ) await hass.services.async_call( COVER_DOMAIN, @@ -438,7 +444,7 @@ async def test_icon_template(hass: HomeAssistant) -> None: ) entity_state = hass.states.get("cover.test") assert entity_state - assert entity_state.attributes.get("icon") == "mdi:closed" + assert entity_state.attributes.get("icon") == "mdi:icon1" await hass.services.async_call( COVER_DOMAIN, @@ -448,4 +454,4 @@ async def test_icon_template(hass: HomeAssistant) -> None: ) entity_state = hass.states.get("cover.test") assert entity_state - assert entity_state.attributes.get("icon") == "mdi:open" + assert entity_state.attributes.get("icon") == "mdi:icon2" diff --git a/tests/components/command_line/test_switch.py b/tests/components/command_line/test_switch.py index 549e729892c..d62410fa792 100644 --- a/tests/components/command_line/test_switch.py +++ b/tests/components/command_line/test_switch.py @@ -552,7 +552,7 @@ async def test_templating(hass: HomeAssistant) -> None: "command_off": f"echo 0 > {path}", "value_template": '{{ value=="1" }}', "icon": ( - '{% if this.state=="on" %} mdi:on {% else %} mdi:off {% endif %}' + '{% if this.attributes.icon=="mdi:icon2" %} mdi:icon1 {% else %} mdi:icon2 {% endif %}' ), "name": "Test", } @@ -564,7 +564,7 @@ async def test_templating(hass: HomeAssistant) -> None: "command_off": f"echo 0 > {path}", "value_template": '{{ value=="1" }}', "icon": ( - '{% if states("switch.test2")=="on" %} mdi:on {% else %} mdi:off {% endif %}' + '{% if states("switch.test")=="off" %} mdi:off {% else %} mdi:on {% endif %}' ), "name": "Test2", }, @@ -595,7 +595,7 @@ async def test_templating(hass: HomeAssistant) -> None: entity_state = hass.states.get("switch.test") entity_state2 = hass.states.get("switch.test2") assert entity_state.state == STATE_ON - assert entity_state.attributes.get("icon") == "mdi:on" + assert entity_state.attributes.get("icon") == "mdi:icon2" assert entity_state2.state == STATE_ON assert entity_state2.attributes.get("icon") == "mdi:on" diff --git a/tests/components/conftest.py b/tests/components/conftest.py index 5bf393a8405..5628a2b1aaf 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -2,15 +2,24 @@ from __future__ import annotations -from collections.abc import Callable, Generator +import asyncio +from collections.abc import AsyncGenerator, Callable, Generator from importlib.util import find_spec from pathlib import Path +import string from typing import TYPE_CHECKING, Any from unittest.mock import AsyncMock, MagicMock, patch -from aiohasupervisor.models import Discovery, Repository, StoreAddon, StoreInfo +from aiohasupervisor.models import ( + Discovery, + Repository, + ResolutionInfo, + StoreAddon, + StoreInfo, +) import pytest +from homeassistant.components import repairs from homeassistant.config_entries import ( DISCOVERY_SOURCES, ConfigEntriesFlowManager, @@ -18,8 +27,15 @@ from homeassistant.config_entries import ( OptionsFlowManager, ) from homeassistant.const import STATE_OFF, STATE_ON -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowHandler, FlowManager, FlowResultType +from homeassistant.core import Context, HomeAssistant, ServiceRegistry, ServiceResponse +from homeassistant.data_entry_flow import ( + FlowContext, + FlowHandler, + FlowManager, + FlowResultType, +) +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.translation import async_get_translations if TYPE_CHECKING: @@ -473,6 +489,26 @@ def supervisor_is_connected_fixture(supervisor_client: AsyncMock) -> AsyncMock: return supervisor_client.supervisor.ping +@pytest.fixture(name="resolution_info") +def resolution_info_fixture(supervisor_client: AsyncMock) -> AsyncMock: + """Mock resolution info from supervisor.""" + supervisor_client.resolution.info.return_value = ResolutionInfo( + suggestions=[], + unsupported=[], + unhealthy=[], + issues=[], + checks=[], + ) + return supervisor_client.resolution.info + + +@pytest.fixture(name="resolution_suggestions_for_issue") +def resolution_suggestions_for_issue_fixture(supervisor_client: AsyncMock) -> AsyncMock: + """Mock suggestions by issue from supervisor resolution.""" + supervisor_client.resolution.suggestions_for_issue.return_value = [] + return supervisor_client.resolution.suggestions_for_issue + + @pytest.fixture(name="supervisor_client") def supervisor_client() -> Generator[AsyncMock]: """Mock the supervisor client.""" @@ -481,6 +517,7 @@ def supervisor_client() -> Generator[AsyncMock]: supervisor_client.discovery = AsyncMock() supervisor_client.homeassistant = AsyncMock() supervisor_client.os = AsyncMock() + supervisor_client.resolution = AsyncMock() supervisor_client.supervisor = AsyncMock() with ( patch( @@ -504,46 +541,64 @@ def supervisor_client() -> Generator[AsyncMock]: return_value=supervisor_client, ), patch( - "homeassistant.components.hassio.get_supervisor_client", + "homeassistant.components.hassio.issues.get_supervisor_client", + return_value=supervisor_client, + ), + patch( + "homeassistant.components.hassio.repairs.get_supervisor_client", return_value=supervisor_client, ), ): yield supervisor_client -async def _ensure_translation_exists( +def _validate_translation_placeholders( + full_key: str, + translation: str, + description_placeholders: dict[str, str] | None, + translation_errors: dict[str, str], +) -> str | None: + """Raise if translation exists with missing placeholders.""" + tuples = list(string.Formatter().parse(translation)) + for _, placeholder, _, _ in tuples: + if placeholder is None: + continue + if ( + description_placeholders is None + or placeholder not in description_placeholders + ): + translation_errors[full_key] = ( + f"Description not found for placeholder `{placeholder}` in {full_key}" + ) + + +async def _validate_translation( hass: HomeAssistant, - ignore_translations: dict[str, StoreInfo], + translation_errors: dict[str, str], category: str, component: str, key: str, + description_placeholders: dict[str, str] | None, + *, + translation_required: bool = True, ) -> None: """Raise if translation doesn't exist.""" full_key = f"component.{component}.{category}.{key}" translations = await async_get_translations(hass, "en", category, [component]) - if full_key in translations: - return - - if full_key in ignore_translations: - ignore_translations[full_key] = "used" - return - - key_parts = key.split(".") - # Ignore step data translations if title or description exists - if ( - len(key_parts) >= 3 - and key_parts[0] == "step" - and key_parts[2] == "data" - and ( - f"component.{component}.{category}.{key_parts[0]}.{key_parts[1]}.description" - in translations - or f"component.{component}.{category}.{key_parts[0]}.{key_parts[1]}.title" - in translations + if (translation := translations.get(full_key)) is not None: + _validate_translation_placeholders( + full_key, translation, description_placeholders, translation_errors ) - ): return - pytest.fail( + if not translation_required: + return + + if full_key in translation_errors: + translation_errors[full_key] = "used" + return + + translation_errors[full_key] = ( f"Translation not found for {component}: `{category}.{key}`. " f"Please add to homeassistant/components/{component}/strings.json" ) @@ -559,68 +614,219 @@ def ignore_translations() -> str | list[str]: return [] +async def _check_config_flow_result_translations( + manager: FlowManager, + flow: FlowHandler, + result: FlowResult[FlowContext, str], + translation_errors: dict[str, str], +) -> None: + if result["type"] is FlowResultType.CREATE_ENTRY: + # No need to check translations for a completed flow + return + + key_prefix = "" + if isinstance(manager, ConfigEntriesFlowManager): + category = "config" + integration = flow.handler + elif isinstance(manager, OptionsFlowManager): + category = "options" + integration = flow.hass.config_entries.async_get_entry(flow.handler).domain + elif isinstance(manager, repairs.RepairsFlowManager): + category = "issues" + integration = flow.handler + issue_id = flow.issue_id + issue = ir.async_get(flow.hass).async_get_issue(integration, issue_id) + key_prefix = f"{issue.translation_key}.fix_flow." + else: + return + + # Check if this flow has been seen before + # Gets set to False on first run, and to True on subsequent runs + setattr(flow, "__flow_seen_before", hasattr(flow, "__flow_seen_before")) + + if result["type"] is FlowResultType.FORM: + if step_id := result.get("step_id"): + # neither title nor description are required + # - title defaults to integration name + # - description is optional + for header in ("title", "description"): + await _validate_translation( + flow.hass, + translation_errors, + category, + integration, + f"{key_prefix}step.{step_id}.{header}", + result["description_placeholders"], + translation_required=False, + ) + if errors := result.get("errors"): + for error in errors.values(): + await _validate_translation( + flow.hass, + translation_errors, + category, + integration, + f"{key_prefix}error.{error}", + result["description_placeholders"], + ) + return + + if result["type"] is FlowResultType.ABORT: + # We don't need translations for a discovery flow which immediately + # aborts, since such flows won't be seen by users + if not flow.__flow_seen_before and flow.source in DISCOVERY_SOURCES: + return + await _validate_translation( + flow.hass, + translation_errors, + category, + integration, + f"{key_prefix}abort.{result["reason"]}", + result["description_placeholders"], + ) + + +async def _check_create_issue_translations( + issue_registry: ir.IssueRegistry, + issue: ir.IssueEntry, + translation_errors: dict[str, str], +) -> None: + if issue.translation_key is None: + # `translation_key` is only None on dismissed issues + return + await _validate_translation( + issue_registry.hass, + translation_errors, + "issues", + issue.domain, + f"{issue.translation_key}.title", + issue.translation_placeholders, + ) + if not issue.is_fixable: + # Description is required for non-fixable issues + await _validate_translation( + issue_registry.hass, + translation_errors, + "issues", + issue.domain, + f"{issue.translation_key}.description", + issue.translation_placeholders, + ) + + +async def _check_exception_translation( + hass: HomeAssistant, + exception: HomeAssistantError, + translation_errors: dict[str, str], +) -> None: + if exception.translation_key is None: + return + await _validate_translation( + hass, + translation_errors, + "exceptions", + exception.translation_domain, + f"{exception.translation_key}.message", + exception.translation_placeholders, + ) + + @pytest.fixture(autouse=True) -def check_config_translations(ignore_translations: str | list[str]) -> Generator[None]: - """Ensure config_flow translations are available.""" +async def check_translations( + ignore_translations: str | list[str], +) -> AsyncGenerator[None]: + """Check that translation requirements are met. + + Current checks: + - data entry flow results (ConfigFlow/OptionsFlow/RepairFlow) + - issue registry entries + """ if not isinstance(ignore_translations, list): ignore_translations = [ignore_translations] - _ignore_translations = {k: "unused" for k in ignore_translations} - _original = FlowManager._async_handle_step + translation_errors = {k: "unused" for k in ignore_translations} - async def _async_handle_step( + translation_coros = set() + + # Keep reference to original functions + _original_flow_manager_async_handle_step = FlowManager._async_handle_step + _original_issue_registry_async_create_issue = ir.IssueRegistry.async_get_or_create + _original_service_registry_async_call = ServiceRegistry.async_call + + # Prepare override functions + async def _flow_manager_async_handle_step( self: FlowManager, flow: FlowHandler, *args ) -> FlowResult: - result = await _original(self, flow, *args) - if isinstance(self, ConfigEntriesFlowManager): - category = "config" - component = flow.handler - elif isinstance(self, OptionsFlowManager): - category = "options" - component = flow.hass.config_entries.async_get_entry(flow.handler).domain - else: - return result - - # Check if this flow has been seen before - # Gets set to False on first run, and to True on subsequent runs - setattr(flow, "__flow_seen_before", hasattr(flow, "__flow_seen_before")) - - if result["type"] is FlowResultType.FORM: - if errors := result.get("errors"): - for error in errors.values(): - await _ensure_translation_exists( - flow.hass, - _ignore_translations, - category, - component, - f"error.{error}", - ) - return result - - if result["type"] is FlowResultType.ABORT: - # We don't need translations for a discovery flow which immediately - # aborts, since such flows won't be seen by users - if not flow.__flow_seen_before and flow.source in DISCOVERY_SOURCES: - return result - await _ensure_translation_exists( - flow.hass, - _ignore_translations, - category, - component, - f"abort.{result["reason"]}", - ) - + result = await _original_flow_manager_async_handle_step(self, flow, *args) + await _check_config_flow_result_translations( + self, flow, result, translation_errors + ) return result - with patch( - "homeassistant.data_entry_flow.FlowManager._async_handle_step", - _async_handle_step, + def _issue_registry_async_create_issue( + self: ir.IssueRegistry, domain: str, issue_id: str, *args, **kwargs + ) -> None: + result = _original_issue_registry_async_create_issue( + self, domain, issue_id, *args, **kwargs + ) + translation_coros.add( + _check_create_issue_translations(self, result, translation_errors) + ) + return result + + async def _service_registry_async_call( + self: ServiceRegistry, + domain: str, + service: str, + service_data: dict[str, Any] | None = None, + blocking: bool = False, + context: Context | None = None, + target: dict[str, Any] | None = None, + return_response: bool = False, + ) -> ServiceResponse: + try: + return await _original_service_registry_async_call( + self, + domain, + service, + service_data, + blocking, + context, + target, + return_response, + ) + except HomeAssistantError as err: + translation_coros.add( + _check_exception_translation(self._hass, err, translation_errors) + ) + raise + + # Use override functions + with ( + patch( + "homeassistant.data_entry_flow.FlowManager._async_handle_step", + _flow_manager_async_handle_step, + ), + patch( + "homeassistant.helpers.issue_registry.IssueRegistry.async_get_or_create", + _issue_registry_async_create_issue, + ), + patch( + "homeassistant.core.ServiceRegistry.async_call", + _service_registry_async_call, + ), ): yield - unused_ignore = [k for k, v in _ignore_translations.items() if v == "unused"] + await asyncio.gather(*translation_coros) + + # Run final checks + unused_ignore = [k for k, v in translation_errors.items() if v == "unused"] if unused_ignore: pytest.fail( f"Unused ignore translations: {', '.join(unused_ignore)}. " "Please remove them from the ignore_translations fixture." ) + for description in translation_errors.values(): + if description not in {"used", "unused"}: + pytest.fail(description) diff --git a/tests/components/conversation/snapshots/test_default_agent.ambr b/tests/components/conversation/snapshots/test_default_agent.ambr index b1f2ea0db75..f1e220b10b2 100644 --- a/tests/components/conversation/snapshots/test_default_agent.ambr +++ b/tests/components/conversation/snapshots/test_default_agent.ambr @@ -308,7 +308,7 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called late added light', + 'speech': 'Sorry, I am not aware of any area called late added', }), }), }), @@ -378,7 +378,7 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called kitchen light', + 'speech': 'Sorry, I am not aware of any area called kitchen', }), }), }), @@ -428,7 +428,7 @@ 'speech': dict({ 'plain': dict({ 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called renamed light', + 'speech': 'Sorry, I am not aware of any area called renamed', }), }), }), diff --git a/tests/components/conversation/snapshots/test_http.ambr b/tests/components/conversation/snapshots/test_http.ambr index 08aca43aba5..a3edd4fa51c 100644 --- a/tests/components/conversation/snapshots/test_http.ambr +++ b/tests/components/conversation/snapshots/test_http.ambr @@ -6,7 +6,6 @@ 'id': 'conversation.home_assistant', 'name': 'Home Assistant', 'supported_languages': list([ - 'af', 'ar', 'bg', 'bn', @@ -24,22 +23,18 @@ 'fi', 'fr', 'gl', - 'gu', 'he', - 'hi', 'hr', 'hu', 'id', 'is', 'it', 'ka', - 'kn', 'ko', 'lb', 'lt', 'lv', 'ml', - 'mn', 'ms', 'nb', 'nl', @@ -52,7 +47,6 @@ 'sl', 'sr', 'sv', - 'sw', 'te', 'th', 'tr', @@ -541,7 +535,7 @@ 'name': 'HassTurnOn', }), 'match': True, - 'sentence_template': ' on [all] in ', + 'sentence_template': ' on [] ', 'slots': dict({ 'area': 'kitchen', 'domain': 'light', @@ -612,7 +606,7 @@ 'name': 'OrderBeer', }), 'match': True, - 'sentence_template': "I'd like to order a {beer_style} [please]", + 'sentence_template': "[I'd like to ]order a {beer_style} [please]", 'slots': dict({ 'beer_style': 'lager', }), @@ -639,7 +633,7 @@ 'details': dict({ 'brightness': dict({ 'name': 'brightness', - 'text': '100%', + 'text': '100', 'value': 100, }), 'name': dict({ @@ -654,7 +648,7 @@ 'match': True, 'sentence_template': '[] brightness [to] ', 'slots': dict({ - 'brightness': '100%', + 'brightness': '100', 'name': 'test light', }), 'source': 'builtin', diff --git a/tests/components/conversation/test_default_agent.py b/tests/components/conversation/test_default_agent.py index 9f54671d8a1..39ecdb7f422 100644 --- a/tests/components/conversation/test_default_agent.py +++ b/tests/components/conversation/test_default_agent.py @@ -770,8 +770,8 @@ async def test_error_no_device_on_floor_exposed( ) with patch( - "homeassistant.components.conversation.default_agent.recognize_all", - return_value=[recognize_result], + "homeassistant.components.conversation.default_agent.recognize_best", + return_value=recognize_result, ): result = await conversation.async_converse( hass, "turn on test light on the ground floor", None, Context(), None @@ -838,8 +838,8 @@ async def test_error_no_domain(hass: HomeAssistant) -> None: ) with patch( - "homeassistant.components.conversation.default_agent.recognize_all", - return_value=[recognize_result], + "homeassistant.components.conversation.default_agent.recognize_best", + return_value=recognize_result, ): result = await conversation.async_converse( hass, "turn on the fans", None, Context(), None @@ -873,8 +873,8 @@ async def test_error_no_domain_exposed(hass: HomeAssistant) -> None: ) with patch( - "homeassistant.components.conversation.default_agent.recognize_all", - return_value=[recognize_result], + "homeassistant.components.conversation.default_agent.recognize_best", + return_value=recognize_result, ): result = await conversation.async_converse( hass, "turn on the fans", None, Context(), None @@ -1047,8 +1047,8 @@ async def test_error_no_device_class(hass: HomeAssistant) -> None: ) with patch( - "homeassistant.components.conversation.default_agent.recognize_all", - return_value=[recognize_result], + "homeassistant.components.conversation.default_agent.recognize_best", + return_value=recognize_result, ): result = await conversation.async_converse( hass, "open the windows", None, Context(), None @@ -1096,8 +1096,8 @@ async def test_error_no_device_class_exposed(hass: HomeAssistant) -> None: ) with patch( - "homeassistant.components.conversation.default_agent.recognize_all", - return_value=[recognize_result], + "homeassistant.components.conversation.default_agent.recognize_best", + return_value=recognize_result, ): result = await conversation.async_converse( hass, "open all the windows", None, Context(), None @@ -1207,8 +1207,8 @@ async def test_error_no_device_class_on_floor_exposed( ) with patch( - "homeassistant.components.conversation.default_agent.recognize_all", - return_value=[recognize_result], + "homeassistant.components.conversation.default_agent.recognize_best", + return_value=recognize_result, ): result = await conversation.async_converse( hass, "open ground floor windows", None, Context(), None @@ -1229,8 +1229,8 @@ async def test_error_no_device_class_on_floor_exposed( async def test_error_no_intent(hass: HomeAssistant) -> None: """Test response with an intent match failure.""" with patch( - "homeassistant.components.conversation.default_agent.recognize_all", - return_value=[], + "homeassistant.components.conversation.default_agent.recognize_best", + return_value=None, ): result = await conversation.async_converse( hass, "do something", None, Context(), None @@ -1735,7 +1735,7 @@ async def test_empty_aliases( return_value=None, ) as mock_recognize_all: await conversation.async_converse( - hass, "turn on lights in the kitchen", None, Context(), None + hass, "turn on kitchen light", None, Context(), None ) assert mock_recognize_all.call_count > 0 @@ -2833,3 +2833,219 @@ async def test_query_same_name_different_areas( assert result.response.response_type == intent.IntentResponseType.QUERY_ANSWER assert len(result.response.matched_states) == 1 assert result.response.matched_states[0].entity_id == kitchen_light.entity_id + + +@pytest.mark.usefixtures("init_components") +async def test_intent_cache_exposed(hass: HomeAssistant) -> None: + """Test that intent recognition results are cached for exposed entities.""" + agent = hass.data[DATA_DEFAULT_ENTITY] + assert isinstance(agent, default_agent.DefaultAgent) + + entity_id = "light.test_light" + hass.states.async_set(entity_id, "off") + expose_entity(hass, entity_id, True) + await hass.async_block_till_done() + + user_input = ConversationInput( + text="turn on test light", + context=Context(), + conversation_id=None, + device_id=None, + language=hass.config.language, + agent_id=None, + ) + result = await agent.async_recognize_intent(user_input) + assert result is not None + assert result.entities["name"].text == "test light" + + # Mark this result so we know it is from cache next time + mark = "_from_cache" + setattr(result, mark, True) + + # Should be from cache this time + result = await agent.async_recognize_intent(user_input) + assert result is not None + assert getattr(result, mark, None) is True + + # Unexposing clears the cache + expose_entity(hass, entity_id, False) + result = await agent.async_recognize_intent(user_input) + assert result is not None + assert getattr(result, mark, None) is None + + +@pytest.mark.usefixtures("init_components") +async def test_intent_cache_all_entities(hass: HomeAssistant) -> None: + """Test that intent recognition results are cached for all entities.""" + agent = hass.data[DATA_DEFAULT_ENTITY] + assert isinstance(agent, default_agent.DefaultAgent) + + entity_id = "light.test_light" + hass.states.async_set(entity_id, "off") + expose_entity(hass, entity_id, False) # not exposed + await hass.async_block_till_done() + + user_input = ConversationInput( + text="turn on test light", + context=Context(), + conversation_id=None, + device_id=None, + language=hass.config.language, + agent_id=None, + ) + result = await agent.async_recognize_intent(user_input) + assert result is not None + assert result.entities["name"].text == "test light" + + # Mark this result so we know it is from cache next time + mark = "_from_cache" + setattr(result, mark, True) + + # Should be from cache this time + result = await agent.async_recognize_intent(user_input) + assert result is not None + assert getattr(result, mark, None) is True + + # Adding a new entity clears the cache + hass.states.async_set("light.new_light", "off") + result = await agent.async_recognize_intent(user_input) + assert result is not None + assert getattr(result, mark, None) is None + + +@pytest.mark.usefixtures("init_components") +async def test_intent_cache_fuzzy(hass: HomeAssistant) -> None: + """Test that intent recognition results are cached for fuzzy matches.""" + agent = hass.data[DATA_DEFAULT_ENTITY] + assert isinstance(agent, default_agent.DefaultAgent) + + # There is no entity named test light + user_input = ConversationInput( + text="turn on test light", + context=Context(), + conversation_id=None, + device_id=None, + language=hass.config.language, + agent_id=None, + ) + result = await agent.async_recognize_intent(user_input) + assert result is not None + assert result.unmatched_entities["area"].text == "test " + + # Mark this result so we know it is from cache next time + mark = "_from_cache" + setattr(result, mark, True) + + # Should be from cache this time + result = await agent.async_recognize_intent(user_input) + assert result is not None + assert getattr(result, mark, None) is True + + +@pytest.mark.usefixtures("init_components") +async def test_entities_filtered_by_input(hass: HomeAssistant) -> None: + """Test that entities are filtered by the input text before intent matching.""" + agent = hass.data[DATA_DEFAULT_ENTITY] + assert isinstance(agent, default_agent.DefaultAgent) + + # Only the switch is exposed + hass.states.async_set("light.test_light", "off") + hass.states.async_set( + "light.test_light_2", "off", attributes={ATTR_FRIENDLY_NAME: "test light"} + ) + hass.states.async_set("cover.garage_door", "closed") + hass.states.async_set("switch.test_switch", "off") + expose_entity(hass, "light.test_light", False) + expose_entity(hass, "light.test_light_2", False) + expose_entity(hass, "cover.garage_door", False) + expose_entity(hass, "switch.test_switch", True) + await hass.async_block_till_done() + + # test switch is exposed + user_input = ConversationInput( + text="turn on test switch", + context=Context(), + conversation_id=None, + device_id=None, + language=hass.config.language, + agent_id=None, + ) + + with patch( + "homeassistant.components.conversation.default_agent.recognize_best", + return_value=None, + ) as recognize_best: + await agent.async_recognize_intent(user_input) + + # (1) exposed, (2) all entities + assert len(recognize_best.call_args_list) == 2 + + # Only the test light should have been considered because its name shows + # up in the input text. + slot_lists = recognize_best.call_args_list[0].kwargs["slot_lists"] + name_list = slot_lists["name"] + assert len(name_list.values) == 1 + assert name_list.values[0].text_in.text == "test switch" + + # test light is not exposed + user_input = ConversationInput( + text="turn on Test Light", # different casing for name + context=Context(), + conversation_id=None, + device_id=None, + language=hass.config.language, + agent_id=None, + ) + + with patch( + "homeassistant.components.conversation.default_agent.recognize_best", + return_value=None, + ) as recognize_best: + await agent.async_recognize_intent(user_input) + + # (1) exposed, (2) all entities + assert len(recognize_best.call_args_list) == 2 + + # Both test lights should have been considered because their name shows + # up in the input text. + slot_lists = recognize_best.call_args_list[1].kwargs["slot_lists"] + name_list = slot_lists["name"] + assert len(name_list.values) == 2 + assert name_list.values[0].text_in.text == "test light" + assert name_list.values[1].text_in.text == "test light" + + +@pytest.mark.usefixtures("init_components") +async def test_entities_names_are_not_templates(hass: HomeAssistant) -> None: + """Test that entities names are not treated as hassil templates.""" + # Contains hassil template characters + hass.states.async_set( + "light.test_light", "off", attributes={ATTR_FRIENDLY_NAME: " None: """Test the date and time intents.""" + await hass.config.async_set_time_zone("UTC") result = await conversation.async_converse( hass, "what is the date", None, Context(), None ) diff --git a/tests/components/conversation/test_http.py b/tests/components/conversation/test_http.py index 5b6f7072a2d..e792d8c6913 100644 --- a/tests/components/conversation/test_http.py +++ b/tests/components/conversation/test_http.py @@ -355,15 +355,15 @@ async def test_ws_hass_agent_debug_null_result( """Test homeassistant agent debug websocket command with a null result.""" client = await hass_ws_client(hass) - async def async_recognize(self, user_input, *args, **kwargs): + async def async_recognize_intent(self, user_input, *args, **kwargs): if user_input.text == "bad sentence": return None return await self.async_recognize(user_input, *args, **kwargs) with patch( - "homeassistant.components.conversation.default_agent.DefaultAgent.async_recognize", - async_recognize, + "homeassistant.components.conversation.default_agent.DefaultAgent.async_recognize_intent", + async_recognize_intent, ): await client.send_json_auto_id( { diff --git a/tests/components/conversation/test_init.py b/tests/components/conversation/test_init.py index e92b1ab538f..6900ba2d419 100644 --- a/tests/components/conversation/test_init.py +++ b/tests/components/conversation/test_init.py @@ -8,10 +8,15 @@ from syrupy.assertion import SnapshotAssertion import voluptuous as vol from homeassistant.components import conversation -from homeassistant.components.conversation import default_agent +from homeassistant.components.conversation import ( + ConversationInput, + async_handle_intents, + async_handle_sentence_triggers, + default_agent, +) from homeassistant.components.conversation.const import DATA_DEFAULT_ENTITY from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN -from homeassistant.core import HomeAssistant +from homeassistant.core import Context, HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import intent from homeassistant.setup import async_setup_component @@ -229,3 +234,97 @@ async def test_prepare_agent( await conversation.async_prepare_agent(hass, agent_id, "en") assert len(mock_prepare.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("response_template", "expected_response"), + [("response {{ trigger.device_id }}", "response 1234"), ("", "")], +) +async def test_async_handle_sentence_triggers( + hass: HomeAssistant, response_template: str, expected_response: str +) -> None: + """Test handling sentence triggers with async_handle_sentence_triggers.""" + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, "conversation", {}) + + assert await async_setup_component( + hass, + "automation", + { + "automation": { + "trigger": { + "platform": "conversation", + "command": ["my trigger"], + }, + "action": { + "set_conversation_response": response_template, + }, + } + }, + ) + + # Device id will be available in response template + device_id = "1234" + actual_response = await async_handle_sentence_triggers( + hass, + ConversationInput( + text="my trigger", + context=Context(), + conversation_id=None, + device_id=device_id, + language=hass.config.language, + ), + ) + assert actual_response == expected_response + + +async def test_async_handle_intents(hass: HomeAssistant) -> None: + """Test handling registered intents with async_handle_intents.""" + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, "conversation", {}) + + # Reuse custom sentences in test config to trigger default agent. + class OrderBeerIntentHandler(intent.IntentHandler): + intent_type = "OrderBeer" + + def __init__(self) -> None: + super().__init__() + self.was_handled = False + + async def async_handle( + self, intent_obj: intent.Intent + ) -> intent.IntentResponse: + self.was_handled = True + return intent_obj.create_response() + + handler = OrderBeerIntentHandler() + intent.async_register(hass, handler) + + # Registered intent will be handled + result = await async_handle_intents( + hass, + ConversationInput( + text="I'd like to order a stout", + context=Context(), + conversation_id=None, + device_id=None, + language=hass.config.language, + ), + ) + assert result is not None + assert result.intent is not None + assert result.intent.intent_type == handler.intent_type + assert handler.was_handled + + # No error messages, just None as a result + result = await async_handle_intents( + hass, + ConversationInput( + text="this sentence does not exist", + context=Context(), + conversation_id=None, + device_id=None, + language=hass.config.language, + ), + ) + assert result is None diff --git a/tests/components/conversation/test_trace.py b/tests/components/conversation/test_trace.py index 59cd10d2510..7c00b9a80b2 100644 --- a/tests/components/conversation/test_trace.py +++ b/tests/components/conversation/test_trace.py @@ -56,7 +56,7 @@ async def test_converation_trace( "intent_name": "HassListAddItem", "slots": { "name": "Shopping List", - "item": "apples ", + "item": "apples", }, } diff --git a/tests/components/cpuspeed/test_config_flow.py b/tests/components/cpuspeed/test_config_flow.py index 0ebb8aede49..1a68d6f9396 100644 --- a/tests/components/cpuspeed/test_config_flow.py +++ b/tests/components/cpuspeed/test_config_flow.py @@ -50,7 +50,7 @@ async def test_already_configured( ) assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "already_configured" + assert result.get("reason") == "single_instance_allowed" assert len(mock_setup_entry.mock_calls) == 0 assert len(mock_cpuinfo_config_flow.mock_calls) == 0 diff --git a/tests/components/deconz/snapshots/test_light.ambr b/tests/components/deconz/snapshots/test_light.ambr index a3ec7caac60..b73bbcca216 100644 --- a/tests/components/deconz/snapshots/test_light.ambr +++ b/tests/components/deconz/snapshots/test_light.ambr @@ -125,7 +125,7 @@ 'min_mireds': 153, 'rgb_color': tuple( 255, - 67, + 68, 0, ), 'supported_color_modes': list([ @@ -134,7 +134,7 @@ ]), 'supported_features': , 'xy_color': tuple( - 0.674, + 0.673, 0.322, ), }), @@ -283,7 +283,7 @@ 'min_mireds': 155, 'rgb_color': tuple( 255, - 67, + 68, 0, ), 'supported_color_modes': list([ @@ -291,7 +291,7 @@ ]), 'supported_features': , 'xy_color': tuple( - 0.674, + 0.673, 0.322, ), }), @@ -429,7 +429,7 @@ 'min_mireds': 153, 'rgb_color': tuple( 255, - 67, + 68, 0, ), 'supported_color_modes': list([ @@ -438,7 +438,7 @@ ]), 'supported_features': , 'xy_color': tuple( - 0.674, + 0.673, 0.322, ), }), @@ -587,7 +587,7 @@ 'min_mireds': 155, 'rgb_color': tuple( 255, - 67, + 68, 0, ), 'supported_color_modes': list([ @@ -595,7 +595,7 @@ ]), 'supported_features': , 'xy_color': tuple( - 0.674, + 0.673, 0.322, ), }), @@ -891,7 +891,7 @@ 'min_mireds': 155, 'rgb_color': tuple( 255, - 67, + 68, 0, ), 'supported_color_modes': list([ @@ -899,7 +899,7 @@ ]), 'supported_features': , 'xy_color': tuple( - 0.674, + 0.673, 0.322, ), }), @@ -981,7 +981,7 @@ 'rgb_color': tuple( 255, 165, - 84, + 85, ), 'supported_color_modes': list([ , @@ -990,8 +990,8 @@ ]), 'supported_features': , 'xy_color': tuple( - 0.53, - 0.388, + 0.529, + 0.387, ), }), 'context': , @@ -1180,7 +1180,7 @@ 'is_deconz_group': False, 'rgb_color': tuple( 243, - 113, + 114, 255, ), 'supported_color_modes': list([ @@ -1189,7 +1189,7 @@ 'supported_features': , 'xy_color': tuple( 0.357, - 0.188, + 0.189, ), }), 'context': , diff --git a/tests/components/deconz/test_light.py b/tests/components/deconz/test_light.py index 8ce83d87b69..15135a333ce 100644 --- a/tests/components/deconz/test_light.py +++ b/tests/components/deconz/test_light.py @@ -830,7 +830,7 @@ async def test_groups( }, { "on": True, - "xy": (0.235, 0.164), + "xy": (0.236, 0.166), }, ), ( # Turn on group with short color loop @@ -845,7 +845,7 @@ async def test_groups( }, { "on": True, - "xy": (0.235, 0.164), + "xy": (0.236, 0.166), }, ), ], diff --git a/tests/components/demo/test_light.py b/tests/components/demo/test_light.py index e3b1efc7eec..8fcdb8a9c2e 100644 --- a/tests/components/demo/test_light.py +++ b/tests/components/demo/test_light.py @@ -73,8 +73,8 @@ async def test_state_attributes(hass: HomeAssistant) -> None: ) state = hass.states.get(ENTITY_LIGHT) - assert state.attributes.get(ATTR_RGB_COLOR) == (250, 252, 255) - assert state.attributes.get(ATTR_XY_COLOR) == (0.319, 0.326) + assert state.attributes.get(ATTR_RGB_COLOR) == (251, 253, 255) + assert state.attributes.get(ATTR_XY_COLOR) == (0.319, 0.327) await hass.services.async_call( LIGHT_DOMAIN, diff --git a/tests/components/device_sun_light_trigger/test_init.py b/tests/components/device_sun_light_trigger/test_init.py index 1de0794b9ee..24996482916 100644 --- a/tests/components/device_sun_light_trigger/test_init.py +++ b/tests/components/device_sun_light_trigger/test_init.py @@ -177,6 +177,9 @@ async def test_lights_turn_on_when_coming_home_after_sun_set_person( hass: HomeAssistant, freezer: FrozenDateTimeFactory ) -> None: """Test lights turn on when coming home after sun set.""" + # Ensure all setup tasks are done (avoid flaky tests) + await hass.async_block_till_done(wait_background_tasks=True) + device_1 = f"{DEVICE_TRACKER_DOMAIN}.device_1" device_2 = f"{DEVICE_TRACKER_DOMAIN}.device_2" diff --git a/tests/components/dexcom/__init__.py b/tests/components/dexcom/__init__.py index adc9c56049a..10a742070d6 100644 --- a/tests/components/dexcom/__init__.py +++ b/tests/components/dexcom/__init__.py @@ -1,6 +1,7 @@ """Tests for the Dexcom integration.""" import json +from typing import Any from unittest.mock import patch from pydexcom import GlucoseReading @@ -20,14 +21,16 @@ CONFIG = { GLUCOSE_READING = GlucoseReading(json.loads(load_fixture("data.json", "dexcom"))) -async def init_integration(hass: HomeAssistant) -> MockConfigEntry: +async def init_integration( + hass: HomeAssistant, options: dict[str, Any] | None = None +) -> MockConfigEntry: """Set up the Dexcom integration in Home Assistant.""" entry = MockConfigEntry( domain=DOMAIN, title="test_username", unique_id="test_username", data=CONFIG, - options=None, + options=options, ) with ( patch( diff --git a/tests/components/dexcom/test_config_flow.py b/tests/components/dexcom/test_config_flow.py index e8893e21d0e..0a7338c13da 100644 --- a/tests/components/dexcom/test_config_flow.py +++ b/tests/components/dexcom/test_config_flow.py @@ -5,15 +5,13 @@ from unittest.mock import patch from pydexcom import AccountError, SessionError from homeassistant import config_entries -from homeassistant.components.dexcom.const import DOMAIN, MG_DL, MMOL_L -from homeassistant.const import CONF_UNIT_OF_MEASUREMENT, CONF_USERNAME +from homeassistant.components.dexcom.const import DOMAIN +from homeassistant.const import CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from . import CONFIG -from tests.common import MockConfigEntry - async def test_form(hass: HomeAssistant) -> None: """Test we get the form.""" @@ -101,51 +99,3 @@ async def test_form_unknown_error(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "unknown"} - - -async def test_option_flow_default(hass: HomeAssistant) -> None: - """Test config flow options.""" - entry = MockConfigEntry( - domain=DOMAIN, - data=CONFIG, - options=None, - ) - entry.add_to_hass(hass) - - result = await hass.config_entries.options.async_init(entry.entry_id) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - - result2 = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={}, - ) - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["data"] == { - CONF_UNIT_OF_MEASUREMENT: MG_DL, - } - - -async def test_option_flow(hass: HomeAssistant) -> None: - """Test config flow options.""" - entry = MockConfigEntry( - domain=DOMAIN, - data=CONFIG, - options={CONF_UNIT_OF_MEASUREMENT: MG_DL}, - ) - entry.add_to_hass(hass) - - result = await hass.config_entries.options.async_init(entry.entry_id) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={CONF_UNIT_OF_MEASUREMENT: MMOL_L}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == { - CONF_UNIT_OF_MEASUREMENT: MMOL_L, - } diff --git a/tests/components/dexcom/test_sensor.py b/tests/components/dexcom/test_sensor.py index 1b7f0b026ab..5c0a5280ad6 100644 --- a/tests/components/dexcom/test_sensor.py +++ b/tests/components/dexcom/test_sensor.py @@ -4,12 +4,7 @@ from unittest.mock import patch from pydexcom import SessionError -from homeassistant.components.dexcom.const import MMOL_L -from homeassistant.const import ( - CONF_UNIT_OF_MEASUREMENT, - STATE_UNAVAILABLE, - STATE_UNKNOWN, -) +from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_component import async_update_entity @@ -58,36 +53,3 @@ async def test_sensors_update_failed(hass: HomeAssistant) -> None: assert test_username_glucose_value.state == STATE_UNAVAILABLE test_username_glucose_trend = hass.states.get("sensor.test_username_glucose_trend") assert test_username_glucose_trend.state == STATE_UNAVAILABLE - - -async def test_sensors_options_changed(hass: HomeAssistant) -> None: - """Test we handle sensor unavailable.""" - entry = await init_integration(hass) - - test_username_glucose_value = hass.states.get("sensor.test_username_glucose_value") - assert test_username_glucose_value.state == str(GLUCOSE_READING.value) - test_username_glucose_trend = hass.states.get("sensor.test_username_glucose_trend") - assert test_username_glucose_trend.state == GLUCOSE_READING.trend_description - - with ( - patch( - "homeassistant.components.dexcom.Dexcom.get_current_glucose_reading", - return_value=GLUCOSE_READING, - ), - patch( - "homeassistant.components.dexcom.Dexcom.create_session", - return_value="test_session_id", - ), - ): - hass.config_entries.async_update_entry( - entry=entry, - options={CONF_UNIT_OF_MEASUREMENT: MMOL_L}, - ) - await hass.async_block_till_done() - - assert entry.options == {CONF_UNIT_OF_MEASUREMENT: MMOL_L} - - test_username_glucose_value = hass.states.get("sensor.test_username_glucose_value") - assert test_username_glucose_value.state == str(GLUCOSE_READING.mmol_l) - test_username_glucose_trend = hass.states.get("sensor.test_username_glucose_trend") - assert test_username_glucose_trend.state == GLUCOSE_READING.trend_description diff --git a/tests/components/dhcp/conftest.py b/tests/components/dhcp/conftest.py deleted file mode 100644 index b0fa3f573c5..00000000000 --- a/tests/components/dhcp/conftest.py +++ /dev/null @@ -1,21 +0,0 @@ -"""Tests for the dhcp integration.""" - -import os -import pathlib - - -def pytest_sessionstart(session): - """Try to avoid flaky FileExistsError in CI. - - Called after the Session object has been created and - before performing collection and entering the run test loop. - - This is needed due to a race condition in scapy v2.6.0 - See https://github.com/secdev/scapy/pull/4558 - - Can be removed when scapy 2.6.1 is released. - """ - for sub_dir in (".cache", ".config"): - path = pathlib.Path(os.path.join(os.path.expanduser("~"), sub_dir)) - if not path.exists(): - path.mkdir(mode=0o700, exist_ok=True) diff --git a/tests/components/discovergy/test_config_flow.py b/tests/components/discovergy/test_config_flow.py index 470ef65fccd..23c4a0f7cee 100644 --- a/tests/components/discovergy/test_config_flow.py +++ b/tests/components/discovergy/test_config_flow.py @@ -20,7 +20,7 @@ async def test_form(hass: HomeAssistant, discovergy: AsyncMock) -> None: DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM - assert result["errors"] is None + assert result["errors"] == {} with patch( "homeassistant.components.discovergy.async_setup_entry", @@ -51,7 +51,7 @@ async def test_reauth( config_entry.add_to_hass(hass) init_result = await config_entry.start_reauth_flow(hass) assert init_result["type"] is FlowResultType.FORM - assert init_result["step_id"] == "reauth_confirm" + assert init_result["step_id"] == "user" with patch( "homeassistant.components.discovergy.async_setup_entry", @@ -60,7 +60,7 @@ async def test_reauth( configure_result = await hass.config_entries.flow.async_configure( init_result["flow_id"], { - CONF_EMAIL: "test@example.com", + CONF_EMAIL: "user@example.org", CONF_PASSWORD: "test-password", }, ) @@ -111,3 +111,30 @@ async def test_form_fail( assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "test@example.com" assert "errors" not in result + + +async def test_reauth_unique_id_mismatch( + hass: HomeAssistant, config_entry: MockConfigEntry, discovergy: AsyncMock +) -> None: + """Test reauth flow with unique id mismatch.""" + config_entry.add_to_hass(hass) + + result = await config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + with patch( + "homeassistant.components.discovergy.async_setup_entry", + return_value=True, + ): + configure_result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "user2@example.org", + CONF_PASSWORD: "test-password", + }, + ) + await hass.async_block_till_done() + + assert configure_result["type"] is FlowResultType.ABORT + assert configure_result["reason"] == "account_mismatch" diff --git a/tests/components/dynalite/common.py b/tests/components/dynalite/common.py index 640b6b3e24f..2d48d7e7b4f 100644 --- a/tests/components/dynalite/common.py +++ b/tests/components/dynalite/common.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, Mock, call, patch from dynalite_devices_lib.dynalitebase import DynaliteBaseDevice from homeassistant.components import dynalite -from homeassistant.const import ATTR_SERVICE +from homeassistant.const import ATTR_SERVICE, CONF_HOST from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -34,7 +34,7 @@ async def get_entry_id_from_hass(hass: HomeAssistant) -> str: async def create_entity_from_device(hass: HomeAssistant, device: DynaliteBaseDevice): """Set up the component and platform and create a light based on the device provided.""" host = "1.2.3.4" - entry = MockConfigEntry(domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host}) + entry = MockConfigEntry(domain=dynalite.DOMAIN, data={CONF_HOST: host}) entry.add_to_hass(hass) with patch( "homeassistant.components.dynalite.bridge.DynaliteDevices" diff --git a/tests/components/dynalite/test_bridge.py b/tests/components/dynalite/test_bridge.py index b0517b89031..ed9296ae685 100644 --- a/tests/components/dynalite/test_bridge.py +++ b/tests/components/dynalite/test_bridge.py @@ -17,6 +17,7 @@ from homeassistant.components.dynalite.const import ( ATTR_PACKET, ATTR_PRESET, ) +from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -26,7 +27,7 @@ from tests.common import MockConfigEntry async def test_update_device(hass: HomeAssistant) -> None: """Test that update works.""" host = "1.2.3.4" - entry = MockConfigEntry(domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host}) + entry = MockConfigEntry(domain=dynalite.DOMAIN, data={CONF_HOST: host}) entry.add_to_hass(hass) with patch( "homeassistant.components.dynalite.bridge.DynaliteDevices" @@ -56,7 +57,7 @@ async def test_update_device(hass: HomeAssistant) -> None: async def test_add_devices_then_register(hass: HomeAssistant) -> None: """Test that add_devices work.""" host = "1.2.3.4" - entry = MockConfigEntry(domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host}) + entry = MockConfigEntry(domain=dynalite.DOMAIN, data={CONF_HOST: host}) entry.add_to_hass(hass) with patch( "homeassistant.components.dynalite.bridge.DynaliteDevices" @@ -91,7 +92,7 @@ async def test_add_devices_then_register(hass: HomeAssistant) -> None: async def test_register_then_add_devices(hass: HomeAssistant) -> None: """Test that add_devices work after register_add_entities.""" host = "1.2.3.4" - entry = MockConfigEntry(domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host}) + entry = MockConfigEntry(domain=dynalite.DOMAIN, data={CONF_HOST: host}) entry.add_to_hass(hass) with patch( "homeassistant.components.dynalite.bridge.DynaliteDevices" @@ -120,7 +121,7 @@ async def test_register_then_add_devices(hass: HomeAssistant) -> None: async def test_notifications(hass: HomeAssistant) -> None: """Test that update works.""" host = "1.2.3.4" - entry = MockConfigEntry(domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host}) + entry = MockConfigEntry(domain=dynalite.DOMAIN, data={CONF_HOST: host}) entry.add_to_hass(hass) with patch( "homeassistant.components.dynalite.bridge.DynaliteDevices" diff --git a/tests/components/dynalite/test_config_flow.py b/tests/components/dynalite/test_config_flow.py index 8bb47fd67e3..20ee42d33b5 100644 --- a/tests/components/dynalite/test_config_flow.py +++ b/tests/components/dynalite/test_config_flow.py @@ -7,11 +7,9 @@ import pytest from homeassistant import config_entries from homeassistant.components import dynalite from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import CONF_PORT -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import issue_registry as ir -from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry @@ -31,11 +29,8 @@ async def test_flow( exp_type, exp_result, exp_reason, - issue_registry: ir.IssueRegistry, ) -> None: """Run a flow with or without errors and return result.""" - issue = issue_registry.async_get_issue(dynalite.DOMAIN, "deprecated_yaml") - assert issue is None host = "1.2.3.4" with patch( "homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup", @@ -43,8 +38,8 @@ async def test_flow( ): result = await hass.config_entries.flow.async_init( dynalite.DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={dynalite.CONF_HOST: host}, + context={"source": config_entries.SOURCE_USER}, + data={CONF_HOST: host}, ) await hass.async_block_till_done() assert result["type"] == exp_type @@ -52,51 +47,33 @@ async def test_flow( assert result["result"].state == exp_result if exp_reason: assert result["reason"] == exp_reason - issue = issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{dynalite.DOMAIN}" - ) - assert issue is not None - assert issue.issue_domain == dynalite.DOMAIN - assert issue.severity == ir.IssueSeverity.WARNING - - -async def test_deprecated( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Check that deprecation warning appears in caplog.""" - await async_setup_component( - hass, dynalite.DOMAIN, {dynalite.DOMAIN: {dynalite.CONF_HOST: "aaa"}} - ) - assert "The 'dynalite' option is deprecated" in caplog.text async def test_existing(hass: HomeAssistant) -> None: """Test when the entry exists with the same config.""" host = "1.2.3.4" - MockConfigEntry( - domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host} - ).add_to_hass(hass) + MockConfigEntry(domain=dynalite.DOMAIN, data={CONF_HOST: host}).add_to_hass(hass) with patch( "homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup", return_value=True, ): result = await hass.config_entries.flow.async_init( dynalite.DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={dynalite.CONF_HOST: host}, + context={"source": config_entries.SOURCE_USER}, + data={CONF_HOST: host}, ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" -async def test_existing_update(hass: HomeAssistant) -> None: +async def test_existing_abort_update(hass: HomeAssistant) -> None: """Test when the entry exists with a different config.""" host = "1.2.3.4" port1 = 7777 port2 = 8888 entry = MockConfigEntry( domain=dynalite.DOMAIN, - data={dynalite.CONF_HOST: host, CONF_PORT: port1}, + data={CONF_HOST: host, CONF_PORT: port1}, ) entry.add_to_hass(hass) with patch( @@ -109,12 +86,12 @@ async def test_existing_update(hass: HomeAssistant) -> None: assert mock_dyn_dev().configure.mock_calls[0][1][0]["port"] == port1 result = await hass.config_entries.flow.async_init( dynalite.DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={dynalite.CONF_HOST: host, CONF_PORT: port2}, + context={"source": config_entries.SOURCE_USER}, + data={CONF_HOST: host, CONF_PORT: port2}, ) await hass.async_block_till_done() - assert mock_dyn_dev().configure.call_count == 2 - assert mock_dyn_dev().configure.mock_calls[1][1][0]["port"] == port2 + assert mock_dyn_dev().configure.call_count == 1 + assert mock_dyn_dev().configure.mock_calls[0][1][0]["port"] == port1 assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" @@ -123,17 +100,15 @@ async def test_two_entries(hass: HomeAssistant) -> None: """Test when two different entries exist with different hosts.""" host1 = "1.2.3.4" host2 = "5.6.7.8" - MockConfigEntry( - domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host1} - ).add_to_hass(hass) + MockConfigEntry(domain=dynalite.DOMAIN, data={CONF_HOST: host1}).add_to_hass(hass) with patch( "homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup", return_value=True, ): result = await hass.config_entries.flow.async_init( dynalite.DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={dynalite.CONF_HOST: host2}, + context={"source": config_entries.SOURCE_USER}, + data={CONF_HOST: host2}, ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["result"].state is ConfigEntryState.LOADED @@ -172,9 +147,7 @@ async def test_setup_user(hass: HomeAssistant) -> None: async def test_setup_user_existing_host(hass: HomeAssistant) -> None: """Test that when we setup a host that is defined, we get an error.""" host = "3.4.5.6" - MockConfigEntry( - domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host} - ).add_to_hass(hass) + MockConfigEntry(domain=dynalite.DOMAIN, data={CONF_HOST: host}).add_to_hass(hass) result = await hass.config_entries.flow.async_init( dynalite.DOMAIN, context={"source": config_entries.SOURCE_USER} ) diff --git a/tests/components/dynalite/test_init.py b/tests/components/dynalite/test_init.py index 2c15c41e40b..4bf4eb53ad6 100644 --- a/tests/components/dynalite/test_init.py +++ b/tests/components/dynalite/test_init.py @@ -6,7 +6,7 @@ import pytest from voluptuous import MultipleInvalid import homeassistant.components.dynalite.const as dynalite -from homeassistant.const import CONF_DEFAULT, CONF_HOST, CONF_NAME, CONF_PORT, CONF_ROOM +from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -20,71 +20,18 @@ async def test_empty_config(hass: HomeAssistant) -> None: assert len(hass.config_entries.async_entries(dynalite.DOMAIN)) == 0 -async def test_async_setup(hass: HomeAssistant) -> None: - """Test a successful setup with all of the different options.""" - with patch( - "homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup", - return_value=True, - ): - assert await async_setup_component( - hass, - dynalite.DOMAIN, - { - dynalite.DOMAIN: { - dynalite.CONF_BRIDGES: [ - { - CONF_HOST: "1.2.3.4", - CONF_PORT: 1234, - dynalite.CONF_AUTO_DISCOVER: True, - dynalite.CONF_POLL_TIMER: 5.5, - dynalite.CONF_AREA: { - "1": { - CONF_NAME: "Name1", - dynalite.CONF_CHANNEL: {"4": {}}, - dynalite.CONF_PRESET: {"7": {}}, - dynalite.CONF_NO_DEFAULT: True, - }, - "2": {CONF_NAME: "Name2"}, - "3": { - CONF_NAME: "Name3", - dynalite.CONF_TEMPLATE: CONF_ROOM, - }, - "4": { - CONF_NAME: "Name4", - dynalite.CONF_TEMPLATE: dynalite.CONF_TIME_COVER, - }, - }, - CONF_DEFAULT: {dynalite.CONF_FADE: 2.3}, - dynalite.CONF_ACTIVE: dynalite.ACTIVE_INIT, - dynalite.CONF_PRESET: { - "5": {CONF_NAME: "pres5", dynalite.CONF_FADE: 4.5} - }, - dynalite.CONF_TEMPLATE: { - CONF_ROOM: { - dynalite.CONF_ROOM_ON: 6, - dynalite.CONF_ROOM_OFF: 7, - }, - dynalite.CONF_TIME_COVER: { - dynalite.CONF_OPEN_PRESET: 8, - dynalite.CONF_CLOSE_PRESET: 9, - dynalite.CONF_STOP_PRESET: 10, - dynalite.CONF_CHANNEL_COVER: 3, - dynalite.CONF_DURATION: 2.2, - dynalite.CONF_TILT_TIME: 3.3, - dynalite.CONF_DEVICE_CLASS: "awning", - }, - }, - } - ] - } - }, - ) - await hass.async_block_till_done() - assert len(hass.config_entries.async_entries(dynalite.DOMAIN)) == 1 - - async def test_service_request_area_preset(hass: HomeAssistant) -> None: """Test requesting and area preset via service call.""" + entry = MockConfigEntry( + domain=dynalite.DOMAIN, + data={CONF_HOST: "1.2.3.4"}, + ) + entry2 = MockConfigEntry( + domain=dynalite.DOMAIN, + data={CONF_HOST: "5.6.7.8"}, + ) + entry.add_to_hass(hass) + entry2.add_to_hass(hass) with ( patch( "homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup", @@ -95,20 +42,8 @@ async def test_service_request_area_preset(hass: HomeAssistant) -> None: return_value=True, ) as mock_req_area_pres, ): - assert await async_setup_component( - hass, - dynalite.DOMAIN, - { - dynalite.DOMAIN: { - dynalite.CONF_BRIDGES: [ - {CONF_HOST: "1.2.3.4"}, - {CONF_HOST: "5.6.7.8"}, - ] - } - }, - ) + assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert len(hass.config_entries.async_entries(dynalite.DOMAIN)) == 2 await hass.services.async_call( dynalite.DOMAIN, "request_area_preset", @@ -160,6 +95,16 @@ async def test_service_request_area_preset(hass: HomeAssistant) -> None: async def test_service_request_channel_level(hass: HomeAssistant) -> None: """Test requesting the level of a channel via service call.""" + entry = MockConfigEntry( + domain=dynalite.DOMAIN, + data={CONF_HOST: "1.2.3.4"}, + ) + entry2 = MockConfigEntry( + domain=dynalite.DOMAIN, + data={CONF_HOST: "5.6.7.8"}, + ) + entry.add_to_hass(hass) + entry2.add_to_hass(hass) with ( patch( "homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup", @@ -170,21 +115,7 @@ async def test_service_request_channel_level(hass: HomeAssistant) -> None: return_value=True, ) as mock_req_chan_lvl, ): - assert await async_setup_component( - hass, - dynalite.DOMAIN, - { - dynalite.DOMAIN: { - dynalite.CONF_BRIDGES: [ - { - CONF_HOST: "1.2.3.4", - dynalite.CONF_AREA: {"7": {CONF_NAME: "test"}}, - }, - {CONF_HOST: "5.6.7.8"}, - ] - } - }, - ) + assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() assert len(hass.config_entries.async_entries(dynalite.DOMAIN)) == 2 await hass.services.async_call( @@ -212,60 +143,6 @@ async def test_service_request_channel_level(hass: HomeAssistant) -> None: assert mock_req_chan_lvl.mock_calls == [call(4, 5), call(4, 5)] -async def test_async_setup_bad_config1(hass: HomeAssistant) -> None: - """Test a successful with bad config on templates.""" - with patch( - "homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup", - return_value=True, - ): - assert not await async_setup_component( - hass, - dynalite.DOMAIN, - { - dynalite.DOMAIN: { - dynalite.CONF_BRIDGES: [ - { - CONF_HOST: "1.2.3.4", - dynalite.CONF_AREA: { - "1": { - dynalite.CONF_TEMPLATE: dynalite.CONF_TIME_COVER, - CONF_NAME: "Name", - dynalite.CONF_ROOM_ON: 7, - } - }, - } - ] - } - }, - ) - await hass.async_block_till_done() - - -async def test_async_setup_bad_config2(hass: HomeAssistant) -> None: - """Test a successful with bad config on numbers.""" - host = "1.2.3.4" - with patch( - "homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup", - return_value=True, - ): - assert not await async_setup_component( - hass, - dynalite.DOMAIN, - { - dynalite.DOMAIN: { - dynalite.CONF_BRIDGES: [ - { - CONF_HOST: host, - dynalite.CONF_AREA: {"WRONG": {CONF_NAME: "Name"}}, - } - ] - } - }, - ) - await hass.async_block_till_done() - assert len(hass.config_entries.async_entries(dynalite.DOMAIN)) == 0 - - async def test_unload_entry(hass: HomeAssistant) -> None: """Test being able to unload an entry.""" host = "1.2.3.4" diff --git a/tests/components/dynalite/test_panel.py b/tests/components/dynalite/test_panel.py index 97752142f0c..a13b27e7567 100644 --- a/tests/components/dynalite/test_panel.py +++ b/tests/components/dynalite/test_panel.py @@ -4,7 +4,7 @@ from unittest.mock import patch from homeassistant.components import dynalite from homeassistant.components.cover import DEVICE_CLASSES -from homeassistant.const import CONF_PORT +from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -20,7 +20,7 @@ async def test_get_config( entry = MockConfigEntry( domain=dynalite.DOMAIN, - data={dynalite.CONF_HOST: host, CONF_PORT: port}, + data={CONF_HOST: host, CONF_PORT: port}, ) entry.add_to_hass(hass) with patch( @@ -44,7 +44,7 @@ async def test_get_config( result = msg["result"] entry_id = entry.entry_id assert result == { - "config": {entry_id: {dynalite.CONF_HOST: host, CONF_PORT: port}}, + "config": {entry_id: {CONF_HOST: host, CONF_PORT: port}}, "default": { "DEFAULT_NAME": dynalite.const.DEFAULT_NAME, "DEFAULT_PORT": dynalite.const.DEFAULT_PORT, @@ -66,7 +66,7 @@ async def test_save_config( entry1 = MockConfigEntry( domain=dynalite.DOMAIN, - data={dynalite.CONF_HOST: host1, CONF_PORT: port1}, + data={CONF_HOST: host1, CONF_PORT: port1}, ) entry1.add_to_hass(hass) with patch( @@ -77,7 +77,7 @@ async def test_save_config( await hass.async_block_till_done() entry2 = MockConfigEntry( domain=dynalite.DOMAIN, - data={dynalite.CONF_HOST: host2, CONF_PORT: port2}, + data={CONF_HOST: host2, CONF_PORT: port2}, ) entry2.add_to_hass(hass) with patch( @@ -94,7 +94,7 @@ async def test_save_config( "id": 24, "type": "dynalite/save-config", "entry_id": entry2.entry_id, - "config": {dynalite.CONF_HOST: host3, CONF_PORT: port3}, + "config": {CONF_HOST: host3, CONF_PORT: port3}, } ) @@ -103,9 +103,9 @@ async def test_save_config( assert msg["result"] == {} existing_entry = hass.config_entries.async_get_entry(entry1.entry_id) - assert existing_entry.data == {dynalite.CONF_HOST: host1, CONF_PORT: port1} + assert existing_entry.data == {CONF_HOST: host1, CONF_PORT: port1} modified_entry = hass.config_entries.async_get_entry(entry2.entry_id) - assert modified_entry.data[dynalite.CONF_HOST] == host3 + assert modified_entry.data[CONF_HOST] == host3 assert modified_entry.data[CONF_PORT] == port3 @@ -120,7 +120,7 @@ async def test_save_config_invalid_entry( entry = MockConfigEntry( domain=dynalite.DOMAIN, - data={dynalite.CONF_HOST: host1, CONF_PORT: port1}, + data={CONF_HOST: host1, CONF_PORT: port1}, ) entry.add_to_hass(hass) with patch( @@ -136,7 +136,7 @@ async def test_save_config_invalid_entry( "id": 24, "type": "dynalite/save-config", "entry_id": "junk", - "config": {dynalite.CONF_HOST: host2, CONF_PORT: port2}, + "config": {CONF_HOST: host2, CONF_PORT: port2}, } ) @@ -145,4 +145,4 @@ async def test_save_config_invalid_entry( assert msg["result"] == {"error": True} existing_entry = hass.config_entries.async_get_entry(entry.entry_id) - assert existing_entry.data == {dynalite.CONF_HOST: host1, CONF_PORT: port1} + assert existing_entry.data == {CONF_HOST: host1, CONF_PORT: port1} diff --git a/tests/components/ecobee/fixtures/ecobee-data.json b/tests/components/ecobee/fixtures/ecobee-data.json index 1573484795f..e0e82d68863 100644 --- a/tests/components/ecobee/fixtures/ecobee-data.json +++ b/tests/components/ecobee/fixtures/ecobee-data.json @@ -160,6 +160,7 @@ "hasHumidifier": true, "humidifierMode": "manual", "hasHeatPump": true, + "compressorProtectionMinTemp": 100, "humidity": "30" }, "equipmentStatus": "fan", diff --git a/tests/components/ecobee/test_number.py b/tests/components/ecobee/test_number.py index 5b01fe8c5ba..be65b6dbb30 100644 --- a/tests/components/ecobee/test_number.py +++ b/tests/components/ecobee/test_number.py @@ -12,8 +12,8 @@ from homeassistant.core import HomeAssistant from .common import setup_platform -VENTILATOR_MIN_HOME_ID = "number.ecobee_ventilator_min_time_home" -VENTILATOR_MIN_AWAY_ID = "number.ecobee_ventilator_min_time_away" +VENTILATOR_MIN_HOME_ID = "number.ecobee_ventilator_minimum_time_home" +VENTILATOR_MIN_AWAY_ID = "number.ecobee_ventilator_minimum_time_away" THERMOSTAT_ID = 0 @@ -26,7 +26,9 @@ async def test_ventilator_min_on_home_attributes(hass: HomeAssistant) -> None: assert state.attributes.get("min") == 0 assert state.attributes.get("max") == 60 assert state.attributes.get("step") == 5 - assert state.attributes.get("friendly_name") == "ecobee Ventilator min time home" + assert ( + state.attributes.get("friendly_name") == "ecobee Ventilator minimum time home" + ) assert state.attributes.get("unit_of_measurement") == UnitOfTime.MINUTES @@ -39,7 +41,9 @@ async def test_ventilator_min_on_away_attributes(hass: HomeAssistant) -> None: assert state.attributes.get("min") == 0 assert state.attributes.get("max") == 60 assert state.attributes.get("step") == 5 - assert state.attributes.get("friendly_name") == "ecobee Ventilator min time away" + assert ( + state.attributes.get("friendly_name") == "ecobee Ventilator minimum time away" + ) assert state.attributes.get("unit_of_measurement") == UnitOfTime.MINUTES @@ -77,3 +81,42 @@ async def test_set_min_time_away(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() mock_set_min_away_time.assert_called_once_with(THERMOSTAT_ID, target_value) + + +COMPRESSOR_MIN_TEMP_ID = "number.ecobee2_compressor_minimum_temperature" + + +async def test_compressor_protection_min_temp_attributes(hass: HomeAssistant) -> None: + """Test the compressor min temp value is correct. + + Ecobee runs in Fahrenheit; the test rig runs in Celsius. Conversions are necessary. + """ + await setup_platform(hass, NUMBER_DOMAIN) + + state = hass.states.get(COMPRESSOR_MIN_TEMP_ID) + assert state.state == "-12.2" + assert ( + state.attributes.get("friendly_name") + == "ecobee2 Compressor minimum temperature" + ) + + +async def test_set_compressor_protection_min_temp(hass: HomeAssistant) -> None: + """Test the number can set minimum compressor operating temp. + + Ecobee runs in Fahrenheit; the test rig runs in Celsius. Conversions are necessary + """ + target_value = 0 + with patch( + "homeassistant.components.ecobee.Ecobee.set_aux_cutover_threshold" + ) as mock_set_compressor_min_temp: + await setup_platform(hass, NUMBER_DOMAIN) + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: COMPRESSOR_MIN_TEMP_ID, ATTR_VALUE: target_value}, + blocking=True, + ) + await hass.async_block_till_done() + mock_set_compressor_min_temp.assert_called_once_with(1, 32) diff --git a/tests/components/ecobee/test_switch.py b/tests/components/ecobee/test_switch.py index 31c8ce8f72d..b3c4c4f8296 100644 --- a/tests/components/ecobee/test_switch.py +++ b/tests/components/ecobee/test_switch.py @@ -118,7 +118,7 @@ async def test_turn_off_20min_ventilator(hass: HomeAssistant) -> None: mock_set_20min_ventilator.assert_called_once_with(THERMOSTAT_ID, False) -DEVICE_ID = "switch.ecobee2_aux_heat_only" +DEVICE_ID = "switch.ecobee2_auxiliary_heat_only" async def test_aux_heat_only_turn_on(hass: HomeAssistant) -> None: diff --git a/tests/components/ecovacs/snapshots/test_sensor.ambr b/tests/components/ecovacs/snapshots/test_sensor.ambr index 659edfde2cf..9c76c00b5b7 100644 --- a/tests/components/ecovacs/snapshots/test_sensor.ambr +++ b/tests/components/ecovacs/snapshots/test_sensor.ambr @@ -177,14 +177,14 @@ 'supported_features': 0, 'translation_key': 'stats_area', 'unique_id': '8516fbb1-17f1-4194-0000000_stats_area', - 'unit_of_measurement': 'm²', + 'unit_of_measurement': , }) # --- # name: test_sensors[5xu9h3][sensor.goat_g1_area_cleaned:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Goat G1 Area cleaned', - 'unit_of_measurement': 'm²', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.goat_g1_area_cleaned', @@ -512,7 +512,7 @@ 'supported_features': 0, 'translation_key': 'total_stats_area', 'unique_id': '8516fbb1-17f1-4194-0000000_total_stats_area', - 'unit_of_measurement': 'm²', + 'unit_of_measurement': , }) # --- # name: test_sensors[5xu9h3][sensor.goat_g1_total_area_cleaned:state] @@ -520,7 +520,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'Goat G1 Total area cleaned', 'state_class': , - 'unit_of_measurement': 'm²', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.goat_g1_total_area_cleaned', @@ -755,14 +755,14 @@ 'supported_features': 0, 'translation_key': 'stats_area', 'unique_id': 'E1234567890000000001_stats_area', - 'unit_of_measurement': 'm²', + 'unit_of_measurement': , }) # --- # name: test_sensors[yna5x1][sensor.ozmo_950_area_cleaned:state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Ozmo 950 Area cleaned', - 'unit_of_measurement': 'm²', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.ozmo_950_area_cleaned', @@ -1137,7 +1137,7 @@ 'supported_features': 0, 'translation_key': 'total_stats_area', 'unique_id': 'E1234567890000000001_total_stats_area', - 'unit_of_measurement': 'm²', + 'unit_of_measurement': , }) # --- # name: test_sensors[yna5x1][sensor.ozmo_950_total_area_cleaned:state] @@ -1145,7 +1145,7 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'Ozmo 950 Total area cleaned', 'state_class': , - 'unit_of_measurement': 'm²', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.ozmo_950_total_area_cleaned', diff --git a/tests/components/elgato/snapshots/test_light.ambr b/tests/components/elgato/snapshots/test_light.ambr index c3ab076ded2..009feefc145 100644 --- a/tests/components/elgato/snapshots/test_light.ambr +++ b/tests/components/elgato/snapshots/test_light.ambr @@ -17,7 +17,7 @@ 'min_mireds': 143, 'rgb_color': tuple( 255, - 188, + 189, 133, ), 'supported_color_modes': list([ @@ -25,8 +25,8 @@ ]), 'supported_features': , 'xy_color': tuple( - 0.465, - 0.376, + 0.464, + 0.377, ), }), 'context': , @@ -132,7 +132,7 @@ 'min_mireds': 153, 'rgb_color': tuple( 255, - 188, + 189, 133, ), 'supported_color_modes': list([ @@ -141,8 +141,8 @@ ]), 'supported_features': , 'xy_color': tuple( - 0.465, - 0.376, + 0.464, + 0.377, ), }), 'context': , @@ -249,7 +249,7 @@ 'min_mireds': 153, 'rgb_color': tuple( 255, - 239, + 240, 240, ), 'supported_color_modes': list([ @@ -258,8 +258,8 @@ ]), 'supported_features': , 'xy_color': tuple( - 0.34, - 0.327, + 0.339, + 0.328, ), }), 'context': , diff --git a/tests/components/emoncms/conftest.py b/tests/components/emoncms/conftest.py index 29e86f3c59d..4bd1d68217a 100644 --- a/tests/components/emoncms/conftest.py +++ b/tests/components/emoncms/conftest.py @@ -91,6 +91,21 @@ def config_entry() -> MockConfigEntry: ) +FLOW_RESULT_SECOND_URL = copy.deepcopy(FLOW_RESULT) +FLOW_RESULT_SECOND_URL[CONF_URL] = "http://1.1.1.2" + + +@pytest.fixture +def config_entry_unique_id() -> MockConfigEntry: + """Mock emoncms config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title=SENSOR_NAME, + data=FLOW_RESULT_SECOND_URL, + unique_id="123-53535292", + ) + + FLOW_RESULT_NO_FEED = copy.deepcopy(FLOW_RESULT) FLOW_RESULT_NO_FEED[CONF_ONLY_INCLUDE_FEEDID] = None @@ -143,4 +158,5 @@ async def emoncms_client() -> AsyncGenerator[AsyncMock]: ): client = mock_client.return_value client.async_request.return_value = {"success": True, "message": FEEDS} + client.async_get_uuid.return_value = "123-53535292" yield client diff --git a/tests/components/emoncms/snapshots/test_sensor.ambr b/tests/components/emoncms/snapshots/test_sensor.ambr index 5e718c1d8e8..210196ce414 100644 --- a/tests/components/emoncms/snapshots/test_sensor.ambr +++ b/tests/components/emoncms/snapshots/test_sensor.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_coordinator_update[sensor.emoncms_1_1_1_1_parameter_1-entry] +# name: test_coordinator_update[sensor.temperature_tag_parameter_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -13,8 +13,8 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.emoncms_1_1_1_1_parameter_1', - 'has_entity_name': False, + 'entity_id': 'sensor.temperature_tag_parameter_1', + 'has_entity_name': True, 'hidden_by': None, 'icon': None, 'id': , @@ -25,16 +25,16 @@ }), 'original_device_class': , 'original_icon': None, - 'original_name': 'emoncms@1.1.1.1 parameter 1', + 'original_name': 'Temperature tag parameter 1', 'platform': 'emoncms', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'XXXXXXXX-1', + 'translation_key': 'temperature', + 'unique_id': '123-53535292-1', 'unit_of_measurement': , }) # --- -# name: test_coordinator_update[sensor.emoncms_1_1_1_1_parameter_1-state] +# name: test_coordinator_update[sensor.temperature_tag_parameter_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'FeedId': '1', @@ -45,12 +45,12 @@ 'Tag': 'tag', 'UserId': '1', 'device_class': 'temperature', - 'friendly_name': 'emoncms@1.1.1.1 parameter 1', + 'friendly_name': 'Temperature tag parameter 1', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.emoncms_1_1_1_1_parameter_1', + 'entity_id': 'sensor.temperature_tag_parameter_1', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/emoncms/test_config_flow.py b/tests/components/emoncms/test_config_flow.py index b554466639e..1914f23fb0b 100644 --- a/tests/components/emoncms/test_config_flow.py +++ b/tests/components/emoncms/test_config_flow.py @@ -2,8 +2,6 @@ from unittest.mock import AsyncMock -import pytest - from homeassistant.components.emoncms.const import CONF_ONLY_INCLUDE_FEEDID, DOMAIN from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_URL @@ -44,7 +42,7 @@ async def test_flow_import_failure( data=YAML, ) assert result["type"] is FlowResultType.ABORT - assert result["reason"] == EMONCMS_FAILURE["message"] + assert result["reason"] == "api_error" async def test_flow_import_already_configured( @@ -99,10 +97,6 @@ async def test_user_flow( assert len(mock_setup_entry.mock_calls) == 1 -USER_OPTIONS = { - CONF_ONLY_INCLUDE_FEEDID: ["1"], -} - CONFIG_ENTRY = { CONF_API_KEY: "my_api_key", CONF_ONLY_INCLUDE_FEEDID: ["1"], @@ -112,27 +106,26 @@ CONFIG_ENTRY = { async def test_options_flow( hass: HomeAssistant, - mock_setup_entry: AsyncMock, emoncms_client: AsyncMock, config_entry: MockConfigEntry, ) -> None: """Options flow - success test.""" await setup_integration(hass, config_entry) + assert config_entry.options == {} result = await hass.config_entries.options.async_init(config_entry.entry_id) await hass.async_block_till_done() result = await hass.config_entries.options.async_configure( result["flow_id"], - user_input=USER_OPTIONS, + user_input={ + CONF_ONLY_INCLUDE_FEEDID: ["1"], + }, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == CONFIG_ENTRY - assert config_entry.options == CONFIG_ENTRY + assert config_entry.options == { + CONF_ONLY_INCLUDE_FEEDID: ["1"], + } -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.emoncms.options.error.failure"], -) async def test_options_flow_failure( hass: HomeAssistant, mock_setup_entry: AsyncMock, @@ -144,6 +137,25 @@ async def test_options_flow_failure( await setup_integration(hass, config_entry) result = await hass.config_entries.options.async_init(config_entry.entry_id) await hass.async_block_till_done() - assert result["errors"]["base"] == "failure" + assert result["errors"]["base"] == "api_error" + assert result["description_placeholders"]["details"] == "failure" assert result["type"] is FlowResultType.FORM assert result["step_id"] == "init" + + +async def test_unique_id_exists( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + emoncms_client: AsyncMock, + config_entry_unique_id: MockConfigEntry, +) -> None: + """Test when entry with same unique id already exists.""" + config_entry_unique_id.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], USER_INPUT + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/emoncms/test_init.py b/tests/components/emoncms/test_init.py index b89b6e65a66..abe1a020034 100644 --- a/tests/components/emoncms/test_init.py +++ b/tests/components/emoncms/test_init.py @@ -4,11 +4,14 @@ from __future__ import annotations from unittest.mock import AsyncMock +from homeassistant.components.emoncms.const import DOMAIN, FEED_ID, FEED_NAME from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er, issue_registry as ir from . import setup_integration -from .conftest import EMONCMS_FAILURE +from .conftest import EMONCMS_FAILURE, FEEDS from tests.common import MockConfigEntry @@ -38,3 +41,49 @@ async def test_failure( emoncms_client.async_request.return_value = EMONCMS_FAILURE config_entry.add_to_hass(hass) assert not await hass.config_entries.async_setup(config_entry.entry_id) + + +async def test_migrate_uuid( + hass: HomeAssistant, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + emoncms_client: AsyncMock, +) -> None: + """Test migration from home assistant uuid to emoncms uuid.""" + config_entry.add_to_hass(hass) + assert config_entry.unique_id is None + for _, feed in enumerate(FEEDS): + entity_registry.async_get_or_create( + Platform.SENSOR, + DOMAIN, + f"{config_entry.entry_id}-{feed[FEED_ID]}", + config_entry=config_entry, + suggested_object_id=f"{DOMAIN}_{feed[FEED_NAME]}", + ) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + emoncms_uuid = emoncms_client.async_get_uuid.return_value + assert config_entry.unique_id == emoncms_uuid + entity_entries = er.async_entries_for_config_entry( + entity_registry, config_entry.entry_id + ) + + for nb, feed in enumerate(FEEDS): + assert entity_entries[nb].unique_id == f"{emoncms_uuid}-{feed[FEED_ID]}" + assert ( + entity_entries[nb].previous_unique_id + == f"{config_entry.entry_id}-{feed[FEED_ID]}" + ) + + +async def test_no_uuid( + hass: HomeAssistant, + config_entry: MockConfigEntry, + issue_registry: ir.IssueRegistry, + emoncms_client: AsyncMock, +) -> None: + """Test an issue is created when the emoncms server does not ship an uuid.""" + emoncms_client.async_get_uuid.return_value = None + await setup_integration(hass, config_entry) + + assert issue_registry.async_get_issue(domain=DOMAIN, issue_id="migrate database") diff --git a/tests/components/esphome/test_assist_satellite.py b/tests/components/esphome/test_assist_satellite.py index e8344e50161..5ca333df1e2 100644 --- a/tests/components/esphome/test_assist_satellite.py +++ b/tests/components/esphome/test_assist_satellite.py @@ -5,7 +5,7 @@ from collections.abc import Awaitable, Callable from dataclasses import replace import io import socket -from unittest.mock import ANY, Mock, patch +from unittest.mock import ANY, AsyncMock, Mock, patch import wave from aioesphomeapi import ( @@ -42,6 +42,10 @@ from homeassistant.components.esphome.assist_satellite import ( VoiceAssistantUDPServer, ) from homeassistant.components.media_source import PlayMedia +from homeassistant.components.select import ( + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er, intent as intent_helper @@ -1473,3 +1477,194 @@ async def test_get_set_configuration( # Device should have been updated assert satellite.async_get_configuration() == updated_config + + +async def test_wake_word_select( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test wake word select.""" + device_config = AssistSatelliteConfiguration( + available_wake_words=[ + AssistSatelliteWakeWord("okay_nabu", "Okay Nabu", ["en"]), + AssistSatelliteWakeWord("hey_jarvis", "Hey Jarvis", ["en"]), + AssistSatelliteWakeWord("hey_mycroft", "Hey Mycroft", ["en"]), + ], + active_wake_words=["hey_jarvis"], + max_active_wake_words=1, + ) + mock_client.get_voice_assistant_configuration.return_value = device_config + + # Wrap mock so we can tell when it's done + configuration_set = asyncio.Event() + + async def wrapper(*args, **kwargs): + # Update device config because entity will request it after update + device_config.active_wake_words = kwargs["active_wake_words"] + configuration_set.set() + + mock_client.set_voice_assistant_configuration = AsyncMock(side_effect=wrapper) + + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.ANNOUNCE + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + assert satellite.async_get_configuration().active_wake_words == ["hey_jarvis"] + + # Active wake word should be selected + state = hass.states.get("select.test_wake_word") + assert state is not None + assert state.state == "Hey Jarvis" + + # Changing the select should set the active wake word + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {"entity_id": "select.test_wake_word", "option": "Okay Nabu"}, + blocking=True, + ) + await hass.async_block_till_done() + + state = hass.states.get("select.test_wake_word") + assert state is not None + assert state.state == "Okay Nabu" + + # Wait for device config to be updated + async with asyncio.timeout(1): + await configuration_set.wait() + + # Satellite config should have been updated + assert satellite.async_get_configuration().active_wake_words == ["okay_nabu"] + + +async def test_wake_word_select_no_wake_words( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test wake word select is unavailable when there are no available wake word.""" + device_config = AssistSatelliteConfiguration( + available_wake_words=[], + active_wake_words=[], + max_active_wake_words=1, + ) + mock_client.get_voice_assistant_configuration.return_value = device_config + + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.ANNOUNCE + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + assert not satellite.async_get_configuration().available_wake_words + + # Select should be unavailable + state = hass.states.get("select.test_wake_word") + assert state is not None + assert state.state == STATE_UNAVAILABLE + + +async def test_wake_word_select_zero_max_wake_words( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test wake word select is unavailable max wake words is zero.""" + device_config = AssistSatelliteConfiguration( + available_wake_words=[ + AssistSatelliteWakeWord("okay_nabu", "Okay Nabu", ["en"]), + ], + active_wake_words=[], + max_active_wake_words=0, + ) + mock_client.get_voice_assistant_configuration.return_value = device_config + + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.ANNOUNCE + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + assert satellite.async_get_configuration().max_active_wake_words == 0 + + # Select should be unavailable + state = hass.states.get("select.test_wake_word") + assert state is not None + assert state.state == STATE_UNAVAILABLE + + +async def test_wake_word_select_no_active_wake_words( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test wake word select uses first available wake word if none are active.""" + device_config = AssistSatelliteConfiguration( + available_wake_words=[ + AssistSatelliteWakeWord("okay_nabu", "Okay Nabu", ["en"]), + AssistSatelliteWakeWord("hey_jarvis", "Hey Jarvis", ["en"]), + ], + active_wake_words=[], + max_active_wake_words=1, + ) + mock_client.get_voice_assistant_configuration.return_value = device_config + + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.ANNOUNCE + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + assert not satellite.async_get_configuration().active_wake_words + + # First available wake word should be selected + state = hass.states.get("select.test_wake_word") + assert state is not None + assert state.state == "Okay Nabu" diff --git a/tests/components/esphome/test_config_flow.py b/tests/components/esphome/test_config_flow.py index 3051547bd43..0a389969c78 100644 --- a/tests/components/esphome/test_config_flow.py +++ b/tests/components/esphome/test_config_flow.py @@ -1400,6 +1400,14 @@ async def test_discovery_mqtt_no_mac( await mqtt_discovery_test_abort(hass, "{}", "mqtt_missing_mac") +@pytest.mark.usefixtures("mock_zeroconf") +async def test_discovery_mqtt_empty_payload( + hass: HomeAssistant, mock_client, mock_setup_entry: None +) -> None: + """Test discovery aborted if MQTT payload is empty.""" + await mqtt_discovery_test_abort(hass, "", "mqtt_missing_payload") + + @pytest.mark.usefixtures("mock_zeroconf") async def test_discovery_mqtt_no_api( hass: HomeAssistant, mock_client, mock_setup_entry: None diff --git a/tests/components/esphome/test_light.py b/tests/components/esphome/test_light.py index 2324c73b16f..7f275fff4f2 100644 --- a/tests/components/esphome/test_light.py +++ b/tests/components/esphome/test_light.py @@ -676,7 +676,7 @@ async def test_light_rgb( color_mode=LightColorCapability.RGB | LightColorCapability.ON_OFF | LightColorCapability.BRIGHTNESS, - rgb=(pytest.approx(0.32941176470588235), 1.0, 0.0), + rgb=(pytest.approx(0.3333333333333333), 1.0, 0.0), brightness=pytest.approx(0.4980392156862745), ) ] @@ -814,7 +814,7 @@ async def test_light_rgbw( | LightColorCapability.ON_OFF | LightColorCapability.BRIGHTNESS, white=0, - rgb=(pytest.approx(0.32941176470588235), 1.0, 0.0), + rgb=(pytest.approx(0.3333333333333333), 1.0, 0.0), brightness=pytest.approx(0.4980392156862745), ) ] @@ -993,7 +993,7 @@ async def test_light_rgbww_with_cold_warm_white_support( | LightColorCapability.BRIGHTNESS, cold_white=0, warm_white=0, - rgb=(pytest.approx(0.32941176470588235), 1.0, 0.0), + rgb=(pytest.approx(0.3333333333333333), 1.0, 0.0), brightness=pytest.approx(0.4980392156862745), ) ] @@ -1226,7 +1226,7 @@ async def test_light_rgbww_without_cold_warm_white_support( | LightColorCapability.ON_OFF | LightColorCapability.BRIGHTNESS, white=0, - rgb=(pytest.approx(0.32941176470588235), 1.0, 0.0), + rgb=(pytest.approx(0.3333333333333333), 1.0, 0.0), brightness=pytest.approx(0.4980392156862745), ) ] diff --git a/tests/components/esphome/test_media_player.py b/tests/components/esphome/test_media_player.py index 799666fc66e..42b7e72a06e 100644 --- a/tests/components/esphome/test_media_player.py +++ b/tests/components/esphome/test_media_player.py @@ -22,6 +22,7 @@ from homeassistant.components.media_player import ( ATTR_MEDIA_ANNOUNCE, ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_CONTENT_TYPE, + ATTR_MEDIA_EXTRA, ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_MUTED, DOMAIN as MEDIA_PLAYER_DOMAIN, @@ -414,3 +415,22 @@ async def test_media_player_proxy( media_args = mock_client.media_player_command.call_args.kwargs assert media_args["announcement"] + + # test with bypass_proxy flag + mock_async_create_proxy_url.reset_mock() + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_mymedia_player", + ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, + ATTR_MEDIA_CONTENT_ID: media_url, + ATTR_MEDIA_EXTRA: { + "bypass_proxy": True, + }, + }, + blocking=True, + ) + mock_async_create_proxy_url.assert_not_called() + media_args = mock_client.media_player_command.call_args.kwargs + assert media_args["media_url"] == media_url diff --git a/tests/components/esphome/test_select.py b/tests/components/esphome/test_select.py index fbe30afd042..6ae1260a89d 100644 --- a/tests/components/esphome/test_select.py +++ b/tests/components/esphome/test_select.py @@ -9,7 +9,7 @@ from homeassistant.components.select import ( DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION, ) -from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant @@ -38,6 +38,16 @@ async def test_vad_sensitivity_select( assert state.state == "default" +async def test_wake_word_select( + hass: HomeAssistant, + mock_voice_assistant_v1_entry, +) -> None: + """Test that wake word select is unavailable initially.""" + state = hass.states.get("select.test_wake_word") + assert state is not None + assert state.state == STATE_UNAVAILABLE + + async def test_select_generic_entity( hass: HomeAssistant, mock_client: APIClient, mock_generic_device_entry ) -> None: diff --git a/tests/components/esphome/test_update.py b/tests/components/esphome/test_update.py index 7593ab21838..5060471f5d2 100644 --- a/tests/components/esphome/test_update.py +++ b/tests/components/esphome/test_update.py @@ -31,7 +31,6 @@ from homeassistant.const import ( STATE_OFF, STATE_ON, STATE_UNAVAILABLE, - STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -83,11 +82,6 @@ def stub_reconnect(): "supported_features": 0, }, ), - ( - [], - STATE_UNKNOWN, # dashboard is available but device is unknown - {"supported_features": 0}, - ), ], ) async def test_update_entity( @@ -408,11 +402,7 @@ async def test_update_becomes_available_at_runtime( ) await hass.async_block_till_done() state = hass.states.get("update.test_firmware") - assert state is not None - features = state.attributes[ATTR_SUPPORTED_FEATURES] - # There are no devices on the dashboard so no - # way to tell the version so install is disabled - assert features is UpdateEntityFeature(0) + assert state is None # A device gets added to the dashboard mock_dashboard["configured"] = [ @@ -433,6 +423,41 @@ async def test_update_becomes_available_at_runtime( assert features is UpdateEntityFeature.INSTALL +async def test_update_entity_not_present_with_dashboard_but_unknown_device( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], + mock_dashboard: dict[str, Any], +) -> None: + """Test ESPHome update entity does not get created if the device is unknown to the dashboard.""" + await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + ) + + mock_dashboard["configured"] = [ + { + "name": "other-test", + "current_version": "2023.2.0-dev", + "configuration": "other-test.yaml", + } + ] + + state = hass.states.get("update.test_firmware") + assert state is None + + await async_get_dashboard(hass).async_refresh() + await hass.async_block_till_done() + + state = hass.states.get("update.none_firmware") + assert state is None + + async def test_generic_device_update_entity( hass: HomeAssistant, mock_client: APIClient, diff --git a/tests/components/fibaro/conftest.py b/tests/components/fibaro/conftest.py index ac10d4fc79d..1976a8f310b 100644 --- a/tests/components/fibaro/conftest.py +++ b/tests/components/fibaro/conftest.py @@ -106,6 +106,29 @@ def mock_cover() -> Mock: return cover +@pytest.fixture +def mock_light() -> Mock: + """Fixture for a dimmmable light.""" + light = Mock() + light.fibaro_id = 3 + light.parent_fibaro_id = 0 + light.name = "Test light" + light.room_id = 1 + light.dead = False + light.visible = True + light.enabled = True + light.type = "com.fibaro.FGD212" + light.base_type = "com.fibaro.device" + light.properties = {"manufacturer": ""} + light.actions = {"setValue": 1, "on": 0, "off": 0} + light.supported_features = {} + value_mock = Mock() + value_mock.has_value = True + value_mock.int_value.return_value = 20 + light.value = value_mock + return light + + @pytest.fixture def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Return the default mocked config entry.""" diff --git a/tests/components/fibaro/test_light.py b/tests/components/fibaro/test_light.py new file mode 100644 index 00000000000..d0a24e009b7 --- /dev/null +++ b/tests/components/fibaro/test_light.py @@ -0,0 +1,57 @@ +"""Test the Fibaro light platform.""" + +from unittest.mock import Mock, patch + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import init_integration + +from tests.common import MockConfigEntry + + +async def test_light_setup( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_light: Mock, + mock_room: Mock, +) -> None: + """Test that the light creates an entity.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_light] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.LIGHT]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + entry = entity_registry.async_get("light.room_1_test_light_3") + assert entry + assert entry.unique_id == "hc2_111111.3" + assert entry.original_name == "Room 1 Test light" + + +async def test_light_brightness( + hass: HomeAssistant, + mock_fibaro_client: Mock, + mock_config_entry: MockConfigEntry, + mock_light: Mock, + mock_room: Mock, +) -> None: + """Test that the light brightness value is translated.""" + + # Arrange + mock_fibaro_client.read_rooms.return_value = [mock_room] + mock_fibaro_client.read_devices.return_value = [mock_light] + + with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.LIGHT]): + # Act + await init_integration(hass, mock_config_entry) + # Assert + state = hass.states.get("light.room_1_test_light_3") + assert state.attributes["brightness"] == 51 + assert state.state == "on" diff --git a/tests/components/file/test_notify.py b/tests/components/file/test_notify.py index 33e4739a488..e7cb85a9cfc 100644 --- a/tests/components/file/test_notify.py +++ b/tests/components/file/test_notify.py @@ -12,222 +12,46 @@ from homeassistant.components.file import DOMAIN from homeassistant.components.notify import ATTR_TITLE_DEFAULT from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers.typing import ConfigType -from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import MockConfigEntry, assert_setup_component - - -async def test_bad_config(hass: HomeAssistant) -> None: - """Test set up the platform with bad/missing config.""" - config = {notify.DOMAIN: {"name": "test", "platform": "file"}} - with assert_setup_component(0, domain="notify") as handle_config: - assert await async_setup_component(hass, notify.DOMAIN, config) - await hass.async_block_till_done() - assert not handle_config[notify.DOMAIN] +from tests.common import MockConfigEntry @pytest.mark.parametrize( ("domain", "service", "params"), [ - (notify.DOMAIN, "test", {"message": "one, two, testing, testing"}), ( notify.DOMAIN, "send_message", {"entity_id": "notify.test", "message": "one, two, testing, testing"}, ), ], - ids=["legacy", "entity"], -) -@pytest.mark.parametrize( - ("timestamp", "config"), - [ - ( - False, - { - "notify": [ - { - "name": "test", - "platform": "file", - "filename": "mock_file", - "timestamp": False, - } - ] - }, - ), - ( - True, - { - "notify": [ - { - "name": "test", - "platform": "file", - "filename": "mock_file", - "timestamp": True, - } - ] - }, - ), - ], - ids=["no_timestamp", "timestamp"], ) +@pytest.mark.parametrize("timestamp", [False, True], ids=["no_timestamp", "timestamp"]) async def test_notify_file( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - timestamp: bool, mock_is_allowed_path: MagicMock, - config: ConfigType, + timestamp: bool, domain: str, service: str, params: dict[str, str], ) -> None: """Test the notify file output.""" filename = "mock_file" - message = params["message"] - assert await async_setup_component(hass, notify.DOMAIN, config) - await hass.async_block_till_done() - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done(wait_background_tasks=True) + full_filename = os.path.join(hass.config.path(), filename) - freezer.move_to(dt_util.utcnow()) - - m_open = mock_open() - with ( - patch("homeassistant.components.file.notify.open", m_open, create=True), - patch("homeassistant.components.file.notify.os.stat") as mock_st, - ): - mock_st.return_value.st_size = 0 - title = ( - f"{ATTR_TITLE_DEFAULT} notifications " - f"(Log started: {dt_util.utcnow().isoformat()})\n{'-' * 80}\n" - ) - - await hass.services.async_call(domain, service, params, blocking=True) - - full_filename = os.path.join(hass.config.path(), filename) - assert m_open.call_count == 1 - assert m_open.call_args == call(full_filename, "a", encoding="utf8") - - assert m_open.return_value.write.call_count == 2 - if not timestamp: - assert m_open.return_value.write.call_args_list == [ - call(title), - call(f"{message}\n"), - ] - else: - assert m_open.return_value.write.call_args_list == [ - call(title), - call(f"{dt_util.utcnow().isoformat()} {message}\n"), - ] - - -@pytest.mark.parametrize( - ("domain", "service", "params"), - [(notify.DOMAIN, "test", {"message": "one, two, testing, testing"})], - ids=["legacy"], -) -@pytest.mark.parametrize( - ("is_allowed", "config"), - [ - ( - True, - { - "notify": [ - { - "name": "test", - "platform": "file", - "filename": "mock_file", - } - ] - }, - ), - ], - ids=["allowed_but_access_failed"], -) -async def test_legacy_notify_file_exception( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_is_allowed_path: MagicMock, - config: ConfigType, - domain: str, - service: str, - params: dict[str, str], -) -> None: - """Test legacy notify file output has exception.""" - assert await async_setup_component(hass, notify.DOMAIN, config) - await hass.async_block_till_done() - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done(wait_background_tasks=True) - - freezer.move_to(dt_util.utcnow()) - - m_open = mock_open() - with ( - patch("homeassistant.components.file.notify.open", m_open, create=True), - patch("homeassistant.components.file.notify.os.stat") as mock_st, - ): - mock_st.side_effect = OSError("Access Failed") - with pytest.raises(ServiceValidationError) as exc: - await hass.services.async_call(domain, service, params, blocking=True) - assert f"{exc.value!r}" == "ServiceValidationError('write_access_failed')" - - -@pytest.mark.parametrize( - ("timestamp", "data", "options"), - [ - ( - False, - { - "name": "test", - "platform": "notify", - "file_path": "mock_file", - }, - { - "timestamp": False, - }, - ), - ( - True, - { - "name": "test", - "platform": "notify", - "file_path": "mock_file", - }, - { - "timestamp": True, - }, - ), - ], - ids=["no_timestamp", "timestamp"], -) -async def test_legacy_notify_file_entry_only_setup( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - timestamp: bool, - mock_is_allowed_path: MagicMock, - data: dict[str, Any], - options: dict[str, Any], -) -> None: - """Test the legacy notify file output in entry only setup.""" - filename = "mock_file" - - domain = notify.DOMAIN - service = "test" - params = {"message": "one, two, testing, testing"} message = params["message"] entry = MockConfigEntry( domain=DOMAIN, - data=data, + data={"name": "test", "platform": "notify", "file_path": full_filename}, + options={"timestamp": timestamp}, version=2, - options=options, - title=f"test [{data['file_path']}]", + title=f"test [{filename}]", ) entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - await hass.async_block_till_done(wait_background_tasks=True) + assert await hass.config_entries.async_setup(entry.entry_id) freezer.move_to(dt_util.utcnow()) @@ -245,7 +69,7 @@ async def test_legacy_notify_file_entry_only_setup( await hass.services.async_call(domain, service, params, blocking=True) assert m_open.call_count == 1 - assert m_open.call_args == call(filename, "a", encoding="utf8") + assert m_open.call_args == call(full_filename, "a", encoding="utf8") assert m_open.return_value.write.call_count == 2 if not timestamp: @@ -277,14 +101,14 @@ async def test_legacy_notify_file_entry_only_setup( ], ids=["not_allowed"], ) -async def test_legacy_notify_file_not_allowed( +async def test_notify_file_not_allowed( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_is_allowed_path: MagicMock, config: dict[str, Any], options: dict[str, Any], ) -> None: - """Test legacy notify file output not allowed.""" + """Test notify file output not allowed.""" entry = MockConfigEntry( domain=DOMAIN, data=config, @@ -301,11 +125,10 @@ async def test_legacy_notify_file_not_allowed( @pytest.mark.parametrize( ("service", "params"), [ - ("test", {"message": "one, two, testing, testing"}), ( "send_message", {"entity_id": "notify.test", "message": "one, two, testing, testing"}, - ), + ) ], ) @pytest.mark.parametrize( diff --git a/tests/components/file/test_sensor.py b/tests/components/file/test_sensor.py index 634ae9d626c..9e6a16e3e27 100644 --- a/tests/components/file/test_sensor.py +++ b/tests/components/file/test_sensor.py @@ -7,33 +7,10 @@ import pytest from homeassistant.components.file import DOMAIN from homeassistant.const import STATE_UNKNOWN from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, get_fixture_path -@patch("os.path.isfile", Mock(return_value=True)) -@patch("os.access", Mock(return_value=True)) -async def test_file_value_yaml_setup( - hass: HomeAssistant, mock_is_allowed_path: MagicMock -) -> None: - """Test the File sensor from YAML setup.""" - config = { - "sensor": { - "platform": "file", - "scan_interval": 30, - "name": "file1", - "file_path": get_fixture_path("file_value.txt", "file"), - } - } - - assert await async_setup_component(hass, "sensor", config) - await hass.async_block_till_done() - - state = hass.states.get("sensor.file1") - assert state.state == "21" - - @patch("os.path.isfile", Mock(return_value=True)) @patch("os.access", Mock(return_value=True)) async def test_file_value_entry_setup( diff --git a/tests/components/filesize/conftest.py b/tests/components/filesize/conftest.py index ac66af0d22f..09acf7a58cc 100644 --- a/tests/components/filesize/conftest.py +++ b/tests/components/filesize/conftest.py @@ -8,21 +8,30 @@ from unittest.mock import patch import pytest -from homeassistant.components.filesize.const import DOMAIN -from homeassistant.const import CONF_FILE_PATH +from homeassistant.components.filesize.const import DOMAIN, PLATFORMS +from homeassistant.const import CONF_FILE_PATH, Platform from . import TEST_FILE_NAME from tests.common import MockConfigEntry +@pytest.fixture(name="load_platforms") +async def patch_platform_constant() -> list[Platform]: + """Return list of platforms to load.""" + return PLATFORMS + + @pytest.fixture -def mock_config_entry(tmp_path: Path) -> MockConfigEntry: +def mock_config_entry( + tmp_path: Path, load_platforms: list[Platform] +) -> MockConfigEntry: """Return the default mocked config entry.""" test_file = str(tmp_path.joinpath(TEST_FILE_NAME)) return MockConfigEntry( title=TEST_FILE_NAME, domain=DOMAIN, + entry_id="01JD5CTQMH9FKEFQKZJ8MMBQ3X", data={CONF_FILE_PATH: test_file}, unique_id=test_file, ) diff --git a/tests/components/filesize/snapshots/test_sensor.ambr b/tests/components/filesize/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..339d64acf91 --- /dev/null +++ b/tests/components/filesize/snapshots/test_sensor.ambr @@ -0,0 +1,197 @@ +# serializer version: 1 +# name: test_sensors[load_platforms0][sensor.mock_file_test_filesize_txt_created-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_file_test_filesize_txt_created', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Created', + 'platform': 'filesize', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'created', + 'unique_id': '01JD5CTQMH9FKEFQKZJ8MMBQ3X-created', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[load_platforms0][sensor.mock_file_test_filesize_txt_created-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'mock_file_test_filesize.txt Created', + }), + 'context': , + 'entity_id': 'sensor.mock_file_test_filesize_txt_created', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-20T18:19:04+00:00', + }) +# --- +# name: test_sensors[load_platforms0][sensor.mock_file_test_filesize_txt_last_updated-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_file_test_filesize_txt_last_updated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last updated', + 'platform': 'filesize', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_updated', + 'unique_id': '01JD5CTQMH9FKEFQKZJ8MMBQ3X-last_updated', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[load_platforms0][sensor.mock_file_test_filesize_txt_last_updated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'mock_file_test_filesize.txt Last updated', + }), + 'context': , + 'entity_id': 'sensor.mock_file_test_filesize_txt_last_updated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-20T18:19:24+00:00', + }) +# --- +# name: test_sensors[load_platforms0][sensor.mock_file_test_filesize_txt_size-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_file_test_filesize_txt_size', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Size', + 'platform': 'filesize', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'size', + 'unique_id': '01JD5CTQMH9FKEFQKZJ8MMBQ3X', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[load_platforms0][sensor.mock_file_test_filesize_txt_size-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'mock_file_test_filesize.txt Size', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_file_test_filesize_txt_size', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[load_platforms0][sensor.mock_file_test_filesize_txt_size_in_bytes-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_file_test_filesize_txt_size_in_bytes', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Size in bytes', + 'platform': 'filesize', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'size_bytes', + 'unique_id': '01JD5CTQMH9FKEFQKZJ8MMBQ3X-bytes', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[load_platforms0][sensor.mock_file_test_filesize_txt_size_in_bytes-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'mock_file_test_filesize.txt Size in bytes', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_file_test_filesize_txt_size_in_bytes', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- diff --git a/tests/components/filesize/test_config_flow.py b/tests/components/filesize/test_config_flow.py index 4b275e66d02..383b1f596f8 100644 --- a/tests/components/filesize/test_config_flow.py +++ b/tests/components/filesize/test_config_flow.py @@ -11,7 +11,7 @@ from homeassistant.const import CONF_FILE_PATH from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from . import TEST_FILE_NAME, async_create_file +from . import TEST_FILE_NAME, TEST_FILE_NAME2, async_create_file from tests.common import MockConfigEntry @@ -108,3 +108,119 @@ async def test_flow_fails_on_validation(hass: HomeAssistant, tmp_path: Path) -> assert result2["data"] == { CONF_FILE_PATH: test_file, } + + +async def test_reconfigure_flow( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, tmp_path: Path +) -> None: + """Test a reconfigure flow.""" + test_file = str(tmp_path.joinpath(TEST_FILE_NAME2)) + await async_create_file(hass, test_file) + hass.config.allowlist_external_dirs = {tmp_path} + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reconfigure_flow(hass) + assert result["step_id"] == "reconfigure" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_FILE_PATH: test_file}, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reconfigure_successful" + assert mock_config_entry.data == {CONF_FILE_PATH: str(test_file)} + + +async def test_unique_id_already_exist_in_reconfigure_flow( + hass: HomeAssistant, tmp_path: Path +) -> None: + """Test a reconfigure flow fails when unique id already exist.""" + test_file = str(tmp_path.joinpath(TEST_FILE_NAME)) + test_file2 = str(tmp_path.joinpath(TEST_FILE_NAME2)) + await async_create_file(hass, test_file) + await async_create_file(hass, test_file2) + hass.config.allowlist_external_dirs = {tmp_path} + test_file = str(tmp_path.joinpath(TEST_FILE_NAME)) + mock_config_entry = MockConfigEntry( + title=TEST_FILE_NAME, + domain=DOMAIN, + data={CONF_FILE_PATH: test_file}, + unique_id=test_file, + ) + mock_config_entry2 = MockConfigEntry( + title=TEST_FILE_NAME2, + domain=DOMAIN, + data={CONF_FILE_PATH: test_file2}, + unique_id=test_file2, + ) + mock_config_entry.add_to_hass(hass) + mock_config_entry2.add_to_hass(hass) + + result = await mock_config_entry.start_reconfigure_flow(hass) + assert result["step_id"] == "reconfigure" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_FILE_PATH: test_file2}, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "already_configured" + + +async def test_reconfigure_flow_fails_on_validation( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, tmp_path: Path +) -> None: + """Test config flow errors in reconfigure.""" + test_file2 = str(tmp_path.joinpath(TEST_FILE_NAME2)) + hass.config.allowlist_external_dirs = {} + + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_FILE_PATH: test_file2, + }, + ) + + assert result["errors"] == {"base": "not_valid"} + + await async_create_file(hass, test_file2) + + with patch( + "homeassistant.components.filesize.config_flow.pathlib.Path", + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_FILE_PATH: test_file2, + }, + ) + + assert result2["errors"] == {"base": "not_allowed"} + + hass.config.allowlist_external_dirs = {tmp_path} + with patch( + "homeassistant.components.filesize.config_flow.pathlib.Path", + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_FILE_PATH: test_file2, + }, + ) + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reconfigure_successful" diff --git a/tests/components/filesize/test_sensor.py b/tests/components/filesize/test_sensor.py index 880563f0ad8..8292800a861 100644 --- a/tests/components/filesize/test_sensor.py +++ b/tests/components/filesize/test_sensor.py @@ -2,14 +2,56 @@ import os from pathlib import Path +from unittest.mock import patch -from homeassistant.const import CONF_FILE_PATH, STATE_UNAVAILABLE +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.filesize.const import DOMAIN +from homeassistant.const import CONF_FILE_PATH, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_component import async_update_entity from . import TEST_FILE_NAME, async_create_file -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize( + "load_platforms", + [[Platform.SENSOR]], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensors( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + tmp_path: Path, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test that an invalid path is caught.""" + testfile = str(tmp_path.joinpath("file.txt")) + await async_create_file(hass, testfile) + hass.config.allowlist_external_dirs = {tmp_path} + mock_config_entry.add_to_hass(hass) + hass.config_entries.async_update_entry( + mock_config_entry, data={CONF_FILE_PATH: testfile} + ) + with ( + patch( + "os.stat_result.st_mtime", + 1732126764.780758, + ), + patch( + "os.stat_result.st_ctime", + 1732126744.780758, + ), + ): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) async def test_invalid_path( @@ -27,7 +69,10 @@ async def test_invalid_path( async def test_valid_path( - hass: HomeAssistant, tmp_path: Path, mock_config_entry: MockConfigEntry + hass: HomeAssistant, + tmp_path: Path, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, ) -> None: """Test for a valid path.""" testfile = str(tmp_path.joinpath("file.txt")) @@ -41,10 +86,15 @@ async def test_valid_path( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - state = hass.states.get("sensor.file_txt_size") + state = hass.states.get("sensor.mock_file_test_filesize_txt_size") assert state assert state.state == "0.0" + device = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.entry_id)} + ) + assert device.name == mock_config_entry.title + await hass.async_add_executor_job(os.remove, testfile) @@ -63,12 +113,12 @@ async def test_state_unavailable( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - state = hass.states.get("sensor.file_txt_size") + state = hass.states.get("sensor.mock_file_test_filesize_txt_size") assert state assert state.state == "0.0" await hass.async_add_executor_job(os.remove, testfile) - await async_update_entity(hass, "sensor.file_txt_size") + await async_update_entity(hass, "sensor.mock_file_test_filesize_txt_size") - state = hass.states.get("sensor.file_txt_size") + state = hass.states.get("sensor.mock_file_test_filesize_txt_size") assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/fitbit/conftest.py b/tests/components/fitbit/conftest.py index 57511739993..8a408748f16 100644 --- a/tests/components/fitbit/conftest.py +++ b/tests/components/fitbit/conftest.py @@ -1,6 +1,6 @@ """Test fixtures for fitbit.""" -from collections.abc import Awaitable, Callable, Generator +from collections.abc import Awaitable, Callable import datetime from http import HTTPStatus import time @@ -14,12 +14,7 @@ from homeassistant.components.application_credentials import ( ClientCredential, async_import_client_credential, ) -from homeassistant.components.fitbit.const import ( - CONF_CLIENT_ID, - CONF_CLIENT_SECRET, - DOMAIN, - OAUTH_SCOPES, -) +from homeassistant.components.fitbit.const import DOMAIN, OAUTH_SCOPES from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -83,15 +78,19 @@ def mock_token_entry(token_expiration_time: float, scopes: list[str]) -> dict[st @pytest.fixture(name="config_entry") -def mock_config_entry(token_entry: dict[str, Any]) -> MockConfigEntry: +def mock_config_entry( + token_entry: dict[str, Any], imported_config_data: dict[str, Any] +) -> MockConfigEntry: """Fixture for a config entry.""" return MockConfigEntry( domain=DOMAIN, data={ "auth_implementation": FAKE_AUTH_IMPL, "token": token_entry, + **imported_config_data, }, unique_id=PROFILE_USER_ID, + title=DISPLAY_NAME, ) @@ -107,37 +106,6 @@ async def setup_credentials(hass: HomeAssistant) -> None: ) -@pytest.fixture(name="fitbit_config_yaml") -def mock_fitbit_config_yaml(token_expiration_time: float) -> dict[str, Any] | None: - """Fixture for the yaml fitbit.conf file contents.""" - return { - CONF_CLIENT_ID: CLIENT_ID, - CONF_CLIENT_SECRET: CLIENT_SECRET, - "access_token": FAKE_ACCESS_TOKEN, - "refresh_token": FAKE_REFRESH_TOKEN, - "last_saved_at": token_expiration_time, - } - - -@pytest.fixture(name="fitbit_config_setup") -def mock_fitbit_config_setup( - fitbit_config_yaml: dict[str, Any] | None, -) -> Generator[None]: - """Fixture to mock out fitbit.conf file data loading and persistence.""" - has_config = fitbit_config_yaml is not None - with ( - patch( - "homeassistant.components.fitbit.sensor.os.path.isfile", - return_value=has_config, - ), - patch( - "homeassistant.components.fitbit.sensor.load_json_object", - return_value=fitbit_config_yaml, - ), - ): - yield - - @pytest.fixture(name="monitored_resources") def mock_monitored_resources() -> list[str] | None: """Fixture for the fitbit yaml config monitored_resources field.""" @@ -150,8 +118,8 @@ def mock_configured_unit_syststem() -> str | None: return None -@pytest.fixture(name="sensor_platform_config") -def mock_sensor_platform_config( +@pytest.fixture(name="imported_config_data") +def mock_imported_config_data( monitored_resources: list[str] | None, configured_unit_system: str | None, ) -> dict[str, Any]: @@ -164,32 +132,6 @@ def mock_sensor_platform_config( return config -@pytest.fixture(name="sensor_platform_setup") -async def mock_sensor_platform_setup( - hass: HomeAssistant, - sensor_platform_config: dict[str, Any], -) -> Callable[[], Awaitable[bool]]: - """Fixture to set up the integration.""" - - async def run() -> bool: - result = await async_setup_component( - hass, - "sensor", - { - "sensor": [ - { - "platform": DOMAIN, - **sensor_platform_config, - } - ] - }, - ) - await hass.async_block_till_done() - return result - - return run - - @pytest.fixture def platforms() -> list[Platform]: """Fixture to specify platforms to test.""" diff --git a/tests/components/fitbit/snapshots/test_sensor.ambr b/tests/components/fitbit/snapshots/test_sensor.ambr index 55b2639a56d..068df25454d 100644 --- a/tests/components/fitbit/snapshots/test_sensor.ambr +++ b/tests/components/fitbit/snapshots/test_sensor.ambr @@ -4,7 +4,7 @@ '99', ReadOnlyDict({ 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Water', + 'friendly_name': 'First L. Water', 'icon': 'mdi:cup-water', 'state_class': , 'unit_of_measurement': , @@ -16,7 +16,7 @@ '1600', ReadOnlyDict({ 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Calories In', + 'friendly_name': 'First L. Calories in', 'icon': 'mdi:food-apple', 'state_class': , 'unit_of_measurement': 'cal', @@ -28,7 +28,7 @@ '99', ReadOnlyDict({ 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Water', + 'friendly_name': 'First L. Water', 'icon': 'mdi:cup-water', 'state_class': , 'unit_of_measurement': , @@ -40,19 +40,19 @@ '1600', ReadOnlyDict({ 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Calories In', + 'friendly_name': 'First L. Calories in', 'icon': 'mdi:food-apple', 'state_class': , 'unit_of_measurement': 'cal', }), ) # --- -# name: test_sensors[monitored_resources0-sensor.activity_calories-activities/activityCalories-135] +# name: test_sensors[monitored_resources0-sensor.first_l_activity_calories-activities/activityCalories-135] tuple( '135', ReadOnlyDict({ 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Activity Calories', + 'friendly_name': 'First L. Activity calories', 'icon': 'mdi:fire', 'state_class': , 'unit_of_measurement': 'cal', @@ -60,254 +60,26 @@ 'fitbit-api-user-id-1_activities/activityCalories', ) # --- -# name: test_sensors[monitored_resources1-sensor.calories-activities/calories-139] +# name: test_sensors[monitored_resources1-sensor.first_l_tracker_activity_calories-activities/tracker/activityCalories-135] tuple( - '139', + '135', ReadOnlyDict({ 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Calories', + 'friendly_name': 'First L. tracker Activity calories', 'icon': 'mdi:fire', 'state_class': , 'unit_of_measurement': 'cal', }), - 'fitbit-api-user-id-1_activities/calories', + 'fitbit-api-user-id-1_activities/tracker/activityCalories', ) # --- -# name: test_sensors[monitored_resources10-sensor.steps-activities/steps-5600] - tuple( - '5600', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Steps', - 'icon': 'mdi:walk', - 'state_class': , - 'unit_of_measurement': 'steps', - }), - 'fitbit-api-user-id-1_activities/steps', - ) -# --- -# name: test_sensors[monitored_resources11-sensor.weight-body/weight-175] - tuple( - '175.0', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'device_class': 'weight', - 'friendly_name': 'Weight', - 'icon': 'mdi:human', - 'state_class': , - 'unit_of_measurement': , - }), - 'fitbit-api-user-id-1_body/weight', - ) -# --- -# name: test_sensors[monitored_resources12-sensor.body_fat-body/fat-18] - tuple( - '18.0', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Body Fat', - 'icon': 'mdi:human', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'fitbit-api-user-id-1_body/fat', - ) -# --- -# name: test_sensors[monitored_resources13-sensor.bmi-body/bmi-23.7] - tuple( - '23.7', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'BMI', - 'icon': 'mdi:human', - 'state_class': , - 'unit_of_measurement': 'BMI', - }), - 'fitbit-api-user-id-1_body/bmi', - ) -# --- -# name: test_sensors[monitored_resources14-sensor.awakenings_count-sleep/awakeningsCount-7] - tuple( - '7', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Awakenings Count', - 'icon': 'mdi:sleep', - 'state_class': , - 'unit_of_measurement': 'times awaken', - }), - 'fitbit-api-user-id-1_sleep/awakeningsCount', - ) -# --- -# name: test_sensors[monitored_resources15-sensor.sleep_efficiency-sleep/efficiency-80] - tuple( - '80', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Sleep Efficiency', - 'icon': 'mdi:sleep', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'fitbit-api-user-id-1_sleep/efficiency', - ) -# --- -# name: test_sensors[monitored_resources16-sensor.minutes_after_wakeup-sleep/minutesAfterWakeup-17] - tuple( - '17', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'device_class': 'duration', - 'friendly_name': 'Minutes After Wakeup', - 'icon': 'mdi:sleep', - 'state_class': , - 'unit_of_measurement': , - }), - 'fitbit-api-user-id-1_sleep/minutesAfterWakeup', - ) -# --- -# name: test_sensors[monitored_resources17-sensor.sleep_minutes_asleep-sleep/minutesAsleep-360] - tuple( - '360', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'device_class': 'duration', - 'friendly_name': 'Sleep Minutes Asleep', - 'icon': 'mdi:sleep', - 'state_class': , - 'unit_of_measurement': , - }), - 'fitbit-api-user-id-1_sleep/minutesAsleep', - ) -# --- -# name: test_sensors[monitored_resources18-sensor.sleep_minutes_awake-sleep/minutesAwake-35] - tuple( - '35', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'device_class': 'duration', - 'friendly_name': 'Sleep Minutes Awake', - 'icon': 'mdi:sleep', - 'state_class': , - 'unit_of_measurement': , - }), - 'fitbit-api-user-id-1_sleep/minutesAwake', - ) -# --- -# name: test_sensors[monitored_resources19-sensor.sleep_minutes_to_fall_asleep-sleep/minutesToFallAsleep-35] - tuple( - '35', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'device_class': 'duration', - 'friendly_name': 'Sleep Minutes to Fall Asleep', - 'icon': 'mdi:sleep', - 'state_class': , - 'unit_of_measurement': , - }), - 'fitbit-api-user-id-1_sleep/minutesToFallAsleep', - ) -# --- -# name: test_sensors[monitored_resources2-sensor.distance-activities/distance-12.7] - tuple( - '12.70', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'device_class': 'distance', - 'friendly_name': 'Distance', - 'icon': 'mdi:map-marker', - 'state_class': , - 'unit_of_measurement': , - }), - 'fitbit-api-user-id-1_activities/distance', - ) -# --- -# name: test_sensors[monitored_resources20-sensor.sleep_start_time-sleep/startTime-2020-01-27T00:17:30.000] - tuple( - '2020-01-27T00:17:30.000', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Sleep Start Time', - 'icon': 'mdi:clock', - }), - 'fitbit-api-user-id-1_sleep/startTime', - ) -# --- -# name: test_sensors[monitored_resources21-sensor.sleep_time_in_bed-sleep/timeInBed-462] - tuple( - '462', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'device_class': 'duration', - 'friendly_name': 'Sleep Time in Bed', - 'icon': 'mdi:hotel', - 'state_class': , - 'unit_of_measurement': , - }), - 'fitbit-api-user-id-1_sleep/timeInBed', - ) -# --- -# name: test_sensors[monitored_resources3-sensor.elevation-activities/elevation-7600.24] - tuple( - '7600.24', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'device_class': 'distance', - 'friendly_name': 'Elevation', - 'icon': 'mdi:walk', - 'state_class': , - 'unit_of_measurement': , - }), - 'fitbit-api-user-id-1_activities/elevation', - ) -# --- -# name: test_sensors[monitored_resources4-sensor.floors-activities/floors-8] - tuple( - '8', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Floors', - 'icon': 'mdi:walk', - 'state_class': , - 'unit_of_measurement': 'floors', - }), - 'fitbit-api-user-id-1_activities/floors', - ) -# --- -# name: test_sensors[monitored_resources5-sensor.resting_heart_rate-activities/heart-api_value5] - tuple( - '76', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'friendly_name': 'Resting Heart Rate', - 'icon': 'mdi:heart-pulse', - 'state_class': , - 'unit_of_measurement': 'bpm', - }), - 'fitbit-api-user-id-1_activities/heart', - ) -# --- -# name: test_sensors[monitored_resources6-sensor.minutes_fairly_active-activities/minutesFairlyActive-35] - tuple( - '35', - ReadOnlyDict({ - 'attribution': 'Data provided by Fitbit.com', - 'device_class': 'duration', - 'friendly_name': 'Minutes Fairly Active', - 'icon': 'mdi:walk', - 'state_class': , - 'unit_of_measurement': , - }), - 'fitbit-api-user-id-1_activities/minutesFairlyActive', - ) -# --- -# name: test_sensors[monitored_resources7-sensor.minutes_lightly_active-activities/minutesLightlyActive-95] +# name: test_sensors[monitored_resources10-sensor.first_l_minutes_lightly_active-activities/minutesLightlyActive-95] tuple( '95', ReadOnlyDict({ 'attribution': 'Data provided by Fitbit.com', 'device_class': 'duration', - 'friendly_name': 'Minutes Lightly Active', + 'friendly_name': 'First L. Minutes lightly active', 'icon': 'mdi:walk', 'state_class': , 'unit_of_measurement': , @@ -315,13 +87,13 @@ 'fitbit-api-user-id-1_activities/minutesLightlyActive', ) # --- -# name: test_sensors[monitored_resources8-sensor.minutes_sedentary-activities/minutesSedentary-18] +# name: test_sensors[monitored_resources11-sensor.first_l_minutes_sedentary-activities/minutesSedentary-18] tuple( '18', ReadOnlyDict({ 'attribution': 'Data provided by Fitbit.com', 'device_class': 'duration', - 'friendly_name': 'Minutes Sedentary', + 'friendly_name': 'First L. Minutes sedentary', 'icon': 'mdi:seat-recline-normal', 'state_class': , 'unit_of_measurement': , @@ -329,13 +101,13 @@ 'fitbit-api-user-id-1_activities/minutesSedentary', ) # --- -# name: test_sensors[monitored_resources9-sensor.minutes_very_active-activities/minutesVeryActive-20] +# name: test_sensors[monitored_resources12-sensor.first_l_minutes_very_active-activities/minutesVeryActive-20] tuple( '20', ReadOnlyDict({ 'attribution': 'Data provided by Fitbit.com', 'device_class': 'duration', - 'friendly_name': 'Minutes Very Active', + 'friendly_name': 'First L. Minutes very active', 'icon': 'mdi:run', 'state_class': , 'unit_of_measurement': , @@ -343,3 +115,271 @@ 'fitbit-api-user-id-1_activities/minutesVeryActive', ) # --- +# name: test_sensors[monitored_resources13-sensor.first_l_steps-activities/steps-5600] + tuple( + '5600', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'friendly_name': 'First L. Steps', + 'icon': 'mdi:walk', + 'state_class': , + 'unit_of_measurement': 'steps', + }), + 'fitbit-api-user-id-1_activities/steps', + ) +# --- +# name: test_sensors[monitored_resources14-sensor.first_l_weight-body/weight-175] + tuple( + '175.0', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'device_class': 'weight', + 'friendly_name': 'First L. Weight', + 'icon': 'mdi:human', + 'state_class': , + 'unit_of_measurement': , + }), + 'fitbit-api-user-id-1_body/weight', + ) +# --- +# name: test_sensors[monitored_resources15-sensor.first_l_body_fat-body/fat-18] + tuple( + '18.0', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'friendly_name': 'First L. Body fat', + 'icon': 'mdi:human', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'fitbit-api-user-id-1_body/fat', + ) +# --- +# name: test_sensors[monitored_resources16-sensor.first_l_bmi-body/bmi-23.7] + tuple( + '23.7', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'friendly_name': 'First L. BMI', + 'icon': 'mdi:human', + 'state_class': , + 'unit_of_measurement': 'BMI', + }), + 'fitbit-api-user-id-1_body/bmi', + ) +# --- +# name: test_sensors[monitored_resources17-sensor.first_l_awakenings_count-sleep/awakeningsCount-7] + tuple( + '7', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'friendly_name': 'First L. Awakenings count', + 'icon': 'mdi:sleep', + 'state_class': , + 'unit_of_measurement': 'times awaken', + }), + 'fitbit-api-user-id-1_sleep/awakeningsCount', + ) +# --- +# name: test_sensors[monitored_resources18-sensor.first_l_sleep_efficiency-sleep/efficiency-80] + tuple( + '80', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'friendly_name': 'First L. Sleep efficiency', + 'icon': 'mdi:sleep', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'fitbit-api-user-id-1_sleep/efficiency', + ) +# --- +# name: test_sensors[monitored_resources19-sensor.first_l_minutes_after_wakeup-sleep/minutesAfterWakeup-17] + tuple( + '17', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'device_class': 'duration', + 'friendly_name': 'First L. Minutes after wakeup', + 'icon': 'mdi:sleep', + 'state_class': , + 'unit_of_measurement': , + }), + 'fitbit-api-user-id-1_sleep/minutesAfterWakeup', + ) +# --- +# name: test_sensors[monitored_resources2-sensor.first_l_calories-activities/calories-139] + tuple( + '139', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'friendly_name': 'First L. Calories', + 'icon': 'mdi:fire', + 'state_class': , + 'unit_of_measurement': 'cal', + }), + 'fitbit-api-user-id-1_activities/calories', + ) +# --- +# name: test_sensors[monitored_resources20-sensor.first_l_sleep_minutes_asleep-sleep/minutesAsleep-360] + tuple( + '360', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'device_class': 'duration', + 'friendly_name': 'First L. Sleep minutes asleep', + 'icon': 'mdi:sleep', + 'state_class': , + 'unit_of_measurement': , + }), + 'fitbit-api-user-id-1_sleep/minutesAsleep', + ) +# --- +# name: test_sensors[monitored_resources21-sensor.first_l_sleep_minutes_awake-sleep/minutesAwake-35] + tuple( + '35', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'device_class': 'duration', + 'friendly_name': 'First L. Sleep minutes awake', + 'icon': 'mdi:sleep', + 'state_class': , + 'unit_of_measurement': , + }), + 'fitbit-api-user-id-1_sleep/minutesAwake', + ) +# --- +# name: test_sensors[monitored_resources22-sensor.first_l_sleep_minutes_to_fall_asleep-sleep/minutesToFallAsleep-35] + tuple( + '35', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'device_class': 'duration', + 'friendly_name': 'First L. Sleep minutes to fall asleep', + 'icon': 'mdi:sleep', + 'state_class': , + 'unit_of_measurement': , + }), + 'fitbit-api-user-id-1_sleep/minutesToFallAsleep', + ) +# --- +# name: test_sensors[monitored_resources23-sensor.first_l_sleep_start_time-sleep/startTime-2020-01-27T00:17:30.000] + tuple( + '2020-01-27T00:17:30.000', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'friendly_name': 'First L. Sleep start time', + 'icon': 'mdi:clock', + }), + 'fitbit-api-user-id-1_sleep/startTime', + ) +# --- +# name: test_sensors[monitored_resources24-sensor.first_l_sleep_time_in_bed-sleep/timeInBed-462] + tuple( + '462', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'device_class': 'duration', + 'friendly_name': 'First L. Sleep time in bed', + 'icon': 'mdi:hotel', + 'state_class': , + 'unit_of_measurement': , + }), + 'fitbit-api-user-id-1_sleep/timeInBed', + ) +# --- +# name: test_sensors[monitored_resources3-sensor.first_l_tracker_calories-activities/tracker/calories-139] + tuple( + '139', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'friendly_name': 'First L. tracker Calories', + 'icon': 'mdi:fire', + 'state_class': , + 'unit_of_measurement': 'cal', + }), + 'fitbit-api-user-id-1_activities/tracker/calories', + ) +# --- +# name: test_sensors[monitored_resources4-sensor.first_l_distance-activities/distance-12.7] + tuple( + '12.70', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'device_class': 'distance', + 'friendly_name': 'First L. Distance', + 'icon': 'mdi:map-marker', + 'state_class': , + 'unit_of_measurement': , + }), + 'fitbit-api-user-id-1_activities/distance', + ) +# --- +# name: test_sensors[monitored_resources5-sensor.first_l_tracker_distance-activities/distance-12.7] + tuple( + 'unknown', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'device_class': 'distance', + 'friendly_name': 'First L. tracker Distance', + 'icon': 'mdi:map-marker', + 'state_class': , + 'unit_of_measurement': , + }), + 'fitbit-api-user-id-1_activities/tracker/distance', + ) +# --- +# name: test_sensors[monitored_resources6-sensor.first_l_elevation-activities/elevation-7600.24] + tuple( + '7600.24', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'device_class': 'distance', + 'friendly_name': 'First L. Elevation', + 'icon': 'mdi:walk', + 'state_class': , + 'unit_of_measurement': , + }), + 'fitbit-api-user-id-1_activities/elevation', + ) +# --- +# name: test_sensors[monitored_resources7-sensor.first_l_floors-activities/floors-8] + tuple( + '8', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'friendly_name': 'First L. Floors', + 'icon': 'mdi:walk', + 'state_class': , + 'unit_of_measurement': 'floors', + }), + 'fitbit-api-user-id-1_activities/floors', + ) +# --- +# name: test_sensors[monitored_resources8-sensor.first_l_resting_heart_rate-activities/heart-api_value8] + tuple( + '76', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'friendly_name': 'First L. Resting heart rate', + 'icon': 'mdi:heart-pulse', + 'state_class': , + 'unit_of_measurement': 'bpm', + }), + 'fitbit-api-user-id-1_activities/heart', + ) +# --- +# name: test_sensors[monitored_resources9-sensor.first_l_minutes_fairly_active-activities/minutesFairlyActive-35] + tuple( + '35', + ReadOnlyDict({ + 'attribution': 'Data provided by Fitbit.com', + 'device_class': 'duration', + 'friendly_name': 'First L. Minutes fairly active', + 'icon': 'mdi:walk', + 'state_class': , + 'unit_of_measurement': , + }), + 'fitbit-api-user-id-1_activities/minutesFairlyActive', + ) +# --- diff --git a/tests/components/fitbit/test_config_flow.py b/tests/components/fitbit/test_config_flow.py index 6f717459486..70c54cd2657 100644 --- a/tests/components/fitbit/test_config_flow.py +++ b/tests/components/fitbit/test_config_flow.py @@ -2,7 +2,6 @@ from collections.abc import Awaitable, Callable from http import HTTPStatus -import time from typing import Any from unittest.mock import patch @@ -13,7 +12,7 @@ from homeassistant import config_entries from homeassistant.components.fitbit.const import DOMAIN, OAUTH2_AUTHORIZE, OAUTH2_TOKEN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import config_entry_oauth2_flow, issue_registry as ir +from homeassistant.helpers import config_entry_oauth2_flow from .conftest import ( CLIENT_ID, @@ -255,207 +254,6 @@ async def test_config_entry_already_exists( assert result.get("reason") == "already_configured" -@pytest.mark.parametrize( - "token_expiration_time", - [time.time() + 86400, time.time() - 86400], - ids=("token_active", "token_expired"), -) -async def test_import_fitbit_config( - hass: HomeAssistant, - fitbit_config_setup: None, - sensor_platform_setup: Callable[[], Awaitable[bool]], - issue_registry: ir.IssueRegistry, - requests_mock: Mocker, -) -> None: - """Test that platform configuration is imported successfully.""" - - requests_mock.register_uri( - "POST", - OAUTH2_TOKEN, - status_code=HTTPStatus.OK, - json=SERVER_ACCESS_TOKEN, - ) - - with patch( - "homeassistant.components.fitbit.async_setup_entry", return_value=True - ) as mock_setup: - await sensor_platform_setup() - - assert len(mock_setup.mock_calls) == 1 - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - - # Verify valid profile can be fetched from the API - config_entry = entries[0] - assert config_entry.title == DISPLAY_NAME - assert config_entry.unique_id == PROFILE_USER_ID - - data = dict(config_entry.data) - # Verify imported values from fitbit.conf and configuration.yaml and - # that the token is updated. - assert "token" in data - expires_at = data["token"]["expires_at"] - assert expires_at > time.time() - del data["token"]["expires_at"] - assert dict(config_entry.data) == { - "auth_implementation": DOMAIN, - "clock_format": "24H", - "monitored_resources": ["activities/steps"], - "token": { - "access_token": "server-access-token", - "refresh_token": "server-refresh-token", - "scope": "activity heartrate nutrition profile settings sleep weight", - }, - "unit_system": "default", - } - - # Verify an issue is raised for deprecated configuration.yaml - issue = issue_registry.issues.get((DOMAIN, "deprecated_yaml")) - assert issue - assert issue.translation_key == "deprecated_yaml_import" - - -async def test_import_fitbit_config_failure_cannot_connect( - hass: HomeAssistant, - fitbit_config_setup: None, - sensor_platform_setup: Callable[[], Awaitable[bool]], - issue_registry: ir.IssueRegistry, - requests_mock: Mocker, -) -> None: - """Test platform configuration fails to import successfully.""" - - requests_mock.register_uri( - "POST", - OAUTH2_TOKEN, - status_code=HTTPStatus.OK, - json=SERVER_ACCESS_TOKEN, - ) - requests_mock.register_uri( - "GET", PROFILE_API_URL, status_code=HTTPStatus.INTERNAL_SERVER_ERROR - ) - - with patch( - "homeassistant.components.fitbit.async_setup_entry", return_value=True - ) as mock_setup: - await sensor_platform_setup() - - assert len(mock_setup.mock_calls) == 0 - - # Verify an issue is raised that we were unable to import configuration - issue = issue_registry.issues.get((DOMAIN, "deprecated_yaml")) - assert issue - assert issue.translation_key == "deprecated_yaml_import_issue_cannot_connect" - - -@pytest.mark.parametrize( - "status_code", - [ - (HTTPStatus.UNAUTHORIZED), - (HTTPStatus.INTERNAL_SERVER_ERROR), - ], -) -async def test_import_fitbit_config_cannot_refresh( - hass: HomeAssistant, - fitbit_config_setup: None, - sensor_platform_setup: Callable[[], Awaitable[bool]], - issue_registry: ir.IssueRegistry, - requests_mock: Mocker, - status_code: HTTPStatus, -) -> None: - """Test platform configuration import fails when refreshing the token.""" - - requests_mock.register_uri( - "POST", - OAUTH2_TOKEN, - status_code=status_code, - json="", - ) - - with patch( - "homeassistant.components.fitbit.async_setup_entry", return_value=True - ) as mock_setup: - await sensor_platform_setup() - - assert len(mock_setup.mock_calls) == 0 - - # Verify an issue is raised that we were unable to import configuration - issue = issue_registry.issues.get((DOMAIN, "deprecated_yaml")) - assert issue - assert issue.translation_key == "deprecated_yaml_import_issue_cannot_connect" - - -async def test_import_fitbit_config_already_exists( - hass: HomeAssistant, - config_entry: MockConfigEntry, - setup_credentials: None, - integration_setup: Callable[[], Awaitable[bool]], - fitbit_config_setup: None, - sensor_platform_setup: Callable[[], Awaitable[bool]], - issue_registry: ir.IssueRegistry, - requests_mock: Mocker, -) -> None: - """Test that platform configuration is not imported if it already exists.""" - - requests_mock.register_uri( - "POST", - OAUTH2_TOKEN, - status_code=HTTPStatus.OK, - json=SERVER_ACCESS_TOKEN, - ) - - # Verify existing config entry - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - - with patch( - "homeassistant.components.fitbit.async_setup_entry", return_value=True - ) as mock_config_entry_setup: - await integration_setup() - - assert len(mock_config_entry_setup.mock_calls) == 1 - - with patch( - "homeassistant.components.fitbit.async_setup_entry", return_value=True - ) as mock_import_setup: - await sensor_platform_setup() - - assert len(mock_import_setup.mock_calls) == 0 - - # Still one config entry - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - - # Verify an issue is raised for deprecated configuration.yaml - issue = issue_registry.issues.get((DOMAIN, "deprecated_yaml")) - assert issue - assert issue.translation_key == "deprecated_yaml_import" - - -async def test_platform_setup_without_import( - hass: HomeAssistant, - sensor_platform_setup: Callable[[], Awaitable[bool]], - issue_registry: ir.IssueRegistry, -) -> None: - """Test platform configuration.yaml but no existing fitbit.conf credentials.""" - - with patch( - "homeassistant.components.fitbit.async_setup_entry", return_value=True - ) as mock_setup: - await sensor_platform_setup() - - # Verify no configuration entry is imported since the integration is not - # fully setup properly - assert len(mock_setup.mock_calls) == 0 - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 0 - - # Verify an issue is raised for deprecated configuration.yaml - assert len(issue_registry.issues) == 1 - issue = issue_registry.issues.get((DOMAIN, "deprecated_yaml")) - assert issue - assert issue.translation_key == "deprecated_yaml_no_import" - - @pytest.mark.usefixtures("current_request_with_host") async def test_reauth_flow( hass: HomeAssistant, diff --git a/tests/components/fitbit/test_sensor.py b/tests/components/fitbit/test_sensor.py index 9443d0500eb..cee9835f89f 100644 --- a/tests/components/fitbit/test_sensor.py +++ b/tests/components/fitbit/test_sensor.py @@ -78,133 +78,151 @@ def mock_token_refresh(requests_mock: Mocker) -> None: [ ( ["activities/activityCalories"], - "sensor.activity_calories", + "sensor.first_l_activity_calories", "activities/activityCalories", "135", ), + ( + ["activities/tracker/activityCalories"], + "sensor.first_l_tracker_activity_calories", + "activities/tracker/activityCalories", + "135", + ), ( ["activities/calories"], - "sensor.calories", + "sensor.first_l_calories", "activities/calories", "139", ), + ( + ["activities/tracker/calories"], + "sensor.first_l_tracker_calories", + "activities/tracker/calories", + "139", + ), ( ["activities/distance"], - "sensor.distance", + "sensor.first_l_distance", + "activities/distance", + "12.7", + ), + ( + ["activities/tracker/distance"], + "sensor.first_l_tracker_distance", "activities/distance", "12.7", ), ( ["activities/elevation"], - "sensor.elevation", + "sensor.first_l_elevation", "activities/elevation", "7600.24", ), ( ["activities/floors"], - "sensor.floors", + "sensor.first_l_floors", "activities/floors", "8", ), ( ["activities/heart"], - "sensor.resting_heart_rate", + "sensor.first_l_resting_heart_rate", "activities/heart", {"restingHeartRate": 76}, ), ( ["activities/minutesFairlyActive"], - "sensor.minutes_fairly_active", + "sensor.first_l_minutes_fairly_active", "activities/minutesFairlyActive", 35, ), ( ["activities/minutesLightlyActive"], - "sensor.minutes_lightly_active", + "sensor.first_l_minutes_lightly_active", "activities/minutesLightlyActive", 95, ), ( ["activities/minutesSedentary"], - "sensor.minutes_sedentary", + "sensor.first_l_minutes_sedentary", "activities/minutesSedentary", 18, ), ( ["activities/minutesVeryActive"], - "sensor.minutes_very_active", + "sensor.first_l_minutes_very_active", "activities/minutesVeryActive", 20, ), ( ["activities/steps"], - "sensor.steps", + "sensor.first_l_steps", "activities/steps", "5600", ), ( ["body/weight"], - "sensor.weight", + "sensor.first_l_weight", "body/weight", "175", ), ( ["body/fat"], - "sensor.body_fat", + "sensor.first_l_body_fat", "body/fat", "18", ), ( ["body/bmi"], - "sensor.bmi", + "sensor.first_l_bmi", "body/bmi", "23.7", ), ( ["sleep/awakeningsCount"], - "sensor.awakenings_count", + "sensor.first_l_awakenings_count", "sleep/awakeningsCount", "7", ), ( ["sleep/efficiency"], - "sensor.sleep_efficiency", + "sensor.first_l_sleep_efficiency", "sleep/efficiency", "80", ), ( ["sleep/minutesAfterWakeup"], - "sensor.minutes_after_wakeup", + "sensor.first_l_minutes_after_wakeup", "sleep/minutesAfterWakeup", "17", ), ( ["sleep/minutesAsleep"], - "sensor.sleep_minutes_asleep", + "sensor.first_l_sleep_minutes_asleep", "sleep/minutesAsleep", "360", ), ( ["sleep/minutesAwake"], - "sensor.sleep_minutes_awake", + "sensor.first_l_sleep_minutes_awake", "sleep/minutesAwake", "35", ), ( ["sleep/minutesToFallAsleep"], - "sensor.sleep_minutes_to_fall_asleep", + "sensor.first_l_sleep_minutes_to_fall_asleep", "sleep/minutesToFallAsleep", "35", ), ( ["sleep/startTime"], - "sensor.sleep_start_time", + "sensor.first_l_sleep_start_time", "sleep/startTime", "2020-01-27T00:17:30.000", ), ( ["sleep/timeInBed"], - "sensor.sleep_time_in_bed", + "sensor.first_l_sleep_time_in_bed", "sleep/timeInBed", "462", ), @@ -212,8 +230,8 @@ def mock_token_refresh(requests_mock: Mocker) -> None: ) async def test_sensors( hass: HomeAssistant, - fitbit_config_setup: None, - sensor_platform_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + integration_setup: Callable[[], Awaitable[bool]], register_timeseries: Callable[[str, dict[str, Any]], None], entity_registry: er.EntityRegistry, entity_id: str, @@ -226,7 +244,7 @@ async def test_sensors( register_timeseries( api_resource, timeseries_response(api_resource.replace("/", "-"), api_value) ) - await sensor_platform_setup() + await integration_setup() entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 @@ -243,13 +261,13 @@ async def test_sensors( ) async def test_device_battery( hass: HomeAssistant, - fitbit_config_setup: None, - sensor_platform_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + integration_setup: Callable[[], Awaitable[bool]], entity_registry: er.EntityRegistry, ) -> None: """Test battery level sensor for devices.""" - assert await sensor_platform_setup() + assert await integration_setup() entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 @@ -290,13 +308,13 @@ async def test_device_battery( ) async def test_device_battery_level( hass: HomeAssistant, - fitbit_config_setup: None, - sensor_platform_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + integration_setup: Callable[[], Awaitable[bool]], entity_registry: er.EntityRegistry, ) -> None: """Test battery level sensor for devices.""" - assert await sensor_platform_setup() + assert await integration_setup() entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 @@ -347,25 +365,25 @@ async def test_device_battery_level( ) async def test_profile_local( hass: HomeAssistant, - fitbit_config_setup: None, - sensor_platform_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + integration_setup: Callable[[], Awaitable[bool]], register_timeseries: Callable[[str, dict[str, Any]], None], expected_unit: str, ) -> None: """Test the fitbit profile locale impact on unit of measure.""" register_timeseries("body/weight", timeseries_response("body-weight", "175")) - await sensor_platform_setup() + await integration_setup() entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 - state = hass.states.get("sensor.weight") + state = hass.states.get("sensor.first_l_weight") assert state assert state.attributes.get("unit_of_measurement") == expected_unit @pytest.mark.parametrize( - ("sensor_platform_config", "api_response", "expected_state"), + ("imported_config_data", "api_response", "expected_state"), [ ( {"clock_format": "12H", "monitored_resources": ["sleep/startTime"]}, @@ -396,8 +414,8 @@ async def test_profile_local( ) async def test_sleep_time_clock_format( hass: HomeAssistant, - fitbit_config_setup: None, - sensor_platform_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + integration_setup: Callable[[], Awaitable[bool]], register_timeseries: Callable[[str, dict[str, Any]], None], api_response: str, expected_state: str, @@ -407,9 +425,9 @@ async def test_sleep_time_clock_format( register_timeseries( "sleep/startTime", timeseries_response("sleep-startTime", api_response) ) - await sensor_platform_setup() + assert await integration_setup() - state = hass.states.get("sensor.sleep_start_time") + state = hass.states.get("sensor.first_l_sleep_start_time") assert state assert state.state == expected_state @@ -445,16 +463,16 @@ async def test_activity_scope_config_entry( states = hass.states.async_all() assert {s.entity_id for s in states} == { - "sensor.activity_calories", - "sensor.calories", - "sensor.distance", - "sensor.elevation", - "sensor.floors", - "sensor.minutes_fairly_active", - "sensor.minutes_lightly_active", - "sensor.minutes_sedentary", - "sensor.minutes_very_active", - "sensor.steps", + "sensor.first_l_activity_calories", + "sensor.first_l_calories", + "sensor.first_l_distance", + "sensor.first_l_elevation", + "sensor.first_l_floors", + "sensor.first_l_minutes_fairly_active", + "sensor.first_l_minutes_lightly_active", + "sensor.first_l_minutes_sedentary", + "sensor.first_l_minutes_very_active", + "sensor.first_l_steps", } @@ -478,7 +496,7 @@ async def test_heartrate_scope_config_entry( states = hass.states.async_all() assert {s.entity_id for s in states} == { - "sensor.resting_heart_rate", + "sensor.first_l_resting_heart_rate", } @@ -506,11 +524,11 @@ async def test_nutrition_scope_config_entry( ) assert await integration_setup() - state = hass.states.get("sensor.water") + state = hass.states.get("sensor.first_l_water") assert state assert (state.state, state.attributes) == snapshot - state = hass.states.get("sensor.calories_in") + state = hass.states.get("sensor.first_l_calories_in") assert state assert (state.state, state.attributes) == snapshot @@ -545,14 +563,14 @@ async def test_sleep_scope_config_entry( states = hass.states.async_all() assert {s.entity_id for s in states} == { - "sensor.awakenings_count", - "sensor.sleep_efficiency", - "sensor.minutes_after_wakeup", - "sensor.sleep_minutes_asleep", - "sensor.sleep_minutes_awake", - "sensor.sleep_minutes_to_fall_asleep", - "sensor.sleep_time_in_bed", - "sensor.sleep_start_time", + "sensor.first_l_awakenings_count", + "sensor.first_l_sleep_efficiency", + "sensor.first_l_minutes_after_wakeup", + "sensor.first_l_sleep_minutes_asleep", + "sensor.first_l_sleep_minutes_awake", + "sensor.first_l_sleep_minutes_to_fall_asleep", + "sensor.first_l_sleep_time_in_bed", + "sensor.first_l_sleep_start_time", } @@ -573,7 +591,7 @@ async def test_weight_scope_config_entry( states = hass.states.async_all() assert [s.entity_id for s in states] == [ - "sensor.weight", + "sensor.first_l_weight", ] @@ -623,7 +641,7 @@ async def test_sensor_update_failed( assert await integration_setup() - state = hass.states.get("sensor.resting_heart_rate") + state = hass.states.get("sensor.first_l_resting_heart_rate") assert state assert state.state == "unavailable" @@ -655,7 +673,7 @@ async def test_sensor_update_failed_requires_reauth( assert await integration_setup() - state = hass.states.get("sensor.resting_heart_rate") + state = hass.states.get("sensor.first_l_resting_heart_rate") assert state assert state.state == "unavailable" @@ -698,14 +716,14 @@ async def test_sensor_update_success( assert await integration_setup() - state = hass.states.get("sensor.resting_heart_rate") + state = hass.states.get("sensor.first_l_resting_heart_rate") assert state assert state.state == "60" - await async_update_entity(hass, "sensor.resting_heart_rate") + await async_update_entity(hass, "sensor.first_l_resting_heart_rate") await hass.async_block_till_done() - state = hass.states.get("sensor.resting_heart_rate") + state = hass.states.get("sensor.first_l_resting_heart_rate") assert state assert state.state == "70" @@ -867,6 +885,6 @@ async def test_resting_heart_rate_responses( ) assert await integration_setup() - state = hass.states.get("sensor.resting_heart_rate") + state = hass.states.get("sensor.first_l_resting_heart_rate") assert state assert state.state == expected_state diff --git a/tests/components/flume/conftest.py b/tests/components/flume/conftest.py index fb0d0157bbc..6173db1e2b9 100644 --- a/tests/components/flume/conftest.py +++ b/tests/components/flume/conftest.py @@ -3,8 +3,7 @@ from collections.abc import Generator import datetime from http import HTTPStatus -import json -from unittest.mock import mock_open, patch +from unittest.mock import patch import jwt import pytest @@ -116,7 +115,7 @@ def access_token_fixture(requests_mock: Mocker) -> Generator[None]: status_code=HTTPStatus.OK, json={"data": [token_response]}, ) - with patch("builtins.open", mock_open(read_data=json.dumps(token_response))): + with patch("homeassistant.components.flume.coordinator.FlumeAuth.write_token_file"): yield diff --git a/tests/components/flume/test_config_flow.py b/tests/components/flume/test_config_flow.py index c323defc791..87fe3a2bbf0 100644 --- a/tests/components/flume/test_config_flow.py +++ b/tests/components/flume/test_config_flow.py @@ -61,10 +61,6 @@ async def test_form(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.flume.config.error.invalid_auth"], -) @pytest.mark.usefixtures("access_token") async def test_form_invalid_auth(hass: HomeAssistant, requests_mock: Mocker) -> None: """Test we handle invalid auth.""" @@ -93,10 +89,6 @@ async def test_form_invalid_auth(hass: HomeAssistant, requests_mock: Mocker) -> assert result2["errors"] == {"password": "invalid_auth"} -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.flume.config.error.cannot_connect"], -) @pytest.mark.usefixtures("access_token", "device_list_timeout") async def test_form_cannot_connect(hass: HomeAssistant) -> None: """Test we handle cannot connect error.""" @@ -118,16 +110,6 @@ async def test_form_cannot_connect(hass: HomeAssistant) -> None: assert result2["errors"] == {"base": "cannot_connect"} -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - [ - [ - "component.flume.config.abort.reauth_successful", - "component.flume.config.error.cannot_connect", - "component.flume.config.error.invalid_auth", - ] - ], -) @pytest.mark.usefixtures("access_token") async def test_reauth(hass: HomeAssistant, requests_mock: Mocker) -> None: """Test we can reauth.""" @@ -208,10 +190,6 @@ async def test_reauth(hass: HomeAssistant, requests_mock: Mocker) -> None: assert result4["reason"] == "reauth_successful" -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.flume.config.error.cannot_connect"], -) @pytest.mark.usefixtures("access_token") async def test_form_no_devices(hass: HomeAssistant, requests_mock: Mocker) -> None: """Test a device list response that contains no values will raise an error.""" diff --git a/tests/components/flux_led/test_light.py b/tests/components/flux_led/test_light.py index f5a7b310202..c12776eb552 100644 --- a/tests/components/flux_led/test_light.py +++ b/tests/components/flux_led/test_light.py @@ -517,7 +517,7 @@ async def test_rgbw_light_auto_on(hass: HomeAssistant) -> None: # enough resolution to determine which color to display bulb.async_turn_on.assert_not_called() bulb.async_set_brightness.assert_not_called() - bulb.async_set_levels.assert_called_with(2, 0, 0, 0) + bulb.async_set_levels.assert_called_with(3, 0, 0, 0) bulb.async_set_levels.reset_mock() await hass.services.async_call( @@ -534,7 +534,7 @@ async def test_rgbw_light_auto_on(hass: HomeAssistant) -> None: # enough resolution to determine which color to display bulb.async_turn_on.assert_not_called() bulb.async_set_brightness.assert_not_called() - bulb.async_set_levels.assert_called_with(2, 0, 0, 56) + bulb.async_set_levels.assert_called_with(3, 0, 0, 56) bulb.async_set_levels.reset_mock() bulb.brightness = 128 @@ -652,7 +652,7 @@ async def test_rgbww_light_auto_on(hass: HomeAssistant) -> None: # which color to display bulb.async_turn_on.assert_not_called() bulb.async_set_brightness.assert_not_called() - bulb.async_set_levels.assert_called_with(2, 0, 0, 0, 0) + bulb.async_set_levels.assert_called_with(3, 0, 0, 0, 0) bulb.async_set_levels.reset_mock() bulb.brightness = 128 diff --git a/tests/components/fritz/test_config_flow.py b/tests/components/fritz/test_config_flow.py index e3fae8c083e..84f1b240b88 100644 --- a/tests/components/fritz/test_config_flow.py +++ b/tests/components/fritz/test_config_flow.py @@ -10,6 +10,7 @@ from fritzconnection.core.exceptions import ( ) import pytest +from homeassistant.components import ssdp from homeassistant.components.device_tracker import ( CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME, @@ -22,7 +23,6 @@ from homeassistant.components.fritz.const import ( ERROR_UNKNOWN, FRITZ_AUTH_EXCEPTIONS, ) -from homeassistant.components.ssdp import ATTR_UPNP_UDN from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER from homeassistant.const import ( CONF_HOST, @@ -644,7 +644,7 @@ async def test_ssdp_already_in_progress_host( MOCK_NO_UNIQUE_ID = dataclasses.replace(MOCK_SSDP_DATA) MOCK_NO_UNIQUE_ID.upnp = MOCK_NO_UNIQUE_ID.upnp.copy() - del MOCK_NO_UNIQUE_ID.upnp[ATTR_UPNP_UDN] + del MOCK_NO_UNIQUE_ID.upnp[ssdp.ATTR_UPNP_UDN] result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_SSDP}, data=MOCK_NO_UNIQUE_ID ) @@ -737,3 +737,23 @@ async def test_options_flow(hass: HomeAssistant) -> None: CONF_OLD_DISCOVERY: False, CONF_CONSIDER_HOME: 37, } + + +async def test_ssdp_ipv6_link_local(hass: HomeAssistant) -> None: + """Test ignoring ipv6-link-local while ssdp discovery.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_SSDP}, + data=ssdp.SsdpServiceInfo( + ssdp_usn="mock_usn", + ssdp_st="mock_st", + ssdp_location="https://[fe80::1ff:fe23:4567:890a]:12345/test", + upnp={ + ssdp.ATTR_UPNP_FRIENDLY_NAME: "fake_name", + ssdp.ATTR_UPNP_UDN: "uuid:only-a-test", + }, + ), + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "ignore_ip6_link_local" diff --git a/tests/components/fritz/test_sensor.py b/tests/components/fritz/test_sensor.py index 77deb665f5e..7dec640b898 100644 --- a/tests/components/fritz/test_sensor.py +++ b/tests/components/fritz/test_sensor.py @@ -43,7 +43,7 @@ async def test_sensor_setup( async def test_sensor_update_fail( - hass: HomeAssistant, fc_class_mock, fh_class_mock + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, fc_class_mock, fh_class_mock ) -> None: """Test failed update of Fritz!Tools sensors.""" @@ -53,10 +53,12 @@ async def test_sensor_update_fail( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - fc_class_mock().call_action_side_effect(FritzConnectionException) + fc_class_mock().call_action_side_effect(FritzConnectionException("Boom")) async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=300)) await hass.async_block_till_done(wait_background_tasks=True) + assert "Error while uptaing the data: Boom" in caplog.text + sensors = hass.states.async_all(SENSOR_DOMAIN) for sensor in sensors: assert sensor.state == STATE_UNAVAILABLE diff --git a/tests/components/frontend/test_init.py b/tests/components/frontend/test_init.py index 5006adedd77..5a682277176 100644 --- a/tests/components/frontend/test_init.py +++ b/tests/components/frontend/test_init.py @@ -166,7 +166,7 @@ async def test_frontend_and_static(mock_http_client: TestClient) -> None: text = await resp.text() # Test we can retrieve frontend.js - frontendjs = re.search(r"(?P\/frontend_es5\/app.[A-Za-z0-9_-]{11}.js)", text) + frontendjs = re.search(r"(?P\/frontend_es5\/app.[A-Za-z0-9_-]{16}.js)", text) assert frontendjs is not None, text resp = await mock_http_client.get(frontendjs.groups(0)[0]) @@ -689,7 +689,7 @@ async def test_auth_authorize(mock_http_client: TestClient) -> None: # Test we can retrieve authorize.js authorizejs = re.search( - r"(?P\/frontend_latest\/authorize.[A-Za-z0-9_-]{11}.js)", text + r"(?P\/frontend_latest\/authorize.[A-Za-z0-9_-]{16}.js)", text ) assert authorizejs is not None, text diff --git a/tests/components/fyta/fixtures/plant_status1.json b/tests/components/fyta/fixtures/plant_status1.json index 72d129492bb..600fc46608c 100644 --- a/tests/components/fyta/fixtures/plant_status1.json +++ b/tests/components/fyta/fixtures/plant_status1.json @@ -1,13 +1,16 @@ { "battery_level": 80, - "battery_status": true, + "low_battery": true, "last_updated": "2023-01-10 10:10:00", "light": 2, "light_status": 3, "nickname": "Gummibaum", + "nutrients_status": 3, "moisture": 61, "moisture_status": 3, "sensor_available": true, + "sensor_id": "FD:1D:B7:E3:D0:E2", + "sensor_update_available": false, "sw_version": "1.0", "status": 1, "online": true, @@ -15,6 +18,7 @@ "plant_id": 0, "plant_origin_path": "", "plant_thumb_path": "", + "is_productive_plant": false, "salinity": 1, "salinity_status": 4, "scientific_name": "Ficus elastica", diff --git a/tests/components/fyta/fixtures/plant_status2.json b/tests/components/fyta/fixtures/plant_status2.json index 8ed09532567..c39e2ac8685 100644 --- a/tests/components/fyta/fixtures/plant_status2.json +++ b/tests/components/fyta/fixtures/plant_status2.json @@ -1,13 +1,16 @@ { "battery_level": 80, - "battery_status": true, + "low_battery": true, "last_updated": "2023-01-02 10:10:00", "light": 2, "light_status": 3, "nickname": "Kakaobaum", + "nutrients_status": 3, "moisture": 61, "moisture_status": 3, "sensor_available": true, + "sensor_id": "FD:1D:B7:E3:D0:E3", + "sensor_update_available": false, "sw_version": "1.0", "status": 1, "online": true, @@ -15,6 +18,7 @@ "plant_id": 0, "plant_origin_path": "", "plant_thumb_path": "", + "is_productive_plant": false, "salinity": 1, "salinity_status": 4, "scientific_name": "Theobroma cacao", diff --git a/tests/components/fyta/fixtures/plant_status3.json b/tests/components/fyta/fixtures/plant_status3.json index 6e32ba601ed..58e3e1b86a0 100644 --- a/tests/components/fyta/fixtures/plant_status3.json +++ b/tests/components/fyta/fixtures/plant_status3.json @@ -1,13 +1,16 @@ { "battery_level": 80, - "battery_status": true, + "low_battery": true, "last_updated": "2023-01-02 10:10:00", "light": 2, "light_status": 3, "nickname": "Tomatenpflanze", + "nutrients_status": 0, "moisture": 61, "moisture_status": 3, "sensor_available": true, + "sensor_id": "FD:1D:B7:E3:D0:E3", + "sensor_update_available": false, "sw_version": "1.0", "status": 1, "online": true, @@ -15,6 +18,7 @@ "plant_id": 0, "plant_origin_path": "", "plant_thumb_path": "", + "is_productive_plant": true, "salinity": 1, "salinity_status": 4, "scientific_name": "Solanum lycopersicum", diff --git a/tests/components/fyta/snapshots/test_diagnostics.ambr b/tests/components/fyta/snapshots/test_diagnostics.ambr index 2af616c6412..eb19797e5b1 100644 --- a/tests/components/fyta/snapshots/test_diagnostics.ambr +++ b/tests/components/fyta/snapshots/test_diagnostics.ambr @@ -26,22 +26,34 @@ 'plant_data': dict({ '0': dict({ 'battery_level': 80.0, - 'battery_status': True, + 'fertilise_last': None, + 'fertilise_next': None, 'last_updated': '2023-01-10T10:10:00', 'light': 2.0, 'light_status': 3, + 'low_battery': True, 'moisture': 61.0, 'moisture_status': 3, 'name': 'Gummibaum', + 'notification_light': False, + 'notification_nutrition': False, + 'notification_temperature': False, + 'notification_water': False, + 'nutrients_status': 3, 'online': True, 'ph': None, 'plant_id': 0, 'plant_origin_path': '', 'plant_thumb_path': '', + 'productive_plant': False, + 'repotted': False, 'salinity': 1.0, 'salinity_status': 4, 'scientific_name': 'Ficus elastica', 'sensor_available': True, + 'sensor_id': 'FD:1D:B7:E3:D0:E2', + 'sensor_status': 0, + 'sensor_update_available': False, 'status': 1, 'sw_version': '1.0', 'temperature': 25.2, @@ -49,22 +61,34 @@ }), '1': dict({ 'battery_level': 80.0, - 'battery_status': True, + 'fertilise_last': None, + 'fertilise_next': None, 'last_updated': '2023-01-02T10:10:00', 'light': 2.0, 'light_status': 3, + 'low_battery': True, 'moisture': 61.0, 'moisture_status': 3, 'name': 'Kakaobaum', + 'notification_light': False, + 'notification_nutrition': False, + 'notification_temperature': False, + 'notification_water': False, + 'nutrients_status': 3, 'online': True, 'ph': 7.0, 'plant_id': 0, 'plant_origin_path': '', 'plant_thumb_path': '', + 'productive_plant': False, + 'repotted': False, 'salinity': 1.0, 'salinity_status': 4, 'scientific_name': 'Theobroma cacao', 'sensor_available': True, + 'sensor_id': 'FD:1D:B7:E3:D0:E3', + 'sensor_status': 0, + 'sensor_update_available': False, 'status': 1, 'sw_version': '1.0', 'temperature': 25.2, diff --git a/tests/components/garages_amsterdam/__init__.py b/tests/components/garages_amsterdam/__init__.py index ff430c0e7b2..f721506b9b0 100644 --- a/tests/components/garages_amsterdam/__init__.py +++ b/tests/components/garages_amsterdam/__init__.py @@ -1 +1,12 @@ """Tests for the Garages Amsterdam integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the integration.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/garages_amsterdam/conftest.py b/tests/components/garages_amsterdam/conftest.py index fb59ba26569..93190d1d1ee 100644 --- a/tests/components/garages_amsterdam/conftest.py +++ b/tests/components/garages_amsterdam/conftest.py @@ -1,32 +1,85 @@ -"""Test helpers.""" +"""Fixtures for Garages Amsterdam integration tests.""" -from unittest.mock import Mock, patch +from collections.abc import Generator +from datetime import UTC, datetime +from unittest.mock import AsyncMock, patch +from odp_amsterdam import Garage, GarageCategory, VehicleType import pytest +from homeassistant.components.garages_amsterdam.const import DOMAIN -@pytest.fixture(autouse=True) -def mock_cases(): - """Mock garages_amsterdam garages.""" +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override setup entry.""" with patch( - "odp_amsterdam.ODPAmsterdam.all_garages", - return_value=[ - Mock( + "homeassistant.components.garages_amsterdam.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_garages_amsterdam() -> Generator[AsyncMock]: + """Mock garages_amsterdam garages.""" + with ( + patch( + "homeassistant.components.garages_amsterdam.ODPAmsterdam", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.garages_amsterdam.config_flow.ODPAmsterdam", + new=mock_client, + ), + ): + client = mock_client.return_value + client.all_garages.return_value = [ + Garage( + garage_id="test-id-1", garage_name="IJDok", + vehicle=VehicleType.CAR, + category=GarageCategory.GARAGE, + state="ok", free_space_short=100, free_space_long=10, short_capacity=120, long_capacity=60, - state="ok", + availability_pct=50.5, + longitude=1.111111, + latitude=2.222222, + updated_at=datetime(2023, 2, 23, 13, 44, 48, tzinfo=UTC), ), - Mock( + Garage( + garage_id="test-id-2", garage_name="Arena", - free_space_short=200, - free_space_long=20, - short_capacity=240, - long_capacity=80, + vehicle=VehicleType.CAR, + category=GarageCategory.GARAGE, state="error", + free_space_short=200, + free_space_long=None, + short_capacity=240, + long_capacity=None, + availability_pct=83.3, + longitude=3.333333, + latitude=4.444444, + updated_at=datetime(2023, 2, 23, 13, 44, 48, tzinfo=UTC), ), - ], - ) as mock_get_garages: - yield mock_get_garages + ] + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + title="monitor", + domain=DOMAIN, + data={ + "garage_name": "IJDok", + }, + unique_id="unique_thingy", + version=1, + ) diff --git a/tests/components/garages_amsterdam/snapshots/test_binary_sensor.ambr b/tests/components/garages_amsterdam/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..5f6511090ee --- /dev/null +++ b/tests/components/garages_amsterdam/snapshots/test_binary_sensor.ambr @@ -0,0 +1,49 @@ +# serializer version: 1 +# name: test_all_binary_sensors[binary_sensor.ijdok_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.ijdok_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State', + 'platform': 'garages_amsterdam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'state', + 'unique_id': 'IJDok-state', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensors[binary_sensor.ijdok_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by municipality of Amsterdam', + 'device_class': 'problem', + 'friendly_name': 'IJDok State', + }), + 'context': , + 'entity_id': 'binary_sensor.ijdok_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/garages_amsterdam/snapshots/test_sensor.ambr b/tests/components/garages_amsterdam/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..2c579631bae --- /dev/null +++ b/tests/components/garages_amsterdam/snapshots/test_sensor.ambr @@ -0,0 +1,199 @@ +# serializer version: 1 +# name: test_all_sensors[sensor.ijdok_long_parking_capacity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ijdok_long_parking_capacity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Long parking capacity', + 'platform': 'garages_amsterdam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'long_capacity', + 'unique_id': 'IJDok-long_capacity', + 'unit_of_measurement': 'cars', + }) +# --- +# name: test_all_sensors[sensor.ijdok_long_parking_capacity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by municipality of Amsterdam', + 'friendly_name': 'IJDok Long parking capacity', + 'unit_of_measurement': 'cars', + }), + 'context': , + 'entity_id': 'sensor.ijdok_long_parking_capacity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_all_sensors[sensor.ijdok_long_parking_free_space-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ijdok_long_parking_free_space', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Long parking free space', + 'platform': 'garages_amsterdam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'free_space_long', + 'unique_id': 'IJDok-free_space_long', + 'unit_of_measurement': 'cars', + }) +# --- +# name: test_all_sensors[sensor.ijdok_long_parking_free_space-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by municipality of Amsterdam', + 'friendly_name': 'IJDok Long parking free space', + 'state_class': , + 'unit_of_measurement': 'cars', + }), + 'context': , + 'entity_id': 'sensor.ijdok_long_parking_free_space', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_all_sensors[sensor.ijdok_short_parking_capacity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ijdok_short_parking_capacity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Short parking capacity', + 'platform': 'garages_amsterdam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'short_capacity', + 'unique_id': 'IJDok-short_capacity', + 'unit_of_measurement': 'cars', + }) +# --- +# name: test_all_sensors[sensor.ijdok_short_parking_capacity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by municipality of Amsterdam', + 'friendly_name': 'IJDok Short parking capacity', + 'unit_of_measurement': 'cars', + }), + 'context': , + 'entity_id': 'sensor.ijdok_short_parking_capacity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '120', + }) +# --- +# name: test_all_sensors[sensor.ijdok_short_parking_free_space-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ijdok_short_parking_free_space', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Short parking free space', + 'platform': 'garages_amsterdam', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'free_space_short', + 'unique_id': 'IJDok-free_space_short', + 'unit_of_measurement': 'cars', + }) +# --- +# name: test_all_sensors[sensor.ijdok_short_parking_free_space-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by municipality of Amsterdam', + 'friendly_name': 'IJDok Short parking free space', + 'state_class': , + 'unit_of_measurement': 'cars', + }), + 'context': , + 'entity_id': 'sensor.ijdok_short_parking_free_space', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- diff --git a/tests/components/garages_amsterdam/test_binary_sensor.py b/tests/components/garages_amsterdam/test_binary_sensor.py new file mode 100644 index 00000000000..b7d0333f7e3 --- /dev/null +++ b/tests/components/garages_amsterdam/test_binary_sensor.py @@ -0,0 +1,31 @@ +"""Tests the binary sensors provided by the Garages Amsterdam integration.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import snapshot_platform + + +async def test_all_binary_sensors( + hass: HomeAssistant, + mock_garages_amsterdam: AsyncMock, + mock_config_entry: AsyncMock, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test all binary sensors.""" + with patch( + "homeassistant.components.garages_amsterdam.PLATFORMS", [Platform.BINARY_SENSOR] + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/garages_amsterdam/test_config_flow.py b/tests/components/garages_amsterdam/test_config_flow.py index 729d31e413c..68950c96cf0 100644 --- a/tests/components/garages_amsterdam/test_config_flow.py +++ b/tests/components/garages_amsterdam/test_config_flow.py @@ -1,39 +1,40 @@ """Test the Garages Amsterdam config flow.""" from http import HTTPStatus -from unittest.mock import patch +from unittest.mock import AsyncMock, patch from aiohttp import ClientResponseError import pytest -from homeassistant import config_entries from homeassistant.components.garages_amsterdam.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -async def test_full_flow(hass: HomeAssistant) -> None: - """Test we get the form.""" +async def test_full_user_flow( + hass: HomeAssistant, + mock_garages_amsterdam: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the full user configuration flow.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "user" + assert not result.get("errors") - with patch( - "homeassistant.components.garages_amsterdam.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {"garage_name": "IJDok"}, - ) - await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"garage_name": "IJDok"}, + ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2.get("title") == "IJDok" - assert "result" in result2 - assert result2["result"].unique_id == "IJDok" + assert result.get("type") is FlowResultType.CREATE_ENTRY + assert result.get("title") == "IJDok" + assert result.get("data") == {"garage_name": "IJDok"} + assert len(mock_garages_amsterdam.all_garages.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -50,14 +51,14 @@ async def test_full_flow(hass: HomeAssistant) -> None: async def test_error_handling( side_effect: Exception, reason: str, hass: HomeAssistant ) -> None: - """Test we get the form.""" + """Test error handling in the config flow.""" with patch( "homeassistant.components.garages_amsterdam.config_flow.ODPAmsterdam.all_garages", side_effect=side_effect, ): result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result.get("type") is FlowResultType.ABORT assert result.get("reason") == reason diff --git a/tests/components/garages_amsterdam/test_init.py b/tests/components/garages_amsterdam/test_init.py new file mode 100644 index 00000000000..ed5469e5ff9 --- /dev/null +++ b/tests/components/garages_amsterdam/test_init.py @@ -0,0 +1,26 @@ +"""Tests for the Garages Amsterdam integration.""" + +from unittest.mock import AsyncMock + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_load_unload_config_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_garages_amsterdam: AsyncMock, +) -> None: + """Test the Garages Amsterdam integration loads and unloads correctly.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/garages_amsterdam/test_sensor.py b/tests/components/garages_amsterdam/test_sensor.py new file mode 100644 index 00000000000..bc36401ea47 --- /dev/null +++ b/tests/components/garages_amsterdam/test_sensor.py @@ -0,0 +1,31 @@ +"""Tests the sensors provided by the Garages Amsterdam integration.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import snapshot_platform + + +async def test_all_sensors( + hass: HomeAssistant, + mock_garages_amsterdam: AsyncMock, + mock_config_entry: AsyncMock, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test all sensors.""" + with patch( + "homeassistant.components.garages_amsterdam.PLATFORMS", [Platform.SENSOR] + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/geniushub/test_config_flow.py b/tests/components/geniushub/test_config_flow.py index 9234e03e35a..7d1d33a2245 100644 --- a/tests/components/geniushub/test_config_flow.py +++ b/tests/components/geniushub/test_config_flow.py @@ -2,21 +2,14 @@ from http import HTTPStatus import socket -from typing import Any from unittest.mock import AsyncMock from aiohttp import ClientConnectionError, ClientResponseError import pytest from homeassistant.components.geniushub import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER -from homeassistant.const import ( - CONF_HOST, - CONF_MAC, - CONF_PASSWORD, - CONF_TOKEN, - CONF_USERNAME, -) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -309,174 +302,3 @@ async def test_cloud_duplicate( ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - - -@pytest.mark.parametrize( - ("data"), - [ - { - CONF_HOST: "10.0.0.130", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - { - CONF_HOST: "10.0.0.130", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - CONF_MAC: "aa:bb:cc:dd:ee:ff", - }, - ], -) -async def test_import_local_flow( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_geniushub_client: AsyncMock, - data: dict[str, Any], -) -> None: - """Test full local import flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=data, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "10.0.0.130" - assert result["data"] == data - assert result["result"].unique_id == "aa:bb:cc:dd:ee:ff" - - -@pytest.mark.parametrize( - ("data"), - [ - { - CONF_TOKEN: "abcdef", - }, - { - CONF_TOKEN: "abcdef", - CONF_MAC: "aa:bb:cc:dd:ee:ff", - }, - ], -) -async def test_import_cloud_flow( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_geniushub_client: AsyncMock, - data: dict[str, Any], -) -> None: - """Test full cloud import flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=data, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Genius hub" - assert result["data"] == data - - -@pytest.mark.parametrize( - ("data"), - [ - { - CONF_HOST: "10.0.0.130", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - { - CONF_HOST: "10.0.0.130", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - CONF_MAC: "aa:bb:cc:dd:ee:ff", - }, - { - CONF_TOKEN: "abcdef", - }, - { - CONF_TOKEN: "abcdef", - CONF_MAC: "aa:bb:cc:dd:ee:ff", - }, - ], -) -@pytest.mark.parametrize( - ("exception", "reason"), - [ - (socket.gaierror, "invalid_host"), - ( - ClientResponseError(AsyncMock(), (), status=HTTPStatus.UNAUTHORIZED), - "invalid_auth", - ), - ( - ClientResponseError(AsyncMock(), (), status=HTTPStatus.NOT_FOUND), - "invalid_host", - ), - (TimeoutError, "cannot_connect"), - (ClientConnectionError, "cannot_connect"), - (Exception, "unknown"), - ], -) -async def test_import_flow_exceptions( - hass: HomeAssistant, - mock_geniushub_client: AsyncMock, - data: dict[str, Any], - exception: Exception, - reason: str, -) -> None: - """Test import flow exceptions.""" - mock_geniushub_client.request.side_effect = exception - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=data, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == reason - - -@pytest.mark.parametrize( - ("data"), - [ - { - CONF_HOST: "10.0.0.130", - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - }, - { - CONF_HOST: "10.0.0.131", - CONF_USERNAME: "test-username1", - CONF_PASSWORD: "test-password", - }, - ], -) -async def test_import_flow_local_duplicate( - hass: HomeAssistant, - mock_geniushub_client: AsyncMock, - mock_local_config_entry: MockConfigEntry, - data: dict[str, Any], -) -> None: - """Test import flow aborts on local duplicate data.""" - mock_local_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=data, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - -async def test_import_flow_cloud_duplicate( - hass: HomeAssistant, - mock_geniushub_client: AsyncMock, - mock_cloud_config_entry: MockConfigEntry, -) -> None: - """Test import flow aborts on cloud duplicate data.""" - mock_cloud_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_TOKEN: "abcdef", - }, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" diff --git a/tests/components/glances/test_config_flow.py b/tests/components/glances/test_config_flow.py index ae8c2e1d51e..b8d376d652f 100644 --- a/tests/components/glances/test_config_flow.py +++ b/tests/components/glances/test_config_flow.py @@ -1,6 +1,6 @@ """Tests for Glances config flow.""" -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch from glances_api.exceptions import ( GlancesApiAuthorizationError, @@ -10,14 +10,14 @@ from glances_api.exceptions import ( import pytest from homeassistant import config_entries -from homeassistant.components import glances +from homeassistant.components.glances.const import DOMAIN from homeassistant.const import CONF_NAME, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from . import HA_SENSOR_DATA, MOCK_USER_INPUT -from tests.common import MockConfigEntry, patch +from tests.common import MockConfigEntry @pytest.fixture(autouse=True) @@ -31,7 +31,7 @@ async def test_form(hass: HomeAssistant) -> None: """Test config entry configured successfully.""" result = await hass.config_entries.flow.async_init( - glances.DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -60,7 +60,7 @@ async def test_form_fails( mock_api.return_value.get_ha_sensor_data.side_effect = error result = await hass.config_entries.flow.async_init( - glances.DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_USER_INPUT @@ -72,11 +72,11 @@ async def test_form_fails( async def test_form_already_configured(hass: HomeAssistant) -> None: """Test host is already configured.""" - entry = MockConfigEntry(domain=glances.DOMAIN, data=MOCK_USER_INPUT) + entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_INPUT) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( - glances.DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=MOCK_USER_INPUT @@ -87,7 +87,7 @@ async def test_form_already_configured(hass: HomeAssistant) -> None: async def test_reauth_success(hass: HomeAssistant) -> None: """Test we can reauth.""" - entry = MockConfigEntry(domain=glances.DOMAIN, data=MOCK_USER_INPUT) + entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_INPUT) entry.add_to_hass(hass) result = await entry.start_reauth_flow(hass) @@ -120,7 +120,7 @@ async def test_reauth_fails( hass: HomeAssistant, error: Exception, message: str, mock_api: MagicMock ) -> None: """Test we can reauth.""" - entry = MockConfigEntry(domain=glances.DOMAIN, data=MOCK_USER_INPUT) + entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_INPUT) entry.add_to_hass(hass) mock_api.return_value.get_ha_sensor_data.side_effect = [error, HA_SENSOR_DATA] diff --git a/tests/components/glances/test_init.py b/tests/components/glances/test_init.py index 553bd6f2089..16d4d9d371b 100644 --- a/tests/components/glances/test_init.py +++ b/tests/components/glances/test_init.py @@ -1,6 +1,6 @@ """Tests for Glances integration.""" -from unittest.mock import AsyncMock, MagicMock +from unittest.mock import MagicMock from glances_api.exceptions import ( GlancesApiAuthorizationError, @@ -12,9 +12,8 @@ import pytest from homeassistant.components.glances.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -from . import HA_SENSOR_DATA, MOCK_USER_INPUT +from . import MOCK_USER_INPUT from tests.common import MockConfigEntry @@ -30,29 +29,6 @@ async def test_successful_config_entry(hass: HomeAssistant) -> None: assert entry.state is ConfigEntryState.LOADED -async def test_entry_deprecated_version( - hass: HomeAssistant, issue_registry: ir.IssueRegistry, mock_api: AsyncMock -) -> None: - """Test creating an issue if glances server is version 2.""" - entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_INPUT) - entry.add_to_hass(hass) - - mock_api.return_value.get_ha_sensor_data.side_effect = [ - GlancesApiNoDataAvailable("endpoint: 'all' is not valid"), # fail v4 - GlancesApiNoDataAvailable("endpoint: 'all' is not valid"), # fail v3 - HA_SENSOR_DATA, # success v2 - HA_SENSOR_DATA, - ] - - await hass.config_entries.async_setup(entry.entry_id) - - assert entry.state is ConfigEntryState.LOADED - - issue = issue_registry.async_get_issue(DOMAIN, "deprecated_version") - assert issue is not None - assert issue.severity == ir.IssueSeverity.WARNING - - @pytest.mark.parametrize( ("error", "entry_state"), [ diff --git a/tests/components/go2rtc/test_init.py b/tests/components/go2rtc/test_init.py index 0f1cac6942d..38ff82fc9c8 100644 --- a/tests/components/go2rtc/test_init.py +++ b/tests/components/go2rtc/test_init.py @@ -18,7 +18,7 @@ from go2rtc_client.ws import ( WsError, ) import pytest -from webrtc_models import RTCIceCandidate +from webrtc_models import RTCIceCandidateInit from homeassistant.components.camera import ( DOMAIN as CAMERA_DOMAIN, @@ -211,7 +211,7 @@ async def _test_setup_and_signaling( ) -> None: """Test the go2rtc config entry.""" entity_id = camera.entity_id - assert camera.frontend_stream_type == StreamType.HLS + assert camera.camera_capabilities.frontend_stream_types == {StreamType.HLS} assert await async_setup_component(hass, DOMAIN, config) await hass.async_block_till_done(wait_background_tasks=True) @@ -423,7 +423,7 @@ async def message_callbacks( [ ( WebRTCCandidate("candidate"), - HAWebRTCCandidate(RTCIceCandidate("candidate")), + HAWebRTCCandidate(RTCIceCandidateInit("candidate")), ), ( WebRTCAnswer(ANSWER_SDP), @@ -459,7 +459,7 @@ async def test_on_candidate( session_id = "session_id" # Session doesn't exist - await camera.async_on_webrtc_candidate(session_id, RTCIceCandidate("candidate")) + await camera.async_on_webrtc_candidate(session_id, RTCIceCandidateInit("candidate")) assert ( "homeassistant.components.go2rtc", logging.DEBUG, @@ -479,7 +479,7 @@ async def test_on_candidate( ) ws_client.reset_mock() - await camera.async_on_webrtc_candidate(session_id, RTCIceCandidate("candidate")) + await camera.async_on_webrtc_candidate(session_id, RTCIceCandidateInit("candidate")) ws_client.send.assert_called_once_with(WebRTCCandidate("candidate")) assert caplog.record_tuples == [] diff --git a/tests/components/google_assistant/test_trait.py b/tests/components/google_assistant/test_trait.py index f5dedc357c1..1e42edf8e7b 100644 --- a/tests/components/google_assistant/test_trait.py +++ b/tests/components/google_assistant/test_trait.py @@ -4069,3 +4069,90 @@ async def test_sensorstate( ) is False ) + + +@pytest.mark.parametrize( + ("state", "identifier"), + [ + (STATE_ON, 0), + (STATE_OFF, 1), + (STATE_UNKNOWN, 2), + ], +) +@pytest.mark.parametrize( + ("device_class", "name", "states"), + [ + ( + binary_sensor.BinarySensorDeviceClass.CO, + "CarbonMonoxideLevel", + ["carbon monoxide detected", "no carbon monoxide detected", "unknown"], + ), + ( + binary_sensor.BinarySensorDeviceClass.SMOKE, + "SmokeLevel", + ["smoke detected", "no smoke detected", "unknown"], + ), + ( + binary_sensor.BinarySensorDeviceClass.MOISTURE, + "WaterLeak", + ["leak", "no leak", "unknown"], + ), + ], +) +async def test_binary_sensorstate( + hass: HomeAssistant, + state: str, + identifier: int, + device_class: binary_sensor.BinarySensorDeviceClass, + name: str, + states: list[str], +) -> None: + """Test SensorState trait support for binary sensor domain.""" + + assert helpers.get_google_type(binary_sensor.DOMAIN, None) is not None + assert trait.SensorStateTrait.supported( + binary_sensor.DOMAIN, None, device_class, None + ) + + trt = trait.SensorStateTrait( + hass, + State( + "binary_sensor.test", + state, + { + "device_class": device_class, + }, + ), + BASIC_CONFIG, + ) + + assert trt.sync_attributes() == { + "sensorStatesSupported": [ + { + "name": name, + "descriptiveCapabilities": { + "availableStates": states, + }, + } + ] + } + assert trt.query_attributes() == { + "currentSensorStateData": [ + { + "name": name, + "currentSensorState": states[identifier], + "rawValue": None, + }, + ] + } + + assert helpers.get_google_type(binary_sensor.DOMAIN, None) is not None + assert ( + trait.SensorStateTrait.supported( + binary_sensor.DOMAIN, + None, + binary_sensor.BinarySensorDeviceClass.TAMPER, + None, + ) + is False + ) diff --git a/tests/components/google_assistant_sdk/test_config_flow.py b/tests/components/google_assistant_sdk/test_config_flow.py index b6ee701b228..332610e74e8 100644 --- a/tests/components/google_assistant_sdk/test_config_flow.py +++ b/tests/components/google_assistant_sdk/test_config_flow.py @@ -157,10 +157,6 @@ async def test_reauth( assert config_entry.data["token"].get("refresh_token") == "mock-refresh-token" -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.google_assistant_sdk.config.abort.single_instance_allowed"], -) @pytest.mark.usefixtures("current_request_with_host") async def test_single_instance_allowed( hass: HomeAssistant, @@ -182,37 +178,7 @@ async def test_single_instance_allowed( result = await hass.config_entries.flow.async_init( "google_assistant_sdk", context={"source": config_entries.SOURCE_USER} ) - state = config_entry_oauth2_flow._encode_jwt( - hass, - { - "flow_id": result["flow_id"], - "redirect_uri": "https://example.com/auth/external/callback", - }, - ) - assert result["url"] == ( - f"{GOOGLE_AUTH_URI}?response_type=code&client_id={CLIENT_ID}" - "&redirect_uri=https://example.com/auth/external/callback" - f"&state={state}&scope=https://www.googleapis.com/auth/assistant-sdk-prototype" - "&access_type=offline&prompt=consent" - ) - - client = await hass_client_no_auth() - resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") - assert resp.status == 200 - assert resp.headers["content-type"] == "text/html; charset=utf-8" - - aioclient_mock.post( - GOOGLE_TOKEN_URI, - json={ - "refresh_token": "mock-refresh-token", - "access_token": "mock-access-token", - "type": "Bearer", - "expires_in": 60, - }, - ) - - result = await hass.config_entries.flow.async_configure(result["flow_id"]) assert result.get("type") is FlowResultType.ABORT assert result.get("reason") == "single_instance_allowed" diff --git a/tests/components/google_travel_time/test_sensor.py b/tests/components/google_travel_time/test_sensor.py index 5ac9ecad482..9ee6ebbbc7b 100644 --- a/tests/components/google_travel_time/test_sensor.py +++ b/tests/components/google_travel_time/test_sensor.py @@ -3,6 +3,7 @@ from collections.abc import Generator from unittest.mock import MagicMock, patch +from googlemaps.exceptions import ApiError, Timeout, TransportError import pytest from homeassistant.components.google_travel_time.config_flow import default_options @@ -13,7 +14,9 @@ from homeassistant.components.google_travel_time.const import ( UNITS_IMPERIAL, UNITS_METRIC, ) +from homeassistant.components.google_travel_time.sensor import SCAN_INTERVAL from homeassistant.core import HomeAssistant +from homeassistant.util import dt as dt_util from homeassistant.util.unit_system import ( METRIC_SYSTEM, US_CUSTOMARY_SYSTEM, @@ -22,7 +25,7 @@ from homeassistant.util.unit_system import ( from .const import MOCK_CONFIG -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed @pytest.fixture(name="mock_update") @@ -240,3 +243,25 @@ async def test_sensor_unit_system( distance_matrix_mock.assert_called_once() assert distance_matrix_mock.call_args.kwargs["units"] == expected_unit_option + + +@pytest.mark.parametrize( + ("exception"), + [(ApiError), (TransportError), (Timeout)], +) +@pytest.mark.parametrize( + ("data", "options"), + [(MOCK_CONFIG, {})], +) +async def test_sensor_exception( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mock_update: MagicMock, + mock_config: MagicMock, + exception: Exception, +) -> None: + """Test that exception gets caught.""" + mock_update.side_effect = exception("Errormessage") + async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL) + await hass.async_block_till_done() + assert "Error getting travel time" in caplog.text diff --git a/tests/components/habitica/conftest.py b/tests/components/habitica/conftest.py index b5ceadd2762..f76987c5ce6 100644 --- a/tests/components/habitica/conftest.py +++ b/tests/components/habitica/conftest.py @@ -34,7 +34,7 @@ def mock_called_with( ( call for call in mock_client.mock_calls - if call[0] == method.upper() and call[1] == URL(url) + if call[0].upper() == method.upper() and call[1] == URL(url) ), None, ) @@ -56,6 +56,20 @@ def mock_habitica(aioclient_mock: AiohttpClientMocker) -> AiohttpClientMocker: f"{DEFAULT_URL}/api/v3/tasks/user", json=load_json_object_fixture("tasks.json", DOMAIN), ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/content", + params={"language": "en"}, + json=load_json_object_fixture("content.json", DOMAIN), + ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user/anonymized", + json={ + "data": { + "user": load_json_object_fixture("user.json", DOMAIN)["data"], + "tasks": load_json_object_fixture("tasks.json", DOMAIN)["data"], + } + }, + ) return aioclient_mock diff --git a/tests/components/habitica/fixtures/common_buttons_unavailable.json b/tests/components/habitica/fixtures/common_buttons_unavailable.json index 08039ae1762..bcc65ee3f91 100644 --- a/tests/components/habitica/fixtures/common_buttons_unavailable.json +++ b/tests/components/habitica/fixtures/common_buttons_unavailable.json @@ -1,4 +1,5 @@ { + "success": true, "data": { "api_user": "test-api-user", "profile": { "name": "test-user" }, @@ -29,11 +30,26 @@ "preferences": { "sleep": false, "automaticAllocation": false, - "disableClasses": false + "disableClasses": false, + "language": "en" }, "flags": { "classSelected": true }, - "needsCron": false + "needsCron": false, + "items": { + "gear": { + "equipped": { + "weapon": "weapon_warrior_5", + "armor": "armor_warrior_5", + "head": "head_warrior_5", + "shield": "shield_warrior_5", + "back": "heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } } } diff --git a/tests/components/habitica/fixtures/content.json b/tests/components/habitica/fixtures/content.json new file mode 100644 index 00000000000..e8e14dead73 --- /dev/null +++ b/tests/components/habitica/fixtures/content.json @@ -0,0 +1,287 @@ +{ + "success": true, + "data": { + "gear": { + "flat": { + "weapon_warrior_5": { + "text": "Ruby Sword", + "notes": "Weapon whose forge-glow never fades. Increases Strength by 15. ", + "str": 15, + "value": 90, + "type": "weapon", + "key": "weapon_warrior_5", + "set": "warrior-5", + "klass": "warrior", + "index": "5", + "int": 0, + "per": 0, + "con": 0 + }, + "armor_warrior_5": { + "text": "Golden Armor", + "notes": "Looks ceremonial, but no known blade can pierce it. Increases Constitution by 11.", + "con": 11, + "value": 120, + "last": true, + "type": "armor", + "key": "armor_warrior_5", + "set": "warrior-5", + "klass": "warrior", + "index": "5", + "str": 0, + "int": 0, + "per": 0 + }, + "head_warrior_5": { + "text": "Golden Helm", + "notes": "Regal crown bound to shining armor. Increases Strength by 12.", + "str": 12, + "value": 80, + "last": true, + "type": "head", + "key": "head_warrior_5", + "set": "warrior-5", + "klass": "warrior", + "index": "5", + "int": 0, + "per": 0, + "con": 0 + }, + "shield_warrior_5": { + "text": "Golden Shield", + "notes": "Shining badge of the vanguard. Increases Constitution by 9.", + "con": 9, + "value": 90, + "last": true, + "type": "shield", + "key": "shield_warrior_5", + "set": "warrior-5", + "klass": "warrior", + "index": "5", + "str": 0, + "int": 0, + "per": 0 + }, + "weapon_wizard_5": { + "twoHanded": true, + "text": "Archmage Staff", + "notes": "Assists in weaving the most complex of spells. Increases Intelligence by 15 and Perception by 7. Two-handed item.", + "int": 15, + "per": 7, + "value": 160, + "type": "weapon", + "key": "weapon_wizard_5", + "set": "wizard-5", + "klass": "wizard", + "index": "5", + "str": 0, + "con": 0 + }, + "armor_wizard_5": { + "text": "Royal Magus Robe", + "notes": "Symbol of the power behind the throne. Increases Intelligence by 12.", + "int": 12, + "value": 120, + "last": true, + "type": "armor", + "key": "armor_wizard_5", + "set": "wizard-5", + "klass": "wizard", + "index": "5", + "str": 0, + "per": 0, + "con": 0 + }, + "head_wizard_5": { + "text": "Royal Magus Hat", + "notes": "Shows authority over fortune, weather, and lesser mages. Increases Perception by 10.", + "per": 10, + "value": 80, + "last": true, + "type": "head", + "key": "head_wizard_5", + "set": "wizard-5", + "klass": "wizard", + "index": "5", + "str": 0, + "int": 0, + "con": 0 + }, + "weapon_healer_5": { + "text": "Royal Scepter", + "notes": "Fit to grace the hand of a monarch, or of one who stands at a monarch's right hand. Increases Intelligence by 9. ", + "int": 9, + "value": 90, + "type": "weapon", + "key": "weapon_healer_5", + "set": "healer-5", + "klass": "healer", + "index": "5", + "str": 0, + "per": 0, + "con": 0 + }, + "armor_healer_5": { + "text": "Royal Mantle", + "notes": "Attire of those who have saved the lives of kings. Increases Constitution by 18.", + "con": 18, + "value": 120, + "last": true, + "type": "armor", + "key": "armor_healer_5", + "set": "healer-5", + "klass": "healer", + "index": "5", + "str": 0, + "int": 0, + "per": 0 + }, + "head_healer_5": { + "text": "Royal Diadem", + "notes": "For king, queen, or miracle-worker. Increases Intelligence by 9.", + "int": 9, + "value": 80, + "last": true, + "type": "head", + "key": "head_healer_5", + "set": "healer-5", + "klass": "healer", + "index": "5", + "str": 0, + "per": 0, + "con": 0 + }, + "shield_healer_5": { + "text": "Royal Shield", + "notes": "Bestowed upon those most dedicated to the kingdom's defense. Increases Constitution by 12.", + "con": 12, + "value": 90, + "last": true, + "type": "shield", + "key": "shield_healer_5", + "set": "healer-5", + "klass": "healer", + "index": "5", + "str": 0, + "int": 0, + "per": 0 + }, + "weapon_rogue_5": { + "text": "Ninja-to", + "notes": "Sleek and deadly as the ninja themselves. Increases Strength by 8. ", + "str": 8, + "value": 90, + "type": "weapon", + "key": "weapon_rogue_5", + "set": "rogue-5", + "klass": "rogue", + "index": "5", + "int": 0, + "per": 0, + "con": 0 + }, + "armor_rogue_5": { + "text": "Umbral Armor", + "notes": "Allows stealth in the open in broad daylight. Increases Perception by 18.", + "per": 18, + "value": 120, + "last": true, + "type": "armor", + "key": "armor_rogue_5", + "set": "rogue-5", + "klass": "rogue", + "index": "5", + "str": 0, + "int": 0, + "con": 0 + }, + "head_rogue_5": { + "text": "Umbral Hood", + "notes": "Conceals even thoughts from those who would probe them. Increases Perception by 12.", + "per": 12, + "value": 80, + "last": true, + "type": "head", + "key": "head_rogue_5", + "set": "rogue-5", + "klass": "rogue", + "index": "5", + "str": 0, + "int": 0, + "con": 0 + }, + "shield_rogue_5": { + "text": "Ninja-to", + "notes": "Sleek and deadly as the ninja themselves. Increases Strength by 8. ", + "str": 8, + "value": 90, + "type": "shield", + "key": "shield_rogue_5", + "set": "rogue-5", + "klass": "rogue", + "index": "5", + "int": 0, + "per": 0, + "con": 0 + }, + "back_special_heroicAureole": { + "text": "Heroic Aureole", + "notes": "The gems on this aureole glimmer when you tell your tales of glory. Increases all stats by 7.", + "con": 7, + "str": 7, + "per": 7, + "int": 7, + "value": 175, + "type": "back", + "key": "back_special_heroicAureole", + "set": "special-heroicAureole", + "klass": "special", + "index": "heroicAureole" + }, + "headAccessory_armoire_gogglesOfBookbinding": { + "per": 8, + "set": "bookbinder", + "notes": "These goggles will help you zero in on any task, large or small! Increases Perception by 8. Enchanted Armoire: Bookbinder Set (Item 1 of 4).", + "text": "Goggles of Bookbinding", + "value": 100, + "type": "headAccessory", + "key": "headAccessory_armoire_gogglesOfBookbinding", + "klass": "armoire", + "index": "gogglesOfBookbinding", + "str": 0, + "int": 0, + "con": 0 + }, + "eyewear_armoire_plagueDoctorMask": { + "con": 5, + "int": 5, + "set": "plagueDoctor", + "notes": "An authentic mask worn by the doctors who battle the Plague of Procrastination. Increases Constitution and Intelligence by 5 each. Enchanted Armoire: Plague Doctor Set (Item 2 of 3).", + "text": "Plague Doctor Mask", + "value": 100, + "type": "eyewear", + "key": "eyewear_armoire_plagueDoctorMask", + "klass": "armoire", + "index": "plagueDoctorMask", + "str": 0, + "per": 0 + }, + "body_special_aetherAmulet": { + "text": "Aether Amulet", + "notes": "This amulet has a mysterious history. Increases Constitution and Strength by 10 each.", + "value": 175, + "str": 10, + "con": 10, + "type": "body", + "key": "body_special_aetherAmulet", + "set": "special-aetherAmulet", + "klass": "special", + "index": "aetherAmulet", + "int": 0, + "per": 0 + } + } + } + }, + "appVersion": "5.29.2" +} diff --git a/tests/components/habitica/fixtures/healer_fixture.json b/tests/components/habitica/fixtures/healer_fixture.json index 04cbabcfa2d..d76ae612662 100644 --- a/tests/components/habitica/fixtures/healer_fixture.json +++ b/tests/components/habitica/fixtures/healer_fixture.json @@ -1,13 +1,14 @@ { + "success": true, "data": { "api_user": "test-api-user", "profile": { "name": "test-user" }, "stats": { "buffs": { - "str": 0, - "int": 0, - "per": 0, - "con": 0, + "str": 26, + "int": 26, + "per": 26, + "con": 26, "stealth": 0, "streaks": false, "seafoam": false, @@ -24,17 +25,36 @@ "maxHealth": 50, "maxMP": 166, "toNextLevel": 880, - "points": 5 + "points": 5, + "str": 15, + "con": 15, + "int": 15, + "per": 15 }, "preferences": { "sleep": false, "automaticAllocation": true, - "disableClasses": false + "disableClasses": false, + "language": "en" }, "flags": { "classSelected": true }, "needsCron": true, - "lastCron": "2024-09-21T22:01:55.586Z" + "lastCron": "2024-09-21T22:01:55.586Z", + "items": { + "gear": { + "equipped": { + "weapon": "weapon_healer_5", + "armor": "armor_healer_5", + "head": "head_healer_5", + "shield": "shield_healer_5", + "back": "back_special_heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } } } diff --git a/tests/components/habitica/fixtures/healer_skills_unavailable.json b/tests/components/habitica/fixtures/healer_skills_unavailable.json index 305a5f8cda1..e3cead40f7d 100644 --- a/tests/components/habitica/fixtures/healer_skills_unavailable.json +++ b/tests/components/habitica/fixtures/healer_skills_unavailable.json @@ -1,13 +1,14 @@ { + "success": true, "data": { "api_user": "test-api-user", "profile": { "name": "test-user" }, "stats": { "buffs": { - "str": 0, - "int": 0, - "per": 0, - "con": 0, + "str": 26, + "int": 26, + "per": 26, + "con": 26, "stealth": 0, "streaks": false, "seafoam": false, @@ -24,16 +25,35 @@ "maxHealth": 50, "maxMP": 166, "toNextLevel": 880, - "points": 0 + "points": 0, + "str": 15, + "con": 15, + "int": 15, + "per": 15 }, "preferences": { "sleep": false, "automaticAllocation": false, - "disableClasses": false + "disableClasses": false, + "language": "en" }, "flags": { "classSelected": true }, - "needsCron": false + "needsCron": false, + "items": { + "gear": { + "equipped": { + "weapon": "weapon_healer_5", + "armor": "armor_healer_5", + "head": "head_healer_5", + "shield": "shield_healer_5", + "back": "back_special_heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } } } diff --git a/tests/components/habitica/fixtures/party_members.json b/tests/components/habitica/fixtures/party_members.json new file mode 100644 index 00000000000..e1bb31e6d81 --- /dev/null +++ b/tests/components/habitica/fixtures/party_members.json @@ -0,0 +1,442 @@ +{ + "success": true, + "data": [ + { + "_id": "a380546a-94be-4b8e-8a0b-23e0d5c03303", + "auth": { + "local": { + "username": "test-username" + }, + "timestamps": { + "created": "2024-10-19T18:43:39.782Z", + "loggedin": "2024-10-31T16:13:35.048Z", + "updated": "2024-10-31T16:15:56.552Z" + } + }, + "achievements": { + "ultimateGearSets": { + "healer": false, + "wizard": false, + "rogue": false, + "warrior": false + }, + "streak": 0, + "challenges": [], + "perfect": 1, + "quests": {}, + "purchasedEquipment": true, + "completedTask": true, + "partyUp": true + }, + "backer": {}, + "contributor": {}, + "flags": { + "verifiedUsername": true, + "classSelected": true + }, + "items": { + "gear": { + "owned": { + "headAccessory_special_blackHeadband": true, + "headAccessory_special_blueHeadband": true, + "headAccessory_special_greenHeadband": true, + "headAccessory_special_pinkHeadband": true, + "headAccessory_special_redHeadband": true, + "headAccessory_special_whiteHeadband": true, + "headAccessory_special_yellowHeadband": true, + "eyewear_special_blackTopFrame": true, + "eyewear_special_blueTopFrame": true, + "eyewear_special_greenTopFrame": true, + "eyewear_special_pinkTopFrame": true, + "eyewear_special_redTopFrame": true, + "eyewear_special_whiteTopFrame": true, + "eyewear_special_yellowTopFrame": true, + "eyewear_special_blackHalfMoon": true, + "eyewear_special_blueHalfMoon": true, + "eyewear_special_greenHalfMoon": true, + "eyewear_special_pinkHalfMoon": true, + "eyewear_special_redHalfMoon": true, + "eyewear_special_whiteHalfMoon": true, + "eyewear_special_yellowHalfMoon": true, + "armor_special_bardRobes": true, + "weapon_special_fall2024Warrior": true, + "shield_special_fall2024Warrior": true, + "head_special_fall2024Warrior": true, + "armor_special_fall2024Warrior": true, + "back_mystery_201402": true, + "body_mystery_202003": true, + "head_special_bardHat": true, + "weapon_wizard_0": true + }, + "equipped": { + "weapon": "weapon_special_fall2024Warrior", + "armor": "armor_special_fall2024Warrior", + "head": "head_special_fall2024Warrior", + "shield": "shield_special_fall2024Warrior", + "back": "back_mystery_201402", + "headAccessory": "headAccessory_special_pinkHeadband", + "eyewear": "eyewear_special_pinkHalfMoon", + "body": "body_mystery_202003" + }, + "costume": { + "armor": "armor_base_0", + "head": "head_base_0", + "shield": "shield_base_0" + } + }, + "special": { + "snowball": 99, + "spookySparkles": 99, + "shinySeed": 99, + "seafoam": 99, + "valentine": 0, + "valentineReceived": [], + "nye": 0, + "nyeReceived": [], + "greeting": 0, + "greetingReceived": [], + "thankyou": 0, + "thankyouReceived": [], + "birthday": 0, + "birthdayReceived": [], + "congrats": 0, + "congratsReceived": [], + "getwell": 0, + "getwellReceived": [], + "goodluck": 0, + "goodluckReceived": [] + }, + "pets": { + "Rat-Shade": 1, + "Gryphatrice-Jubilant": 1 + }, + "currentPet": "Gryphatrice-Jubilant", + "eggs": { + "Cactus": 1, + "Fox": 2, + "Wolf": 1 + }, + "hatchingPotions": { + "CottonCandyBlue": 1, + "RoyalPurple": 1 + }, + "food": { + "Meat": 2, + "Chocolate": 1, + "CottonCandyPink": 1, + "Candy_Zombie": 1 + }, + "mounts": { + "Velociraptor-Base": true, + "Gryphon-Gryphatrice": true + }, + "currentMount": "Gryphon-Gryphatrice", + "quests": { + "dustbunnies": 1, + "vice1": 1, + "atom1": 1, + "moonstone1": 1, + "goldenknight1": 1, + "basilist": 1 + }, + "lastDrop": { + "date": "2024-10-31T16:13:34.952Z", + "count": 0 + } + }, + "party": { + "quest": { + "progress": { + "up": 0, + "down": 0, + "collectedItems": 0, + "collect": {} + }, + "RSVPNeeded": false, + "key": "dustbunnies" + }, + "order": "level", + "orderAscending": "ascending", + "_id": "94cd398c-2240-4320-956e-6d345cf2c0de" + }, + "preferences": { + "size": "slim", + "hair": { + "color": "red", + "base": 3, + "bangs": 1, + "beard": 0, + "mustache": 0, + "flower": 1 + }, + "skin": "915533", + "shirt": "blue", + "chair": "handleless_pink", + "costume": false, + "sleep": false, + "disableClasses": false, + "tasks": { + "groupByChallenge": false, + "confirmScoreNotes": false, + "mirrorGroupTasks": [], + "activeFilter": { + "habit": "all", + "daily": "all", + "todo": "remaining", + "reward": "all" + } + }, + "background": "violet" + }, + "profile": { + "name": "test-user" + }, + "stats": { + "hp": 50, + "mp": 150.8, + "exp": 127, + "gp": 19.08650199252128, + "lvl": 99, + "class": "wizard", + "points": 0, + "str": 0, + "con": 0, + "int": 0, + "per": 0, + "buffs": { + "str": 50, + "int": 50, + "per": 50, + "con": 50, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "training": { + "int": 0, + "per": 0, + "str": 0, + "con": 0 + }, + "toNextLevel": 3580, + "maxHealth": 50, + "maxMP": 228 + }, + "inbox": { + "optOut": false + }, + "loginIncentives": 6, + "id": "a380546a-94be-4b8e-8a0b-23e0d5c03303" + }, + { + "_id": "ffce870c-3ff3-4fa4-bad1-87612e52b8e7", + "auth": { + "local": { + "username": "test-partymember-username" + }, + "timestamps": { + "created": "2024-10-10T15:57:01.106Z", + "loggedin": "2024-10-30T19:37:01.970Z", + "updated": "2024-10-30T19:38:25.968Z" + } + }, + "achievements": { + "ultimateGearSets": { + "healer": false, + "wizard": false, + "rogue": false, + "warrior": false + }, + "streak": 0, + "challenges": [], + "perfect": 1, + "quests": {}, + "completedTask": true, + "partyUp": true, + "snowball": 1, + "spookySparkles": 1, + "seafoam": 1, + "shinySeed": 1 + }, + "backer": {}, + "contributor": {}, + "flags": { + "verifiedUsername": true, + "classSelected": false + }, + "items": { + "gear": { + "equipped": { + "armor": "armor_base_0", + "head": "head_base_0", + "shield": "shield_base_0" + }, + "costume": { + "armor": "armor_base_0", + "head": "head_base_0", + "shield": "shield_base_0" + }, + "owned": { + "headAccessory_special_blackHeadband": true, + "headAccessory_special_blueHeadband": true, + "headAccessory_special_greenHeadband": true, + "headAccessory_special_pinkHeadband": true, + "headAccessory_special_redHeadband": true, + "headAccessory_special_whiteHeadband": true, + "headAccessory_special_yellowHeadband": true, + "eyewear_special_blackTopFrame": true, + "eyewear_special_blueTopFrame": true, + "eyewear_special_greenTopFrame": true, + "eyewear_special_pinkTopFrame": true, + "eyewear_special_redTopFrame": true, + "eyewear_special_whiteTopFrame": true, + "eyewear_special_yellowTopFrame": true, + "eyewear_special_blackHalfMoon": true, + "eyewear_special_blueHalfMoon": true, + "eyewear_special_greenHalfMoon": true, + "eyewear_special_pinkHalfMoon": true, + "eyewear_special_redHalfMoon": true, + "eyewear_special_whiteHalfMoon": true, + "eyewear_special_yellowHalfMoon": true, + "armor_special_bardRobes": true + } + }, + "special": { + "snowball": 0, + "spookySparkles": 0, + "shinySeed": 0, + "seafoam": 0, + "valentine": 0, + "valentineReceived": [], + "nye": 0, + "nyeReceived": [], + "greeting": 0, + "greetingReceived": [], + "thankyou": 0, + "thankyouReceived": [], + "birthday": 0, + "birthdayReceived": [], + "congrats": 0, + "congratsReceived": [], + "getwell": 0, + "getwellReceived": [], + "goodluck": 0, + "goodluckReceived": [] + }, + "lastDrop": { + "count": 0, + "date": "2024-10-30T19:37:01.838Z" + }, + "currentPet": "", + "currentMount": "", + "pets": {}, + "eggs": { + "BearCub": 1, + "Cactus": 1 + }, + "hatchingPotions": { + "Skeleton": 1 + }, + "food": { + "Candy_Red": 1 + }, + "mounts": {}, + "quests": { + "dustbunnies": 1 + } + }, + "party": { + "quest": { + "progress": { + "up": 0, + "down": 0, + "collectedItems": 0, + "collect": {} + }, + "RSVPNeeded": true, + "key": "dustbunnies" + }, + "order": "level", + "orderAscending": "ascending", + "_id": "94cd398c-2240-4320-956e-6d345cf2c0de" + }, + "preferences": { + "size": "slim", + "hair": { + "color": "red", + "base": 3, + "bangs": 1, + "beard": 0, + "mustache": 0, + "flower": 1 + }, + "skin": "915533", + "shirt": "blue", + "chair": "none", + "costume": false, + "sleep": false, + "disableClasses": false, + "tasks": { + "groupByChallenge": false, + "confirmScoreNotes": false, + "mirrorGroupTasks": [], + "activeFilter": { + "habit": "all", + "daily": "all", + "todo": "remaining", + "reward": "all" + } + }, + "background": "violet" + }, + "profile": { + "name": "test-partymember-displayname" + }, + "stats": { + "buffs": { + "str": 1, + "int": 1, + "per": 1, + "con": 1, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": true, + "snowball": false, + "spookySparkles": false + }, + "training": { + "int": 0, + "per": 0, + "str": 0, + "con": 0 + }, + "hp": 50, + "mp": 24, + "exp": 24, + "gp": 4, + "lvl": 1, + "class": "warrior", + "points": 0, + "str": 0, + "con": 0, + "int": 0, + "per": 0, + "toNextLevel": 25, + "maxHealth": 50, + "maxMP": 32 + }, + "inbox": { + "optOut": false + }, + "loginIncentives": 1, + "id": "ffce870c-3ff3-4fa4-bad1-87612e52b8e7" + } + ], + "notifications": [], + "userV": 96, + "appVersion": "5.29.0" +} diff --git a/tests/components/habitica/fixtures/quest_invitation_off.json b/tests/components/habitica/fixtures/quest_invitation_off.json new file mode 100644 index 00000000000..0f191696476 --- /dev/null +++ b/tests/components/habitica/fixtures/quest_invitation_off.json @@ -0,0 +1,66 @@ +{ + "success": true, + "data": { + "api_user": "test-api-user", + "profile": { "name": "test-user" }, + "stats": { + "buffs": { + "str": 0, + "int": 0, + "per": 0, + "con": 0, + "stealth": 0, + "streaks": false, + "seafoam": false, + "shinySeed": false, + "snowball": false, + "spookySparkles": false + }, + "hp": 0, + "mp": 50.89999999999998, + "exp": 737, + "gp": 137.62587214609795, + "lvl": 38, + "class": "wizard", + "maxHealth": 50, + "maxMP": 166, + "toNextLevel": 880, + "points": 5 + }, + "preferences": { + "sleep": false, + "automaticAllocation": true, + "disableClasses": false, + "language": "en" + }, + "flags": { + "classSelected": true + }, + "tasksOrder": { + "rewards": ["5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b"], + "todos": [ + "88de7cd9-af2b-49ce-9afd-bf941d87336b", + "2f6fcabc-f670-4ec3-ba65-817e8deea490", + "1aa3137e-ef72-4d1f-91ee-41933602f438", + "86ea2475-d1b5-4020-bdcc-c188c7996afa" + ], + "dailys": [ + "f21fa608-cfc6-4413-9fc7-0eb1b48ca43a", + "bc1d1855-b2b8-4663-98ff-62e7b763dfc4", + "e97659e0-2c42-4599-a7bb-00282adc410d", + "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", + "f2c85972-1a19-4426-bc6d-ce3337b9d99f", + "2c6d136c-a1c3-4bef-b7c4-fa980784b1e1" + ], + "habits": ["1d147de6-5c02-4740-8e2f-71d3015a37f4"] + }, + "party": { + "quest": { + "RSVPNeeded": false, + "key": null + } + }, + "needsCron": true, + "lastCron": "2024-09-21T22:01:55.586Z" + } +} diff --git a/tests/components/habitica/fixtures/rogue_fixture.json b/tests/components/habitica/fixtures/rogue_fixture.json index f0ea42a7182..b6fcd9f1427 100644 --- a/tests/components/habitica/fixtures/rogue_fixture.json +++ b/tests/components/habitica/fixtures/rogue_fixture.json @@ -1,13 +1,14 @@ { + "success": true, "data": { "api_user": "test-api-user", "profile": { "name": "test-user" }, "stats": { "buffs": { - "str": 0, - "int": 0, - "per": 0, - "con": 0, + "str": 26, + "int": 26, + "per": 26, + "con": 26, "stealth": 0, "streaks": false, "seafoam": false, @@ -24,17 +25,36 @@ "maxHealth": 50, "maxMP": 166, "toNextLevel": 880, - "points": 5 + "points": 5, + "str": 15, + "con": 15, + "int": 15, + "per": 15 }, "preferences": { "sleep": false, "automaticAllocation": true, - "disableClasses": false + "disableClasses": false, + "language": "en" }, "flags": { "classSelected": true }, "needsCron": true, - "lastCron": "2024-09-21T22:01:55.586Z" + "lastCron": "2024-09-21T22:01:55.586Z", + "items": { + "gear": { + "equipped": { + "weapon": "weapon_rogue_5", + "armor": "armor_rogue_5", + "head": "head_rogue_5", + "shield": "shield_rogue_5", + "back": "back_special_heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } } } diff --git a/tests/components/habitica/fixtures/rogue_skills_unavailable.json b/tests/components/habitica/fixtures/rogue_skills_unavailable.json index 2709731ba55..b3bada649fa 100644 --- a/tests/components/habitica/fixtures/rogue_skills_unavailable.json +++ b/tests/components/habitica/fixtures/rogue_skills_unavailable.json @@ -1,13 +1,14 @@ { + "success": true, "data": { "api_user": "test-api-user", "profile": { "name": "test-user" }, "stats": { "buffs": { - "str": 0, - "int": 0, - "per": 0, - "con": 0, + "str": 26, + "int": 26, + "per": 26, + "con": 26, "stealth": 0, "streaks": true, "seafoam": false, @@ -24,16 +25,35 @@ "maxHealth": 50, "maxMP": 166, "toNextLevel": 880, - "points": 0 + "points": 0, + "str": 15, + "con": 15, + "int": 15, + "per": 15 }, "preferences": { "sleep": false, "automaticAllocation": false, - "disableClasses": false + "disableClasses": false, + "language": "en" }, "flags": { "classSelected": true }, - "needsCron": false + "needsCron": false, + "items": { + "gear": { + "equipped": { + "weapon": "weapon_rogue_5", + "armor": "armor_rogue_5", + "head": "head_rogue_5", + "shield": "shield_rogue_5", + "back": "back_special_heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } } } diff --git a/tests/components/habitica/fixtures/rogue_stealth_unavailable.json b/tests/components/habitica/fixtures/rogue_stealth_unavailable.json index a4e86abbb91..9478feb91fa 100644 --- a/tests/components/habitica/fixtures/rogue_stealth_unavailable.json +++ b/tests/components/habitica/fixtures/rogue_stealth_unavailable.json @@ -1,13 +1,14 @@ { + "success": true, "data": { "api_user": "test-api-user", "profile": { "name": "test-user" }, "stats": { "buffs": { - "str": 0, - "int": 0, - "per": 0, - "con": 0, + "str": 26, + "int": 26, + "per": 26, + "con": 26, "stealth": 4, "streaks": false, "seafoam": false, @@ -24,16 +25,35 @@ "maxHealth": 50, "maxMP": 166, "toNextLevel": 880, - "points": 0 + "points": 0, + "str": 15, + "con": 15, + "int": 15, + "per": 15 }, "preferences": { "sleep": false, "automaticAllocation": false, - "disableClasses": false + "disableClasses": false, + "language": "en" }, "flags": { "classSelected": true }, - "needsCron": false + "needsCron": false, + "items": { + "gear": { + "equipped": { + "weapon": "weapon_rogue_5", + "armor": "armor_rogue_5", + "head": "head_rogue_5", + "shield": "shield_rogue_5", + "back": "back_special_heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } } } diff --git a/tests/components/habitica/fixtures/tasks.json b/tests/components/habitica/fixtures/tasks.json index 0d6ffba0732..7784b9c7f49 100644 --- a/tests/components/habitica/fixtures/tasks.json +++ b/tests/components/habitica/fixtures/tasks.json @@ -121,7 +121,8 @@ "createdAt": "2024-07-07T17:51:53.264Z", "updatedAt": "2024-07-12T09:58:45.438Z", "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "id": "e97659e0-2c42-4599-a7bb-00282adc410d" + "id": "e97659e0-2c42-4599-a7bb-00282adc410d", + "alias": "create_a_task" }, { "_id": "564b9ac9-c53d-4638-9e7f-1cd96fe19baa", @@ -344,7 +345,12 @@ "daysOfMonth": [], "weeksOfMonth": [], "checklist": [], - "reminders": [], + "reminders": [ + { + "id": "1491d640-6b21-4d0c-8940-0b7aa61c8836", + "time": "2024-09-22T20:00:00.0000Z" + } + ], "createdAt": "2024-07-07T17:51:53.266Z", "updatedAt": "2024-09-21T22:51:41.756Z", "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", @@ -454,7 +460,8 @@ "createdAt": "2024-09-21T22:17:19.513Z", "updatedAt": "2024-09-21T22:19:35.576Z", "userId": "5f359083-ef78-4af0-985a-0b2c6d05797c", - "id": "2f6fcabc-f670-4ec3-ba65-817e8deea490" + "id": "2f6fcabc-f670-4ec3-ba65-817e8deea490", + "alias": "pay_bills" }, { "_id": "1aa3137e-ef72-4d1f-91ee-41933602f438", diff --git a/tests/components/habitica/fixtures/user.json b/tests/components/habitica/fixtures/user.json index a10ce354f44..a498de910ef 100644 --- a/tests/components/habitica/fixtures/user.json +++ b/tests/components/habitica/fixtures/user.json @@ -1,13 +1,15 @@ { + "success": true, "data": { "api_user": "test-api-user", "profile": { "name": "test-user" }, + "auth": { "local": { "username": "test-username" } }, "stats": { "buffs": { - "str": 0, - "int": 0, - "per": 0, - "con": 0, + "str": 26, + "int": 26, + "per": 26, + "con": 26, "stealth": 0, "streaks": false, "seafoam": false, @@ -24,12 +26,17 @@ "maxHealth": 50, "maxMP": 166, "toNextLevel": 880, - "points": 5 + "points": 5, + "str": 15, + "con": 15, + "int": 15, + "per": 15 }, "preferences": { "sleep": false, "automaticAllocation": true, - "disableClasses": false + "disableClasses": false, + "language": "en" }, "flags": { "classSelected": true @@ -52,7 +59,29 @@ ], "habits": ["1d147de6-5c02-4740-8e2f-71d3015a37f4"] }, + "party": { + "quest": { + "RSVPNeeded": true, + "key": "dustbunnies" + }, + "_id": "94cd398c-2240-4320-956e-6d345cf2c0de" + }, "needsCron": true, - "lastCron": "2024-09-21T22:01:55.586Z" + "lastCron": "2024-09-21T22:01:55.586Z", + "id": "a380546a-94be-4b8e-8a0b-23e0d5c03303", + "items": { + "gear": { + "equipped": { + "weapon": "weapon_warrior_5", + "armor": "armor_warrior_5", + "head": "head_warrior_5", + "shield": "shield_warrior_5", + "back": "back_special_heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } } } diff --git a/tests/components/habitica/fixtures/warrior_fixture.json b/tests/components/habitica/fixtures/warrior_fixture.json index 53d18206f9a..97ad9e5b060 100644 --- a/tests/components/habitica/fixtures/warrior_fixture.json +++ b/tests/components/habitica/fixtures/warrior_fixture.json @@ -1,13 +1,14 @@ { + "success": true, "data": { "api_user": "test-api-user", "profile": { "name": "test-user" }, "stats": { "buffs": { - "str": 0, - "int": 0, - "per": 0, - "con": 0, + "str": 26, + "int": 26, + "per": 26, + "con": 26, "stealth": 0, "streaks": false, "seafoam": false, @@ -24,17 +25,36 @@ "maxHealth": 50, "maxMP": 166, "toNextLevel": 880, - "points": 5 + "points": 5, + "str": 15, + "con": 15, + "int": 15, + "per": 15 }, "preferences": { "sleep": false, "automaticAllocation": true, - "disableClasses": false + "disableClasses": false, + "language": "en" }, "flags": { "classSelected": true }, "needsCron": true, - "lastCron": "2024-09-21T22:01:55.586Z" + "lastCron": "2024-09-21T22:01:55.586Z", + "items": { + "gear": { + "equipped": { + "weapon": "weapon_warrior_5", + "armor": "armor_warrior_5", + "head": "head_warrior_5", + "shield": "shield_warrior_5", + "back": "back_special_heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } } } diff --git a/tests/components/habitica/fixtures/warrior_skills_unavailable.json b/tests/components/habitica/fixtures/warrior_skills_unavailable.json index 53160646569..f25ca484cba 100644 --- a/tests/components/habitica/fixtures/warrior_skills_unavailable.json +++ b/tests/components/habitica/fixtures/warrior_skills_unavailable.json @@ -1,13 +1,14 @@ { + "success": true, "data": { "api_user": "test-api-user", "profile": { "name": "test-user" }, "stats": { "buffs": { - "str": 0, - "int": 0, - "per": 0, - "con": 0, + "str": 26, + "int": 26, + "per": 26, + "con": 26, "stealth": 0, "streaks": false, "seafoam": false, @@ -24,16 +25,35 @@ "maxHealth": 50, "maxMP": 166, "toNextLevel": 880, - "points": 0 + "points": 0, + "str": 15, + "con": 15, + "int": 15, + "per": 15 }, "preferences": { "sleep": false, "automaticAllocation": false, - "disableClasses": false + "disableClasses": false, + "language": "en" }, "flags": { "classSelected": true }, - "needsCron": false + "needsCron": false, + "items": { + "gear": { + "equipped": { + "weapon": "weapon_warrior_5", + "armor": "armor_warrior_5", + "head": "head_warrior_5", + "shield": "shield_warrior_5", + "back": "back_special_heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } } } diff --git a/tests/components/habitica/fixtures/wizard_fixture.json b/tests/components/habitica/fixtures/wizard_fixture.json index 0f9f2a49639..655c0ad1f0d 100644 --- a/tests/components/habitica/fixtures/wizard_fixture.json +++ b/tests/components/habitica/fixtures/wizard_fixture.json @@ -1,13 +1,14 @@ { + "success": true, "data": { "api_user": "test-api-user", "profile": { "name": "test-user" }, "stats": { "buffs": { - "str": 0, - "int": 0, - "per": 0, - "con": 0, + "str": 26, + "int": 26, + "per": 26, + "con": 26, "stealth": 0, "streaks": false, "seafoam": false, @@ -24,17 +25,36 @@ "maxHealth": 50, "maxMP": 166, "toNextLevel": 880, - "points": 5 + "points": 5, + "str": 15, + "con": 15, + "int": 15, + "per": 15 }, "preferences": { "sleep": false, "automaticAllocation": true, - "disableClasses": false + "disableClasses": false, + "language": "en" }, "flags": { "classSelected": true }, "needsCron": true, - "lastCron": "2024-09-21T22:01:55.586Z" + "lastCron": "2024-09-21T22:01:55.586Z", + "items": { + "gear": { + "equipped": { + "weapon": "weapon_wizard_5", + "armor": "armor_wizard_5", + "head": "head_wizard_5", + "shield": "shield_base_0", + "back": "back_special_heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } } } diff --git a/tests/components/habitica/fixtures/wizard_frost_unavailable.json b/tests/components/habitica/fixtures/wizard_frost_unavailable.json index ba57568e99e..d5634633a0d 100644 --- a/tests/components/habitica/fixtures/wizard_frost_unavailable.json +++ b/tests/components/habitica/fixtures/wizard_frost_unavailable.json @@ -1,13 +1,14 @@ { + "success": true, "data": { "api_user": "test-api-user", "profile": { "name": "test-user" }, "stats": { "buffs": { - "str": 0, - "int": 0, - "per": 0, - "con": 0, + "str": 26, + "int": 26, + "per": 26, + "con": 26, "stealth": 0, "streaks": true, "seafoam": false, @@ -24,16 +25,35 @@ "maxHealth": 50, "maxMP": 166, "toNextLevel": 880, - "points": 0 + "points": 0, + "str": 15, + "con": 15, + "int": 15, + "per": 15 }, "preferences": { "sleep": false, "automaticAllocation": false, - "disableClasses": false + "disableClasses": false, + "language": "en" }, "flags": { "classSelected": true }, - "needsCron": false + "needsCron": false, + "items": { + "gear": { + "equipped": { + "weapon": "weapon_wizard_5", + "armor": "armor_wizard_5", + "head": "head_wizard_5", + "shield": "shield_base_0", + "back": "back_special_heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } } } diff --git a/tests/components/habitica/fixtures/wizard_skills_unavailable.json b/tests/components/habitica/fixtures/wizard_skills_unavailable.json index 11bf0a19193..eaf5f6f55b8 100644 --- a/tests/components/habitica/fixtures/wizard_skills_unavailable.json +++ b/tests/components/habitica/fixtures/wizard_skills_unavailable.json @@ -1,13 +1,14 @@ { + "success": true, "data": { "api_user": "test-api-user", "profile": { "name": "test-user" }, "stats": { "buffs": { - "str": 0, - "int": 0, - "per": 0, - "con": 0, + "str": 26, + "int": 26, + "per": 26, + "con": 26, "stealth": 0, "streaks": false, "seafoam": false, @@ -24,16 +25,35 @@ "maxHealth": 50, "maxMP": 166, "toNextLevel": 880, - "points": 0 + "points": 0, + "str": 15, + "con": 15, + "int": 15, + "per": 15 }, "preferences": { "sleep": false, "automaticAllocation": false, - "disableClasses": false + "disableClasses": false, + "language": "en" }, "flags": { "classSelected": true }, - "needsCron": false + "needsCron": false, + "items": { + "gear": { + "equipped": { + "weapon": "weapon_wizard_5", + "armor": "armor_wizard_5", + "head": "head_wizard_5", + "shield": "shield_base_0", + "back": "back_special_heroicAureole", + "headAccessory": "headAccessory_armoire_gogglesOfBookbinding", + "eyewear": "eyewear_armoire_plagueDoctorMask", + "body": "body_special_aetherAmulet" + } + } + } } } diff --git a/tests/components/habitica/snapshots/test_binary_sensor.ambr b/tests/components/habitica/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..c18f8f551c9 --- /dev/null +++ b/tests/components/habitica/snapshots/test_binary_sensor.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_binary_sensors[binary_sensor.test_user_pending_quest_invitation-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_user_pending_quest_invitation', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pending quest invitation', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_pending_quest', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.test_user_pending_quest_invitation-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/inventory_quest_scroll_dustbunnies.png', + 'friendly_name': 'test-user Pending quest invitation', + }), + 'context': , + 'entity_id': 'binary_sensor.test_user_pending_quest_invitation', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/habitica/snapshots/test_calendar.ambr b/tests/components/habitica/snapshots/test_calendar.ambr index 7325e125470..c2f9c8e83c9 100644 --- a/tests/components/habitica/snapshots/test_calendar.ambr +++ b/tests/components/habitica/snapshots/test_calendar.ambr @@ -577,6 +577,266 @@ }), ]) # --- +# name: test_api_events[calendar.test_user_daily_reminders] + list([ + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-09-21T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-09-21T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-09-22T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-09-22T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-09-23T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-09-23T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-09-24T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-09-24T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-09-25T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-09-25T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-09-26T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-09-26T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-09-27T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-09-27T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-09-28T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-09-28T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-09-29T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-09-29T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-09-30T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-09-30T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-10-01T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-10-01T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-10-02T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-10-02T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-10-03T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-10-03T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-10-04T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-10-04T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-10-05T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-10-05T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-10-06T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-10-06T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + dict({ + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end': dict({ + 'dateTime': '2024-10-07T21:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-10-07T20:00:00+02:00', + }), + 'summary': '5 Minuten ruhig durchatmen', + 'uid': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f_1491d640-6b21-4d0c-8940-0b7aa61c8836', + }), + ]) +# --- +# name: test_api_events[calendar.test_user_to_do_reminders] + list([ + dict({ + 'description': 'Strom- und Internetrechnungen rechtzeitig überweisen.', + 'end': dict({ + 'dateTime': '2024-09-22T03:00:00+02:00', + }), + 'location': None, + 'recurrence_id': None, + 'rrule': None, + 'start': dict({ + 'dateTime': '2024-09-22T02:00:00+02:00', + }), + 'summary': 'Rechnungen bezahlen', + 'uid': '2f6fcabc-f670-4ec3-ba65-817e8deea490_91c09432-10ac-4a49-bd20-823081ec29ed', + }), + ]) +# --- # name: test_api_events[calendar.test_user_to_do_s] list([ dict({ @@ -676,6 +936,110 @@ 'state': 'on', }) # --- +# name: test_calendar_platform[calendar.test_user_daily_reminders-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'calendar', + 'entity_category': None, + 'entity_id': 'calendar.test_user_daily_reminders', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Daily reminders', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_daily_reminders', + 'unit_of_measurement': None, + }) +# --- +# name: test_calendar_platform[calendar.test_user_daily_reminders-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'all_day': False, + 'description': 'Klicke um Deinen Terminplan festzulegen!', + 'end_time': '2024-09-21 21:00:00', + 'friendly_name': 'test-user Daily reminders', + 'location': '', + 'message': '5 Minuten ruhig durchatmen', + 'start_time': '2024-09-21 20:00:00', + }), + 'context': , + 'entity_id': 'calendar.test_user_daily_reminders', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_calendar_platform[calendar.test_user_to_do_reminders-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'calendar', + 'entity_category': None, + 'entity_id': 'calendar.test_user_to_do_reminders', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'To-do reminders', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_todo_reminders', + 'unit_of_measurement': None, + }) +# --- +# name: test_calendar_platform[calendar.test_user_to_do_reminders-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'all_day': False, + 'description': 'Strom- und Internetrechnungen rechtzeitig überweisen.', + 'end_time': '2024-09-22 03:00:00', + 'friendly_name': 'test-user To-do reminders', + 'location': '', + 'message': 'Rechnungen bezahlen', + 'start_time': '2024-09-22 02:00:00', + }), + 'context': , + 'entity_id': 'calendar.test_user_to_do_reminders', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_calendar_platform[calendar.test_user_to_do_s-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/habitica/snapshots/test_diagnostics.ambr b/tests/components/habitica/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..bb9371a4c68 --- /dev/null +++ b/tests/components/habitica/snapshots/test_diagnostics.ambr @@ -0,0 +1,715 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'config_entry_data': dict({ + 'api_user': 'test-api-user', + 'url': 'https://habitica.com', + }), + 'habitica_data': dict({ + 'tasks': list([ + dict({ + '_id': 'f21fa608-cfc6-4413-9fc7-0eb1b48ca43a', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'counterDown': 0, + 'counterUp': 0, + 'createdAt': '2024-07-07T17:51:53.268Z', + 'down': True, + 'frequency': 'daily', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'history': list([ + ]), + 'id': 'f21fa608-cfc6-4413-9fc7-0eb1b48ca43a', + 'notes': '', + 'priority': 1, + 'reminders': list([ + ]), + 'tags': list([ + ]), + 'text': 'Gesundes Essen/Junkfood', + 'type': 'habit', + 'up': True, + 'updatedAt': '2024-07-07T17:51:53.268Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': 0, + }), + dict({ + '_id': '1d147de6-5c02-4740-8e2f-71d3015a37f4', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'counterDown': 0, + 'counterUp': 0, + 'createdAt': '2024-07-07T17:51:53.266Z', + 'down': False, + 'frequency': 'daily', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'history': list([ + dict({ + 'date': 1720376763324, + 'scoredDown': 0, + 'scoredUp': 1, + 'value': 1, + }), + ]), + 'id': '1d147de6-5c02-4740-8e2f-71d3015a37f4', + 'notes': '', + 'priority': 1, + 'reminders': list([ + ]), + 'tags': list([ + ]), + 'text': 'Eine kurze Pause machen', + 'type': 'habit', + 'up': True, + 'updatedAt': '2024-07-12T09:58:45.438Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': 0, + }), + dict({ + '_id': 'bc1d1855-b2b8-4663-98ff-62e7b763dfc4', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'counterDown': 0, + 'counterUp': 0, + 'createdAt': '2024-07-07T17:51:53.265Z', + 'down': True, + 'frequency': 'daily', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'history': list([ + ]), + 'id': 'bc1d1855-b2b8-4663-98ff-62e7b763dfc4', + 'notes': 'Oder lösche es über die Bearbeitungs-Ansicht', + 'priority': 1, + 'reminders': list([ + ]), + 'tags': list([ + ]), + 'text': 'Klicke hier um dies als schlechte Gewohnheit zu markieren, die Du gerne loswerden möchtest', + 'type': 'habit', + 'up': False, + 'updatedAt': '2024-07-07T17:51:53.265Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': 0, + }), + dict({ + '_id': 'e97659e0-2c42-4599-a7bb-00282adc410d', + 'alias': 'create_a_task', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'counterDown': 0, + 'counterUp': 0, + 'createdAt': '2024-07-07T17:51:53.264Z', + 'down': False, + 'frequency': 'daily', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'history': list([ + dict({ + 'date': 1720376763140, + 'scoredDown': 0, + 'scoredUp': 1, + 'value': 1, + }), + ]), + 'id': 'e97659e0-2c42-4599-a7bb-00282adc410d', + 'notes': 'Eine Gewohnheit, eine Tagesaufgabe oder ein To-Do', + 'priority': 1, + 'reminders': list([ + ]), + 'tags': list([ + ]), + 'text': 'Füge eine Aufgabe zu Habitica hinzu', + 'type': 'habit', + 'up': True, + 'updatedAt': '2024-07-12T09:58:45.438Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': 0, + }), + dict({ + '_id': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'checklist': list([ + ]), + 'collapseChecklist': False, + 'completed': True, + 'createdAt': '2024-07-07T17:51:53.268Z', + 'daysOfMonth': list([ + ]), + 'everyX': 1, + 'frequency': 'weekly', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'history': list([ + dict({ + 'completed': True, + 'date': 1720376766749, + 'isDue': True, + 'value': 1, + }), + dict({ + 'completed': False, + 'date': 1720545311292, + 'isDue': True, + 'value': 0.02529999999999999, + }), + dict({ + 'completed': False, + 'date': 1720564306719, + 'isDue': True, + 'value': -0.9740518837628547, + }), + dict({ + 'completed': True, + 'date': 1720691096907, + 'isDue': True, + 'value': 0.051222853419153, + }), + dict({ + 'completed': True, + 'date': 1720778325243, + 'isDue': True, + 'value': 1.0499115128458676, + }), + dict({ + 'completed': False, + 'date': 1724185196447, + 'isDue': True, + 'value': 0.07645736684721605, + }), + dict({ + 'completed': False, + 'date': 1724255707692, + 'isDue': True, + 'value': -0.921585289356988, + }), + dict({ + 'completed': False, + 'date': 1726846163640, + 'isDue': True, + 'value': -1.9454824860630637, + }), + dict({ + 'completed': False, + 'date': 1726953787542, + 'isDue': True, + 'value': -2.9966001649571803, + }), + dict({ + 'completed': False, + 'date': 1726956115608, + 'isDue': True, + 'value': -4.07641493832036, + }), + dict({ + 'completed': True, + 'date': 1726957460150, + 'isDue': True, + 'value': -2.9663035443712333, + }), + ]), + 'id': '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + 'isDue': True, + 'nextDue': list([ + 'Mon Sep 23 2024 00:00:00 GMT+0200', + 'Tue Sep 24 2024 00:00:00 GMT+0200', + 'Wed Sep 25 2024 00:00:00 GMT+0200', + 'Thu Sep 26 2024 00:00:00 GMT+0200', + 'Fri Sep 27 2024 00:00:00 GMT+0200', + 'Sat Sep 28 2024 00:00:00 GMT+0200', + ]), + 'notes': 'Klicke um Änderungen zu machen!', + 'priority': 1, + 'reminders': list([ + ]), + 'repeat': dict({ + 'f': True, + 'm': True, + 's': True, + 'su': True, + 't': True, + 'th': True, + 'w': True, + }), + 'startDate': '2024-07-06T22:00:00.000Z', + 'streak': 1, + 'tags': list([ + ]), + 'text': 'Zahnseide benutzen', + 'type': 'daily', + 'updatedAt': '2024-09-21T22:24:20.154Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': -2.9663035443712333, + 'weeksOfMonth': list([ + ]), + 'yesterDaily': True, + }), + dict({ + '_id': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'checklist': list([ + ]), + 'collapseChecklist': False, + 'completed': False, + 'createdAt': '2024-07-07T17:51:53.266Z', + 'daysOfMonth': list([ + ]), + 'everyX': 1, + 'frequency': 'weekly', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'history': list([ + dict({ + 'completed': True, + 'date': 1720374903074, + 'isDue': True, + 'value': 1, + }), + dict({ + 'completed': False, + 'date': 1720545311291, + 'isDue': True, + 'value': 0.02529999999999999, + }), + dict({ + 'completed': False, + 'date': 1720564306717, + 'isDue': True, + 'value': -0.9740518837628547, + }), + dict({ + 'completed': True, + 'date': 1720682459722, + 'isDue': True, + 'value': 0.051222853419153, + }), + dict({ + 'completed': True, + 'date': 1720778325246, + 'isDue': True, + 'value': 1.0499115128458676, + }), + dict({ + 'completed': True, + 'date': 1720778492219, + 'isDue': True, + 'value': 2.023365658844519, + }), + dict({ + 'completed': False, + 'date': 1724255707691, + 'isDue': True, + 'value': 1.0738942424964806, + }), + dict({ + 'completed': False, + 'date': 1726846163638, + 'isDue': True, + 'value': 0.10103816898038132, + }), + dict({ + 'completed': False, + 'date': 1726953787540, + 'isDue': True, + 'value': -0.8963760215867302, + }), + dict({ + 'completed': False, + 'date': 1726956115607, + 'isDue': True, + 'value': -1.919611992979862, + }), + ]), + 'id': 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + 'isDue': True, + 'nextDue': list([ + '2024-09-22T22:00:00.000Z', + '2024-09-23T22:00:00.000Z', + '2024-09-24T22:00:00.000Z', + '2024-09-25T22:00:00.000Z', + '2024-09-26T22:00:00.000Z', + '2024-09-27T22:00:00.000Z', + ]), + 'notes': 'Klicke um Deinen Terminplan festzulegen!', + 'priority': 1, + 'reminders': list([ + dict({ + 'id': '1491d640-6b21-4d0c-8940-0b7aa61c8836', + 'time': '2024-09-22T20:00:00.0000Z', + }), + ]), + 'repeat': dict({ + 'f': True, + 'm': True, + 's': True, + 'su': True, + 't': True, + 'th': True, + 'w': True, + }), + 'startDate': '2024-07-06T22:00:00.000Z', + 'streak': 0, + 'tags': list([ + ]), + 'text': '5 Minuten ruhig durchatmen', + 'type': 'daily', + 'updatedAt': '2024-09-21T22:51:41.756Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': -1.919611992979862, + 'weeksOfMonth': list([ + ]), + 'yesterDaily': True, + }), + dict({ + '_id': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'checklist': list([ + ]), + 'collapseChecklist': False, + 'completed': False, + 'createdAt': '2024-09-22T11:44:43.774Z', + 'daysOfMonth': list([ + ]), + 'everyX': 1, + 'frequency': 'weekly', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'history': list([ + ]), + 'id': '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + 'isDue': True, + 'nextDue': list([ + '2024-09-24T22:00:00.000Z', + '2024-09-27T22:00:00.000Z', + '2024-09-28T22:00:00.000Z', + '2024-10-01T22:00:00.000Z', + '2024-10-04T22:00:00.000Z', + '2024-10-08T22:00:00.000Z', + ]), + 'notes': 'Ein einstündiges Workout im Fitnessstudio absolvieren.', + 'priority': 2, + 'reminders': list([ + ]), + 'repeat': dict({ + 'f': False, + 'm': False, + 's': True, + 'su': True, + 't': False, + 'th': False, + 'w': True, + }), + 'startDate': '2024-09-21T22:00:00.000Z', + 'streak': 0, + 'tags': list([ + '51076966-2970-4b40-b6ba-d58c6a756dd7', + ]), + 'text': 'Fitnessstudio besuchen', + 'type': 'daily', + 'updatedAt': '2024-09-22T11:44:43.774Z', + 'userId': '1343a9af-d891-4027-841a-956d105ca408', + 'value': 0, + 'weeksOfMonth': list([ + ]), + 'yesterDaily': True, + }), + dict({ + '_id': '88de7cd9-af2b-49ce-9afd-bf941d87336b', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'checklist': list([ + ]), + 'collapseChecklist': False, + 'completed': False, + 'createdAt': '2024-09-21T22:17:57.816Z', + 'date': '2024-09-27T22:17:00.000Z', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'id': '88de7cd9-af2b-49ce-9afd-bf941d87336b', + 'notes': 'Das Buch, das du angefangen hast, bis zum Wochenende fertig lesen.', + 'priority': 1, + 'reminders': list([ + ]), + 'tags': list([ + ]), + 'text': 'Buch zu Ende lesen', + 'type': 'todo', + 'updatedAt': '2024-09-21T22:17:57.816Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': 0, + }), + dict({ + '_id': '2f6fcabc-f670-4ec3-ba65-817e8deea490', + 'alias': 'pay_bills', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'checklist': list([ + ]), + 'collapseChecklist': False, + 'completed': False, + 'createdAt': '2024-09-21T22:17:19.513Z', + 'date': '2024-08-31T22:16:00.000Z', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'id': '2f6fcabc-f670-4ec3-ba65-817e8deea490', + 'notes': 'Strom- und Internetrechnungen rechtzeitig überweisen.', + 'priority': 1, + 'reminders': list([ + dict({ + 'id': '91c09432-10ac-4a49-bd20-823081ec29ed', + 'time': '2024-09-22T02:00:00.0000Z', + }), + ]), + 'tags': list([ + ]), + 'text': 'Rechnungen bezahlen', + 'type': 'todo', + 'updatedAt': '2024-09-21T22:19:35.576Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': 0, + }), + dict({ + '_id': '1aa3137e-ef72-4d1f-91ee-41933602f438', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'checklist': list([ + ]), + 'collapseChecklist': False, + 'completed': False, + 'createdAt': '2024-09-21T22:16:38.153Z', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'id': '1aa3137e-ef72-4d1f-91ee-41933602f438', + 'notes': 'Rasen mähen und die Pflanzen gießen.', + 'priority': 1, + 'reminders': list([ + ]), + 'tags': list([ + ]), + 'text': 'Garten pflegen', + 'type': 'todo', + 'updatedAt': '2024-09-21T22:16:38.153Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': 0, + }), + dict({ + '_id': '86ea2475-d1b5-4020-bdcc-c188c7996afa', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'checklist': list([ + ]), + 'collapseChecklist': False, + 'completed': False, + 'createdAt': '2024-09-21T22:16:16.756Z', + 'date': '2024-09-21T22:00:00.000Z', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'id': '86ea2475-d1b5-4020-bdcc-c188c7996afa', + 'notes': 'Den Ausflug für das kommende Wochenende organisieren.', + 'priority': 1, + 'reminders': list([ + ]), + 'tags': list([ + '51076966-2970-4b40-b6ba-d58c6a756dd7', + ]), + 'text': 'Wochenendausflug planen', + 'type': 'todo', + 'updatedAt': '2024-09-21T22:16:16.756Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': 0, + }), + dict({ + '_id': '5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b', + 'attribute': 'str', + 'byHabitica': False, + 'challenge': dict({ + }), + 'createdAt': '2024-07-07T17:51:53.266Z', + 'group': dict({ + 'assignedUsers': list([ + ]), + 'completedBy': dict({ + }), + }), + 'id': '5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b', + 'notes': 'Schaue fern, spiele ein Spiel, gönne Dir einen Leckerbissen, es liegt ganz bei Dir!', + 'priority': 1, + 'reminders': list([ + ]), + 'tags': list([ + ]), + 'text': 'Belohne Dich selbst', + 'type': 'reward', + 'updatedAt': '2024-07-07T17:51:53.266Z', + 'userId': '5f359083-ef78-4af0-985a-0b2c6d05797c', + 'value': 10, + }), + ]), + 'user': dict({ + 'api_user': 'test-api-user', + 'auth': dict({ + 'local': dict({ + 'username': 'test-username', + }), + }), + 'flags': dict({ + 'classSelected': True, + }), + 'id': 'a380546a-94be-4b8e-8a0b-23e0d5c03303', + 'items': dict({ + 'gear': dict({ + 'equipped': dict({ + 'armor': 'armor_warrior_5', + 'back': 'back_special_heroicAureole', + 'body': 'body_special_aetherAmulet', + 'eyewear': 'eyewear_armoire_plagueDoctorMask', + 'head': 'head_warrior_5', + 'headAccessory': 'headAccessory_armoire_gogglesOfBookbinding', + 'shield': 'shield_warrior_5', + 'weapon': 'weapon_warrior_5', + }), + }), + }), + 'lastCron': '2024-09-21T22:01:55.586Z', + 'needsCron': True, + 'party': dict({ + '_id': '94cd398c-2240-4320-956e-6d345cf2c0de', + 'quest': dict({ + 'RSVPNeeded': True, + 'key': 'dustbunnies', + }), + }), + 'preferences': dict({ + 'automaticAllocation': True, + 'disableClasses': False, + 'language': 'en', + 'sleep': False, + }), + 'profile': dict({ + 'name': 'test-user', + }), + 'stats': dict({ + 'buffs': dict({ + 'con': 26, + 'int': 26, + 'per': 26, + 'seafoam': False, + 'shinySeed': False, + 'snowball': False, + 'spookySparkles': False, + 'stealth': 0, + 'str': 26, + 'streaks': False, + }), + 'class': 'wizard', + 'con': 15, + 'exp': 737, + 'gp': 137.62587214609795, + 'hp': 0, + 'int': 15, + 'lvl': 38, + 'maxHealth': 50, + 'maxMP': 166, + 'mp': 50.89999999999998, + 'per': 15, + 'points': 5, + 'str': 15, + 'toNextLevel': 880, + }), + 'tasksOrder': dict({ + 'dailys': list([ + 'f21fa608-cfc6-4413-9fc7-0eb1b48ca43a', + 'bc1d1855-b2b8-4663-98ff-62e7b763dfc4', + 'e97659e0-2c42-4599-a7bb-00282adc410d', + '564b9ac9-c53d-4638-9e7f-1cd96fe19baa', + 'f2c85972-1a19-4426-bc6d-ce3337b9d99f', + '2c6d136c-a1c3-4bef-b7c4-fa980784b1e1', + ]), + 'habits': list([ + '1d147de6-5c02-4740-8e2f-71d3015a37f4', + ]), + 'rewards': list([ + '5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b', + ]), + 'todos': list([ + '88de7cd9-af2b-49ce-9afd-bf941d87336b', + '2f6fcabc-f670-4ec3-ba65-817e8deea490', + '1aa3137e-ef72-4d1f-91ee-41933602f438', + '86ea2475-d1b5-4020-bdcc-c188c7996afa', + ]), + }), + }), + }), + }) +# --- diff --git a/tests/components/habitica/snapshots/test_sensor.ambr b/tests/components/habitica/snapshots/test_sensor.ambr index ee75b424a93..28dd7eb8c43 100644 --- a/tests/components/habitica/snapshots/test_sensor.ambr +++ b/tests/components/habitica/snapshots/test_sensor.ambr @@ -59,6 +59,61 @@ 'state': 'wizard', }) # --- +# name: test_sensors[sensor.test_user_constitution-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_constitution', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Constitution', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_constitution', + 'unit_of_measurement': 'CON', + }) +# --- +# name: test_sensors[sensor.test_user_constitution-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'allocated': 15, + 'buffs': 26, + 'class': 0, + 'equipment': 42, + 'friendly_name': 'test-user Constitution', + 'level': 19, + 'unit_of_measurement': 'CON', + }), + 'context': , + 'entity_id': 'sensor.test_user_constitution', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '102', + }) +# --- # name: test_sensors[sensor.test_user_dailies-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -350,6 +405,7 @@ # name: test_sensors[sensor.test_user_gems-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/shop_gem.png', 'friendly_name': 'test-user Gems', 'unit_of_measurement': 'gems', }), @@ -567,6 +623,61 @@ 'state': '0', }) # --- +# name: test_sensors[sensor.test_user_intelligence-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_intelligence', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Intelligence', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_intelligence', + 'unit_of_measurement': 'INT', + }) +# --- +# name: test_sensors[sensor.test_user_intelligence-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'allocated': 15, + 'buffs': 26, + 'class': 0, + 'equipment': 12, + 'friendly_name': 'test-user Intelligence', + 'level': 19, + 'unit_of_measurement': 'INT', + }), + 'context': , + 'entity_id': 'sensor.test_user_intelligence', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '72', + }) +# --- # name: test_sensors[sensor.test_user_level-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -796,6 +907,7 @@ # name: test_sensors[sensor.test_user_mystic_hourglasses-state] StateSnapshot({ 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://habitica-assets.s3.amazonaws.com/mobileApp/images/notif_subscriber_reward.png', 'friendly_name': 'test-user Mystic hourglasses', 'unit_of_measurement': '⧖', }), @@ -854,6 +966,61 @@ 'state': '880', }) # --- +# name: test_sensors[sensor.test_user_perception-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_perception', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Perception', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_perception', + 'unit_of_measurement': 'PER', + }) +# --- +# name: test_sensors[sensor.test_user_perception-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'allocated': 15, + 'buffs': 26, + 'class': 0, + 'equipment': 15, + 'friendly_name': 'test-user Perception', + 'level': 19, + 'unit_of_measurement': 'PER', + }), + 'context': , + 'entity_id': 'sensor.test_user_perception', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '75', + }) +# --- # name: test_sensors[sensor.test_user_rewards-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -915,6 +1082,61 @@ 'state': '1', }) # --- +# name: test_sensors[sensor.test_user_strength-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_user_strength', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Strength', + 'platform': 'habitica', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': '00000000-0000-0000-0000-000000000000_strength', + 'unit_of_measurement': 'STR', + }) +# --- +# name: test_sensors[sensor.test_user_strength-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'allocated': 15, + 'buffs': 26, + 'class': 0, + 'equipment': 44, + 'friendly_name': 'test-user Strength', + 'level': 19, + 'unit_of_measurement': 'STR', + }), + 'context': , + 'entity_id': 'sensor.test_user_strength', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '104', + }) +# --- # name: test_sensors[sensor.test_user_to_do_s-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/habitica/test_binary_sensor.py b/tests/components/habitica/test_binary_sensor.py new file mode 100644 index 00000000000..1710f8f217e --- /dev/null +++ b/tests/components/habitica/test_binary_sensor.py @@ -0,0 +1,84 @@ +"""Tests for the Habitica binary sensor platform.""" + +from collections.abc import Generator +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.habitica.const import ASSETS_URL, DEFAULT_URL, DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, load_json_object_fixture, snapshot_platform +from tests.test_util.aiohttp import AiohttpClientMocker + + +@pytest.fixture(autouse=True) +def binary_sensor_only() -> Generator[None]: + """Enable only the binarty sensor platform.""" + with patch( + "homeassistant.components.habitica.PLATFORMS", + [Platform.BINARY_SENSOR], + ): + yield + + +@pytest.mark.usefixtures("mock_habitica") +async def test_binary_sensors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test setup of the Habitica binary sensor platform.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("fixture", "entity_state", "entity_picture"), + [ + ("user", STATE_ON, f"{ASSETS_URL}inventory_quest_scroll_dustbunnies.png"), + ("quest_invitation_off", STATE_OFF, None), + ], +) +async def test_pending_quest_states( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, + fixture: str, + entity_state: str, + entity_picture: str | None, +) -> None: + """Test states of pending quest sensor.""" + + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/user", + json=load_json_object_fixture(f"{fixture}.json", DOMAIN), + ) + aioclient_mock.get(f"{DEFAULT_URL}/api/v3/tasks/user", json={"data": []}) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/content", + params={"language": "en"}, + json=load_json_object_fixture("content.json", DOMAIN), + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + assert ( + state := hass.states.get("binary_sensor.test_user_pending_quest_invitation") + ) + assert state.state == entity_state + assert state.attributes.get("entity_picture") == entity_picture diff --git a/tests/components/habitica/test_button.py b/tests/components/habitica/test_button.py index 6bd62f3a58e..979cefef923 100644 --- a/tests/components/habitica/test_button.py +++ b/tests/components/habitica/test_button.py @@ -63,6 +63,11 @@ async def test_buttons( f"{DEFAULT_URL}/api/v3/tasks/user", json=load_json_object_fixture("tasks.json", DOMAIN), ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/content", + params={"language": "en"}, + json=load_json_object_fixture("content.json", DOMAIN), + ) config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() @@ -163,6 +168,11 @@ async def test_button_press( f"{DEFAULT_URL}/api/v3/tasks/user", json=load_json_object_fixture("tasks.json", DOMAIN), ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/content", + params={"language": "en"}, + json=load_json_object_fixture("content.json", DOMAIN), + ) config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/habitica/test_calendar.py b/tests/components/habitica/test_calendar.py index 7c0a2686038..a6cdb1a9306 100644 --- a/tests/components/habitica/test_calendar.py +++ b/tests/components/habitica/test_calendar.py @@ -55,6 +55,8 @@ async def test_calendar_platform( [ "calendar.test_user_to_do_s", "calendar.test_user_dailies", + "calendar.test_user_daily_reminders", + "calendar.test_user_to_do_reminders", ], ) @pytest.mark.freeze_time("2024-09-20T22:00:00.000Z") diff --git a/tests/components/habitica/test_diagnostics.py b/tests/components/habitica/test_diagnostics.py new file mode 100644 index 00000000000..68b40fe254a --- /dev/null +++ b/tests/components/habitica/test_diagnostics.py @@ -0,0 +1,27 @@ +"""Tests for Habitica diagnostics.""" + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +@pytest.mark.usefixtures("mock_habitica") +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + == snapshot + ) diff --git a/tests/components/habitica/test_services.py b/tests/components/habitica/test_services.py new file mode 100644 index 00000000000..cd363eba3b5 --- /dev/null +++ b/tests/components/habitica/test_services.py @@ -0,0 +1,791 @@ +"""Test Habitica actions.""" + +from collections.abc import Generator +from http import HTTPStatus +from typing import Any +from unittest.mock import patch + +import pytest + +from homeassistant.components.habitica.const import ( + ATTR_CONFIG_ENTRY, + ATTR_DIRECTION, + ATTR_ITEM, + ATTR_SKILL, + ATTR_TARGET, + ATTR_TASK, + DEFAULT_URL, + DOMAIN, + SERVICE_ABORT_QUEST, + SERVICE_ACCEPT_QUEST, + SERVICE_CANCEL_QUEST, + SERVICE_CAST_SKILL, + SERVICE_LEAVE_QUEST, + SERVICE_REJECT_QUEST, + SERVICE_SCORE_HABIT, + SERVICE_SCORE_REWARD, + SERVICE_START_QUEST, + SERVICE_TRANSFORMATION, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError + +from .conftest import load_json_object_fixture, mock_called_with + +from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker + +REQUEST_EXCEPTION_MSG = "Unable to connect to Habitica, try again later" +RATE_LIMIT_EXCEPTION_MSG = "Rate limit exceeded, try again later" + + +@pytest.fixture(autouse=True) +def services_only() -> Generator[None]: + """Enable only services.""" + with patch( + "homeassistant.components.habitica.PLATFORMS", + [], + ): + yield + + +@pytest.fixture(autouse=True) +async def load_entry( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + services_only: Generator, +) -> None: + """Load config entry.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + +@pytest.fixture(autouse=True) +def uuid_mock() -> Generator[None]: + """Mock the UUID.""" + with patch( + "uuid.uuid4", return_value="5d1935ff-80c8-443c-b2e9-733c66b44745" + ) as uuid_mock: + yield uuid_mock.return_value + + +@pytest.mark.parametrize( + ("service_data", "item", "target_id"), + [ + ( + { + ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ATTR_SKILL: "pickpocket", + }, + "pickPocket", + "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ), + ( + { + ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ATTR_SKILL: "backstab", + }, + "backStab", + "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ), + ( + { + ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ATTR_SKILL: "fireball", + }, + "fireball", + "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ), + ( + { + ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ATTR_SKILL: "smash", + }, + "smash", + "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ), + ( + { + ATTR_TASK: "Rechnungen bezahlen", + ATTR_SKILL: "smash", + }, + "smash", + "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ), + ( + { + ATTR_TASK: "pay_bills", + ATTR_SKILL: "smash", + }, + "smash", + "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ), + ], + ids=[ + "cast pickpocket", + "cast backstab", + "cast fireball", + "cast smash", + "select task by name", + "select task_by_alias", + ], +) +async def test_cast_skill( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + service_data: dict[str, Any], + item: str, + target_id: str, +) -> None: + """Test Habitica cast skill action.""" + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/user/class/cast/{item}?targetId={target_id}", + json={"success": True, "data": {}}, + ) + + await hass.services.async_call( + DOMAIN, + SERVICE_CAST_SKILL, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + + assert mock_called_with( + mock_habitica, + "post", + f"{DEFAULT_URL}/api/v3/user/class/cast/{item}?targetId={target_id}", + ) + + +@pytest.mark.parametrize( + ( + "service_data", + "http_status", + "expected_exception", + "expected_exception_msg", + ), + [ + ( + { + ATTR_TASK: "task-not-found", + ATTR_SKILL: "smash", + }, + HTTPStatus.OK, + ServiceValidationError, + "Unable to complete action, could not find the task 'task-not-found'", + ), + ( + { + ATTR_TASK: "Rechnungen bezahlen", + ATTR_SKILL: "smash", + }, + HTTPStatus.TOO_MANY_REQUESTS, + ServiceValidationError, + RATE_LIMIT_EXCEPTION_MSG, + ), + ( + { + ATTR_TASK: "Rechnungen bezahlen", + ATTR_SKILL: "smash", + }, + HTTPStatus.NOT_FOUND, + ServiceValidationError, + "Unable to cast skill, your character does not have the skill or spell smash", + ), + ( + { + ATTR_TASK: "Rechnungen bezahlen", + ATTR_SKILL: "smash", + }, + HTTPStatus.UNAUTHORIZED, + ServiceValidationError, + "Unable to cast skill, not enough mana. Your character has 50 MP, but the skill costs 10 MP", + ), + ( + { + ATTR_TASK: "Rechnungen bezahlen", + ATTR_SKILL: "smash", + }, + HTTPStatus.BAD_REQUEST, + HomeAssistantError, + REQUEST_EXCEPTION_MSG, + ), + ], +) +@pytest.mark.usefixtures("mock_habitica") +async def test_cast_skill_exceptions( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + service_data: dict[str, Any], + http_status: HTTPStatus, + expected_exception: Exception, + expected_exception_msg: str, +) -> None: + """Test Habitica cast skill action exceptions.""" + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/user/class/cast/smash?targetId=2f6fcabc-f670-4ec3-ba65-817e8deea490", + json={"success": True, "data": {}}, + status=http_status, + ) + + with pytest.raises(expected_exception, match=expected_exception_msg): + await hass.services.async_call( + DOMAIN, + SERVICE_CAST_SKILL, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + + +@pytest.mark.usefixtures("mock_habitica") +async def test_get_config_entry( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, +) -> None: + """Test Habitica config entry exceptions.""" + + with pytest.raises( + ServiceValidationError, + match="The selected character is not configured in Home Assistant", + ): + await hass.services.async_call( + DOMAIN, + SERVICE_CAST_SKILL, + service_data={ + ATTR_CONFIG_ENTRY: "0000000000000000", + ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ATTR_SKILL: "smash", + }, + return_response=True, + blocking=True, + ) + + assert await hass.config_entries.async_unload(config_entry.entry_id) + + with pytest.raises( + ServiceValidationError, + match="The selected character is currently not loaded or disabled in Home Assistant", + ): + await hass.services.async_call( + DOMAIN, + SERVICE_CAST_SKILL, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + ATTR_TASK: "2f6fcabc-f670-4ec3-ba65-817e8deea490", + ATTR_SKILL: "smash", + }, + return_response=True, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("service", "command"), + [ + (SERVICE_ABORT_QUEST, "abort"), + (SERVICE_ACCEPT_QUEST, "accept"), + (SERVICE_CANCEL_QUEST, "cancel"), + (SERVICE_LEAVE_QUEST, "leave"), + (SERVICE_REJECT_QUEST, "reject"), + (SERVICE_START_QUEST, "force-start"), + ], + ids=[], +) +async def test_handle_quests( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + service: str, + command: str, +) -> None: + """Test Habitica actions for quest handling.""" + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/groups/party/quests/{command}", + json={"success": True, "data": {}}, + ) + + await hass.services.async_call( + DOMAIN, + service, + service_data={ATTR_CONFIG_ENTRY: config_entry.entry_id}, + return_response=True, + blocking=True, + ) + + assert mock_called_with( + mock_habitica, + "post", + f"{DEFAULT_URL}/api/v3/groups/party/quests/{command}", + ) + + +@pytest.mark.parametrize( + ( + "http_status", + "expected_exception", + "expected_exception_msg", + ), + [ + ( + HTTPStatus.TOO_MANY_REQUESTS, + ServiceValidationError, + RATE_LIMIT_EXCEPTION_MSG, + ), + ( + HTTPStatus.NOT_FOUND, + ServiceValidationError, + "Unable to complete action, quest or group not found", + ), + ( + HTTPStatus.UNAUTHORIZED, + ServiceValidationError, + "Action not allowed, only quest leader or group leader can perform this action", + ), + ( + HTTPStatus.BAD_REQUEST, + HomeAssistantError, + REQUEST_EXCEPTION_MSG, + ), + ], +) +@pytest.mark.usefixtures("mock_habitica") +async def test_handle_quests_exceptions( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + http_status: HTTPStatus, + expected_exception: Exception, + expected_exception_msg: str, +) -> None: + """Test Habitica handle quests action exceptions.""" + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/groups/party/quests/accept", + json={"success": True, "data": {}}, + status=http_status, + ) + + with pytest.raises(expected_exception, match=expected_exception_msg): + await hass.services.async_call( + DOMAIN, + SERVICE_ACCEPT_QUEST, + service_data={ATTR_CONFIG_ENTRY: config_entry.entry_id}, + return_response=True, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("service", "service_data", "task_id"), + [ + ( + SERVICE_SCORE_HABIT, + { + ATTR_TASK: "e97659e0-2c42-4599-a7bb-00282adc410d", + ATTR_DIRECTION: "up", + }, + "e97659e0-2c42-4599-a7bb-00282adc410d", + ), + ( + SERVICE_SCORE_HABIT, + { + ATTR_TASK: "e97659e0-2c42-4599-a7bb-00282adc410d", + ATTR_DIRECTION: "down", + }, + "e97659e0-2c42-4599-a7bb-00282adc410d", + ), + ( + SERVICE_SCORE_REWARD, + { + ATTR_TASK: "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b", + }, + "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b", + ), + ( + SERVICE_SCORE_HABIT, + { + ATTR_TASK: "Füge eine Aufgabe zu Habitica hinzu", + ATTR_DIRECTION: "up", + }, + "e97659e0-2c42-4599-a7bb-00282adc410d", + ), + ( + SERVICE_SCORE_HABIT, + { + ATTR_TASK: "create_a_task", + ATTR_DIRECTION: "up", + }, + "e97659e0-2c42-4599-a7bb-00282adc410d", + ), + ], + ids=[ + "habit score up", + "habit score down", + "buy reward", + "match task by name", + "match task by alias", + ], +) +async def test_score_task( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + service: str, + service_data: dict[str, Any], + task_id: str, +) -> None: + """Test Habitica score task action.""" + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/{task_id}/score/{service_data.get(ATTR_DIRECTION, "up")}", + json={"success": True, "data": {}}, + ) + + await hass.services.async_call( + DOMAIN, + service, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + + assert mock_called_with( + mock_habitica, + "post", + f"{DEFAULT_URL}/api/v3/tasks/{task_id}/score/{service_data.get(ATTR_DIRECTION, "up")}", + ) + + +@pytest.mark.parametrize( + ( + "service_data", + "http_status", + "expected_exception", + "expected_exception_msg", + ), + [ + ( + { + ATTR_TASK: "task does not exist", + ATTR_DIRECTION: "up", + }, + HTTPStatus.OK, + ServiceValidationError, + "Unable to complete action, could not find the task 'task does not exist'", + ), + ( + { + ATTR_TASK: "e97659e0-2c42-4599-a7bb-00282adc410d", + ATTR_DIRECTION: "up", + }, + HTTPStatus.TOO_MANY_REQUESTS, + ServiceValidationError, + RATE_LIMIT_EXCEPTION_MSG, + ), + ( + { + ATTR_TASK: "e97659e0-2c42-4599-a7bb-00282adc410d", + ATTR_DIRECTION: "up", + }, + HTTPStatus.BAD_REQUEST, + HomeAssistantError, + REQUEST_EXCEPTION_MSG, + ), + ( + { + ATTR_TASK: "5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b", + ATTR_DIRECTION: "up", + }, + HTTPStatus.UNAUTHORIZED, + HomeAssistantError, + "Unable to buy reward, not enough gold. Your character has 137.63 GP, but the reward costs 10 GP", + ), + ], +) +@pytest.mark.usefixtures("mock_habitica") +async def test_score_task_exceptions( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + service_data: dict[str, Any], + http_status: HTTPStatus, + expected_exception: Exception, + expected_exception_msg: str, +) -> None: + """Test Habitica score task action exceptions.""" + + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/e97659e0-2c42-4599-a7bb-00282adc410d/score/up", + json={"success": True, "data": {}}, + status=http_status, + ) + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/tasks/5e2ea1df-f6e6-4ba3-bccb-97c5ec63e99b/score/up", + json={"success": True, "data": {}}, + status=http_status, + ) + + with pytest.raises(expected_exception, match=expected_exception_msg): + await hass.services.async_call( + DOMAIN, + SERVICE_SCORE_HABIT, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("service_data", "item", "target_id"), + [ + ( + { + ATTR_TARGET: "a380546a-94be-4b8e-8a0b-23e0d5c03303", + ATTR_ITEM: "spooky_sparkles", + }, + "spookySparkles", + "a380546a-94be-4b8e-8a0b-23e0d5c03303", + ), + ( + { + ATTR_TARGET: "a380546a-94be-4b8e-8a0b-23e0d5c03303", + ATTR_ITEM: "shiny_seed", + }, + "shinySeed", + "a380546a-94be-4b8e-8a0b-23e0d5c03303", + ), + ( + { + ATTR_TARGET: "a380546a-94be-4b8e-8a0b-23e0d5c03303", + ATTR_ITEM: "seafoam", + }, + "seafoam", + "a380546a-94be-4b8e-8a0b-23e0d5c03303", + ), + ( + { + ATTR_TARGET: "a380546a-94be-4b8e-8a0b-23e0d5c03303", + ATTR_ITEM: "snowball", + }, + "snowball", + "a380546a-94be-4b8e-8a0b-23e0d5c03303", + ), + ( + { + ATTR_TARGET: "test-user", + ATTR_ITEM: "spooky_sparkles", + }, + "spookySparkles", + "a380546a-94be-4b8e-8a0b-23e0d5c03303", + ), + ( + { + ATTR_TARGET: "test-username", + ATTR_ITEM: "spooky_sparkles", + }, + "spookySparkles", + "a380546a-94be-4b8e-8a0b-23e0d5c03303", + ), + ( + { + ATTR_TARGET: "ffce870c-3ff3-4fa4-bad1-87612e52b8e7", + ATTR_ITEM: "spooky_sparkles", + }, + "spookySparkles", + "ffce870c-3ff3-4fa4-bad1-87612e52b8e7", + ), + ( + { + ATTR_TARGET: "test-partymember-username", + ATTR_ITEM: "spooky_sparkles", + }, + "spookySparkles", + "ffce870c-3ff3-4fa4-bad1-87612e52b8e7", + ), + ( + { + ATTR_TARGET: "test-partymember-displayname", + ATTR_ITEM: "spooky_sparkles", + }, + "spookySparkles", + "ffce870c-3ff3-4fa4-bad1-87612e52b8e7", + ), + ], + ids=[], +) +async def test_transformation( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + service_data: dict[str, Any], + item: str, + target_id: str, +) -> None: + """Test Habitica user transformation item action.""" + mock_habitica.get( + f"{DEFAULT_URL}/api/v3/groups/party/members", + json=load_json_object_fixture("party_members.json", DOMAIN), + ) + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/user/class/cast/{item}?targetId={target_id}", + json={"success": True, "data": {}}, + ) + + await hass.services.async_call( + DOMAIN, + SERVICE_TRANSFORMATION, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + **service_data, + }, + return_response=True, + blocking=True, + ) + + assert mock_called_with( + mock_habitica, + "post", + f"{DEFAULT_URL}/api/v3/user/class/cast/{item}?targetId={target_id}", + ) + + +@pytest.mark.parametrize( + ( + "service_data", + "http_status_members", + "http_status_cast", + "expected_exception", + "expected_exception_msg", + ), + [ + ( + { + ATTR_TARGET: "user-not-found", + ATTR_ITEM: "spooky_sparkles", + }, + HTTPStatus.OK, + HTTPStatus.OK, + ServiceValidationError, + "Unable to find target 'user-not-found' in your party", + ), + ( + { + ATTR_TARGET: "test-partymember-username", + ATTR_ITEM: "spooky_sparkles", + }, + HTTPStatus.TOO_MANY_REQUESTS, + HTTPStatus.OK, + ServiceValidationError, + RATE_LIMIT_EXCEPTION_MSG, + ), + ( + { + ATTR_TARGET: "test-partymember-username", + ATTR_ITEM: "spooky_sparkles", + }, + HTTPStatus.NOT_FOUND, + HTTPStatus.OK, + ServiceValidationError, + "Unable to find target, you are currently not in a party. You can only target yourself", + ), + ( + { + ATTR_TARGET: "test-partymember-username", + ATTR_ITEM: "spooky_sparkles", + }, + HTTPStatus.BAD_REQUEST, + HTTPStatus.OK, + HomeAssistantError, + "Unable to connect to Habitica, try again later", + ), + ( + { + ATTR_TARGET: "test-partymember-username", + ATTR_ITEM: "spooky_sparkles", + }, + HTTPStatus.OK, + HTTPStatus.TOO_MANY_REQUESTS, + ServiceValidationError, + RATE_LIMIT_EXCEPTION_MSG, + ), + ( + { + ATTR_TARGET: "test-partymember-username", + ATTR_ITEM: "spooky_sparkles", + }, + HTTPStatus.OK, + HTTPStatus.UNAUTHORIZED, + ServiceValidationError, + "Unable to use spooky_sparkles, you don't own this item", + ), + ( + { + ATTR_TARGET: "test-partymember-username", + ATTR_ITEM: "spooky_sparkles", + }, + HTTPStatus.OK, + HTTPStatus.BAD_REQUEST, + HomeAssistantError, + "Unable to connect to Habitica, try again later", + ), + ], +) +@pytest.mark.usefixtures("mock_habitica") +async def test_transformation_exceptions( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_habitica: AiohttpClientMocker, + service_data: dict[str, Any], + http_status_members: HTTPStatus, + http_status_cast: HTTPStatus, + expected_exception: Exception, + expected_exception_msg: str, +) -> None: + """Test Habitica transformation action exceptions.""" + mock_habitica.get( + f"{DEFAULT_URL}/api/v3/groups/party/members", + json=load_json_object_fixture("party_members.json", DOMAIN), + status=http_status_members, + ) + mock_habitica.post( + f"{DEFAULT_URL}/api/v3/user/class/cast/spookySparkles?targetId=ffce870c-3ff3-4fa4-bad1-87612e52b8e7", + json={"success": True, "data": {}}, + status=http_status_cast, + ) + + with pytest.raises(expected_exception, match=expected_exception_msg): + await hass.services.async_call( + DOMAIN, + SERVICE_TRANSFORMATION, + service_data={ + ATTR_CONFIG_ENTRY: config_entry.entry_id, + **service_data, + }, + return_response=True, + blocking=True, + ) diff --git a/tests/components/habitica/test_todo.py b/tests/components/habitica/test_todo.py index 88947caba2d..66f741eb39a 100644 --- a/tests/components/habitica/test_todo.py +++ b/tests/components/habitica/test_todo.py @@ -1,7 +1,6 @@ """Tests for Habitica todo platform.""" from collections.abc import Generator -from datetime import datetime from http import HTTPStatus import json import re @@ -39,7 +38,7 @@ from tests.typing import WebSocketGenerator @pytest.fixture(autouse=True) -def switch_only() -> Generator[None]: +def todo_only() -> Generator[None]: """Enable only the todo platform.""" with patch( "homeassistant.components.habitica.PLATFORMS", @@ -628,12 +627,12 @@ async def test_move_todo_item_exception( @pytest.mark.parametrize( ("fixture", "calculated_due_date"), [ - ("duedate_fixture_1.json", (2024, 9, 23)), - ("duedate_fixture_2.json", (2024, 9, 24)), - ("duedate_fixture_3.json", (2024, 10, 23)), - ("duedate_fixture_4.json", (2024, 10, 23)), - ("duedate_fixture_5.json", (2024, 9, 28)), - ("duedate_fixture_6.json", (2024, 10, 21)), + ("duedate_fixture_1.json", "2024-09-22"), + ("duedate_fixture_2.json", "2024-09-24"), + ("duedate_fixture_3.json", "2024-10-23"), + ("duedate_fixture_4.json", "2024-10-23"), + ("duedate_fixture_5.json", "2024-09-28"), + ("duedate_fixture_6.json", "2024-10-21"), ("duedate_fixture_7.json", None), ("duedate_fixture_8.json", None), ], @@ -672,6 +671,11 @@ async def test_next_due_date( f"{DEFAULT_URL}/api/v3/tasks/user", json=load_json_object_fixture(fixture, DOMAIN), ) + aioclient_mock.get( + f"{DEFAULT_URL}/api/v3/content", + params={"language": "en"}, + json=load_json_object_fixture("content.json", DOMAIN), + ) config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) @@ -688,8 +692,4 @@ async def test_next_due_date( return_response=True, ) - assert ( - result[dailies_entity]["items"][0].get("due") is None - if not calculated_due_date - else datetime(*calculated_due_date).date() - ) + assert result[dailies_entity]["items"][0].get("due") == calculated_due_date diff --git a/tests/components/hassio/test_binary_sensor.py b/tests/components/hassio/test_binary_sensor.py index c97be736248..9878dd67a21 100644 --- a/tests/components/hassio/test_binary_sensor.py +++ b/tests/components/hassio/test_binary_sensor.py @@ -25,6 +25,7 @@ def mock_all( store_info: AsyncMock, addon_changelog: AsyncMock, addon_stats: AsyncMock, + resolution_info: AsyncMock, ) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) @@ -140,19 +141,6 @@ def mock_all( aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ diff --git a/tests/components/hassio/test_diagnostics.py b/tests/components/hassio/test_diagnostics.py index c238d9d2a15..c95cde67b8a 100644 --- a/tests/components/hassio/test_diagnostics.py +++ b/tests/components/hassio/test_diagnostics.py @@ -24,6 +24,7 @@ def mock_all( store_info: AsyncMock, addon_stats: AsyncMock, addon_changelog: AsyncMock, + resolution_info: AsyncMock, ) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) @@ -143,19 +144,6 @@ def mock_all( aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ diff --git a/tests/components/hassio/test_handler.py b/tests/components/hassio/test_handler.py index e125e09ae7e..56f0dcb706c 100644 --- a/tests/components/hassio/test_handler.py +++ b/tests/components/hassio/test_handler.py @@ -208,7 +208,7 @@ async def test_api_ingress_panels( @pytest.mark.parametrize( ("api_call", "method", "payload"), [ - ("get_resolution_info", "GET", None), + ("get_network_info", "GET", None), ("update_diagnostics", "POST", True), ], ) diff --git a/tests/components/hassio/test_init.py b/tests/components/hassio/test_init.py index 23259543478..5c11370ae74 100644 --- a/tests/components/hassio/test_init.py +++ b/tests/components/hassio/test_init.py @@ -67,6 +67,7 @@ def mock_all( addon_info: AsyncMock, addon_stats: AsyncMock, addon_changelog: AsyncMock, + resolution_info: AsyncMock, ) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) @@ -204,19 +205,6 @@ def mock_all( aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ diff --git a/tests/components/hassio/test_issues.py b/tests/components/hassio/test_issues.py index 1a3d3d83f95..b0d3920be09 100644 --- a/tests/components/hassio/test_issues.py +++ b/tests/components/hassio/test_issues.py @@ -4,11 +4,28 @@ from __future__ import annotations from collections.abc import Generator from datetime import timedelta -from http import HTTPStatus import os from typing import Any -from unittest.mock import ANY, patch +from unittest.mock import ANY, AsyncMock, patch +from uuid import UUID, uuid4 +from aiohasupervisor import ( + SupervisorBadRequestError, + SupervisorError, + SupervisorTimeoutError, +) +from aiohasupervisor.models import ( + Check, + CheckType, + ContextType, + Issue, + IssueType, + ResolutionInfo, + Suggestion, + SuggestionType, + UnhealthyReason, + UnsupportedReason, +) from freezegun.api import FrozenDateTimeFactory import pytest @@ -18,7 +35,6 @@ from homeassistant.setup import async_setup_component from .test_init import MOCK_ENVIRON -from tests.test_util.aiohttp import AiohttpClientMocker, AiohttpClientMockResponse from tests.typing import WebSocketGenerator @@ -36,49 +52,41 @@ def fixture_supervisor_environ() -> Generator[None]: def mock_resolution_info( - aioclient_mock: AiohttpClientMocker, - unsupported: list[str] | None = None, - unhealthy: list[str] | None = None, - issues: list[dict[str, str]] | None = None, - suggestion_result: str = "ok", + supervisor_client: AsyncMock, + unsupported: list[UnsupportedReason] | None = None, + unhealthy: list[UnhealthyReason] | None = None, + issues: list[Issue] | None = None, + suggestions_by_issue: dict[UUID, list[Suggestion]] | None = None, + suggestion_result: SupervisorError | None = None, ) -> None: """Mock resolution/info endpoint with unsupported/unhealthy reasons and/or issues.""" - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": unsupported or [], - "unhealthy": unhealthy or [], - "suggestions": [], - "issues": [ - {k: v for k, v in issue.items() if k != "suggestions"} - for issue in issues - ] - if issues - else [], - "checks": [ - {"enabled": True, "slug": "supervisor_trust"}, - {"enabled": True, "slug": "free_space"}, - ], - }, - }, + supervisor_client.resolution.info.return_value = ResolutionInfo( + unsupported=unsupported or [], + unhealthy=unhealthy or [], + issues=issues or [], + suggestions=[ + suggestion + for issue_list in suggestions_by_issue.values() + for suggestion in issue_list + ] + if suggestions_by_issue + else [], + checks=[ + Check(enabled=True, slug=CheckType.SUPERVISOR_TRUST), + Check(enabled=True, slug=CheckType.FREE_SPACE), + ], ) - if issues: - suggestions_by_issue = { - issue["uuid"]: issue.get("suggestions", []) for issue in issues - } - for issue_uuid, suggestions in suggestions_by_issue.items(): - aioclient_mock.get( - f"http://127.0.0.1/resolution/issue/{issue_uuid}/suggestions", - json={"result": "ok", "data": {"suggestions": suggestions}}, - ) - for suggestion in suggestions: - aioclient_mock.post( - f"http://127.0.0.1/resolution/suggestion/{suggestion['uuid']}", - json={"result": suggestion_result}, - ) + if suggestions_by_issue: + + async def mock_suggestions_for_issue(uuid: UUID) -> list[Suggestion]: + """Mock of suggestions for issue api.""" + return suggestions_by_issue.get(uuid, []) + + supervisor_client.resolution.suggestions_for_issue.side_effect = ( + mock_suggestions_for_issue + ) + supervisor_client.resolution.apply_suggestion.side_effect = suggestion_result def assert_repair_in_list( @@ -134,11 +142,13 @@ def assert_issue_repair_in_list( @pytest.mark.usefixtures("all_setup_requests") async def test_unhealthy_issues( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test issues added for unhealthy systems.""" - mock_resolution_info(aioclient_mock, unhealthy=["docker", "setup"]) + mock_resolution_info( + supervisor_client, unhealthy=[UnhealthyReason.DOCKER, UnhealthyReason.SETUP] + ) result = await async_setup_component(hass, "hassio", {}) assert result @@ -156,11 +166,14 @@ async def test_unhealthy_issues( @pytest.mark.usefixtures("all_setup_requests") async def test_unsupported_issues( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test issues added for unsupported systems.""" - mock_resolution_info(aioclient_mock, unsupported=["content_trust", "os"]) + mock_resolution_info( + supervisor_client, + unsupported=[UnsupportedReason.CONTENT_TRUST, UnsupportedReason.OS], + ) result = await async_setup_component(hass, "hassio", {}) assert result @@ -180,11 +193,11 @@ async def test_unsupported_issues( @pytest.mark.usefixtures("all_setup_requests") async def test_unhealthy_issues_add_remove( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test unhealthy issues added and removed from dispatches.""" - mock_resolution_info(aioclient_mock) + mock_resolution_info(supervisor_client) result = await async_setup_component(hass, "hassio", {}) assert result @@ -237,11 +250,11 @@ async def test_unhealthy_issues_add_remove( @pytest.mark.usefixtures("all_setup_requests") async def test_unsupported_issues_add_remove( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test unsupported issues added and removed from dispatches.""" - mock_resolution_info(aioclient_mock) + mock_resolution_info(supervisor_client) result = await async_setup_component(hass, "hassio", {}) assert result @@ -294,22 +307,33 @@ async def test_unsupported_issues_add_remove( @pytest.mark.usefixtures("all_setup_requests") async def test_reset_issues_supervisor_restart( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """All issues reset on supervisor restart.""" mock_resolution_info( - aioclient_mock, - unsupported=["os"], - unhealthy=["docker"], + supervisor_client, + unsupported=[UnsupportedReason.OS], + unhealthy=[UnhealthyReason.DOCKER], issues=[ - { - "uuid": "1234", - "type": "reboot_required", - "context": "system", - "reference": None, - } + Issue( + type=IssueType.REBOOT_REQUIRED, + context=ContextType.SYSTEM, + reference=None, + uuid=(uuid := uuid4()), + ) ], + suggestions_by_issue={ + uuid: [ + Suggestion( + SuggestionType.EXECUTE_REBOOT, + context=ContextType.SYSTEM, + reference=None, + uuid=uuid4(), + auto=False, + ) + ] + }, ) result = await async_setup_component(hass, "hassio", {}) @@ -325,15 +349,14 @@ async def test_reset_issues_supervisor_restart( assert_repair_in_list(msg["result"]["issues"], unhealthy=False, reason="os") assert_issue_repair_in_list( msg["result"]["issues"], - uuid="1234", + uuid=uuid.hex, context="system", type_="reboot_required", - fixable=False, + fixable=True, reference=None, ) - aioclient_mock.clear_requests() - mock_resolution_info(aioclient_mock) + mock_resolution_info(supervisor_client) await client.send_json( { "id": 2, @@ -358,11 +381,15 @@ async def test_reset_issues_supervisor_restart( @pytest.mark.usefixtures("all_setup_requests") async def test_reasons_added_and_removed( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test an unsupported/unhealthy reasons being added and removed at same time.""" - mock_resolution_info(aioclient_mock, unsupported=["os"], unhealthy=["docker"]) + mock_resolution_info( + supervisor_client, + unsupported=[UnsupportedReason.OS], + unhealthy=[UnhealthyReason.DOCKER], + ) result = await async_setup_component(hass, "hassio", {}) assert result @@ -376,9 +403,10 @@ async def test_reasons_added_and_removed( assert_repair_in_list(msg["result"]["issues"], unhealthy=True, reason="docker") assert_repair_in_list(msg["result"]["issues"], unhealthy=False, reason="os") - aioclient_mock.clear_requests() mock_resolution_info( - aioclient_mock, unsupported=["content_trust"], unhealthy=["setup"] + supervisor_client, + unsupported=[UnsupportedReason.CONTENT_TRUST], + unhealthy=[UnhealthyReason.SETUP], ) await client.send_json( { @@ -408,12 +436,14 @@ async def test_reasons_added_and_removed( @pytest.mark.usefixtures("all_setup_requests") async def test_ignored_unsupported_skipped( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Unsupported reasons which have an identical unhealthy reason are ignored.""" mock_resolution_info( - aioclient_mock, unsupported=["privileged"], unhealthy=["privileged"] + supervisor_client, + unsupported=[UnsupportedReason.PRIVILEGED], + unhealthy=[UnhealthyReason.PRIVILEGED], ) result = await async_setup_component(hass, "hassio", {}) @@ -431,12 +461,14 @@ async def test_ignored_unsupported_skipped( @pytest.mark.usefixtures("all_setup_requests") async def test_new_unsupported_unhealthy_reason( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """New unsupported/unhealthy reasons result in a generic repair until next core update.""" mock_resolution_info( - aioclient_mock, unsupported=["fake_unsupported"], unhealthy=["fake_unhealthy"] + supervisor_client, + unsupported=["fake_unsupported"], + unhealthy=["fake_unhealthy"], ) result = await async_setup_component(hass, "hassio", {}) @@ -481,40 +513,43 @@ async def test_new_unsupported_unhealthy_reason( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test repairs added for supervisor issue.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "reboot_required", - "context": "system", - "reference": None, - }, - { - "uuid": "1235", - "type": "multiple_data_disks", - "context": "system", - "reference": "/dev/sda1", - "suggestions": [ - { - "uuid": "1236", - "type": "rename_data_disk", - "context": "system", - "reference": "/dev/sda1", - } - ], - }, - { - "uuid": "1237", - "type": "should_not_be_repair", - "context": "os", - "reference": None, - }, + Issue( + type=IssueType.DETACHED_ADDON_MISSING, + context=ContextType.ADDON, + reference="test", + uuid=(uuid_issue1 := uuid4()), + ), + Issue( + type=IssueType.MULTIPLE_DATA_DISKS, + context=ContextType.SYSTEM, + reference="/dev/sda1", + uuid=(uuid_issue2 := uuid4()), + ), + Issue( + type="should_not_be_repair", + context=ContextType.OS, + reference=None, + uuid=uuid4(), + ), ], + suggestions_by_issue={ + uuid_issue2: [ + Suggestion( + type=SuggestionType.RENAME_DATA_DISK, + context=ContextType.SYSTEM, + reference="/dev/sda1", + uuid=uuid4(), + auto=False, + ) + ] + }, ) result = await async_setup_component(hass, "hassio", {}) @@ -528,15 +563,16 @@ async def test_supervisor_issues( assert len(msg["result"]["issues"]) == 2 assert_issue_repair_in_list( msg["result"]["issues"], - uuid="1234", - context="system", - type_="reboot_required", + uuid=uuid_issue1.hex, + context="addon", + type_="detached_addon_missing", fixable=False, - reference=None, + reference="test", + placeholders={"addon_url": "/hassio/addon/test", "addon": "test"}, ) assert_issue_repair_in_list( msg["result"]["issues"], - uuid="1235", + uuid=uuid_issue2.hex, context="system", type_="multiple_data_disks", fixable=True, @@ -547,61 +583,41 @@ async def test_supervisor_issues( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_initial_failure( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, + resolution_info: AsyncMock, hass_ws_client: WebSocketGenerator, freezer: FrozenDateTimeFactory, ) -> None: """Test issues manager retries after initial update failure.""" - responses = [ - AiohttpClientMockResponse( - method="get", - url="http://127.0.0.1/resolution/info", - status=HTTPStatus.BAD_REQUEST, - json={ - "result": "error", - "message": "System is not ready with state: setup", - }, - ), - AiohttpClientMockResponse( - method="get", - url="http://127.0.0.1/resolution/info", - status=HTTPStatus.OK, - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [ - { - "uuid": "1234", - "type": "reboot_required", - "context": "system", - "reference": None, - }, - ], - "checks": [ - {"enabled": True, "slug": "supervisor_trust"}, - {"enabled": True, "slug": "free_space"}, - ], - }, - }, - ), + mock_resolution_info( + supervisor_client, + unsupported=[], + unhealthy=[], + issues=[ + Issue( + type=IssueType.REBOOT_REQUIRED, + context=ContextType.SYSTEM, + reference=None, + uuid=(uuid := uuid4()), + ) + ], + suggestions_by_issue={ + uuid: [ + Suggestion( + SuggestionType.EXECUTE_REBOOT, + context=ContextType.SYSTEM, + reference=None, + uuid=uuid4(), + auto=False, + ) + ] + }, + ) + resolution_info.side_effect = [ + SupervisorBadRequestError("System is not ready with state: setup"), + resolution_info.return_value, ] - async def mock_responses(*args): - nonlocal responses - return responses.pop(0) - - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - side_effect=mock_responses, - ) - aioclient_mock.get( - "http://127.0.0.1/resolution/issue/1234/suggestions", - json={"result": "ok", "data": {"suggestions": []}}, - ) - with patch("homeassistant.components.hassio.issues.REQUEST_REFRESH_DELAY", new=0.1): result = await async_setup_component(hass, "hassio", {}) await hass.async_block_till_done() @@ -625,11 +641,11 @@ async def test_supervisor_issues_initial_failure( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_add_remove( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test supervisor issues added and removed from dispatches.""" - mock_resolution_info(aioclient_mock) + mock_resolution_info(supervisor_client) result = await async_setup_component(hass, "hassio", {}) assert result @@ -643,10 +659,18 @@ async def test_supervisor_issues_add_remove( "data": { "event": "issue_changed", "data": { - "uuid": "1234", + "uuid": (issue_uuid := uuid4().hex), "type": "reboot_required", "context": "system", "reference": None, + "suggestions": [ + { + "uuid": uuid4().hex, + "type": "execute_reboot", + "context": "system", + "reference": None, + } + ], }, }, } @@ -661,10 +685,10 @@ async def test_supervisor_issues_add_remove( assert len(msg["result"]["issues"]) == 1 assert_issue_repair_in_list( msg["result"]["issues"], - uuid="1234", + uuid=issue_uuid, context="system", type_="reboot_required", - fixable=False, + fixable=True, reference=None, ) @@ -673,20 +697,12 @@ async def test_supervisor_issues_add_remove( "id": 3, "type": "supervisor/event", "data": { - "event": "issue_changed", + "event": "issue_removed", "data": { - "uuid": "1234", + "uuid": issue_uuid, "type": "reboot_required", "context": "system", "reference": None, - "suggestions": [ - { - "uuid": "1235", - "type": "execute_reboot", - "context": "system", - "reference": None, - } - ], }, }, } @@ -698,75 +714,29 @@ async def test_supervisor_issues_add_remove( await client.send_json({"id": 4, "type": "repairs/list_issues"}) msg = await client.receive_json() assert msg["success"] - assert len(msg["result"]["issues"]) == 1 - assert_issue_repair_in_list( - msg["result"]["issues"], - uuid="1234", - context="system", - type_="reboot_required", - fixable=True, - reference=None, - ) - - await client.send_json( - { - "id": 5, - "type": "supervisor/event", - "data": { - "event": "issue_removed", - "data": { - "uuid": "1234", - "type": "reboot_required", - "context": "system", - "reference": None, - }, - }, - } - ) - msg = await client.receive_json() - assert msg["success"] - await hass.async_block_till_done() - - await client.send_json({"id": 6, "type": "repairs/list_issues"}) - msg = await client.receive_json() - assert msg["success"] assert msg["result"] == {"issues": []} @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_suggestions_fail( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, + resolution_suggestions_for_issue: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test failing to get suggestions for issue skips it.""" - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [ - { - "uuid": "1234", - "type": "reboot_required", - "context": "system", - "reference": None, - } - ], - "checks": [ - {"enabled": True, "slug": "supervisor_trust"}, - {"enabled": True, "slug": "free_space"}, - ], - }, - }, - ) - aioclient_mock.get( - "http://127.0.0.1/resolution/issue/1234/suggestions", - exc=TimeoutError(), + mock_resolution_info( + supervisor_client, + issues=[ + Issue( + type=IssueType.REBOOT_REQUIRED, + context=ContextType.SYSTEM, + reference=None, + uuid=uuid4(), + ) + ], ) + resolution_suggestions_for_issue.side_effect = SupervisorTimeoutError result = await async_setup_component(hass, "hassio", {}) assert result @@ -782,11 +752,11 @@ async def test_supervisor_issues_suggestions_fail( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_remove_missing_issue_without_error( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test HA skips message to remove issue that it didn't know about (sync issue).""" - mock_resolution_info(aioclient_mock) + mock_resolution_info(supervisor_client) result = await async_setup_component(hass, "hassio", {}) assert result @@ -816,16 +786,12 @@ async def test_supervisor_remove_missing_issue_without_error( @pytest.mark.usefixtures("all_setup_requests") async def test_system_is_not_ready( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + resolution_info: AsyncMock, caplog: pytest.LogCaptureFixture, ) -> None: """Ensure hassio starts despite error.""" - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "", - "message": "System is not ready with state: setup", - }, + resolution_info.side_effect = SupervisorBadRequestError( + "System is not ready with state: setup" ) assert await async_setup_component(hass, "hassio", {}) @@ -838,11 +804,11 @@ async def test_system_is_not_ready( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_detached_addon_missing( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_ws_client: WebSocketGenerator, ) -> None: """Test supervisor issue for detached addon due to missing repository.""" - mock_resolution_info(aioclient_mock) + mock_resolution_info(supervisor_client) result = await async_setup_component(hass, "hassio", {}) assert result @@ -856,7 +822,7 @@ async def test_supervisor_issues_detached_addon_missing( "data": { "event": "issue_changed", "data": { - "uuid": "1234", + "uuid": (issue_uuid := uuid4().hex), "type": "detached_addon_missing", "context": "addon", "reference": "test", @@ -874,7 +840,7 @@ async def test_supervisor_issues_detached_addon_missing( assert len(msg["result"]["issues"]) == 1 assert_issue_repair_in_list( msg["result"]["issues"], - uuid="1234", + uuid=issue_uuid, context="addon", type_="detached_addon_missing", fixable=False, diff --git a/tests/components/hassio/test_repairs.py b/tests/components/hassio/test_repairs.py index f3ccb5948f1..f8cac4e1a97 100644 --- a/tests/components/hassio/test_repairs.py +++ b/tests/components/hassio/test_repairs.py @@ -3,8 +3,17 @@ from collections.abc import Generator from http import HTTPStatus import os -from unittest.mock import patch +from unittest.mock import AsyncMock, patch +from uuid import uuid4 +from aiohasupervisor import SupervisorError +from aiohasupervisor.models import ( + ContextType, + Issue, + IssueType, + Suggestion, + SuggestionType, +) import pytest from homeassistant.core import HomeAssistant @@ -14,7 +23,6 @@ from homeassistant.setup import async_setup_component from .test_init import MOCK_ENVIRON from .test_issues import mock_resolution_info -from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator @@ -28,34 +36,39 @@ def fixture_supervisor_environ() -> Generator[None]: @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for supervisor issue.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "multiple_data_disks", - "context": "system", - "reference": "/dev/sda1", - "suggestions": [ - { - "uuid": "1235", - "type": "rename_data_disk", - "context": "system", - "reference": "/dev/sda1", - } - ], - }, + Issue( + type=IssueType.MULTIPLE_DATA_DISKS, + context=ContextType.SYSTEM, + reference="/dev/sda1", + uuid=(issue_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.RENAME_DATA_DISK, + context=ContextType.SYSTEM, + reference="/dev/sda1", + uuid=(sugg_uuid := uuid4()), + auto=False, + ) + ] + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -95,52 +108,53 @@ async def test_supervisor_issue_repair_flow( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1235" - ) + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_with_multiple_suggestions( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for supervisor issue with multiple suggestions.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "reboot_required", - "context": "system", - "reference": "test", - "suggestions": [ - { - "uuid": "1235", - "type": "execute_reboot", - "context": "system", - "reference": "test", - }, - { - "uuid": "1236", - "type": "test_type", - "context": "system", - "reference": "test", - }, - ], - }, + Issue( + type=IssueType.REBOOT_REQUIRED, + context=ContextType.SYSTEM, + reference="test", + uuid=(issue_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_REBOOT, + context=ContextType.SYSTEM, + reference="test", + uuid=uuid4(), + auto=False, + ), + Suggestion( + type="test_type", + context=ContextType.SYSTEM, + reference="test", + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + ] + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -189,52 +203,53 @@ async def test_supervisor_issue_repair_flow_with_multiple_suggestions( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1236" - ) + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_with_multiple_suggestions_and_confirmation( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for supervisor issue with multiple suggestions and choice requires confirmation.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "reboot_required", - "context": "system", - "reference": None, - "suggestions": [ - { - "uuid": "1235", - "type": "execute_reboot", - "context": "system", - "reference": None, - }, - { - "uuid": "1236", - "type": "test_type", - "context": "system", - "reference": None, - }, - ], - }, + Issue( + type=IssueType.REBOOT_REQUIRED, + context=ContextType.SYSTEM, + reference=None, + uuid=(issue_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_REBOOT, + context=ContextType.SYSTEM, + reference=None, + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + Suggestion( + type="test_type", + context=ContextType.SYSTEM, + reference=None, + uuid=uuid4(), + auto=False, + ), + ] + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -302,46 +317,46 @@ async def test_supervisor_issue_repair_flow_with_multiple_suggestions_and_confir "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1235" - ) + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_skip_confirmation( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test confirmation skipped for fix flow for supervisor issue with one suggestion.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "reboot_required", - "context": "system", - "reference": None, - "suggestions": [ - { - "uuid": "1235", - "type": "execute_reboot", - "context": "system", - "reference": None, - } - ], - }, + Issue( + type=IssueType.REBOOT_REQUIRED, + context=ContextType.SYSTEM, + reference=None, + uuid=(issue_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_REBOOT, + context=ContextType.SYSTEM, + reference=None, + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + ] + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -381,53 +396,54 @@ async def test_supervisor_issue_repair_flow_skip_confirmation( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1235" - ) + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) @pytest.mark.usefixtures("all_setup_requests") async def test_mount_failed_repair_flow_error( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test repair flow fails when repair fails to apply.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "mount_failed", - "context": "mount", - "reference": "backup_share", - "suggestions": [ - { - "uuid": "1235", - "type": "execute_reload", - "context": "mount", - "reference": "backup_share", - }, - { - "uuid": "1236", - "type": "execute_remove", - "context": "mount", - "reference": "backup_share", - }, - ], - }, + Issue( + type=IssueType.MOUNT_FAILED, + context=ContextType.MOUNT, + reference="backup_share", + uuid=(issue_uuid := uuid4()), + ), ], - suggestion_result=False, + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_RELOAD, + context=ContextType.MOUNT, + reference="backup_share", + uuid=uuid4(), + auto=False, + ), + Suggestion( + type=SuggestionType.EXECUTE_REMOVE, + context=ContextType.MOUNT, + reference="backup_share", + uuid=uuid4(), + auto=False, + ), + ] + }, + suggestion_result=SupervisorError("boom"), ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -459,46 +475,52 @@ async def test_mount_failed_repair_flow_error( "description_placeholders": None, } - assert issue_registry.async_get_issue(domain="hassio", issue_id="1234") + assert issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) @pytest.mark.usefixtures("all_setup_requests") async def test_mount_failed_repair_flow( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test repair flow for mount_failed issue.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "mount_failed", - "context": "mount", - "reference": "backup_share", - "suggestions": [ - { - "uuid": "1235", - "type": "execute_reload", - "context": "mount", - "reference": "backup_share", - }, - { - "uuid": "1236", - "type": "execute_remove", - "context": "mount", - "reference": "backup_share", - }, - ], - }, + Issue( + type=IssueType.MOUNT_FAILED, + context=ContextType.MOUNT, + reference="backup_share", + uuid=(issue_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_RELOAD, + context=ContextType.MOUNT, + reference="backup_share", + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + Suggestion( + type=SuggestionType.EXECUTE_REMOVE, + context=ContextType.MOUNT, + reference="backup_share", + uuid=uuid4(), + auto=False, + ), + ] + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -551,13 +573,8 @@ async def test_mount_failed_repair_flow( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1235" - ) + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) @pytest.mark.parametrize( @@ -566,62 +583,69 @@ async def test_mount_failed_repair_flow( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_docker_config_repair_flow( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for supervisor issue.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "docker_config", - "context": "system", - "reference": None, - "suggestions": [ - { - "uuid": "1235", - "type": "execute_rebuild", - "context": "system", - "reference": None, - } - ], - }, - { - "uuid": "1236", - "type": "docker_config", - "context": "core", - "reference": None, - "suggestions": [ - { - "uuid": "1237", - "type": "execute_rebuild", - "context": "core", - "reference": None, - } - ], - }, - { - "uuid": "1238", - "type": "docker_config", - "context": "addon", - "reference": "test", - "suggestions": [ - { - "uuid": "1239", - "type": "execute_rebuild", - "context": "addon", - "reference": "test", - } - ], - }, + Issue( + type=IssueType.DOCKER_CONFIG, + context=ContextType.SYSTEM, + reference=None, + uuid=(issue1_uuid := uuid4()), + ), + Issue( + type=IssueType.DOCKER_CONFIG, + context=ContextType.CORE, + reference=None, + uuid=(issue2_uuid := uuid4()), + ), + Issue( + type=IssueType.DOCKER_CONFIG, + context=ContextType.ADDON, + reference="test", + uuid=(issue3_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue1_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_REBUILD, + context=ContextType.SYSTEM, + reference=None, + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + ], + issue2_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_REBUILD, + context=ContextType.CORE, + reference=None, + uuid=uuid4(), + auto=False, + ), + ], + issue3_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_REBUILD, + context=ContextType.ADDON, + reference="test", + uuid=uuid4(), + auto=False, + ), + ], + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue1_uuid.hex + ) assert repair_issue client = await hass_client() @@ -661,52 +685,53 @@ async def test_supervisor_issue_docker_config_repair_flow( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1235" - ) + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue1_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_multiple_data_disks( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for multiple data disks supervisor issue.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "multiple_data_disks", - "context": "system", - "reference": "/dev/sda1", - "suggestions": [ - { - "uuid": "1235", - "type": "rename_data_disk", - "context": "system", - "reference": "/dev/sda1", - }, - { - "uuid": "1236", - "type": "adopt_data_disk", - "context": "system", - "reference": "/dev/sda1", - }, - ], - }, + Issue( + type=IssueType.MULTIPLE_DATA_DISKS, + context=ContextType.SYSTEM, + reference="/dev/sda1", + uuid=(issue_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.RENAME_DATA_DISK, + context=ContextType.SYSTEM, + reference="/dev/sda1", + uuid=uuid4(), + auto=False, + ), + Suggestion( + type=SuggestionType.ADOPT_DATA_DISK, + context=ContextType.SYSTEM, + reference="/dev/sda1", + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + ] + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -774,13 +799,8 @@ async def test_supervisor_issue_repair_flow_multiple_data_disks( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1236" - ) + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) @pytest.mark.parametrize( @@ -789,34 +809,39 @@ async def test_supervisor_issue_repair_flow_multiple_data_disks( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_detached_addon_removed( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for supervisor issue.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "detached_addon_removed", - "context": "addon", - "reference": "test", - "suggestions": [ - { - "uuid": "1235", - "type": "execute_remove", - "context": "addon", - "reference": "test", - } - ], - }, + Issue( + type=IssueType.DETACHED_ADDON_REMOVED, + context=ContextType.ADDON, + reference="test", + uuid=(issue_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type=SuggestionType.EXECUTE_REMOVE, + context=ContextType.ADDON, + reference="test", + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + ] + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -861,13 +886,8 @@ async def test_supervisor_issue_detached_addon_removed( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1235" - ) + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) @pytest.mark.parametrize( @@ -876,40 +896,46 @@ async def test_supervisor_issue_detached_addon_removed( @pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_addon_boot_fail( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, + supervisor_client: AsyncMock, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, ) -> None: """Test fix flow for supervisor issue.""" mock_resolution_info( - aioclient_mock, + supervisor_client, issues=[ - { - "uuid": "1234", - "type": "boot_fail", - "context": "addon", - "reference": "test", - "suggestions": [ - { - "uuid": "1235", - "type": "execute_start", - "context": "addon", - "reference": "test", - }, - { - "uuid": "1236", - "type": "disable_boot", - "context": "addon", - "reference": "test", - }, - ], - }, + Issue( + type="boot_fail", + context=ContextType.ADDON, + reference="test", + uuid=(issue_uuid := uuid4()), + ), ], + suggestions_by_issue={ + issue_uuid: [ + Suggestion( + type="execute_start", + context=ContextType.ADDON, + reference="test", + uuid=(sugg_uuid := uuid4()), + auto=False, + ), + Suggestion( + type="disable_boot", + context=ContextType.ADDON, + reference="test", + uuid=uuid4(), + auto=False, + ), + ] + }, ) assert await async_setup_component(hass, "hassio", {}) - repair_issue = issue_registry.async_get_issue(domain="hassio", issue_id="1234") + repair_issue = issue_registry.async_get_issue( + domain="hassio", issue_id=issue_uuid.hex + ) assert repair_issue client = await hass_client() @@ -962,10 +988,5 @@ async def test_supervisor_issue_addon_boot_fail( "description_placeholders": None, } - assert not issue_registry.async_get_issue(domain="hassio", issue_id="1234") - - assert aioclient_mock.mock_calls[-1][0] == "post" - assert ( - str(aioclient_mock.mock_calls[-1][1]) - == "http://127.0.0.1/resolution/suggestion/1235" - ) + assert not issue_registry.async_get_issue(domain="hassio", issue_id=issue_uuid.hex) + supervisor_client.resolution.apply_suggestion.assert_called_once_with(sugg_uuid) diff --git a/tests/components/hassio/test_sensor.py b/tests/components/hassio/test_sensor.py index 1b58534d52f..7160a2cbf16 100644 --- a/tests/components/hassio/test_sensor.py +++ b/tests/components/hassio/test_sensor.py @@ -33,6 +33,7 @@ def mock_all( store_info: AsyncMock, addon_stats: AsyncMock, addon_changelog: AsyncMock, + resolution_info: AsyncMock, ) -> None: """Mock all setup requests.""" _install_default_mocks(aioclient_mock) @@ -146,19 +147,6 @@ def _install_default_mocks(aioclient_mock: AiohttpClientMocker): aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ diff --git a/tests/components/hassio/test_update.py b/tests/components/hassio/test_update.py index 0d15eac48c5..c1775d6e0b4 100644 --- a/tests/components/hassio/test_update.py +++ b/tests/components/hassio/test_update.py @@ -29,6 +29,7 @@ def mock_all( store_info: AsyncMock, addon_stats: AsyncMock, addon_changelog: AsyncMock, + resolution_info: AsyncMock, ) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) @@ -149,19 +150,6 @@ def mock_all( aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ diff --git a/tests/components/hassio/test_websocket_api.py b/tests/components/hassio/test_websocket_api.py index 1023baa89df..21e6b03678b 100644 --- a/tests/components/hassio/test_websocket_api.py +++ b/tests/components/hassio/test_websocket_api.py @@ -26,7 +26,9 @@ from tests.typing import WebSocketGenerator @pytest.fixture(autouse=True) def mock_all( - aioclient_mock: AiohttpClientMocker, supervisor_is_connected: AsyncMock + aioclient_mock: AiohttpClientMocker, + supervisor_is_connected: AsyncMock, + resolution_info: AsyncMock, ) -> None: """Mock all setup requests.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) @@ -67,19 +69,6 @@ def mock_all( aioclient_mock.get( "http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}} ) - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - }, - ) @pytest.mark.usefixtures("hassio_env") diff --git a/tests/components/history_stats/test_sensor.py b/tests/components/history_stats/test_sensor.py index f86c04b3e5b..d60203676e6 100644 --- a/tests/components/history_stats/test_sensor.py +++ b/tests/components/history_stats/test_sensor.py @@ -437,10 +437,10 @@ async def test_measure(recorder_mock: Recorder, hass: HomeAssistant) -> None: await async_update_entity(hass, f"sensor.sensor{i}") await hass.async_block_till_done() - assert hass.states.get("sensor.sensor1").state == "0.83" - assert hass.states.get("sensor.sensor2").state == "0.833333333333333" + assert hass.states.get("sensor.sensor1").state == "0.5" + assert 0.499 < float(hass.states.get("sensor.sensor2").state) < 0.501 assert hass.states.get("sensor.sensor3").state == "2" - assert hass.states.get("sensor.sensor4").state == "83.3" + assert hass.states.get("sensor.sensor4").state == "50.0" async def test_async_on_entire_period( @@ -459,7 +459,11 @@ async def test_async_on_entire_period( def _fake_states(*args, **kwargs): return { "binary_sensor.test_on_id": [ - ha.State("binary_sensor.test_on_id", "on", last_changed=start_time), + ha.State( + "binary_sensor.test_on_id", + "on", + last_changed=(start_time - timedelta(seconds=10)), + ), ha.State("binary_sensor.test_on_id", "on", last_changed=t0), ha.State("binary_sensor.test_on_id", "on", last_changed=t1), ha.State("binary_sensor.test_on_id", "on", last_changed=t2), @@ -1254,10 +1258,10 @@ async def test_measure_sliding_window( await async_update_entity(hass, f"sensor.sensor{i}") await hass.async_block_till_done() - assert hass.states.get("sensor.sensor1").state == "0.83" - assert hass.states.get("sensor.sensor2").state == "0.833333333333333" - assert hass.states.get("sensor.sensor3").state == "2" - assert hass.states.get("sensor.sensor4").state == "41.7" + assert hass.states.get("sensor.sensor1").state == "0.0" + assert float(hass.states.get("sensor.sensor2").state) == 0 + assert hass.states.get("sensor.sensor3").state == "0" + assert hass.states.get("sensor.sensor4").state == "0.0" past_next_update = start_time + timedelta(minutes=30) with ( @@ -1268,12 +1272,12 @@ async def test_measure_sliding_window( freeze_time(past_next_update), ): async_fire_time_changed(hass, past_next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get("sensor.sensor1").state == "0.83" - assert hass.states.get("sensor.sensor2").state == "0.833333333333333" - assert hass.states.get("sensor.sensor3").state == "2" - assert hass.states.get("sensor.sensor4").state == "41.7" + assert hass.states.get("sensor.sensor1").state == "0.17" + assert 0.166 < float(hass.states.get("sensor.sensor2").state) < 0.167 + assert hass.states.get("sensor.sensor3").state == "1" + assert hass.states.get("sensor.sensor4").state == "8.3" async def test_measure_from_end_going_backwards( @@ -1355,10 +1359,10 @@ async def test_measure_from_end_going_backwards( await async_update_entity(hass, f"sensor.sensor{i}") await hass.async_block_till_done() - assert hass.states.get("sensor.sensor1").state == "0.83" - assert hass.states.get("sensor.sensor2").state == "0.833333333333333" - assert hass.states.get("sensor.sensor3").state == "2" - assert hass.states.get("sensor.sensor4").state == "83.3" + assert hass.states.get("sensor.sensor1").state == "0.0" + assert float(hass.states.get("sensor.sensor2").state) == 0 + assert hass.states.get("sensor.sensor3").state == "0" + assert hass.states.get("sensor.sensor4").state == "0.0" past_next_update = start_time + timedelta(minutes=30) with ( @@ -1369,12 +1373,12 @@ async def test_measure_from_end_going_backwards( freeze_time(past_next_update), ): async_fire_time_changed(hass, past_next_update) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get("sensor.sensor1").state == "0.83" - assert hass.states.get("sensor.sensor2").state == "0.833333333333333" - assert hass.states.get("sensor.sensor3").state == "2" - assert hass.states.get("sensor.sensor4").state == "83.3" + assert hass.states.get("sensor.sensor1").state == "0.17" + assert 0.166 < float(hass.states.get("sensor.sensor2").state) < 0.167 + assert hass.states.get("sensor.sensor3").state == "1" + assert 16.6 <= float(hass.states.get("sensor.sensor4").state) <= 16.7 async def test_measure_cet(recorder_mock: Recorder, hass: HomeAssistant) -> None: @@ -1403,7 +1407,7 @@ async def test_measure_cet(recorder_mock: Recorder, hass: HomeAssistant) -> None "homeassistant.components.recorder.history.state_changes_during_period", _fake_states, ), - freeze_time(start_time), + freeze_time(start_time + timedelta(minutes=60)), ): await async_setup_component( hass, @@ -1455,10 +1459,10 @@ async def test_measure_cet(recorder_mock: Recorder, hass: HomeAssistant) -> None await async_update_entity(hass, f"sensor.sensor{i}") await hass.async_block_till_done() - assert hass.states.get("sensor.sensor1").state == "0.83" - assert hass.states.get("sensor.sensor2").state == "0.833333333333333" + assert hass.states.get("sensor.sensor1").state == "0.5" + assert 0.499 < float(hass.states.get("sensor.sensor2").state) < 0.501 assert hass.states.get("sensor.sensor3").state == "2" - assert hass.states.get("sensor.sensor4").state == "83.3" + assert hass.states.get("sensor.sensor4").state == "50.0" @pytest.mark.parametrize("time_zone", ["Europe/Berlin", "America/Chicago", "US/Hawaii"]) @@ -1537,18 +1541,19 @@ async def test_end_time_with_microseconds_zeroed( await hass.async_block_till_done() await async_update_entity(hass, "sensor.heatpump_compressor_today") await hass.async_block_till_done() - assert hass.states.get("sensor.heatpump_compressor_today").state == "1.83" + assert hass.states.get("sensor.heatpump_compressor_today").state == "0.5" assert ( - hass.states.get("sensor.heatpump_compressor_today2").state - == "1.83333333333333" + 0.499 + < float(hass.states.get("sensor.heatpump_compressor_today2").state) + < 0.501 ) async_fire_time_changed(hass, time_200) - await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get("sensor.heatpump_compressor_today").state == "1.83" + assert hass.states.get("sensor.heatpump_compressor_today").state == "0.5" assert ( - hass.states.get("sensor.heatpump_compressor_today2").state - == "1.83333333333333" + 0.499 + < float(hass.states.get("sensor.heatpump_compressor_today2").state) + < 0.501 ) hass.states.async_set("binary_sensor.heatpump_compressor_state", "off") await hass.async_block_till_done() @@ -1557,10 +1562,11 @@ async def test_end_time_with_microseconds_zeroed( with freeze_time(time_400): async_fire_time_changed(hass, time_400) await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get("sensor.heatpump_compressor_today").state == "1.83" + assert hass.states.get("sensor.heatpump_compressor_today").state == "0.5" assert ( - hass.states.get("sensor.heatpump_compressor_today2").state - == "1.83333333333333" + 0.499 + < float(hass.states.get("sensor.heatpump_compressor_today2").state) + < 0.501 ) hass.states.async_set("binary_sensor.heatpump_compressor_state", "on") await async_wait_recording_done(hass) @@ -1568,10 +1574,11 @@ async def test_end_time_with_microseconds_zeroed( with freeze_time(time_600): async_fire_time_changed(hass, time_600) await hass.async_block_till_done(wait_background_tasks=True) - assert hass.states.get("sensor.heatpump_compressor_today").state == "3.83" + assert hass.states.get("sensor.heatpump_compressor_today").state == "2.5" assert ( - hass.states.get("sensor.heatpump_compressor_today2").state - == "3.83333333333333" + 2.499 + < float(hass.states.get("sensor.heatpump_compressor_today2").state) + < 2.501 ) rolled_to_next_day = start_of_today + timedelta(days=1) diff --git a/tests/components/home_connect/conftest.py b/tests/components/home_connect/conftest.py index 4e790074700..d2eff43e071 100644 --- a/tests/components/home_connect/conftest.py +++ b/tests/components/home_connect/conftest.py @@ -182,6 +182,7 @@ def mock_problematic_appliance(request: pytest.FixtureRequest) -> Mock: mock.get_programs_active.side_effect = HomeConnectError mock.get_programs_available.side_effect = HomeConnectError mock.start_program.side_effect = HomeConnectError + mock.select_program.side_effect = HomeConnectError mock.stop_program.side_effect = HomeConnectError mock.get_status.side_effect = HomeConnectError mock.get_settings.side_effect = HomeConnectError diff --git a/tests/components/home_connect/snapshots/test_diagnostics.ambr b/tests/components/home_connect/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..99f10fe2847 --- /dev/null +++ b/tests/components/home_connect/snapshots/test_diagnostics.ambr @@ -0,0 +1,468 @@ +# serializer version: 1 +# name: test_async_get_config_entry_diagnostics + dict({ + 'BOSCH-000000000-000000000000': dict({ + 'programs': list([ + ]), + 'status': dict({ + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'BOSCH-HCS000000-D00000000001': dict({ + 'programs': list([ + 'LaundryCare.WasherDryer.Program.Mix', + 'LaundryCare.Washer.Option.Temperature', + ]), + 'status': dict({ + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'BOSCH-HCS000000-D00000000002': dict({ + 'programs': list([ + ]), + 'status': dict({ + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'BOSCH-HCS000000-D00000000003': dict({ + 'programs': list([ + ]), + 'status': dict({ + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'BOSCH-HCS000000-D00000000004': dict({ + 'programs': list([ + ]), + 'status': dict({ + 'BSH.Common.Setting.AmbientLightBrightness': dict({ + 'type': 'Double', + 'unit': '%', + 'value': 70, + }), + 'BSH.Common.Setting.AmbientLightColor': dict({ + 'type': 'BSH.Common.EnumType.AmbientLightColor', + 'value': 'BSH.Common.EnumType.AmbientLightColor.Color43', + }), + 'BSH.Common.Setting.AmbientLightCustomColor': dict({ + 'type': 'String', + 'value': '#4a88f8', + }), + 'BSH.Common.Setting.AmbientLightEnabled': dict({ + 'type': 'Boolean', + 'value': True, + }), + 'BSH.Common.Setting.ColorTemperature': dict({ + 'type': 'BSH.Common.EnumType.ColorTemperature', + 'value': 'Cooking.Hood.EnumType.ColorTemperature.warmToNeutral', + }), + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Cooking.Common.Setting.Lighting': dict({ + 'type': 'Boolean', + 'value': True, + }), + 'Cooking.Common.Setting.LightingBrightness': dict({ + 'type': 'Double', + 'unit': '%', + 'value': 70, + }), + 'Cooking.Hood.Setting.ColorTemperaturePercent': dict({ + 'type': 'Double', + 'unit': '%', + 'value': 70, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'BOSCH-HCS000000-D00000000005': dict({ + 'programs': list([ + ]), + 'status': dict({ + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'BOSCH-HCS000000-D00000000006': dict({ + 'programs': list([ + ]), + 'status': dict({ + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'BOSCH-HCS01OVN1-43E0065FE245': dict({ + 'programs': list([ + 'Cooking.Oven.Program.HeatingMode.HotAir', + 'Cooking.Oven.Program.HeatingMode.TopBottomHeating', + 'Cooking.Oven.Program.HeatingMode.PizzaSetting', + ]), + 'status': dict({ + 'BSH.Common.Root.ActiveProgram': dict({ + 'value': 'Cooking.Oven.Program.HeatingMode.HotAir', + }), + 'BSH.Common.Setting.PowerState': dict({ + 'type': 'BSH.Common.EnumType.PowerState', + 'value': 'BSH.Common.EnumType.PowerState.On', + }), + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'BOSCH-HCS04DYR1-831694AE3C5A': dict({ + 'programs': list([ + 'LaundryCare.Dryer.Program.Cotton', + 'LaundryCare.Dryer.Program.Synthetic', + 'LaundryCare.Dryer.Program.Mix', + ]), + 'status': dict({ + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'BOSCH-HCS06COM1-D70390681C2C': dict({ + 'programs': list([ + 'ConsumerProducts.CoffeeMaker.Program.Beverage.Espresso', + 'ConsumerProducts.CoffeeMaker.Program.Beverage.EspressoMacchiato', + 'ConsumerProducts.CoffeeMaker.Program.Beverage.Coffee', + 'ConsumerProducts.CoffeeMaker.Program.Beverage.Cappuccino', + 'ConsumerProducts.CoffeeMaker.Program.Beverage.LatteMacchiato', + 'ConsumerProducts.CoffeeMaker.Program.Beverage.CaffeLatte', + ]), + 'status': dict({ + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'SIEMENS-HCS02DWH1-6BE58C26DCC1': dict({ + 'programs': list([ + 'Dishcare.Dishwasher.Program.Auto1', + 'Dishcare.Dishwasher.Program.Auto2', + 'Dishcare.Dishwasher.Program.Auto3', + 'Dishcare.Dishwasher.Program.Eco50', + 'Dishcare.Dishwasher.Program.Quick45', + ]), + 'status': dict({ + 'BSH.Common.Setting.AmbientLightBrightness': dict({ + 'type': 'Double', + 'unit': '%', + 'value': 70, + }), + 'BSH.Common.Setting.AmbientLightColor': dict({ + 'type': 'BSH.Common.EnumType.AmbientLightColor', + 'value': 'BSH.Common.EnumType.AmbientLightColor.Color43', + }), + 'BSH.Common.Setting.AmbientLightCustomColor': dict({ + 'type': 'String', + 'value': '#4a88f8', + }), + 'BSH.Common.Setting.AmbientLightEnabled': dict({ + 'type': 'Boolean', + 'value': True, + }), + 'BSH.Common.Setting.ChildLock': dict({ + 'type': 'Boolean', + 'value': False, + }), + 'BSH.Common.Setting.PowerState': dict({ + 'type': 'BSH.Common.EnumType.PowerState', + 'value': 'BSH.Common.EnumType.PowerState.On', + }), + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'SIEMENS-HCS03WCH1-7BC6383CF794': dict({ + 'programs': list([ + 'LaundryCare.Washer.Program.Cotton', + 'LaundryCare.Washer.Program.EasyCare', + 'LaundryCare.Washer.Program.Mix', + 'LaundryCare.Washer.Program.DelicatesSilk', + 'LaundryCare.Washer.Program.Wool', + ]), + 'status': dict({ + 'BSH.Common.Root.ActiveProgram': dict({ + 'value': 'BSH.Common.Root.ActiveProgram', + }), + 'BSH.Common.Setting.ChildLock': dict({ + 'type': 'Boolean', + 'value': False, + }), + 'BSH.Common.Setting.PowerState': dict({ + 'type': 'BSH.Common.EnumType.PowerState', + 'value': 'BSH.Common.EnumType.PowerState.On', + }), + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }), + 'SIEMENS-HCS05FRF1-304F4F9E541D': dict({ + 'programs': list([ + ]), + 'status': dict({ + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Setting.Dispenser.Enabled': dict({ + 'constraints': dict({ + 'access': 'readWrite', + }), + 'type': 'Boolean', + 'value': False, + }), + 'Refrigeration.Common.Setting.Light.External.Brightness': dict({ + 'constraints': dict({ + 'access': 'readWrite', + 'max': 100, + 'min': 0, + }), + 'type': 'Double', + 'unit': '%', + 'value': 70, + }), + 'Refrigeration.Common.Setting.Light.External.Power': dict({ + 'type': 'Boolean', + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + 'Refrigeration.FridgeFreezer.Setting.SuperModeFreezer': dict({ + 'constraints': dict({ + 'access': 'readWrite', + }), + 'type': 'Boolean', + 'value': False, + }), + 'Refrigeration.FridgeFreezer.Setting.SuperModeRefrigerator': dict({ + 'constraints': dict({ + 'access': 'readWrite', + }), + 'type': 'Boolean', + 'value': False, + }), + }), + }), + }) +# --- +# name: test_async_get_device_diagnostics + dict({ + 'programs': list([ + 'Dishcare.Dishwasher.Program.Auto1', + 'Dishcare.Dishwasher.Program.Auto2', + 'Dishcare.Dishwasher.Program.Auto3', + 'Dishcare.Dishwasher.Program.Eco50', + 'Dishcare.Dishwasher.Program.Quick45', + ]), + 'status': dict({ + 'BSH.Common.Setting.AmbientLightBrightness': dict({ + 'type': 'Double', + 'unit': '%', + 'value': 70, + }), + 'BSH.Common.Setting.AmbientLightColor': dict({ + 'type': 'BSH.Common.EnumType.AmbientLightColor', + 'value': 'BSH.Common.EnumType.AmbientLightColor.Color43', + }), + 'BSH.Common.Setting.AmbientLightCustomColor': dict({ + 'type': 'String', + 'value': '#4a88f8', + }), + 'BSH.Common.Setting.AmbientLightEnabled': dict({ + 'type': 'Boolean', + 'value': True, + }), + 'BSH.Common.Setting.ChildLock': dict({ + 'type': 'Boolean', + 'value': False, + }), + 'BSH.Common.Setting.PowerState': dict({ + 'type': 'BSH.Common.EnumType.PowerState', + 'value': 'BSH.Common.EnumType.PowerState.On', + }), + 'BSH.Common.Status.DoorState': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Closed', + }), + 'BSH.Common.Status.OperationState': dict({ + 'value': 'BSH.Common.EnumType.OperationState.Ready', + }), + 'BSH.Common.Status.RemoteControlActive': dict({ + 'value': True, + }), + 'BSH.Common.Status.RemoteControlStartAllowed': dict({ + 'value': True, + }), + 'Refrigeration.Common.Status.Door.Refrigerator': dict({ + 'value': 'BSH.Common.EnumType.DoorState.Open', + }), + }), + }) +# --- diff --git a/tests/components/home_connect/test_diagnostics.py b/tests/components/home_connect/test_diagnostics.py new file mode 100644 index 00000000000..d0bc5e77735 --- /dev/null +++ b/tests/components/home_connect/test_diagnostics.py @@ -0,0 +1,87 @@ +"""Test diagnostics for Home Connect.""" + +from collections.abc import Awaitable, Callable +from unittest.mock import MagicMock + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.home_connect.const import DOMAIN +from homeassistant.components.home_connect.diagnostics import ( + async_get_config_entry_diagnostics, + async_get_device_diagnostics, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from .conftest import get_all_appliances + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("bypass_throttle") +async def test_async_get_config_entry_diagnostics( + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + get_appliances.side_effect = get_all_appliances + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + assert await async_get_config_entry_diagnostics(hass, config_entry) == snapshot + + +@pytest.mark.usefixtures("bypass_throttle") +async def test_async_get_device_diagnostics( + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test device config entry diagnostics.""" + get_appliances.side_effect = get_all_appliances + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + device = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, "SIEMENS-HCS02DWH1-6BE58C26DCC1")}, + ) + + assert await async_get_device_diagnostics(hass, config_entry, device) == snapshot + + +@pytest.mark.usefixtures("bypass_throttle") +async def test_async_device_diagnostics_exceptions( + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test device config entry diagnostics.""" + get_appliances.side_effect = get_all_appliances + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + device = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, "Random-Device-ID")}, + ) + + with pytest.raises(ValueError): + await async_get_device_diagnostics(hass, config_entry, device) diff --git a/tests/components/home_connect/test_init.py b/tests/components/home_connect/test_init.py index 52550d705a9..7c4f73b6f0a 100644 --- a/tests/components/home_connect/test_init.py +++ b/tests/components/home_connect/test_init.py @@ -10,7 +10,10 @@ from requests import HTTPError import requests_mock from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN -from homeassistant.components.home_connect import SCAN_INTERVAL +from homeassistant.components.home_connect import ( + SCAN_INTERVAL, + bsh_key_to_translation_key, +) from homeassistant.components.home_connect.const import ( BSH_CHILD_LOCK_STATE, BSH_OPERATION_STATE, @@ -27,6 +30,7 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er +from script.hassfest.translations import RE_TRANSLATION_KEY from .conftest import ( CLIENT_ID, @@ -305,7 +309,7 @@ async def test_services_exception( service_call["service_data"]["device_id"] = "DOES_NOT_EXISTS" - with pytest.raises(ValueError): + with pytest.raises(AssertionError): await hass.services.async_call(**service_call) @@ -372,3 +376,10 @@ async def test_entity_migration( domain, DOMAIN, f"{appliance.haId}-{expected_unique_id_suffix}" ) assert config_entry_v1_1.minor_version == 2 + + +async def test_bsh_key_transformations() -> None: + """Test that the key transformations are compatible valid translations keys and can be reversed.""" + program = "Dishcare.Dishwasher.Program.Eco50" + translation_key = bsh_key_to_translation_key(program) + assert RE_TRANSLATION_KEY.match(translation_key) diff --git a/tests/components/home_connect/test_select.py b/tests/components/home_connect/test_select.py new file mode 100644 index 00000000000..5939d256e0a --- /dev/null +++ b/tests/components/home_connect/test_select.py @@ -0,0 +1,161 @@ +"""Tests for home_connect select entities.""" + +from collections.abc import Awaitable, Callable, Generator +from unittest.mock import MagicMock, Mock + +from homeconnect.api import HomeConnectError +import pytest + +from homeassistant.components.home_connect.const import ( + BSH_ACTIVE_PROGRAM, + BSH_SELECTED_PROGRAM, +) +from homeassistant.components.select import ATTR_OPTION, DOMAIN as SELECT_DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_SELECT_OPTION, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError + +from .conftest import get_all_appliances + +from tests.common import MockConfigEntry, load_json_object_fixture + +SETTINGS_STATUS = { + setting.pop("key"): setting + for setting in load_json_object_fixture("home_connect/settings.json") + .get("Washer") + .get("data") + .get("settings") +} + +PROGRAM = "Dishcare.Dishwasher.Program.Eco50" + + +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.SELECT] + + +async def test_select( + bypass_throttle: Generator[None], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: Mock, +) -> None: + """Test select entity.""" + get_appliances.side_effect = get_all_appliances + assert config_entry.state is ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state is ConfigEntryState.LOADED + + +@pytest.mark.parametrize( + ("entity_id", "status", "program_to_set"), + [ + ( + "select.washer_selected_program", + {BSH_SELECTED_PROGRAM: {"value": PROGRAM}}, + "dishcare_dishwasher_program_eco_50", + ), + ( + "select.washer_active_program", + {BSH_ACTIVE_PROGRAM: {"value": PROGRAM}}, + "dishcare_dishwasher_program_eco_50", + ), + ], +) +async def test_select_functionality( + entity_id: str, + status: dict, + program_to_set: str, + bypass_throttle: Generator[None], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + appliance: Mock, + get_appliances: MagicMock, +) -> None: + """Test select functionality.""" + appliance.status.update(SETTINGS_STATUS) + appliance.get_programs_available.return_value = [PROGRAM] + get_appliances.return_value = [appliance] + + assert config_entry.state is ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state is ConfigEntryState.LOADED + + appliance.status.update(status) + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: program_to_set}, + blocking=True, + ) + assert hass.states.is_state(entity_id, program_to_set) + + +@pytest.mark.parametrize( + ( + "entity_id", + "status", + "program_to_set", + "mock_attr", + "exception_match", + ), + [ + ( + "select.washer_selected_program", + {BSH_SELECTED_PROGRAM: {"value": PROGRAM}}, + "dishcare_dishwasher_program_eco_50", + "select_program", + r"Error.*select.*program.*", + ), + ( + "select.washer_active_program", + {BSH_ACTIVE_PROGRAM: {"value": PROGRAM}}, + "dishcare_dishwasher_program_eco_50", + "start_program", + r"Error.*start.*program.*", + ), + ], +) +async def test_select_exception_handling( + entity_id: str, + status: dict, + program_to_set: str, + mock_attr: str, + exception_match: str, + bypass_throttle: Generator[None], + hass: HomeAssistant, + integration_setup: Callable[[], Awaitable[bool]], + config_entry: MockConfigEntry, + setup_credentials: None, + problematic_appliance: Mock, + get_appliances: MagicMock, +) -> None: + """Test exception handling.""" + problematic_appliance.get_programs_available.side_effect = None + problematic_appliance.get_programs_available.return_value = [PROGRAM] + get_appliances.return_value = [problematic_appliance] + + assert config_entry.state is ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state is ConfigEntryState.LOADED + + # Assert that an exception is called. + with pytest.raises(HomeConnectError): + getattr(problematic_appliance, mock_attr)() + + problematic_appliance.status.update(status) + with pytest.raises(ServiceValidationError, match=exception_match): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {"entity_id": entity_id, "option": program_to_set}, + blocking=True, + ) + assert getattr(problematic_appliance, mock_attr).call_count == 2 diff --git a/tests/components/home_connect/test_switch.py b/tests/components/home_connect/test_switch.py index 06201ffd58c..e4f45fbcdf9 100644 --- a/tests/components/home_connect/test_switch.py +++ b/tests/components/home_connect/test_switch.py @@ -162,7 +162,7 @@ async def test_switch_functionality( SERVICE_TURN_OFF, "set_setting", "Dishwasher", - r"Error.*turn.*off.*appliance.*value", + r"Error.*turn.*off.*", ), ( "switch.dishwasher_power", @@ -170,7 +170,7 @@ async def test_switch_functionality( SERVICE_TURN_ON, "set_setting", "Dishwasher", - r"Error.*turn.*on.*appliance.*", + r"Error.*turn.*on.*", ), ( "switch.dishwasher_child_lock", @@ -178,7 +178,7 @@ async def test_switch_functionality( SERVICE_TURN_ON, "set_setting", "Dishwasher", - r"Error.*turn.*on.*key.*", + r"Error.*turn.*on.*", ), ( "switch.dishwasher_child_lock", @@ -186,7 +186,7 @@ async def test_switch_functionality( SERVICE_TURN_OFF, "set_setting", "Dishwasher", - r"Error.*turn.*off.*key.*", + r"Error.*turn.*off.*", ), ], indirect=["problematic_appliance"], @@ -297,7 +297,7 @@ async def test_ent_desc_switch_functionality( SERVICE_TURN_ON, "set_setting", "FridgeFreezer", - r"Error.*turn.*on.*key.*", + r"Error.*turn.*on.*", ), ( "switch.fridgefreezer_freezer_super_mode", @@ -305,7 +305,7 @@ async def test_ent_desc_switch_functionality( SERVICE_TURN_OFF, "set_setting", "FridgeFreezer", - r"Error.*turn.*off.*key.*", + r"Error.*turn.*off.*", ), ], indirect=["problematic_appliance"], diff --git a/tests/components/homekit/test_accessories.py b/tests/components/homekit/test_accessories.py index c37cac84b8a..00cf42bb916 100644 --- a/tests/components/homekit/test_accessories.py +++ b/tests/components/homekit/test_accessories.py @@ -121,7 +121,7 @@ async def test_home_accessory(hass: HomeAssistant, hk_driver) -> None: serv = acc3.services[0] # SERV_ACCESSORY_INFO assert ( serv.get_characteristic(CHAR_NAME).value - == "Home Accessory that exceeds the maximum maximum maximum maximum " + == "Home Accessory that exceeds the maximum maximum maximum maximum" ) assert ( serv.get_characteristic(CHAR_MANUFACTURER).value @@ -154,7 +154,7 @@ async def test_home_accessory(hass: HomeAssistant, hk_driver) -> None: serv = acc4.services[0] # SERV_ACCESSORY_INFO assert ( serv.get_characteristic(CHAR_NAME).value - == "Home Accessory that exceeds the maximum maximum maximum maximum " + == "Home Accessory that exceeds the maximum maximum maximum maximum" ) assert ( serv.get_characteristic(CHAR_MANUFACTURER).value diff --git a/tests/components/homekit/test_type_thermostats.py b/tests/components/homekit/test_type_thermostats.py index 8454610566b..e99db8f6234 100644 --- a/tests/components/homekit/test_type_thermostats.py +++ b/tests/components/homekit/test_type_thermostats.py @@ -921,8 +921,8 @@ async def test_thermostat_fahrenheit( await hass.async_block_till_done() assert call_set_temperature[0] assert call_set_temperature[0].data[ATTR_ENTITY_ID] == entity_id - assert call_set_temperature[0].data[ATTR_TARGET_TEMP_HIGH] == 73.5 - assert call_set_temperature[0].data[ATTR_TARGET_TEMP_LOW] == 68 + assert call_set_temperature[0].data[ATTR_TARGET_TEMP_HIGH] == 73.4 + assert call_set_temperature[0].data[ATTR_TARGET_TEMP_LOW] == 68.18 assert len(events) == 1 assert events[-1].data[ATTR_VALUE] == "CoolingThresholdTemperature to 23°C" @@ -942,8 +942,8 @@ async def test_thermostat_fahrenheit( await hass.async_block_till_done() assert call_set_temperature[1] assert call_set_temperature[1].data[ATTR_ENTITY_ID] == entity_id - assert call_set_temperature[1].data[ATTR_TARGET_TEMP_HIGH] == 73.5 - assert call_set_temperature[1].data[ATTR_TARGET_TEMP_LOW] == 71.5 + assert call_set_temperature[1].data[ATTR_TARGET_TEMP_HIGH] == 73.4 + assert call_set_temperature[1].data[ATTR_TARGET_TEMP_LOW] == 71.6 assert len(events) == 2 assert events[-1].data[ATTR_VALUE] == "HeatingThresholdTemperature to 22°C" @@ -962,7 +962,7 @@ async def test_thermostat_fahrenheit( await hass.async_block_till_done() assert call_set_temperature[2] assert call_set_temperature[2].data[ATTR_ENTITY_ID] == entity_id - assert call_set_temperature[2].data[ATTR_TEMPERATURE] == 75.0 + assert call_set_temperature[2].data[ATTR_TEMPERATURE] == 75.2 assert len(events) == 3 assert events[-1].data[ATTR_VALUE] == "TargetTemperature to 24.0°C" diff --git a/tests/components/homekit/test_util.py b/tests/components/homekit/test_util.py index 7f7e3ee0ce0..30efd7fcc5c 100644 --- a/tests/components/homekit/test_util.py +++ b/tests/components/homekit/test_util.py @@ -256,6 +256,7 @@ def test_cleanup_name_for_homekit() -> None: """Ensure name sanitize works as expected.""" assert cleanup_name_for_homekit("abc") == "abc" + assert cleanup_name_for_homekit("abc ") == "abc" assert cleanup_name_for_homekit("a b c") == "a b c" assert cleanup_name_for_homekit("ab_c") == "ab c" assert ( @@ -267,14 +268,16 @@ def test_cleanup_name_for_homekit() -> None: def test_temperature_to_homekit() -> None: """Test temperature conversion from HA to HomeKit.""" - assert temperature_to_homekit(20.46, UnitOfTemperature.CELSIUS) == 20.5 - assert temperature_to_homekit(92.1, UnitOfTemperature.FAHRENHEIT) == 33.4 + assert temperature_to_homekit(20.46, UnitOfTemperature.CELSIUS) == 20.46 + assert temperature_to_homekit(92.1, UnitOfTemperature.FAHRENHEIT) == pytest.approx( + 33.388888888888886 + ) def test_temperature_to_states() -> None: """Test temperature conversion from HomeKit to HA.""" assert temperature_to_states(20, UnitOfTemperature.CELSIUS) == 20.0 - assert temperature_to_states(20.2, UnitOfTemperature.FAHRENHEIT) == 68.5 + assert temperature_to_states(20.2, UnitOfTemperature.FAHRENHEIT) == 68.36 def test_density_to_air_quality() -> None: diff --git a/tests/components/homekit_controller/snapshots/test_init.ambr b/tests/components/homekit_controller/snapshots/test_init.ambr index 8304d567916..b96da507adf 100644 --- a/tests/components/homekit_controller/snapshots/test_init.ambr +++ b/tests/components/homekit_controller/snapshots/test_init.ambr @@ -11400,15 +11400,15 @@ 'min_mireds': 153, 'rgb_color': tuple( 255, - 167, - 89, + 168, + 90, ), 'supported_color_modes': list([ , ]), 'supported_features': , 'xy_color': tuple( - 0.524, + 0.522, 0.387, ), }), @@ -11548,15 +11548,15 @@ 'min_mireds': 153, 'rgb_color': tuple( 255, - 167, - 89, + 168, + 90, ), 'supported_color_modes': list([ , ]), 'supported_features': , 'xy_color': tuple( - 0.524, + 0.522, 0.387, ), }), @@ -14883,7 +14883,7 @@ 'min_mireds': 153, 'rgb_color': tuple( 255, - 141, + 142, 28, ), 'supported_color_modes': list([ @@ -14892,8 +14892,8 @@ ]), 'supported_features': , 'xy_color': tuple( - 0.589, - 0.385, + 0.588, + 0.386, ), }), 'entity_id': 'light.nanoleaf_strip_3b32_nanoleaf_light_strip', diff --git a/tests/components/homematicip_cloud/test_sensor.py b/tests/components/homematicip_cloud/test_sensor.py index bdd0b6194ed..2dda3116032 100644 --- a/tests/components/homematicip_cloud/test_sensor.py +++ b/tests/components/homematicip_cloud/test_sensor.py @@ -23,7 +23,11 @@ from homeassistant.components.homematicip_cloud.sensor import ( ATTR_WIND_DIRECTION, ATTR_WIND_DIRECTION_VARIATION, ) -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.components.sensor import ( + ATTR_STATE_CLASS, + DOMAIN as SENSOR_DOMAIN, + SensorStateClass, +) from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, LIGHT_LUX, @@ -362,6 +366,7 @@ async def test_hmip_windspeed_sensor( assert ( ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == UnitOfSpeed.KILOMETERS_PER_HOUR ) + assert ha_state.attributes[ATTR_STATE_CLASS] == SensorStateClass.MEASUREMENT await async_manipulate_test_data(hass, hmip_device, "windSpeed", 9.4) ha_state = hass.states.get(entity_id) assert ha_state.state == "9.4" @@ -411,6 +416,7 @@ async def test_hmip_today_rain_sensor( assert ha_state.state == "3.9" assert ha_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == UnitOfLength.MILLIMETERS + assert ha_state.attributes[ATTR_STATE_CLASS] == SensorStateClass.MEASUREMENT await async_manipulate_test_data(hass, hmip_device, "todayRainCounter", 14.2) ha_state = hass.states.get(entity_id) assert ha_state.state == "14.2" diff --git a/tests/components/homewizard/conftest.py b/tests/components/homewizard/conftest.py index fcfe1e5c189..dfd92577a04 100644 --- a/tests/components/homewizard/conftest.py +++ b/tests/components/homewizard/conftest.py @@ -4,7 +4,7 @@ from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from homewizard_energy.errors import NotFoundError -from homewizard_energy.models import Data, Device, State, System +from homewizard_energy.v1.models import Data, Device, State, System import pytest from homeassistant.components.homewizard.const import DOMAIN @@ -27,11 +27,11 @@ def mock_homewizardenergy( """Return a mock bridge.""" with ( patch( - "homeassistant.components.homewizard.coordinator.HomeWizardEnergy", + "homeassistant.components.homewizard.coordinator.HomeWizardEnergyV1", autospec=True, ) as homewizard, patch( - "homeassistant.components.homewizard.config_flow.HomeWizardEnergy", + "homeassistant.components.homewizard.config_flow.HomeWizardEnergyV1", new=homewizard, ), ): @@ -77,12 +77,12 @@ def mock_config_entry() -> MockConfigEntry: title="Device", domain=DOMAIN, data={ - "product_name": "Product name", - "product_type": "product_type", - "serial": "aabbccddeeff", + "product_name": "P1 Meter", + "product_type": "HWE-P1", + "serial": "5c2fafabcdef", CONF_IP_ADDRESS: "127.0.0.1", }, - unique_id="aabbccddeeff", + unique_id="HWE-P1_5c2fafabcdef", ) diff --git a/tests/components/homewizard/fixtures/HWE-KWH1/device.json b/tests/components/homewizard/fixtures/HWE-KWH1/device.json index 67f9ddf42cb..2cb20bf1255 100644 --- a/tests/components/homewizard/fixtures/HWE-KWH1/device.json +++ b/tests/components/homewizard/fixtures/HWE-KWH1/device.json @@ -1,7 +1,7 @@ { "product_type": "HWE-KWH1", "product_name": "kWh meter", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "3.06", "api_version": "v1" } diff --git a/tests/components/homewizard/fixtures/HWE-KWH3/device.json b/tests/components/homewizard/fixtures/HWE-KWH3/device.json index e3122c8ff89..a3ba3281a4f 100644 --- a/tests/components/homewizard/fixtures/HWE-KWH3/device.json +++ b/tests/components/homewizard/fixtures/HWE-KWH3/device.json @@ -1,7 +1,7 @@ { "product_type": "HWE-KWH3", "product_name": "KWh meter 3-phase", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "3.06", "api_version": "v1" } diff --git a/tests/components/homewizard/fixtures/HWE-P1-invalid-EAN/device.json b/tests/components/homewizard/fixtures/HWE-P1-invalid-EAN/device.json index 4972c491859..a444aa81c30 100644 --- a/tests/components/homewizard/fixtures/HWE-P1-invalid-EAN/device.json +++ b/tests/components/homewizard/fixtures/HWE-P1-invalid-EAN/device.json @@ -1,7 +1,7 @@ { "product_type": "HWE-P1", "product_name": "P1 meter", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "4.19", "api_version": "v1" } diff --git a/tests/components/homewizard/fixtures/HWE-P1-unused-exports/device.json b/tests/components/homewizard/fixtures/HWE-P1-unused-exports/device.json index 4972c491859..a444aa81c30 100644 --- a/tests/components/homewizard/fixtures/HWE-P1-unused-exports/device.json +++ b/tests/components/homewizard/fixtures/HWE-P1-unused-exports/device.json @@ -1,7 +1,7 @@ { "product_type": "HWE-P1", "product_name": "P1 meter", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "4.19", "api_version": "v1" } diff --git a/tests/components/homewizard/fixtures/HWE-P1-zero-values/device.json b/tests/components/homewizard/fixtures/HWE-P1-zero-values/device.json index 4972c491859..a444aa81c30 100644 --- a/tests/components/homewizard/fixtures/HWE-P1-zero-values/device.json +++ b/tests/components/homewizard/fixtures/HWE-P1-zero-values/device.json @@ -1,7 +1,7 @@ { "product_type": "HWE-P1", "product_name": "P1 meter", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "4.19", "api_version": "v1" } diff --git a/tests/components/homewizard/fixtures/HWE-P1/device.json b/tests/components/homewizard/fixtures/HWE-P1/device.json index 4972c491859..a444aa81c30 100644 --- a/tests/components/homewizard/fixtures/HWE-P1/device.json +++ b/tests/components/homewizard/fixtures/HWE-P1/device.json @@ -1,7 +1,7 @@ { "product_type": "HWE-P1", "product_name": "P1 meter", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "4.19", "api_version": "v1" } diff --git a/tests/components/homewizard/fixtures/HWE-SKT-11/device.json b/tests/components/homewizard/fixtures/HWE-SKT-11/device.json index bab5a636368..8b768eccb98 100644 --- a/tests/components/homewizard/fixtures/HWE-SKT-11/device.json +++ b/tests/components/homewizard/fixtures/HWE-SKT-11/device.json @@ -1,7 +1,7 @@ { "product_type": "HWE-SKT", "product_name": "Energy Socket", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "3.03", "api_version": "v1" } diff --git a/tests/components/homewizard/fixtures/HWE-SKT-21/device.json b/tests/components/homewizard/fixtures/HWE-SKT-21/device.json index 69b5947351f..a4ab182e7ec 100644 --- a/tests/components/homewizard/fixtures/HWE-SKT-21/device.json +++ b/tests/components/homewizard/fixtures/HWE-SKT-21/device.json @@ -1,7 +1,7 @@ { "product_type": "HWE-SKT", "product_name": "Energy Socket", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "4.07", "api_version": "v1" } diff --git a/tests/components/homewizard/fixtures/HWE-WTR/device.json b/tests/components/homewizard/fixtures/HWE-WTR/device.json index d33e6045299..3f57d7174fc 100644 --- a/tests/components/homewizard/fixtures/HWE-WTR/device.json +++ b/tests/components/homewizard/fixtures/HWE-WTR/device.json @@ -1,7 +1,7 @@ { "product_type": "HWE-WTR", "product_name": "Watermeter", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "2.03", "api_version": "v1" } diff --git a/tests/components/homewizard/fixtures/SDM230/SDM630/device.json b/tests/components/homewizard/fixtures/SDM230/SDM630/device.json index b8ec1d18fe8..c7fefd081b5 100644 --- a/tests/components/homewizard/fixtures/SDM230/SDM630/device.json +++ b/tests/components/homewizard/fixtures/SDM230/SDM630/device.json @@ -1,7 +1,7 @@ { "product_type": "SDM630-wifi", "product_name": "KWh meter 3-phase", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "3.06", "api_version": "v1" } diff --git a/tests/components/homewizard/fixtures/SDM230/device.json b/tests/components/homewizard/fixtures/SDM230/device.json index b6b5c18904e..2dcd391e119 100644 --- a/tests/components/homewizard/fixtures/SDM230/device.json +++ b/tests/components/homewizard/fixtures/SDM230/device.json @@ -1,7 +1,7 @@ { "product_type": "SDM230-wifi", "product_name": "kWh meter", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "3.06", "api_version": "v1" } diff --git a/tests/components/homewizard/fixtures/SDM630/device.json b/tests/components/homewizard/fixtures/SDM630/device.json index b8ec1d18fe8..c7fefd081b5 100644 --- a/tests/components/homewizard/fixtures/SDM630/device.json +++ b/tests/components/homewizard/fixtures/SDM630/device.json @@ -1,7 +1,7 @@ { "product_type": "SDM630-wifi", "product_name": "KWh meter 3-phase", - "serial": "3c39e7aabbcc", + "serial": "5c2fafabcdef", "firmware_version": "3.06", "api_version": "v1" } diff --git a/tests/components/homewizard/snapshots/test_button.ambr b/tests/components/homewizard/snapshots/test_button.ambr index d5ad9770478..6dd7fcc45d2 100644 --- a/tests/components/homewizard/snapshots/test_button.ambr +++ b/tests/components/homewizard/snapshots/test_button.ambr @@ -42,7 +42,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_identify', + 'unique_id': 'HWE-P1_5c2fafabcdef_identify', 'unit_of_measurement': None, }) # --- @@ -54,7 +54,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -64,7 +64,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, diff --git a/tests/components/homewizard/snapshots/test_config_flow.ambr b/tests/components/homewizard/snapshots/test_config_flow.ambr index c3852a8c3fa..0a301fc3941 100644 --- a/tests/components/homewizard/snapshots/test_config_flow.ambr +++ b/tests/components/homewizard/snapshots/test_config_flow.ambr @@ -3,7 +3,7 @@ FlowResultSnapshot({ 'context': dict({ 'source': 'zeroconf', - 'unique_id': 'HWE-P1_aabbccddeeff', + 'unique_id': 'HWE-P1_5c2fafabcdef', }), 'data': dict({ 'ip_address': '127.0.0.1', @@ -31,7 +31,7 @@ 'pref_disable_polling': False, 'source': 'zeroconf', 'title': 'P1 meter', - 'unique_id': 'HWE-P1_aabbccddeeff', + 'unique_id': 'HWE-P1_5c2fafabcdef', 'version': 1, }), 'title': 'P1 meter', @@ -47,7 +47,7 @@ 'title_placeholders': dict({ 'name': 'P1 meter', }), - 'unique_id': 'HWE-P1_aabbccddeeff', + 'unique_id': 'HWE-P1_5c2fafabcdef', }), 'data': dict({ 'ip_address': '127.0.0.1', @@ -75,7 +75,7 @@ 'pref_disable_polling': False, 'source': 'zeroconf', 'title': 'P1 meter', - 'unique_id': 'HWE-P1_aabbccddeeff', + 'unique_id': 'HWE-P1_5c2fafabcdef', 'version': 1, }), 'title': 'P1 meter', @@ -89,9 +89,9 @@ 'confirm_only': True, 'source': 'zeroconf', 'title_placeholders': dict({ - 'name': 'Energy Socket (aabbccddeeff)', + 'name': 'Energy Socket (5c2fafabcdef)', }), - 'unique_id': 'HWE-SKT_aabbccddeeff', + 'unique_id': 'HWE-SKT_5c2fafabcdef', }), 'data': dict({ 'ip_address': '127.0.0.1', @@ -119,7 +119,7 @@ 'pref_disable_polling': False, 'source': 'zeroconf', 'title': 'Energy Socket', - 'unique_id': 'HWE-SKT_aabbccddeeff', + 'unique_id': 'HWE-SKT_5c2fafabcdef', 'version': 1, }), 'title': 'Energy Socket', @@ -131,7 +131,7 @@ FlowResultSnapshot({ 'context': dict({ 'source': 'user', - 'unique_id': 'HWE-P1_3c39e7aabbcc', + 'unique_id': 'HWE-P1_5c2fafabcdef', }), 'data': dict({ 'ip_address': '2.2.2.2', @@ -159,7 +159,7 @@ 'pref_disable_polling': False, 'source': 'user', 'title': 'P1 meter', - 'unique_id': 'HWE-P1_3c39e7aabbcc', + 'unique_id': 'HWE-P1_5c2fafabcdef', 'version': 1, }), 'title': 'P1 meter', diff --git a/tests/components/homewizard/snapshots/test_diagnostics.ambr b/tests/components/homewizard/snapshots/test_diagnostics.ambr index f8ac80f2536..cb5e7ef1f43 100644 --- a/tests/components/homewizard/snapshots/test_diagnostics.ambr +++ b/tests/components/homewizard/snapshots/test_diagnostics.ambr @@ -82,8 +82,8 @@ }), 'entry': dict({ 'ip_address': '**REDACTED**', - 'product_name': 'Product name', - 'product_type': 'product_type', + 'product_name': 'P1 Meter', + 'product_type': 'HWE-P1', 'serial': '**REDACTED**', }), }) @@ -171,8 +171,8 @@ }), 'entry': dict({ 'ip_address': '**REDACTED**', - 'product_name': 'Product name', - 'product_type': 'product_type', + 'product_name': 'P1 Meter', + 'product_type': 'HWE-P1', 'serial': '**REDACTED**', }), }) @@ -311,8 +311,8 @@ }), 'entry': dict({ 'ip_address': '**REDACTED**', - 'product_name': 'Product name', - 'product_type': 'product_type', + 'product_name': 'P1 Meter', + 'product_type': 'HWE-P1', 'serial': '**REDACTED**', }), }) @@ -404,8 +404,8 @@ }), 'entry': dict({ 'ip_address': '**REDACTED**', - 'product_name': 'Product name', - 'product_type': 'product_type', + 'product_name': 'P1 Meter', + 'product_type': 'HWE-P1', 'serial': '**REDACTED**', }), }) @@ -497,8 +497,8 @@ }), 'entry': dict({ 'ip_address': '**REDACTED**', - 'product_name': 'Product name', - 'product_type': 'product_type', + 'product_name': 'P1 Meter', + 'product_type': 'HWE-P1', 'serial': '**REDACTED**', }), }) @@ -586,8 +586,8 @@ }), 'entry': dict({ 'ip_address': '**REDACTED**', - 'product_name': 'Product name', - 'product_type': 'product_type', + 'product_name': 'P1 Meter', + 'product_type': 'HWE-P1', 'serial': '**REDACTED**', }), }) @@ -675,8 +675,8 @@ }), 'entry': dict({ 'ip_address': '**REDACTED**', - 'product_name': 'Product name', - 'product_type': 'product_type', + 'product_name': 'P1 Meter', + 'product_type': 'HWE-P1', 'serial': '**REDACTED**', }), }) @@ -764,8 +764,8 @@ }), 'entry': dict({ 'ip_address': '**REDACTED**', - 'product_name': 'Product name', - 'product_type': 'product_type', + 'product_name': 'P1 Meter', + 'product_type': 'HWE-P1', 'serial': '**REDACTED**', }), }) diff --git a/tests/components/homewizard/snapshots/test_number.ambr b/tests/components/homewizard/snapshots/test_number.ambr index 768255c7508..b14028cd97c 100644 --- a/tests/components/homewizard/snapshots/test_number.ambr +++ b/tests/components/homewizard/snapshots/test_number.ambr @@ -14,7 +14,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '100.0', + 'state': '100', }) # --- # name: test_number_entities[HWE-SKT-11].1 @@ -51,7 +51,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'status_light_brightness', - 'unique_id': 'aabbccddeeff_status_light_brightness', + 'unique_id': 'HWE-P1_5c2fafabcdef_status_light_brightness', 'unit_of_measurement': '%', }) # --- @@ -63,7 +63,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -73,7 +73,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -106,7 +106,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '100.0', + 'state': '100', }) # --- # name: test_number_entities[HWE-SKT-21].1 @@ -143,7 +143,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'status_light_brightness', - 'unique_id': 'aabbccddeeff_status_light_brightness', + 'unique_id': 'HWE-P1_5c2fafabcdef_status_light_brightness', 'unit_of_measurement': '%', }) # --- @@ -155,7 +155,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -165,7 +165,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, diff --git a/tests/components/homewizard/snapshots/test_sensor.ambr b/tests/components/homewizard/snapshots/test_sensor.ambr index 5d5b458dccc..a91c87722d1 100644 --- a/tests/components/homewizard/snapshots/test_sensor.ambr +++ b/tests/components/homewizard/snapshots/test_sensor.ambr @@ -7,7 +7,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17,7 +17,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -66,7 +66,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_apparent_power_va', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_apparent_power_va', 'unit_of_measurement': , }) # --- @@ -94,7 +94,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -104,7 +104,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -153,7 +153,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_current_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_a', 'unit_of_measurement': , }) # --- @@ -181,7 +181,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -191,7 +191,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -240,7 +240,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_kwh', 'unit_of_measurement': , }) # --- @@ -268,7 +268,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -278,7 +278,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -327,7 +327,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_kwh', 'unit_of_measurement': , }) # --- @@ -355,7 +355,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -365,7 +365,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -414,7 +414,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_frequency_hz', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_frequency_hz', 'unit_of_measurement': , }) # --- @@ -442,7 +442,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -452,7 +452,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -504,7 +504,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_w', 'unit_of_measurement': , }) # --- @@ -532,7 +532,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -542,7 +542,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -591,7 +591,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_factor', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_factor', 'unit_of_measurement': '%', }) # --- @@ -619,7 +619,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -629,7 +629,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -678,7 +678,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_reactive_power_var', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_reactive_power_var', 'unit_of_measurement': , }) # --- @@ -706,7 +706,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -716,7 +716,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -765,7 +765,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_voltage_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_v', 'unit_of_measurement': , }) # --- @@ -793,7 +793,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -803,7 +803,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -850,7 +850,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_ssid', - 'unique_id': 'aabbccddeeff_wifi_ssid', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_ssid', 'unit_of_measurement': None, }) # --- @@ -875,7 +875,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -885,7 +885,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -934,7 +934,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_strength', - 'unique_id': 'aabbccddeeff_wifi_strength', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_strength', 'unit_of_measurement': '%', }) # --- @@ -961,7 +961,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -971,7 +971,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -1020,7 +1020,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_apparent_power_va', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_apparent_power_va', 'unit_of_measurement': , }) # --- @@ -1048,7 +1048,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -1058,7 +1058,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -1107,7 +1107,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_apparent_power_phase_va', - 'unique_id': 'aabbccddeeff_active_apparent_power_l1_va', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_apparent_power_l1_va', 'unit_of_measurement': , }) # --- @@ -1135,7 +1135,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -1145,7 +1145,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -1194,7 +1194,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_apparent_power_phase_va', - 'unique_id': 'aabbccddeeff_active_apparent_power_l2_va', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_apparent_power_l2_va', 'unit_of_measurement': , }) # --- @@ -1222,7 +1222,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -1232,7 +1232,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -1281,7 +1281,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_apparent_power_phase_va', - 'unique_id': 'aabbccddeeff_active_apparent_power_l3_va', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_apparent_power_l3_va', 'unit_of_measurement': , }) # --- @@ -1309,7 +1309,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -1319,7 +1319,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -1368,7 +1368,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_current_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_a', 'unit_of_measurement': , }) # --- @@ -1396,7 +1396,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -1406,7 +1406,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -1455,7 +1455,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l1_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l1_a', 'unit_of_measurement': , }) # --- @@ -1483,7 +1483,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -1493,7 +1493,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -1542,7 +1542,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l2_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l2_a', 'unit_of_measurement': , }) # --- @@ -1570,7 +1570,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -1580,7 +1580,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -1629,7 +1629,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l3_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l3_a', 'unit_of_measurement': , }) # --- @@ -1657,7 +1657,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -1667,7 +1667,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -1716,7 +1716,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_kwh', 'unit_of_measurement': , }) # --- @@ -1744,7 +1744,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -1754,7 +1754,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -1803,7 +1803,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_kwh', 'unit_of_measurement': , }) # --- @@ -1831,7 +1831,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -1841,7 +1841,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -1890,7 +1890,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_frequency_hz', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_frequency_hz', 'unit_of_measurement': , }) # --- @@ -1918,7 +1918,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -1928,7 +1928,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -1980,7 +1980,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_w', 'unit_of_measurement': , }) # --- @@ -2008,7 +2008,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -2018,7 +2018,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -2067,7 +2067,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_factor_phase', - 'unique_id': 'aabbccddeeff_active_power_factor_l1', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_factor_l1', 'unit_of_measurement': '%', }) # --- @@ -2095,7 +2095,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -2105,7 +2105,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -2154,7 +2154,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_factor_phase', - 'unique_id': 'aabbccddeeff_active_power_factor_l2', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_factor_l2', 'unit_of_measurement': '%', }) # --- @@ -2182,7 +2182,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -2192,7 +2192,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -2241,7 +2241,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_factor_phase', - 'unique_id': 'aabbccddeeff_active_power_factor_l3', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_factor_l3', 'unit_of_measurement': '%', }) # --- @@ -2269,7 +2269,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -2279,7 +2279,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -2331,7 +2331,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l1_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l1_w', 'unit_of_measurement': , }) # --- @@ -2359,7 +2359,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -2369,7 +2369,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -2421,7 +2421,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l2_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l2_w', 'unit_of_measurement': , }) # --- @@ -2449,7 +2449,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -2459,7 +2459,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -2511,7 +2511,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l3_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l3_w', 'unit_of_measurement': , }) # --- @@ -2539,7 +2539,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -2549,7 +2549,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -2598,7 +2598,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_reactive_power_var', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_reactive_power_var', 'unit_of_measurement': , }) # --- @@ -2626,7 +2626,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -2636,7 +2636,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -2685,7 +2685,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', - 'unique_id': 'aabbccddeeff_active_reactive_power_l1_var', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_reactive_power_l1_var', 'unit_of_measurement': , }) # --- @@ -2713,7 +2713,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -2723,7 +2723,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -2772,7 +2772,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', - 'unique_id': 'aabbccddeeff_active_reactive_power_l2_var', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_reactive_power_l2_var', 'unit_of_measurement': , }) # --- @@ -2800,7 +2800,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -2810,7 +2810,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -2859,7 +2859,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', - 'unique_id': 'aabbccddeeff_active_reactive_power_l3_var', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_reactive_power_l3_var', 'unit_of_measurement': , }) # --- @@ -2887,7 +2887,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -2897,7 +2897,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -2946,7 +2946,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l1_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l1_v', 'unit_of_measurement': , }) # --- @@ -2974,7 +2974,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -2984,7 +2984,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -3033,7 +3033,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l2_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l2_v', 'unit_of_measurement': , }) # --- @@ -3061,7 +3061,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -3071,7 +3071,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -3120,7 +3120,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l3_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l3_v', 'unit_of_measurement': , }) # --- @@ -3148,7 +3148,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -3158,7 +3158,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -3205,7 +3205,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_ssid', - 'unique_id': 'aabbccddeeff_wifi_ssid', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_ssid', 'unit_of_measurement': None, }) # --- @@ -3230,7 +3230,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -3240,7 +3240,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -3289,7 +3289,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_strength', - 'unique_id': 'aabbccddeeff_wifi_strength', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_strength', 'unit_of_measurement': '%', }) # --- @@ -3316,7 +3316,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -3326,7 +3326,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -3373,7 +3373,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_average_w', - 'unique_id': 'aabbccddeeff_active_power_average_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_average_w', 'unit_of_measurement': , }) # --- @@ -3400,7 +3400,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -3410,7 +3410,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -3459,7 +3459,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l1_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l1_a', 'unit_of_measurement': , }) # --- @@ -3487,7 +3487,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -3497,7 +3497,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -3546,7 +3546,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l2_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l2_a', 'unit_of_measurement': , }) # --- @@ -3574,7 +3574,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -3584,7 +3584,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -3633,7 +3633,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l3_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l3_a', 'unit_of_measurement': , }) # --- @@ -3661,7 +3661,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -3671,7 +3671,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -3718,7 +3718,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'dsmr_version', - 'unique_id': 'aabbccddeeff_smr_version', + 'unique_id': 'HWE-P1_5c2fafabcdef_smr_version', 'unit_of_measurement': None, }) # --- @@ -3743,7 +3743,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -3753,7 +3753,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -3802,7 +3802,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_kwh', 'unit_of_measurement': , }) # --- @@ -3830,7 +3830,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -3840,7 +3840,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -3889,7 +3889,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t1_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t1_kwh', 'unit_of_measurement': , }) # --- @@ -3917,7 +3917,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -3927,7 +3927,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -3976,7 +3976,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t2_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t2_kwh', 'unit_of_measurement': , }) # --- @@ -4004,7 +4004,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4014,7 +4014,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -4063,7 +4063,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t3_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t3_kwh', 'unit_of_measurement': , }) # --- @@ -4091,7 +4091,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4101,7 +4101,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -4150,7 +4150,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t4_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t4_kwh', 'unit_of_measurement': , }) # --- @@ -4178,7 +4178,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4188,7 +4188,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -4237,7 +4237,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_kwh', 'unit_of_measurement': , }) # --- @@ -4265,7 +4265,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4275,7 +4275,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -4324,7 +4324,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t1_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t1_kwh', 'unit_of_measurement': , }) # --- @@ -4352,7 +4352,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4362,7 +4362,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -4411,7 +4411,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t2_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t2_kwh', 'unit_of_measurement': , }) # --- @@ -4439,7 +4439,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4449,7 +4449,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -4498,7 +4498,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t3_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t3_kwh', 'unit_of_measurement': , }) # --- @@ -4526,7 +4526,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4536,7 +4536,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -4585,7 +4585,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t4_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t4_kwh', 'unit_of_measurement': , }) # --- @@ -4613,7 +4613,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4623,7 +4623,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -4672,7 +4672,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_frequency_hz', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_frequency_hz', 'unit_of_measurement': , }) # --- @@ -4700,7 +4700,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4710,7 +4710,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -4757,7 +4757,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'long_power_fail_count', - 'unique_id': 'aabbccddeeff_long_power_fail_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_long_power_fail_count', 'unit_of_measurement': None, }) # --- @@ -4782,7 +4782,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4792,7 +4792,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -4839,7 +4839,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'monthly_power_peak_w', - 'unique_id': 'aabbccddeeff_monthly_power_peak_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_monthly_power_peak_w', 'unit_of_measurement': , }) # --- @@ -4866,7 +4866,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4876,7 +4876,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -4928,7 +4928,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_w', 'unit_of_measurement': , }) # --- @@ -4956,7 +4956,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -4966,7 +4966,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -5013,7 +5013,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'any_power_fail_count', - 'unique_id': 'aabbccddeeff_any_power_fail_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_any_power_fail_count', 'unit_of_measurement': None, }) # --- @@ -5038,7 +5038,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -5048,7 +5048,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -5100,7 +5100,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l1_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l1_w', 'unit_of_measurement': , }) # --- @@ -5128,7 +5128,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -5138,7 +5138,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -5190,7 +5190,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l2_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l2_w', 'unit_of_measurement': , }) # --- @@ -5218,7 +5218,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -5228,7 +5228,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -5280,7 +5280,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l3_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l3_w', 'unit_of_measurement': , }) # --- @@ -5308,7 +5308,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -5318,7 +5318,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -5365,7 +5365,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'unique_meter_id', - 'unique_id': 'aabbccddeeff_unique_meter_id', + 'unique_id': 'HWE-P1_5c2fafabcdef_unique_meter_id', 'unit_of_measurement': None, }) # --- @@ -5390,7 +5390,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -5400,7 +5400,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -5447,7 +5447,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'meter_model', - 'unique_id': 'aabbccddeeff_meter_model', + 'unique_id': 'HWE-P1_5c2fafabcdef_meter_model', 'unit_of_measurement': None, }) # --- @@ -5472,7 +5472,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -5482,7 +5482,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -5536,7 +5536,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_tariff', - 'unique_id': 'aabbccddeeff_active_tariff', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_tariff', 'unit_of_measurement': None, }) # --- @@ -5568,7 +5568,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -5578,7 +5578,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -5627,7 +5627,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_liter_m3', - 'unique_id': 'aabbccddeeff_total_liter_m3', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_liter_m3', 'unit_of_measurement': , }) # --- @@ -5655,7 +5655,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -5665,7 +5665,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -5714,7 +5714,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l1_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l1_v', 'unit_of_measurement': , }) # --- @@ -5742,7 +5742,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -5752,7 +5752,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -5801,7 +5801,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l2_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l2_v', 'unit_of_measurement': , }) # --- @@ -5829,7 +5829,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -5839,7 +5839,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -5888,7 +5888,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l3_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l3_v', 'unit_of_measurement': , }) # --- @@ -5916,7 +5916,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -5926,7 +5926,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -5973,7 +5973,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_sag_phase_count', - 'unique_id': 'aabbccddeeff_voltage_sag_l1_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_sag_l1_count', 'unit_of_measurement': None, }) # --- @@ -5998,7 +5998,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -6008,7 +6008,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -6055,7 +6055,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_sag_phase_count', - 'unique_id': 'aabbccddeeff_voltage_sag_l2_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_sag_l2_count', 'unit_of_measurement': None, }) # --- @@ -6080,7 +6080,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -6090,7 +6090,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -6137,7 +6137,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_sag_phase_count', - 'unique_id': 'aabbccddeeff_voltage_sag_l3_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_sag_l3_count', 'unit_of_measurement': None, }) # --- @@ -6162,7 +6162,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -6172,7 +6172,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -6219,7 +6219,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_swell_phase_count', - 'unique_id': 'aabbccddeeff_voltage_swell_l1_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_swell_l1_count', 'unit_of_measurement': None, }) # --- @@ -6244,7 +6244,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -6254,7 +6254,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -6301,7 +6301,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_swell_phase_count', - 'unique_id': 'aabbccddeeff_voltage_swell_l2_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_swell_l2_count', 'unit_of_measurement': None, }) # --- @@ -6326,7 +6326,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -6336,7 +6336,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -6383,7 +6383,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_swell_phase_count', - 'unique_id': 'aabbccddeeff_voltage_swell_l3_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_swell_l3_count', 'unit_of_measurement': None, }) # --- @@ -6408,7 +6408,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -6418,7 +6418,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -6467,7 +6467,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_liter_lpm', - 'unique_id': 'aabbccddeeff_active_liter_lpm', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_liter_lpm', 'unit_of_measurement': 'l/min', }) # --- @@ -6494,7 +6494,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -6504,7 +6504,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -6551,7 +6551,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_ssid', - 'unique_id': 'aabbccddeeff_wifi_ssid', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_ssid', 'unit_of_measurement': None, }) # --- @@ -6576,7 +6576,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -6586,7 +6586,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -6635,7 +6635,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_strength', - 'unique_id': 'aabbccddeeff_wifi_strength', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_strength', 'unit_of_measurement': '%', }) # --- @@ -7076,7 +7076,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -7086,7 +7086,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -7133,7 +7133,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_average_w', - 'unique_id': 'aabbccddeeff_active_power_average_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_average_w', 'unit_of_measurement': , }) # --- @@ -7160,7 +7160,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -7170,7 +7170,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -7219,7 +7219,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l1_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l1_a', 'unit_of_measurement': , }) # --- @@ -7247,7 +7247,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -7257,7 +7257,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -7306,7 +7306,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l2_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l2_a', 'unit_of_measurement': , }) # --- @@ -7334,7 +7334,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -7344,7 +7344,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -7393,7 +7393,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l3_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l3_a', 'unit_of_measurement': , }) # --- @@ -7421,7 +7421,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -7431,7 +7431,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -7478,7 +7478,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'dsmr_version', - 'unique_id': 'aabbccddeeff_smr_version', + 'unique_id': 'HWE-P1_5c2fafabcdef_smr_version', 'unit_of_measurement': None, }) # --- @@ -7503,7 +7503,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -7513,7 +7513,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -7562,7 +7562,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_kwh', 'unit_of_measurement': , }) # --- @@ -7590,7 +7590,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -7600,7 +7600,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -7649,7 +7649,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t1_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t1_kwh', 'unit_of_measurement': , }) # --- @@ -7677,7 +7677,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -7687,7 +7687,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -7736,7 +7736,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t2_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t2_kwh', 'unit_of_measurement': , }) # --- @@ -7764,7 +7764,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -7774,7 +7774,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -7823,7 +7823,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t3_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t3_kwh', 'unit_of_measurement': , }) # --- @@ -7851,7 +7851,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -7861,7 +7861,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -7910,7 +7910,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t4_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t4_kwh', 'unit_of_measurement': , }) # --- @@ -7938,7 +7938,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -7948,7 +7948,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -7997,7 +7997,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_kwh', 'unit_of_measurement': , }) # --- @@ -8025,7 +8025,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -8035,7 +8035,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -8084,7 +8084,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t1_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t1_kwh', 'unit_of_measurement': , }) # --- @@ -8112,7 +8112,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -8122,7 +8122,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -8171,7 +8171,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t2_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t2_kwh', 'unit_of_measurement': , }) # --- @@ -8199,7 +8199,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -8209,7 +8209,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -8258,7 +8258,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t3_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t3_kwh', 'unit_of_measurement': , }) # --- @@ -8286,7 +8286,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -8296,7 +8296,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -8345,7 +8345,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t4_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t4_kwh', 'unit_of_measurement': , }) # --- @@ -8373,7 +8373,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -8383,7 +8383,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -8432,7 +8432,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_frequency_hz', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_frequency_hz', 'unit_of_measurement': , }) # --- @@ -8460,7 +8460,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -8470,7 +8470,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -8517,7 +8517,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'long_power_fail_count', - 'unique_id': 'aabbccddeeff_long_power_fail_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_long_power_fail_count', 'unit_of_measurement': None, }) # --- @@ -8542,7 +8542,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -8552,7 +8552,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -8599,7 +8599,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'monthly_power_peak_w', - 'unique_id': 'aabbccddeeff_monthly_power_peak_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_monthly_power_peak_w', 'unit_of_measurement': , }) # --- @@ -8626,7 +8626,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -8636,7 +8636,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -8688,7 +8688,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_w', 'unit_of_measurement': , }) # --- @@ -8716,7 +8716,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -8726,7 +8726,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -8773,7 +8773,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'any_power_fail_count', - 'unique_id': 'aabbccddeeff_any_power_fail_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_any_power_fail_count', 'unit_of_measurement': None, }) # --- @@ -8798,7 +8798,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -8808,7 +8808,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -8860,7 +8860,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l1_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l1_w', 'unit_of_measurement': , }) # --- @@ -8888,7 +8888,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -8898,7 +8898,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -8950,7 +8950,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l2_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l2_w', 'unit_of_measurement': , }) # --- @@ -8978,7 +8978,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -8988,7 +8988,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -9040,7 +9040,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l3_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l3_w', 'unit_of_measurement': , }) # --- @@ -9068,7 +9068,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -9078,7 +9078,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -9125,7 +9125,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'unique_meter_id', - 'unique_id': 'aabbccddeeff_unique_meter_id', + 'unique_id': 'HWE-P1_5c2fafabcdef_unique_meter_id', 'unit_of_measurement': None, }) # --- @@ -9150,7 +9150,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -9160,7 +9160,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -9207,7 +9207,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'meter_model', - 'unique_id': 'aabbccddeeff_meter_model', + 'unique_id': 'HWE-P1_5c2fafabcdef_meter_model', 'unit_of_measurement': None, }) # --- @@ -9232,7 +9232,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -9242,7 +9242,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -9296,7 +9296,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_tariff', - 'unique_id': 'aabbccddeeff_active_tariff', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_tariff', 'unit_of_measurement': None, }) # --- @@ -9328,7 +9328,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -9338,7 +9338,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -9387,7 +9387,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_liter_m3', - 'unique_id': 'aabbccddeeff_total_liter_m3', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_liter_m3', 'unit_of_measurement': , }) # --- @@ -9415,7 +9415,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -9425,7 +9425,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -9474,7 +9474,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l1_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l1_v', 'unit_of_measurement': , }) # --- @@ -9502,7 +9502,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -9512,7 +9512,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -9561,7 +9561,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l2_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l2_v', 'unit_of_measurement': , }) # --- @@ -9589,7 +9589,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -9599,7 +9599,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -9648,7 +9648,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l3_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l3_v', 'unit_of_measurement': , }) # --- @@ -9676,7 +9676,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -9686,7 +9686,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -9733,7 +9733,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_sag_phase_count', - 'unique_id': 'aabbccddeeff_voltage_sag_l1_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_sag_l1_count', 'unit_of_measurement': None, }) # --- @@ -9758,7 +9758,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -9768,7 +9768,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -9815,7 +9815,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_sag_phase_count', - 'unique_id': 'aabbccddeeff_voltage_sag_l2_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_sag_l2_count', 'unit_of_measurement': None, }) # --- @@ -9840,7 +9840,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -9850,7 +9850,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -9897,7 +9897,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_sag_phase_count', - 'unique_id': 'aabbccddeeff_voltage_sag_l3_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_sag_l3_count', 'unit_of_measurement': None, }) # --- @@ -9922,7 +9922,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -9932,7 +9932,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -9979,7 +9979,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_swell_phase_count', - 'unique_id': 'aabbccddeeff_voltage_swell_l1_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_swell_l1_count', 'unit_of_measurement': None, }) # --- @@ -10004,7 +10004,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -10014,7 +10014,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -10061,7 +10061,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_swell_phase_count', - 'unique_id': 'aabbccddeeff_voltage_swell_l2_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_swell_l2_count', 'unit_of_measurement': None, }) # --- @@ -10086,7 +10086,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -10096,7 +10096,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -10143,7 +10143,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_swell_phase_count', - 'unique_id': 'aabbccddeeff_voltage_swell_l3_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_swell_l3_count', 'unit_of_measurement': None, }) # --- @@ -10168,7 +10168,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -10178,7 +10178,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -10227,7 +10227,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_liter_lpm', - 'unique_id': 'aabbccddeeff_active_liter_lpm', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_liter_lpm', 'unit_of_measurement': 'l/min', }) # --- @@ -10254,7 +10254,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -10264,7 +10264,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -10311,7 +10311,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_ssid', - 'unique_id': 'aabbccddeeff_wifi_ssid', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_ssid', 'unit_of_measurement': None, }) # --- @@ -10336,7 +10336,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -10346,7 +10346,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -10395,7 +10395,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_strength', - 'unique_id': 'aabbccddeeff_wifi_strength', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_strength', 'unit_of_measurement': '%', }) # --- @@ -10836,7 +10836,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -10846,7 +10846,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -10893,7 +10893,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_average_w', - 'unique_id': 'aabbccddeeff_active_power_average_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_average_w', 'unit_of_measurement': , }) # --- @@ -10920,7 +10920,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -10930,7 +10930,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -10979,7 +10979,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l1_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l1_a', 'unit_of_measurement': , }) # --- @@ -11007,7 +11007,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -11017,7 +11017,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -11066,7 +11066,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l2_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l2_a', 'unit_of_measurement': , }) # --- @@ -11094,7 +11094,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -11104,7 +11104,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -11153,7 +11153,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l3_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l3_a', 'unit_of_measurement': , }) # --- @@ -11181,7 +11181,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -11191,7 +11191,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -11240,7 +11240,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_kwh', 'unit_of_measurement': , }) # --- @@ -11268,7 +11268,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -11278,7 +11278,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -11327,7 +11327,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t1_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t1_kwh', 'unit_of_measurement': , }) # --- @@ -11355,7 +11355,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -11365,7 +11365,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -11414,7 +11414,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t2_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t2_kwh', 'unit_of_measurement': , }) # --- @@ -11442,7 +11442,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -11452,7 +11452,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -11501,7 +11501,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t3_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t3_kwh', 'unit_of_measurement': , }) # --- @@ -11529,7 +11529,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -11539,7 +11539,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -11588,7 +11588,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_t4_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_t4_kwh', 'unit_of_measurement': , }) # --- @@ -11616,7 +11616,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -11626,7 +11626,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -11675,7 +11675,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_kwh', 'unit_of_measurement': , }) # --- @@ -11703,7 +11703,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -11713,7 +11713,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -11762,7 +11762,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t1_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t1_kwh', 'unit_of_measurement': , }) # --- @@ -11790,7 +11790,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -11800,7 +11800,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -11849,7 +11849,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t2_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t2_kwh', 'unit_of_measurement': , }) # --- @@ -11877,7 +11877,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -11887,7 +11887,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -11936,7 +11936,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t3_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t3_kwh', 'unit_of_measurement': , }) # --- @@ -11964,7 +11964,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -11974,7 +11974,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -12023,7 +12023,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_tariff_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_t4_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_t4_kwh', 'unit_of_measurement': , }) # --- @@ -12051,7 +12051,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -12061,7 +12061,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -12110,7 +12110,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_frequency_hz', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_frequency_hz', 'unit_of_measurement': , }) # --- @@ -12138,7 +12138,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -12148,7 +12148,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -12195,7 +12195,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'long_power_fail_count', - 'unique_id': 'aabbccddeeff_long_power_fail_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_long_power_fail_count', 'unit_of_measurement': None, }) # --- @@ -12220,7 +12220,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -12230,7 +12230,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -12282,7 +12282,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_w', 'unit_of_measurement': , }) # --- @@ -12310,7 +12310,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -12320,7 +12320,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -12367,7 +12367,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'any_power_fail_count', - 'unique_id': 'aabbccddeeff_any_power_fail_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_any_power_fail_count', 'unit_of_measurement': None, }) # --- @@ -12392,7 +12392,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -12402,7 +12402,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -12454,7 +12454,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l1_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l1_w', 'unit_of_measurement': , }) # --- @@ -12482,7 +12482,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -12492,7 +12492,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -12544,7 +12544,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l2_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l2_w', 'unit_of_measurement': , }) # --- @@ -12572,7 +12572,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -12582,7 +12582,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -12634,7 +12634,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l3_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l3_w', 'unit_of_measurement': , }) # --- @@ -12662,7 +12662,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -12672,7 +12672,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -12721,7 +12721,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_liter_m3', - 'unique_id': 'aabbccddeeff_total_liter_m3', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_liter_m3', 'unit_of_measurement': , }) # --- @@ -12749,7 +12749,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -12759,7 +12759,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -12808,7 +12808,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l1_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l1_v', 'unit_of_measurement': , }) # --- @@ -12836,7 +12836,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -12846,7 +12846,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -12895,7 +12895,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l2_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l2_v', 'unit_of_measurement': , }) # --- @@ -12923,7 +12923,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -12933,7 +12933,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -12982,7 +12982,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l3_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l3_v', 'unit_of_measurement': , }) # --- @@ -13010,7 +13010,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13020,7 +13020,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13067,7 +13067,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_sag_phase_count', - 'unique_id': 'aabbccddeeff_voltage_sag_l1_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_sag_l1_count', 'unit_of_measurement': None, }) # --- @@ -13092,7 +13092,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13102,7 +13102,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13149,7 +13149,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_sag_phase_count', - 'unique_id': 'aabbccddeeff_voltage_sag_l2_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_sag_l2_count', 'unit_of_measurement': None, }) # --- @@ -13174,7 +13174,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13184,7 +13184,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13231,7 +13231,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_sag_phase_count', - 'unique_id': 'aabbccddeeff_voltage_sag_l3_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_sag_l3_count', 'unit_of_measurement': None, }) # --- @@ -13256,7 +13256,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13266,7 +13266,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13313,7 +13313,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_swell_phase_count', - 'unique_id': 'aabbccddeeff_voltage_swell_l1_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_swell_l1_count', 'unit_of_measurement': None, }) # --- @@ -13338,7 +13338,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13348,7 +13348,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13395,7 +13395,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_swell_phase_count', - 'unique_id': 'aabbccddeeff_voltage_swell_l2_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_swell_l2_count', 'unit_of_measurement': None, }) # --- @@ -13420,7 +13420,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13430,7 +13430,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13477,7 +13477,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_swell_phase_count', - 'unique_id': 'aabbccddeeff_voltage_swell_l3_count', + 'unique_id': 'HWE-P1_5c2fafabcdef_voltage_swell_l3_count', 'unit_of_measurement': None, }) # --- @@ -13502,7 +13502,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13512,7 +13512,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13561,7 +13561,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_liter_lpm', - 'unique_id': 'aabbccddeeff_active_liter_lpm', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_liter_lpm', 'unit_of_measurement': 'l/min', }) # --- @@ -13588,7 +13588,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13598,7 +13598,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13647,7 +13647,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_kwh', 'unit_of_measurement': , }) # --- @@ -13675,7 +13675,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13685,7 +13685,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13734,7 +13734,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_kwh', 'unit_of_measurement': , }) # --- @@ -13762,7 +13762,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13772,7 +13772,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13824,7 +13824,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_w', 'unit_of_measurement': , }) # --- @@ -13852,7 +13852,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13862,7 +13862,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13914,7 +13914,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l1_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l1_w', 'unit_of_measurement': , }) # --- @@ -13942,7 +13942,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -13952,7 +13952,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -13999,7 +13999,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_ssid', - 'unique_id': 'aabbccddeeff_wifi_ssid', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_ssid', 'unit_of_measurement': None, }) # --- @@ -14024,7 +14024,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -14034,7 +14034,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -14083,7 +14083,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_strength', - 'unique_id': 'aabbccddeeff_wifi_strength', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_strength', 'unit_of_measurement': '%', }) # --- @@ -14110,7 +14110,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -14120,7 +14120,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -14169,7 +14169,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_apparent_power_va', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_apparent_power_va', 'unit_of_measurement': , }) # --- @@ -14197,7 +14197,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -14207,7 +14207,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -14256,7 +14256,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_current_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_a', 'unit_of_measurement': , }) # --- @@ -14284,7 +14284,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -14294,7 +14294,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -14343,7 +14343,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_kwh', 'unit_of_measurement': , }) # --- @@ -14371,7 +14371,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -14381,7 +14381,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -14430,7 +14430,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_kwh', 'unit_of_measurement': , }) # --- @@ -14458,7 +14458,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -14468,7 +14468,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -14517,7 +14517,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_frequency_hz', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_frequency_hz', 'unit_of_measurement': , }) # --- @@ -14545,7 +14545,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -14555,7 +14555,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -14607,7 +14607,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_w', 'unit_of_measurement': , }) # --- @@ -14635,7 +14635,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -14645,7 +14645,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -14694,7 +14694,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_factor', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_factor', 'unit_of_measurement': '%', }) # --- @@ -14722,7 +14722,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -14732,7 +14732,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -14784,7 +14784,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l1_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l1_w', 'unit_of_measurement': , }) # --- @@ -14812,7 +14812,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -14822,7 +14822,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -14871,7 +14871,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_reactive_power_var', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_reactive_power_var', 'unit_of_measurement': , }) # --- @@ -14899,7 +14899,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -14909,7 +14909,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -14958,7 +14958,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_voltage_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_v', 'unit_of_measurement': , }) # --- @@ -14986,7 +14986,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -14996,7 +14996,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -15043,7 +15043,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_ssid', - 'unique_id': 'aabbccddeeff_wifi_ssid', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_ssid', 'unit_of_measurement': None, }) # --- @@ -15068,7 +15068,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -15078,7 +15078,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -15127,7 +15127,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_strength', - 'unique_id': 'aabbccddeeff_wifi_strength', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_strength', 'unit_of_measurement': '%', }) # --- @@ -15154,7 +15154,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -15164,7 +15164,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -15213,7 +15213,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_liter_m3', - 'unique_id': 'aabbccddeeff_total_liter_m3', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_liter_m3', 'unit_of_measurement': , }) # --- @@ -15241,7 +15241,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -15251,7 +15251,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -15300,7 +15300,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_liter_lpm', - 'unique_id': 'aabbccddeeff_active_liter_lpm', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_liter_lpm', 'unit_of_measurement': 'l/min', }) # --- @@ -15327,7 +15327,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -15337,7 +15337,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -15384,7 +15384,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_ssid', - 'unique_id': 'aabbccddeeff_wifi_ssid', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_ssid', 'unit_of_measurement': None, }) # --- @@ -15409,7 +15409,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -15419,7 +15419,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -15468,7 +15468,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_strength', - 'unique_id': 'aabbccddeeff_wifi_strength', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_strength', 'unit_of_measurement': '%', }) # --- @@ -15495,7 +15495,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -15505,7 +15505,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -15554,7 +15554,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_apparent_power_va', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_apparent_power_va', 'unit_of_measurement': , }) # --- @@ -15582,7 +15582,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -15592,7 +15592,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -15641,7 +15641,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_current_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_a', 'unit_of_measurement': , }) # --- @@ -15669,7 +15669,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -15679,7 +15679,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -15728,7 +15728,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_kwh', 'unit_of_measurement': , }) # --- @@ -15756,7 +15756,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -15766,7 +15766,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -15815,7 +15815,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_kwh', 'unit_of_measurement': , }) # --- @@ -15843,7 +15843,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -15853,7 +15853,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -15902,7 +15902,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_frequency_hz', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_frequency_hz', 'unit_of_measurement': , }) # --- @@ -15930,7 +15930,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -15940,7 +15940,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -15992,7 +15992,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_w', 'unit_of_measurement': , }) # --- @@ -16020,7 +16020,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -16030,7 +16030,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -16079,7 +16079,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_factor', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_factor', 'unit_of_measurement': '%', }) # --- @@ -16107,7 +16107,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -16117,7 +16117,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -16166,7 +16166,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_reactive_power_var', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_reactive_power_var', 'unit_of_measurement': , }) # --- @@ -16194,7 +16194,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -16204,7 +16204,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -16253,7 +16253,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_voltage_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_v', 'unit_of_measurement': , }) # --- @@ -16281,7 +16281,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -16291,7 +16291,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -16338,7 +16338,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_ssid', - 'unique_id': 'aabbccddeeff_wifi_ssid', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_ssid', 'unit_of_measurement': None, }) # --- @@ -16363,7 +16363,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -16373,7 +16373,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -16422,7 +16422,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_strength', - 'unique_id': 'aabbccddeeff_wifi_strength', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_strength', 'unit_of_measurement': '%', }) # --- @@ -16449,7 +16449,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -16459,7 +16459,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -16508,7 +16508,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_apparent_power_va', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_apparent_power_va', 'unit_of_measurement': , }) # --- @@ -16536,7 +16536,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -16546,7 +16546,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -16595,7 +16595,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_apparent_power_phase_va', - 'unique_id': 'aabbccddeeff_active_apparent_power_l1_va', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_apparent_power_l1_va', 'unit_of_measurement': , }) # --- @@ -16623,7 +16623,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -16633,7 +16633,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -16682,7 +16682,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_apparent_power_phase_va', - 'unique_id': 'aabbccddeeff_active_apparent_power_l2_va', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_apparent_power_l2_va', 'unit_of_measurement': , }) # --- @@ -16710,7 +16710,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -16720,7 +16720,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -16769,7 +16769,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_apparent_power_phase_va', - 'unique_id': 'aabbccddeeff_active_apparent_power_l3_va', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_apparent_power_l3_va', 'unit_of_measurement': , }) # --- @@ -16797,7 +16797,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -16807,7 +16807,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -16856,7 +16856,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_current_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_a', 'unit_of_measurement': , }) # --- @@ -16884,7 +16884,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -16894,7 +16894,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -16943,7 +16943,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l1_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l1_a', 'unit_of_measurement': , }) # --- @@ -16971,7 +16971,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -16981,7 +16981,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -17030,7 +17030,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l2_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l2_a', 'unit_of_measurement': , }) # --- @@ -17058,7 +17058,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17068,7 +17068,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -17117,7 +17117,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_current_phase_a', - 'unique_id': 'aabbccddeeff_active_current_l3_a', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_current_l3_a', 'unit_of_measurement': , }) # --- @@ -17145,7 +17145,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17155,7 +17155,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -17204,7 +17204,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_export_kwh', - 'unique_id': 'aabbccddeeff_total_power_export_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_export_kwh', 'unit_of_measurement': , }) # --- @@ -17232,7 +17232,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17242,7 +17242,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -17291,7 +17291,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_energy_import_kwh', - 'unique_id': 'aabbccddeeff_total_power_import_kwh', + 'unique_id': 'HWE-P1_5c2fafabcdef_total_power_import_kwh', 'unit_of_measurement': , }) # --- @@ -17319,7 +17319,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17329,7 +17329,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -17378,7 +17378,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_frequency_hz', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_frequency_hz', 'unit_of_measurement': , }) # --- @@ -17406,7 +17406,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17416,7 +17416,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -17468,7 +17468,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_power_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_w', 'unit_of_measurement': , }) # --- @@ -17496,7 +17496,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17506,7 +17506,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -17555,7 +17555,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_factor_phase', - 'unique_id': 'aabbccddeeff_active_power_factor_l1', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_factor_l1', 'unit_of_measurement': '%', }) # --- @@ -17583,7 +17583,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17593,7 +17593,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -17642,7 +17642,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_factor_phase', - 'unique_id': 'aabbccddeeff_active_power_factor_l2', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_factor_l2', 'unit_of_measurement': '%', }) # --- @@ -17670,7 +17670,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17680,7 +17680,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -17729,7 +17729,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_factor_phase', - 'unique_id': 'aabbccddeeff_active_power_factor_l3', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_factor_l3', 'unit_of_measurement': '%', }) # --- @@ -17757,7 +17757,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17767,7 +17767,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -17819,7 +17819,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l1_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l1_w', 'unit_of_measurement': , }) # --- @@ -17847,7 +17847,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17857,7 +17857,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -17909,7 +17909,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l2_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l2_w', 'unit_of_measurement': , }) # --- @@ -17937,7 +17937,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -17947,7 +17947,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -17999,7 +17999,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_power_phase_w', - 'unique_id': 'aabbccddeeff_active_power_l3_w', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_power_l3_w', 'unit_of_measurement': , }) # --- @@ -18027,7 +18027,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -18037,7 +18037,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -18086,7 +18086,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_active_reactive_power_var', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_reactive_power_var', 'unit_of_measurement': , }) # --- @@ -18114,7 +18114,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -18124,7 +18124,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -18173,7 +18173,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', - 'unique_id': 'aabbccddeeff_active_reactive_power_l1_var', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_reactive_power_l1_var', 'unit_of_measurement': , }) # --- @@ -18201,7 +18201,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -18211,7 +18211,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -18260,7 +18260,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', - 'unique_id': 'aabbccddeeff_active_reactive_power_l2_var', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_reactive_power_l2_var', 'unit_of_measurement': , }) # --- @@ -18288,7 +18288,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -18298,7 +18298,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -18347,7 +18347,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', - 'unique_id': 'aabbccddeeff_active_reactive_power_l3_var', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_reactive_power_l3_var', 'unit_of_measurement': , }) # --- @@ -18375,7 +18375,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -18385,7 +18385,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -18434,7 +18434,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l1_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l1_v', 'unit_of_measurement': , }) # --- @@ -18462,7 +18462,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -18472,7 +18472,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -18521,7 +18521,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l2_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l2_v', 'unit_of_measurement': , }) # --- @@ -18549,7 +18549,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -18559,7 +18559,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -18608,7 +18608,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'active_voltage_phase_v', - 'unique_id': 'aabbccddeeff_active_voltage_l3_v', + 'unique_id': 'HWE-P1_5c2fafabcdef_active_voltage_l3_v', 'unit_of_measurement': , }) # --- @@ -18636,7 +18636,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -18646,7 +18646,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -18693,7 +18693,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_ssid', - 'unique_id': 'aabbccddeeff_wifi_ssid', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_ssid', 'unit_of_measurement': None, }) # --- @@ -18718,7 +18718,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -18728,7 +18728,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -18777,7 +18777,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'wifi_strength', - 'unique_id': 'aabbccddeeff_wifi_strength', + 'unique_id': 'HWE-P1_5c2fafabcdef_wifi_strength', 'unit_of_measurement': '%', }) # --- diff --git a/tests/components/homewizard/snapshots/test_switch.ambr b/tests/components/homewizard/snapshots/test_switch.ambr index 68a351c1ebb..c2ef87970f3 100644 --- a/tests/components/homewizard/snapshots/test_switch.ambr +++ b/tests/components/homewizard/snapshots/test_switch.ambr @@ -41,7 +41,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'cloud_connection', - 'unique_id': 'aabbccddeeff_cloud_connection', + 'unique_id': 'HWE-P1_5c2fafabcdef_cloud_connection', 'unit_of_measurement': None, }) # --- @@ -53,7 +53,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -63,7 +63,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -123,7 +123,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'cloud_connection', - 'unique_id': 'aabbccddeeff_cloud_connection', + 'unique_id': 'HWE-P1_5c2fafabcdef_cloud_connection', 'unit_of_measurement': None, }) # --- @@ -135,7 +135,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -145,7 +145,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -206,7 +206,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_power_on', + 'unique_id': 'HWE-P1_5c2fafabcdef_power_on', 'unit_of_measurement': None, }) # --- @@ -218,7 +218,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -228,7 +228,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -288,7 +288,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'cloud_connection', - 'unique_id': 'aabbccddeeff_cloud_connection', + 'unique_id': 'HWE-P1_5c2fafabcdef_cloud_connection', 'unit_of_measurement': None, }) # --- @@ -300,7 +300,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -310,7 +310,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -370,7 +370,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'switch_lock', - 'unique_id': 'aabbccddeeff_switch_lock', + 'unique_id': 'HWE-P1_5c2fafabcdef_switch_lock', 'unit_of_measurement': None, }) # --- @@ -382,7 +382,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -392,7 +392,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -453,7 +453,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': None, - 'unique_id': 'aabbccddeeff_power_on', + 'unique_id': 'HWE-P1_5c2fafabcdef_power_on', 'unit_of_measurement': None, }) # --- @@ -465,7 +465,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -475,7 +475,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -535,7 +535,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'cloud_connection', - 'unique_id': 'aabbccddeeff_cloud_connection', + 'unique_id': 'HWE-P1_5c2fafabcdef_cloud_connection', 'unit_of_measurement': None, }) # --- @@ -547,7 +547,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -557,7 +557,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -617,7 +617,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'switch_lock', - 'unique_id': 'aabbccddeeff_switch_lock', + 'unique_id': 'HWE-P1_5c2fafabcdef_switch_lock', 'unit_of_measurement': None, }) # --- @@ -629,7 +629,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -639,7 +639,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -699,7 +699,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'cloud_connection', - 'unique_id': 'aabbccddeeff_cloud_connection', + 'unique_id': 'HWE-P1_5c2fafabcdef_cloud_connection', 'unit_of_measurement': None, }) # --- @@ -711,7 +711,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -721,7 +721,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -781,7 +781,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'cloud_connection', - 'unique_id': 'aabbccddeeff_cloud_connection', + 'unique_id': 'HWE-P1_5c2fafabcdef_cloud_connection', 'unit_of_measurement': None, }) # --- @@ -793,7 +793,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -803,7 +803,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, @@ -863,7 +863,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'cloud_connection', - 'unique_id': 'aabbccddeeff_cloud_connection', + 'unique_id': 'HWE-P1_5c2fafabcdef_cloud_connection', 'unit_of_measurement': None, }) # --- @@ -875,7 +875,7 @@ 'connections': set({ tuple( 'mac', - '3c:39:e7:aa:bb:cc', + '5c:2f:af:ab:cd:ef', ), }), 'disabled_by': None, @@ -885,7 +885,7 @@ 'identifiers': set({ tuple( 'homewizard', - '3c39e7aabbcc', + '5c2fafabcdef', ), }), 'is_new': False, diff --git a/tests/components/homewizard/test_button.py b/tests/components/homewizard/test_button.py index 928e6f21901..d0a6d92b36f 100644 --- a/tests/components/homewizard/test_button.py +++ b/tests/components/homewizard/test_button.py @@ -79,7 +79,7 @@ async def test_identify_button( with pytest.raises( HomeAssistantError, - match=r"^The local API of the HomeWizard device is disabled$", + match=r"^The local API is disabled$", ): await hass.services.async_call( button.DOMAIN, diff --git a/tests/components/homewizard/test_config_flow.py b/tests/components/homewizard/test_config_flow.py index 442659f2aad..984fda8e7a4 100644 --- a/tests/components/homewizard/test_config_flow.py +++ b/tests/components/homewizard/test_config_flow.py @@ -8,7 +8,7 @@ import pytest from syrupy.assertion import SnapshotAssertion from homeassistant import config_entries -from homeassistant.components import zeroconf +from homeassistant.components import dhcp, zeroconf from homeassistant.components.homewizard.const import DOMAIN from homeassistant.const import CONF_IP_ADDRESS from homeassistant.core import HomeAssistant @@ -66,7 +66,7 @@ async def test_discovery_flow_works( "path": "/api/v1", "product_name": "Energy Socket", "product_type": "HWE-SKT", - "serial": "aabbccddeeff", + "serial": "5c2fafabcdef", }, ), ) @@ -112,7 +112,7 @@ async def test_discovery_flow_during_onboarding( "path": "/api/v1", "product_name": "P1 meter", "product_type": "HWE-P1", - "serial": "aabbccddeeff", + "serial": "5c2fafabcdef", }, ), ) @@ -149,7 +149,7 @@ async def test_discovery_flow_during_onboarding_disabled_api( "path": "/api/v1", "product_name": "P1 meter", "product_type": "HWE-P1", - "serial": "aabbccddeeff", + "serial": "5c2fafabcdef", }, ), ) @@ -193,7 +193,7 @@ async def test_discovery_disabled_api( "path": "/api/v1", "product_name": "P1 meter", "product_type": "HWE-P1", - "serial": "aabbccddeeff", + "serial": "5c2fafabcdef", }, ), ) @@ -228,7 +228,7 @@ async def test_discovery_missing_data_in_service_info(hass: HomeAssistant) -> No "path": "/api/v1", "product_name": "P1 meter", "product_type": "HWE-P1", - "serial": "aabbccddeeff", + "serial": "5c2fafabcdef", }, ), ) @@ -254,7 +254,7 @@ async def test_discovery_invalid_api(hass: HomeAssistant) -> None: "path": "/api/not_v1", "product_name": "P1 meter", "product_type": "HWE-P1", - "serial": "aabbccddeeff", + "serial": "5c2fafabcdef", }, ), ) @@ -263,6 +263,116 @@ async def test_discovery_invalid_api(hass: HomeAssistant) -> None: assert result["reason"] == "unsupported_api_version" +async def test_dhcp_discovery_updates_entry( + hass: HomeAssistant, + mock_homewizardenergy: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test DHCP discovery updates config entries.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_DHCP}, + data=dhcp.DhcpServiceInfo( + ip="1.0.0.127", + hostname="HW-p1meter-aabbcc", + macaddress="5c2fafabcdef", + ), + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "already_configured" + assert mock_config_entry.data[CONF_IP_ADDRESS] == "1.0.0.127" + + +@pytest.mark.usefixtures("mock_setup_entry") +@pytest.mark.parametrize( + ("exception"), + [(DisabledError), (RequestError)], +) +async def test_dhcp_discovery_updates_entry_fails( + hass: HomeAssistant, + mock_homewizardenergy: MagicMock, + mock_config_entry: MockConfigEntry, + exception: Exception, +) -> None: + """Test DHCP discovery updates config entries, but fails to connect.""" + mock_homewizardenergy.device.side_effect = exception + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_DHCP}, + data=dhcp.DhcpServiceInfo( + ip="1.0.0.127", + hostname="HW-p1meter-aabbcc", + macaddress="5c2fafabcdef", + ), + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "unknown" + + +async def test_dhcp_discovery_ignores_unknown( + hass: HomeAssistant, + mock_homewizardenergy: MagicMock, +) -> None: + """Test DHCP discovery is only used for updates. + + Anything else will just abort the flow. + """ + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_DHCP}, + data=dhcp.DhcpServiceInfo( + ip="127.0.0.1", + hostname="HW-p1meter-aabbcc", + macaddress="5c2fafabcdef", + ), + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "unknown" + + +async def test_discovery_flow_updates_new_ip( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test discovery setup updates new config data.""" + mock_config_entry.add_to_hass(hass) + + # preflight check, see if the ip address is already in use + assert mock_config_entry.data[CONF_IP_ADDRESS] == "127.0.0.1" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("1.0.0.127"), + ip_addresses=[ip_address("1.0.0.127")], + port=80, + hostname="p1meter-ddeeff.local.", + type="", + name="", + properties={ + "api_enabled": "1", + "path": "/api/v1", + "product_name": "P1 Meter", + "product_type": "HWE-P1", + "serial": "5c2fafabcdef", + }, + ), + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert mock_config_entry.data[CONF_IP_ADDRESS] == "1.0.0.127" + + @pytest.mark.usefixtures("mock_setup_entry") @pytest.mark.parametrize( ("exception", "reason"), @@ -370,3 +480,131 @@ async def test_reauth_error( assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "api_not_enabled"} + + +async def test_reconfigure( + hass: HomeAssistant, + mock_homewizardenergy: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfiguration.""" + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + assert result["errors"] == {} + + # original entry + assert mock_config_entry.data[CONF_IP_ADDRESS] == "127.0.0.1" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_IP_ADDRESS: "1.0.0.127", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # changed entry + assert mock_config_entry.data[CONF_IP_ADDRESS] == "1.0.0.127" + + +async def test_reconfigure_nochange( + hass: HomeAssistant, + mock_homewizardenergy: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfiguration without changing values.""" + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + assert result["errors"] == {} + + # original entry + assert mock_config_entry.data[CONF_IP_ADDRESS] == "127.0.0.1" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_IP_ADDRESS: "127.0.0.1", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # changed entry + assert mock_config_entry.data[CONF_IP_ADDRESS] == "127.0.0.1" + + +async def test_reconfigure_wrongdevice( + hass: HomeAssistant, + mock_homewizardenergy: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test entering ip of other device and prevent changing it based on serial.""" + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + assert result["errors"] == {} + + # simulate different serial number, as if user entered wrong IP + mock_homewizardenergy.device.return_value.serial = "not_5c2fafabcdef" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_IP_ADDRESS: "1.0.0.127", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "wrong_device" + + # entry should still be original entry + assert mock_config_entry.data[CONF_IP_ADDRESS] == "127.0.0.1" + + +@pytest.mark.parametrize( + ("exception", "reason"), + [(DisabledError, "api_not_enabled"), (RequestError, "network_error")], +) +async def test_reconfigure_cannot_connect( + hass: HomeAssistant, + mock_homewizardenergy: MagicMock, + mock_config_entry: MockConfigEntry, + exception: Exception, + reason: str, +) -> None: + """Test reconfiguration fails when not able to connect.""" + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + assert result["errors"] == {} + + mock_homewizardenergy.device.side_effect = exception + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_IP_ADDRESS: "1.0.0.127", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": reason} + assert result["data_schema"]({}) == {CONF_IP_ADDRESS: "127.0.0.1"} + + # attempt with valid IP should work + mock_homewizardenergy.device.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_IP_ADDRESS: "1.0.0.127", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # changed entry + assert mock_config_entry.data[CONF_IP_ADDRESS] == "1.0.0.127" diff --git a/tests/components/homewizard/test_number.py b/tests/components/homewizard/test_number.py index ff27fb1b257..623ba018dee 100644 --- a/tests/components/homewizard/test_number.py +++ b/tests/components/homewizard/test_number.py @@ -42,7 +42,7 @@ async def test_number_entities( assert snapshot == device_entry # Test unknown handling - assert state.state == "100.0" + assert state.state == "100" mock_homewizardenergy.state.return_value.brightness = None @@ -85,7 +85,7 @@ async def test_number_entities( mock_homewizardenergy.state_set.side_effect = DisabledError with pytest.raises( HomeAssistantError, - match=r"^The local API of the HomeWizard device is disabled$", + match=r"^The local API is disabled$", ): await hass.services.async_call( number.DOMAIN, diff --git a/tests/components/homewizard/test_sensor.py b/tests/components/homewizard/test_sensor.py index c180c2a4def..60077c2cdf9 100644 --- a/tests/components/homewizard/test_sensor.py +++ b/tests/components/homewizard/test_sensor.py @@ -3,7 +3,7 @@ from unittest.mock import MagicMock from homewizard_energy.errors import RequestError -from homewizard_energy.models import Data +from homewizard_energy.v1.models import Data import pytest from syrupy.assertion import SnapshotAssertion diff --git a/tests/components/homewizard/test_switch.py b/tests/components/homewizard/test_switch.py index b9e812620e8..d9f1ac26b4f 100644 --- a/tests/components/homewizard/test_switch.py +++ b/tests/components/homewizard/test_switch.py @@ -174,7 +174,7 @@ async def test_switch_entities( with pytest.raises( HomeAssistantError, - match=r"^The local API of the HomeWizard device is disabled$", + match=r"^The local API is disabled$", ): await hass.services.async_call( switch.DOMAIN, @@ -185,7 +185,7 @@ async def test_switch_entities( with pytest.raises( HomeAssistantError, - match=r"^The local API of the HomeWizard device is disabled$", + match=r"^The local API is disabled$", ): await hass.services.async_call( switch.DOMAIN, diff --git a/tests/components/honeywell/test_config_flow.py b/tests/components/honeywell/test_config_flow.py index b1c0b28f537..ed9c86f5e10 100644 --- a/tests/components/honeywell/test_config_flow.py +++ b/tests/components/honeywell/test_config_flow.py @@ -120,10 +120,6 @@ async def test_create_option_entry( } -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.honeywell.config.abort.reauth_successful"], -) async def test_reauth_flow(hass: HomeAssistant) -> None: """Test a successful reauth flow.""" diff --git a/tests/components/http/test_ban.py b/tests/components/http/test_ban.py index 7ffd0263157..59011de0cfd 100644 --- a/tests/components/http/test_ban.py +++ b/tests/components/http/test_ban.py @@ -197,6 +197,7 @@ async def test_access_from_supervisor_ip( hass: HomeAssistant, aiohttp_client: ClientSessionGenerator, hassio_env, + resolution_info: AsyncMock, ) -> None: """Test accessing to server from supervisor IP.""" app = web.Application() @@ -218,17 +219,7 @@ async def test_access_from_supervisor_ip( manager = app[KEY_BAN_MANAGER] - with patch( - "homeassistant.components.hassio.HassIO.get_resolution_info", - return_value={ - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - ): - assert await async_setup_component(hass, "hassio", {"hassio": {}}) + assert await async_setup_component(hass, "hassio", {"hassio": {}}) m_open = mock_open() diff --git a/tests/components/husqvarna_automower/fixtures/empty.json b/tests/components/husqvarna_automower/fixtures/empty.json new file mode 100644 index 00000000000..22f4a272fc1 --- /dev/null +++ b/tests/components/husqvarna_automower/fixtures/empty.json @@ -0,0 +1 @@ +{ "data": [] } diff --git a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr index ee9b7510770..a0bb8302fcc 100644 --- a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr @@ -123,7 +123,7 @@ 'system': dict({ 'model': 'HUSQVARNA AUTOMOWER® 450XH', 'name': 'Test Mower 1', - 'serial_number': 123, + 'serial_number': '123', }), 'work_area_dict': dict({ '0': 'my_lawn', diff --git a/tests/components/husqvarna_automower/snapshots/test_init.ambr b/tests/components/husqvarna_automower/snapshots/test_init.ambr index e79bd1f8145..036783dd6d0 100644 --- a/tests/components/husqvarna_automower/snapshots/test_init.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_init.ambr @@ -25,7 +25,7 @@ 'name': 'Test Mower 1', 'name_by_user': None, 'primary_config_entry': , - 'serial_number': 123, + 'serial_number': '123', 'suggested_area': 'Garden', 'sw_version': None, 'via_device_id': None, diff --git a/tests/components/husqvarna_automower/test_binary_sensor.py b/tests/components/husqvarna_automower/test_binary_sensor.py index 858dc03b93f..30c9cc1bdd3 100644 --- a/tests/components/husqvarna_automower/test_binary_sensor.py +++ b/tests/components/husqvarna_automower/test_binary_sensor.py @@ -4,6 +4,7 @@ from unittest.mock import AsyncMock, patch from aioautomower.model import MowerActivities, MowerAttributes from freezegun.api import FrozenDateTimeFactory +import pytest from syrupy import SnapshotAssertion from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL @@ -17,6 +18,7 @@ from .const import TEST_MOWER_ID from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_binary_sensor_states( hass: HomeAssistant, mock_automower_client: AsyncMock, @@ -50,6 +52,7 @@ async def test_binary_sensor_states( assert state.state == "on" +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_binary_sensor_snapshot( hass: HomeAssistant, entity_registry: er.EntityRegistry, diff --git a/tests/components/husqvarna_automower/test_config_flow.py b/tests/components/husqvarna_automower/test_config_flow.py index 31e8a9afcbd..d91078d80a2 100644 --- a/tests/components/husqvarna_automower/test_config_flow.py +++ b/tests/components/husqvarna_automower/test_config_flow.py @@ -2,6 +2,8 @@ from unittest.mock import AsyncMock, patch +from aioautomower.const import API_BASE_URL +from aioautomower.session import AutomowerEndpoint import pytest from homeassistant import config_entries @@ -18,16 +20,18 @@ from homeassistant.helpers import config_entry_oauth2_flow from . import setup_integration from .const import CLIENT_ID, USER_ID -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, load_fixture from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator @pytest.mark.parametrize( - ("new_scope", "amount"), + ("new_scope", "fixture", "exception", "amount"), [ - ("iam:read amc:api", 1), - ("iam:read", 0), + ("iam:read amc:api", "mower.json", None, 1), + ("iam:read amc:api", "mower.json", Exception, 0), + ("iam:read", "mower.json", None, 0), + ("iam:read amc:api", "empty.json", None, 0), ], ) @pytest.mark.usefixtures("current_request_with_host") @@ -38,6 +42,8 @@ async def test_full_flow( jwt: str, new_scope: str, amount: int, + fixture: str, + exception: Exception | None, ) -> None: """Check full flow.""" result = await hass.config_entries.flow.async_init( @@ -76,11 +82,17 @@ async def test_full_flow( "expires_at": 1697753347, }, ) - - with patch( - "homeassistant.components.husqvarna_automower.async_setup_entry", - return_value=True, - ) as mock_setup: + aioclient_mock.get( + f"{API_BASE_URL}/{AutomowerEndpoint.mowers}", + text=load_fixture(fixture, DOMAIN), + exc=exception, + ) + with ( + patch( + "homeassistant.components.husqvarna_automower.async_setup_entry", + return_value=True, + ) as mock_setup, + ): await hass.config_entries.flow.async_configure(result["flow_id"]) assert len(hass.config_entries.async_entries(DOMAIN)) == amount diff --git a/tests/components/husqvarna_automower/test_init.py b/tests/components/husqvarna_automower/test_init.py index acf10d33004..ae688571d2c 100644 --- a/tests/components/husqvarna_automower/test_init.py +++ b/tests/components/husqvarna_automower/test_init.py @@ -306,6 +306,7 @@ async def test_add_and_remove_work_area( del values[TEST_MOWER_ID].work_area_dict[123456] del values[TEST_MOWER_ID].work_areas[123456] del values[TEST_MOWER_ID].calendar.tasks[:2] + values[TEST_MOWER_ID].mower.work_area_id = 654321 mock_automower_client.get_status.return_value = values freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) diff --git a/tests/components/husqvarna_automower/test_sensor.py b/tests/components/husqvarna_automower/test_sensor.py index 06fcc30e40c..08ed5251344 100644 --- a/tests/components/husqvarna_automower/test_sensor.py +++ b/tests/components/husqvarna_automower/test_sensor.py @@ -111,6 +111,7 @@ async def test_work_area_sensor( assert state.state == "my_lawn" +@pytest.mark.usefixtures("entity_registry_enabled_by_default") @pytest.mark.parametrize( ("sensor_to_test"), [ @@ -167,6 +168,7 @@ async def test_error_sensor( assert state.state == expected_state +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_sensor_snapshot( hass: HomeAssistant, entity_registry: er.EntityRegistry, diff --git a/tests/components/husqvarna_automower_ble/conftest.py b/tests/components/husqvarna_automower_ble/conftest.py index 5e27582b81c..3a8e881aba0 100644 --- a/tests/components/husqvarna_automower_ble/conftest.py +++ b/tests/components/husqvarna_automower_ble/conftest.py @@ -1,19 +1,16 @@ """Common fixtures for the Husqvarna Automower Bluetooth tests.""" -from collections.abc import Awaitable, Callable, Generator +from collections.abc import Generator from unittest.mock import AsyncMock, patch -from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.husqvarna_automower_ble.const import DOMAIN -from homeassistant.components.husqvarna_automower_ble.coordinator import SCAN_INTERVAL from homeassistant.const import CONF_ADDRESS, CONF_CLIENT_ID -from homeassistant.core import HomeAssistant from . import AUTOMOWER_SERVICE_INFO -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import MockConfigEntry @pytest.fixture @@ -26,25 +23,8 @@ def mock_setup_entry() -> Generator[AsyncMock]: yield mock_setup_entry -@pytest.fixture -async def scan_step( - hass: HomeAssistant, freezer: FrozenDateTimeFactory -) -> Generator[None, None, Callable[[], Awaitable[None]]]: - """Step system time forward.""" - - freezer.move_to("2023-01-01T01:00:00Z") - - async def delay() -> None: - """Trigger delay in system.""" - freezer.tick(delta=SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - return delay - - @pytest.fixture(autouse=True) -def mock_automower_client(enable_bluetooth: None, scan_step) -> Generator[AsyncMock]: +def mock_automower_client(enable_bluetooth: None) -> Generator[AsyncMock]: """Mock a BleakClient client.""" with ( patch( diff --git a/tests/components/imap/const.py b/tests/components/imap/const.py index 037960c9e5d..8f6761bd795 100644 --- a/tests/components/imap/const.py +++ b/tests/components/imap/const.py @@ -141,6 +141,8 @@ TEST_CONTENT_MULTIPART_BASE64_INVALID = ( ) EMPTY_SEARCH_RESPONSE = ("OK", [b"", b"Search completed (0.0001 + 0.000 secs)."]) +EMPTY_SEARCH_RESPONSE_ALT = ("OK", [b"Search completed (0.0001 + 0.000 secs)."]) + BAD_RESPONSE = ("BAD", [b"", b"Unexpected error"]) TEST_SEARCH_RESPONSE = ("OK", [b"1", b"Search completed (0.0001 + 0.000 secs)."]) diff --git a/tests/components/imap/test_diagnostics.py b/tests/components/imap/test_diagnostics.py index 23450104aed..43f837679c8 100644 --- a/tests/components/imap/test_diagnostics.py +++ b/tests/components/imap/test_diagnostics.py @@ -41,7 +41,7 @@ async def test_entry_diagnostics( # Make sure we have had one update (when polling) async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # we should have received one message assert state is not None assert state.state == "1" diff --git a/tests/components/imap/test_init.py b/tests/components/imap/test_init.py index 40c3ce013e4..d4281b9e513 100644 --- a/tests/components/imap/test_init.py +++ b/tests/components/imap/test_init.py @@ -20,6 +20,7 @@ from homeassistant.util.dt import utcnow from .const import ( BAD_RESPONSE, EMPTY_SEARCH_RESPONSE, + EMPTY_SEARCH_RESPONSE_ALT, TEST_BADLY_ENCODED_CONTENT, TEST_FETCH_RESPONSE_BINARY, TEST_FETCH_RESPONSE_HTML, @@ -153,7 +154,7 @@ async def test_receiving_message_successfully( # Make sure we have had one update (when polling) async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # we should have received one message assert state is not None assert state.state == "1" @@ -202,7 +203,7 @@ async def test_receiving_message_with_invalid_encoding( # Make sure we have had one update (when polling) async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # we should have received one message assert state is not None assert state.state == "1" @@ -237,7 +238,7 @@ async def test_receiving_message_no_subject_to_from( # Make sure we have had one update (when polling) async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # we should have received one message assert state is not None assert state.state == "1" @@ -273,7 +274,7 @@ async def test_initial_authentication_error( assert await hass.config_entries.async_setup(config_entry.entry_id) == success await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") assert (state is not None) == success @@ -290,7 +291,7 @@ async def test_initial_invalid_folder_error( assert await hass.config_entries.async_setup(config_entry.entry_id) == success await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") assert (state is not None) == success @@ -330,7 +331,7 @@ async def test_late_authentication_retry( assert "Authentication failed, retrying" in caplog.text # we still should have an entity with an unavailable state - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") assert state is not None assert state.state == STATE_UNAVAILABLE @@ -371,7 +372,7 @@ async def test_late_authentication_error( assert "Username or password incorrect, starting reauthentication" in caplog.text # we still should have an entity with an unavailable state - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") assert state is not None assert state.state == STATE_UNAVAILABLE @@ -415,7 +416,7 @@ async def test_late_folder_error( assert "Selected mailbox folder is invalid" in caplog.text # we still should have an entity with an unavailable state - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") assert state is not None assert state.state == STATE_UNAVAILABLE @@ -444,7 +445,7 @@ async def test_handle_cleanup_exception( async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # we should have an entity assert state is not None assert state.state == "0" @@ -456,7 +457,7 @@ async def test_handle_cleanup_exception( await hass.async_block_till_done() assert "Error while cleaning up imap connection" in caplog.text - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # we should have an entity with an unavailable state assert state is not None @@ -487,7 +488,7 @@ async def test_lost_connection_with_imap_push( await hass.async_block_till_done() assert "Lost imap.server.com (will attempt to reconnect after 10 s)" in caplog.text - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # Our entity should keep its current state as this assert state is not None assert state.state == "0" @@ -511,12 +512,17 @@ async def test_fetch_number_of_messages( await hass.async_block_till_done() assert "Invalid response for search" in caplog.text - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # we should have an entity with an unavailable state assert state is not None assert state.state == STATE_UNAVAILABLE +@pytest.mark.parametrize( + "empty_search_reponse", + [EMPTY_SEARCH_RESPONSE, EMPTY_SEARCH_RESPONSE_ALT], + ids=["regular_empty_search_response", "alt_empty_search_response"], +) @pytest.mark.parametrize("imap_search", [TEST_SEARCH_RESPONSE]) @pytest.mark.parametrize( ("imap_fetch", "valid_date"), @@ -525,7 +531,10 @@ async def test_fetch_number_of_messages( ) @pytest.mark.parametrize("imap_has_capability", [True, False], ids=["push", "poll"]) async def test_reset_last_message( - hass: HomeAssistant, mock_imap_protocol: MagicMock, valid_date: bool + hass: HomeAssistant, + mock_imap_protocol: MagicMock, + valid_date: bool, + empty_search_reponse: tuple[str, list[bytes]], ) -> None: """Test receiving a message successfully.""" event = asyncio.Event() # needed for pushed coordinator to make a new loop @@ -556,7 +565,7 @@ async def test_reset_last_message( # Make sure we have had one update (when polling) async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # We should have received one message assert state is not None assert state.state == "1" @@ -580,7 +589,7 @@ async def test_reset_last_message( ) # Simulate an update where no messages are found (needed for pushed coordinator) - mock_imap_protocol.search.return_value = Response(*EMPTY_SEARCH_RESPONSE) + mock_imap_protocol.search.return_value = Response(*empty_search_reponse) # Make sure we have an update async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) @@ -590,7 +599,7 @@ async def test_reset_last_message( await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # We should have message assert state is not None assert state.state == "0" @@ -607,7 +616,7 @@ async def test_reset_last_message( await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # We should have received one message assert state is not None assert state.state == "1" @@ -637,7 +646,7 @@ async def test_event_skipped_message_too_large( # Make sure we have had one update (when polling) async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # We should have received one message assert state is not None assert state.state == "1" @@ -667,7 +676,7 @@ async def test_message_is_truncated( # Make sure we have had one update (when polling) async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # We should have received one message assert state is not None assert state.state == "1" @@ -702,7 +711,7 @@ async def test_message_data( # Make sure we have had one update (when polling) async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # We should have received one message assert state is not None assert state.state == "1" @@ -747,7 +756,7 @@ async def test_custom_template( # Make sure we have had one update (when polling) async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # we should have received one message assert state is not None assert state.state == "1" @@ -798,7 +807,7 @@ async def test_enforce_polling( # Make sure we have had one update (when polling) async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # we should have received one message assert state is not None assert state.state == "1" @@ -838,7 +847,7 @@ async def test_services(hass: HomeAssistant, mock_imap_protocol: MagicMock) -> N # Make sure we have had one update (when polling) async_fire_time_changed(hass, utcnow() + timedelta(seconds=5)) await hass.async_block_till_done() - state = hass.states.get("sensor.imap_email_email_com") + state = hass.states.get("sensor.imap_email_email_com_messages") # we should have received one message assert state is not None assert state.state == "1" diff --git a/tests/components/intent/test_timers.py b/tests/components/intent/test_timers.py index d194d532513..1789e981e2d 100644 --- a/tests/components/intent/test_timers.py +++ b/tests/components/intent/test_timers.py @@ -1587,3 +1587,182 @@ async def test_async_device_supports_timers(hass: HomeAssistant) -> None: # After handler registration assert async_device_supports_timers(hass, device_id) + + +async def test_cancel_all_timers(hass: HomeAssistant, init_components) -> None: + """Test cancelling all timers.""" + device_id = "test_device" + + started_event = asyncio.Event() + num_started = 0 + + @callback + def handle_timer(event_type: TimerEventType, timer: TimerInfo) -> None: + nonlocal num_started + + if event_type == TimerEventType.STARTED: + num_started += 1 + if num_started == 3: + started_event.set() + + async_register_timer_handler(hass, device_id, handle_timer) + + # Start timers + result = await intent.async_handle( + hass, + "test", + intent.INTENT_START_TIMER, + {"name": {"value": "pizza"}, "minutes": {"value": 10}}, + device_id=device_id, + ) + assert result.response_type == intent.IntentResponseType.ACTION_DONE + + result = await intent.async_handle( + hass, + "test", + intent.INTENT_START_TIMER, + {"name": {"value": "tv"}, "minutes": {"value": 10}}, + device_id=device_id, + ) + assert result.response_type == intent.IntentResponseType.ACTION_DONE + + result2 = await intent.async_handle( + hass, + "test", + intent.INTENT_START_TIMER, + {"name": {"value": "media"}, "minutes": {"value": 15}}, + device_id=device_id, + ) + assert result2.response_type == intent.IntentResponseType.ACTION_DONE + + # Wait for all timers to start + async with asyncio.timeout(1): + await started_event.wait() + + # Cancel all timers + result = await intent.async_handle( + hass, "test", intent.INTENT_CANCEL_ALL_TIMERS, {}, device_id=device_id + ) + assert result.response_type == intent.IntentResponseType.ACTION_DONE + assert result.speech_slots.get("canceled", 0) == 3 + + # No timers should be running for test_device + result = await intent.async_handle( + hass, "test", intent.INTENT_TIMER_STATUS, {}, device_id=device_id + ) + assert result.response_type == intent.IntentResponseType.ACTION_DONE + timers = result.speech_slots.get("timers", []) + assert len(timers) == 0 + + +async def test_cancel_all_timers_area( + hass: HomeAssistant, + init_components, + area_registry: ar.AreaRegistry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test cancelling all timers in an area.""" + entry = MockConfigEntry() + entry.add_to_hass(hass) + + area_kitchen = area_registry.async_create("kitchen") + device_kitchen = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + connections=set(), + identifiers={("test", "kitchen-device")}, + ) + device_registry.async_update_device(device_kitchen.id, area_id=area_kitchen.id) + + area_living_room = area_registry.async_create("living room") + device_living_room = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + connections=set(), + identifiers={("test", "living_room-device")}, + ) + device_registry.async_update_device( + device_living_room.id, area_id=area_living_room.id + ) + + started_event = asyncio.Event() + num_timers = 3 + num_started = 0 + + @callback + def handle_timer(event_type: TimerEventType, timer: TimerInfo) -> None: + nonlocal num_started + + if event_type == TimerEventType.STARTED: + num_started += 1 + if num_started == num_timers: + started_event.set() + + async_register_timer_handler(hass, device_kitchen.id, handle_timer) + async_register_timer_handler(hass, device_living_room.id, handle_timer) + + # Start timers in different areas + result = await intent.async_handle( + hass, + "test", + intent.INTENT_START_TIMER, + {"name": {"value": "pizza"}, "minutes": {"value": 10}}, + device_id=device_kitchen.id, + ) + assert result.response_type == intent.IntentResponseType.ACTION_DONE + + result = await intent.async_handle( + hass, + "test", + intent.INTENT_START_TIMER, + {"name": {"value": "tv"}, "minutes": {"value": 10}}, + device_id=device_living_room.id, + ) + assert result.response_type == intent.IntentResponseType.ACTION_DONE + + result = await intent.async_handle( + hass, + "test", + intent.INTENT_START_TIMER, + {"name": {"value": "media"}, "minutes": {"value": 15}}, + device_id=device_living_room.id, + ) + assert result.response_type == intent.IntentResponseType.ACTION_DONE + + # Wait for all timers to start + async with asyncio.timeout(1): + await started_event.wait() + + # Cancel all timers in kitchen + result = await intent.async_handle( + hass, + "test", + intent.INTENT_CANCEL_ALL_TIMERS, + {"area": {"value": "kitchen"}}, + device_id=device_kitchen.id, + ) + assert result.response_type == intent.IntentResponseType.ACTION_DONE + assert result.speech_slots.get("canceled", 0) == 1 + assert result.speech_slots.get("area") == "kitchen" + + # No timers should be running in kitchen + result = await intent.async_handle( + hass, + "test", + intent.INTENT_TIMER_STATUS, + {"area": {"value": "kitchen"}}, + device_id=device_kitchen.id, + ) + assert result.response_type == intent.IntentResponseType.ACTION_DONE + timers = result.speech_slots.get("timers", []) + assert len(timers) == 0 + + # timers should be running in living room + result = await intent.async_handle( + hass, + "test", + intent.INTENT_TIMER_STATUS, + {"area": {"value": "living room"}}, + device_id=device_living_room.id, + ) + assert result.response_type == intent.IntentResponseType.ACTION_DONE + timers = result.speech_slots.get("timers", []) + assert len(timers) == 2 diff --git a/tests/components/isy994/test_config_flow.py b/tests/components/isy994/test_config_flow.py index 34e267fe904..2bc1fff222f 100644 --- a/tests/components/isy994/test_config_flow.py +++ b/tests/components/isy994/test_config_flow.py @@ -698,3 +698,16 @@ async def test_reauth(hass: HomeAssistant) -> None: assert mock_setup_entry.called assert result4["type"] is FlowResultType.ABORT assert result4["reason"] == "reauth_successful" + + +async def test_options_flow(hass: HomeAssistant) -> None: + """Test option flow.""" + entry = MockConfigEntry(domain=DOMAIN) + entry.add_to_hass(hass) + + result = await hass.config_entries.options.async_init(entry.entry_id) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + # This should be improved at a later stage to increase test coverage + hass.config_entries.options.async_abort(result["flow_id"]) diff --git a/tests/components/jewish_calendar/test_config_flow.py b/tests/components/jewish_calendar/test_config_flow.py index 2a490270fdf..e00fe41749f 100644 --- a/tests/components/jewish_calendar/test_config_flow.py +++ b/tests/components/jewish_calendar/test_config_flow.py @@ -2,8 +2,6 @@ from unittest.mock import AsyncMock -import pytest - from homeassistant import config_entries, setup from homeassistant.components.jewish_calendar.const import ( CONF_CANDLE_LIGHT_MINUTES, @@ -20,12 +18,10 @@ from homeassistant.const import ( CONF_LANGUAGE, CONF_LATITUDE, CONF_LONGITUDE, - CONF_NAME, CONF_TIME_ZONE, ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry @@ -59,51 +55,6 @@ async def test_step_user(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> No assert entries[0].data[CONF_TIME_ZONE] == hass.config.time_zone -@pytest.mark.parametrize("diaspora", [True, False]) -@pytest.mark.parametrize("language", ["hebrew", "english"]) -async def test_import_no_options(hass: HomeAssistant, language, diaspora) -> None: - """Test that the import step works.""" - conf = { - DOMAIN: {CONF_NAME: "test", CONF_LANGUAGE: language, CONF_DIASPORA: diaspora} - } - - assert await async_setup_component(hass, DOMAIN, conf.copy()) - await hass.async_block_till_done() - - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - assert CONF_LANGUAGE in entries[0].data - assert CONF_DIASPORA in entries[0].data - for entry_key, entry_val in entries[0].data.items(): - assert entry_val == conf[DOMAIN][entry_key] - - -async def test_import_with_options(hass: HomeAssistant) -> None: - """Test that the import step works.""" - conf = { - DOMAIN: { - CONF_NAME: "test", - CONF_DIASPORA: DEFAULT_DIASPORA, - CONF_LANGUAGE: DEFAULT_LANGUAGE, - CONF_CANDLE_LIGHT_MINUTES: 20, - CONF_HAVDALAH_OFFSET_MINUTES: 50, - CONF_LATITUDE: 31.76, - CONF_LONGITUDE: 35.235, - } - } - - # Simulate HomeAssistant setting up the component - assert await async_setup_component(hass, DOMAIN, conf.copy()) - await hass.async_block_till_done() - - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - for entry_key, entry_val in entries[0].data.items(): - assert entry_val == conf[DOMAIN][entry_key] - for entry_key, entry_val in entries[0].options.items(): - assert entry_val == conf[DOMAIN][entry_key] - - async def test_single_instance_allowed( hass: HomeAssistant, mock_config_entry: MockConfigEntry, @@ -168,10 +119,6 @@ async def test_options_reconfigure( ) -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.jewish_calendar.config.abort.reconfigure_successful"], -) async def test_reconfigure( hass: HomeAssistant, mock_config_entry: MockConfigEntry ) -> None: diff --git a/tests/components/jewish_calendar/test_init.py b/tests/components/jewish_calendar/test_init.py index b8454b41a60..cb982afec0f 100644 --- a/tests/components/jewish_calendar/test_init.py +++ b/tests/components/jewish_calendar/test_init.py @@ -1,76 +1 @@ """Tests for the Jewish Calendar component's init.""" - -from hdate import Location - -from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSORS -from homeassistant.components.jewish_calendar import get_unique_prefix -from homeassistant.components.jewish_calendar.const import ( - CONF_CANDLE_LIGHT_MINUTES, - CONF_DIASPORA, - CONF_HAVDALAH_OFFSET_MINUTES, - DEFAULT_DIASPORA, - DEFAULT_LANGUAGE, - DOMAIN, -) -from homeassistant.const import CONF_LANGUAGE, CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME -from homeassistant.core import HomeAssistant -import homeassistant.helpers.entity_registry as er -from homeassistant.setup import async_setup_component - - -async def test_import_unique_id_migration(hass: HomeAssistant) -> None: - """Test unique_id migration.""" - yaml_conf = { - DOMAIN: { - CONF_NAME: "test", - CONF_DIASPORA: DEFAULT_DIASPORA, - CONF_LANGUAGE: DEFAULT_LANGUAGE, - CONF_CANDLE_LIGHT_MINUTES: 20, - CONF_HAVDALAH_OFFSET_MINUTES: 50, - CONF_LATITUDE: 31.76, - CONF_LONGITUDE: 35.235, - } - } - - # Create an entry in the entity registry with the data from conf - ent_reg = er.async_get(hass) - location = Location( - latitude=yaml_conf[DOMAIN][CONF_LATITUDE], - longitude=yaml_conf[DOMAIN][CONF_LONGITUDE], - timezone=hass.config.time_zone, - diaspora=DEFAULT_DIASPORA, - ) - old_prefix = get_unique_prefix(location, DEFAULT_LANGUAGE, 20, 50) - sample_entity = ent_reg.async_get_or_create( - BINARY_SENSORS, - DOMAIN, - unique_id=f"{old_prefix}_erev_shabbat_hag", - suggested_object_id=f"{DOMAIN}_erev_shabbat_hag", - ) - # Save the existing unique_id, DEFAULT_LANGUAGE should be part of it - old_unique_id = sample_entity.unique_id - assert DEFAULT_LANGUAGE in old_unique_id - - # Simulate HomeAssistant setting up the component - assert await async_setup_component(hass, DOMAIN, yaml_conf.copy()) - await hass.async_block_till_done() - - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 1 - for entry_key, entry_val in entries[0].data.items(): - assert entry_val == yaml_conf[DOMAIN][entry_key] - for entry_key, entry_val in entries[0].options.items(): - assert entry_val == yaml_conf[DOMAIN][entry_key] - - # Assert that the unique_id was updated - new_unique_id = ent_reg.async_get(sample_entity.entity_id).unique_id - assert new_unique_id != old_unique_id - assert DEFAULT_LANGUAGE not in new_unique_id - - # Confirm that when the component is reloaded, the unique_id is not changed - assert ent_reg.async_get(sample_entity.entity_id).unique_id == new_unique_id - - # Confirm that all the unique_ids are prefixed correctly - await hass.config_entries.async_reload(entries[0].entry_id) - er_entries = er.async_entries_for_config_entry(ent_reg, entries[0].entry_id) - assert all(entry.unique_id.startswith(entries[0].entry_id) for entry in er_entries) diff --git a/tests/components/jewish_calendar/test_sensor.py b/tests/components/jewish_calendar/test_sensor.py index cb054751f67..4897ef7749b 100644 --- a/tests/components/jewish_calendar/test_sensor.py +++ b/tests/components/jewish_calendar/test_sensor.py @@ -93,7 +93,26 @@ TEST_PARAMS = [ "id": "rosh_hashana_i", "type": "YOM_TOV", "type_id": 1, - "options": [h.description.english for h in htables.HOLIDAYS], + "options": htables.get_all_holidays("english"), + }, + ), + ( + dt(2024, 12, 31), + "UTC", + 31.778, + 35.235, + "english", + "holiday", + False, + "Chanukah, Rosh Chodesh", + { + "device_class": "enum", + "friendly_name": "Jewish Calendar Holiday", + "icon": "mdi:calendar-star", + "id": "chanukah, rosh_chodesh", + "type": "MELACHA_PERMITTED_HOLIDAY, ROSH_CHODESH", + "type_id": "4, 10", + "options": htables.get_all_holidays("english"), }, ), ( @@ -180,6 +199,7 @@ TEST_IDS = [ "date_output_hebrew", "holiday", "holiday_english", + "holiday_multiple", "torah_reading", "first_stars_ny", "first_stars_jerusalem", diff --git a/tests/components/knx/test_config_flow.py b/tests/components/knx/test_config_flow.py index 78751c7e641..2187721a518 100644 --- a/tests/components/knx/test_config_flow.py +++ b/tests/components/knx/test_config_flow.py @@ -913,7 +913,7 @@ async def test_form_with_automatic_connection_handling( CONF_KNX_ROUTE_BACK: False, CONF_KNX_TUNNEL_ENDPOINT_IA: None, CONF_KNX_STATE_UPDATER: True, - CONF_KNX_TELEGRAM_LOG_SIZE: 200, + CONF_KNX_TELEGRAM_LOG_SIZE: 1000, } knx_setup.assert_called_once() @@ -1210,7 +1210,7 @@ async def test_options_flow_connection_type( CONF_KNX_SECURE_DEVICE_AUTHENTICATION: None, CONF_KNX_SECURE_USER_ID: None, CONF_KNX_SECURE_USER_PASSWORD: None, - CONF_KNX_TELEGRAM_LOG_SIZE: 200, + CONF_KNX_TELEGRAM_LOG_SIZE: 1000, } diff --git a/tests/components/knx/test_light.py b/tests/components/knx/test_light.py index 88f76a163d5..6ba6090d60d 100644 --- a/tests/components/knx/test_light.py +++ b/tests/components/knx/test_light.py @@ -41,7 +41,11 @@ async def test_light_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: } ) - knx.assert_state("light.test", STATE_OFF) + knx.assert_state( + "light.test", + STATE_OFF, + supported_color_modes=[ColorMode.ONOFF], + ) # turn on light await hass.services.async_call( "light", @@ -110,6 +114,7 @@ async def test_light_brightness(hass: HomeAssistant, knx: KNXTestKit) -> None: "light.test", STATE_ON, brightness=80, + supported_color_modes=[ColorMode.BRIGHTNESS], color_mode=ColorMode.BRIGHTNESS, ) # receive brightness changes from KNX @@ -165,6 +170,7 @@ async def test_light_color_temp_absolute(hass: HomeAssistant, knx: KNXTestKit) - "light.test", STATE_ON, brightness=255, + supported_color_modes=[ColorMode.COLOR_TEMP], color_mode=ColorMode.COLOR_TEMP, color_temp=370, color_temp_kelvin=2700, @@ -227,6 +233,7 @@ async def test_light_color_temp_relative(hass: HomeAssistant, knx: KNXTestKit) - "light.test", STATE_ON, brightness=255, + supported_color_modes=[ColorMode.COLOR_TEMP], color_mode=ColorMode.COLOR_TEMP, color_temp=250, color_temp_kelvin=4000, @@ -300,6 +307,7 @@ async def test_light_hs_color(hass: HomeAssistant, knx: KNXTestKit) -> None: "light.test", STATE_ON, brightness=255, + supported_color_modes=[ColorMode.HS], color_mode=ColorMode.HS, hs_color=(360, 100), ) @@ -375,6 +383,7 @@ async def test_light_xyy_color(hass: HomeAssistant, knx: KNXTestKit) -> None: "light.test", STATE_ON, brightness=204, + supported_color_modes=[ColorMode.XY], color_mode=ColorMode.XY, xy_color=(0.8, 0.8), ) @@ -457,6 +466,7 @@ async def test_light_xyy_color_with_brightness( "light.test", STATE_ON, brightness=255, # brightness form xyy_color ignored when extra brightness GA is used + supported_color_modes=[ColorMode.XY], color_mode=ColorMode.XY, xy_color=(0.8, 0.8), ) @@ -543,6 +553,7 @@ async def test_light_rgb_individual(hass: HomeAssistant, knx: KNXTestKit) -> Non "light.test", STATE_ON, brightness=255, + supported_color_modes=[ColorMode.RGB], color_mode=ColorMode.RGB, rgb_color=(255, 255, 255), ) @@ -699,6 +710,7 @@ async def test_light_rgbw_individual( "light.test", STATE_ON, brightness=255, + supported_color_modes=[ColorMode.RGBW], color_mode=ColorMode.RGBW, rgbw_color=(0, 0, 0, 255), ) @@ -853,6 +865,7 @@ async def test_light_rgb(hass: HomeAssistant, knx: KNXTestKit) -> None: "light.test", STATE_ON, brightness=255, + supported_color_modes=[ColorMode.RGB], color_mode=ColorMode.RGB, rgb_color=(255, 255, 255), ) @@ -961,6 +974,7 @@ async def test_light_rgbw(hass: HomeAssistant, knx: KNXTestKit) -> None: "light.test", STATE_ON, brightness=255, + supported_color_modes=[ColorMode.RGBW], color_mode=ColorMode.RGBW, rgbw_color=(255, 101, 102, 103), ) @@ -1078,6 +1092,7 @@ async def test_light_rgbw_brightness(hass: HomeAssistant, knx: KNXTestKit) -> No "light.test", STATE_ON, brightness=255, + supported_color_modes=[ColorMode.RGBW], color_mode=ColorMode.RGBW, rgbw_color=(255, 101, 102, 103), ) @@ -1174,8 +1189,12 @@ async def test_light_ui_create( # created entity sends read-request to KNX bus await knx.assert_read("2/2/2") await knx.receive_response("2/2/2", True) - state = hass.states.get("light.test") - assert state.state is STATE_ON + knx.assert_state( + "light.test", + STATE_ON, + supported_color_modes=[ColorMode.ONOFF], + color_mode=ColorMode.ONOFF, + ) @pytest.mark.parametrize( @@ -1216,9 +1235,103 @@ async def test_light_ui_color_temp( blocking=True, ) await knx.assert_write("3/3/3", raw_ct) - state = hass.states.get("light.test") - assert state.state is STATE_ON - assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == pytest.approx(4200, abs=1) + knx.assert_state( + "light.test", + STATE_ON, + supported_color_modes=[ColorMode.COLOR_TEMP], + color_mode=ColorMode.COLOR_TEMP, + color_temp_kelvin=pytest.approx(4200, abs=1), + ) + + +async def test_light_ui_multi_mode( + hass: HomeAssistant, + knx: KNXTestKit, + create_ui_entity: KnxEntityGenerator, +) -> None: + """Test creating a light with multiple color modes.""" + await knx.setup_integration({}) + await create_ui_entity( + platform=Platform.LIGHT, + entity_data={"name": "test"}, + knx_data={ + "color_temp_min": 2700, + "color_temp_max": 6000, + "_light_color_mode_schema": "default", + "ga_switch": { + "write": "1/1/1", + "passive": [], + "state": "2/2/2", + }, + "sync_state": True, + "ga_brightness": { + "write": "0/6/0", + "state": "0/6/1", + "passive": [], + }, + "ga_color_temp": { + "write": "0/6/2", + "dpt": "7.600", + "state": "0/6/3", + "passive": [], + }, + "ga_color": { + "write": "0/6/4", + "dpt": "251.600", + "state": "0/6/5", + "passive": [], + }, + }, + ) + await knx.assert_read("2/2/2", True) + await knx.assert_read("0/6/1", (0xFF,)) + await knx.assert_read("0/6/5", (0xFF, 0x65, 0x66, 0x67, 0x00, 0x0F)) + await knx.assert_read("0/6/3", (0x12, 0x34)) + + await hass.services.async_call( + "light", + "turn_on", + { + "entity_id": "light.test", + ATTR_COLOR_NAME: "hotpink", + }, + blocking=True, + ) + await knx.assert_write("0/6/4", (255, 0, 128, 178, 0, 15)) + knx.assert_state( + "light.test", + STATE_ON, + brightness=255, + color_temp_kelvin=None, + rgbw_color=(255, 0, 128, 178), + supported_color_modes=[ + ColorMode.COLOR_TEMP, + ColorMode.RGBW, + ], + color_mode=ColorMode.RGBW, + ) + await hass.services.async_call( + "light", + "turn_on", + { + "entity_id": "light.test", + ATTR_COLOR_TEMP_KELVIN: 4200, + }, + blocking=True, + ) + await knx.assert_write("0/6/2", (0x10, 0x68)) + knx.assert_state( + "light.test", + STATE_ON, + brightness=255, + color_temp_kelvin=4200, + rgbw_color=None, + supported_color_modes=[ + ColorMode.COLOR_TEMP, + ColorMode.RGBW, + ], + color_mode=ColorMode.COLOR_TEMP, + ) async def test_light_ui_load( @@ -1234,8 +1347,12 @@ async def test_light_ui_load( # unrelated switch in config store await knx.assert_read("1/0/45", response=True, ignore_order=True) - state = hass.states.get("light.test") - assert state.state is STATE_ON + knx.assert_state( + "light.test", + STATE_ON, + supported_color_modes=[ColorMode.ONOFF], + color_mode=ColorMode.ONOFF, + ) entity = entity_registry.async_get("light.test") assert entity.entity_category is EntityCategory.CONFIG diff --git a/tests/components/knx/test_websocket.py b/tests/components/knx/test_websocket.py index b3e4b7aaa38..a34f126e4f4 100644 --- a/tests/components/knx/test_websocket.py +++ b/tests/components/knx/test_websocket.py @@ -180,6 +180,37 @@ async def test_knx_group_monitor_info_command( assert res["result"]["recent_telegrams"] == [] +async def test_knx_group_telegrams_command( + hass: HomeAssistant, knx: KNXTestKit, hass_ws_client: WebSocketGenerator +) -> None: + """Test knx/group_telegrams command.""" + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "knx/group_telegrams"}) + res = await client.receive_json() + assert res["success"], res + assert res["result"] == {} + + # # get some telegrams to populate the cache + await knx.receive_write("1/1/1", True) + await knx.receive_read("2/2/2") # read telegram shall be ignored + await knx.receive_write("3/3/3", 0x34) + + await client.send_json_auto_id({"type": "knx/group_telegrams"}) + res = await client.receive_json() + assert res["success"], res + assert len(res["result"]) == 2 + assert "1/1/1" in res["result"] + assert res["result"]["1/1/1"]["destination"] == "1/1/1" + assert "3/3/3" in res["result"] + assert res["result"]["3/3/3"]["payload"] == 52 + assert res["result"]["3/3/3"]["telegramtype"] == "GroupValueWrite" + assert res["result"]["3/3/3"]["source"] == "1.2.3" + assert res["result"]["3/3/3"]["direction"] == "Incoming" + assert res["result"]["3/3/3"]["timestamp"] is not None + + async def test_knx_subscribe_telegrams_command_recent_telegrams( hass: HomeAssistant, knx: KNXTestKit, hass_ws_client: WebSocketGenerator ) -> None: diff --git a/tests/components/lamarzocco/__init__.py b/tests/components/lamarzocco/__init__.py index 4d274d10baa..f6ca0fe40df 100644 --- a/tests/components/lamarzocco/__init__.py +++ b/tests/components/lamarzocco/__init__.py @@ -1,6 +1,6 @@ """Mock inputs for tests.""" -from lmcloud.const import MachineModel +from pylamarzocco.const import MachineModel from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant @@ -19,10 +19,10 @@ PASSWORD_SELECTION = { USER_INPUT = PASSWORD_SELECTION | {CONF_USERNAME: "username"} SERIAL_DICT = { - MachineModel.GS3_AV: "GS01234", - MachineModel.GS3_MP: "GS01234", - MachineModel.LINEA_MICRA: "MR01234", - MachineModel.LINEA_MINI: "LM01234", + MachineModel.GS3_AV: "GS012345", + MachineModel.GS3_MP: "GS012345", + MachineModel.LINEA_MICRA: "MR012345", + MachineModel.LINEA_MINI: "LM012345", } WAKE_UP_SLEEP_ENTRY_IDS = ["Os2OswX", "aXFz5bJ"] diff --git a/tests/components/lamarzocco/conftest.py b/tests/components/lamarzocco/conftest.py index 2520433e86a..d6d59cf9ebc 100644 --- a/tests/components/lamarzocco/conftest.py +++ b/tests/components/lamarzocco/conftest.py @@ -2,16 +2,22 @@ from collections.abc import Generator import json -from unittest.mock import MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, patch from bleak.backends.device import BLEDevice -from lmcloud.const import FirmwareType, MachineModel, SteamLevel -from lmcloud.lm_machine import LaMarzoccoMachine -from lmcloud.models import LaMarzoccoDeviceInfo +from pylamarzocco.const import FirmwareType, MachineModel, SteamLevel +from pylamarzocco.lm_machine import LaMarzoccoMachine +from pylamarzocco.models import LaMarzoccoDeviceInfo import pytest from homeassistant.components.lamarzocco.const import DOMAIN -from homeassistant.const import CONF_HOST, CONF_MODEL, CONF_NAME, CONF_TOKEN +from homeassistant.const import ( + CONF_ADDRESS, + CONF_HOST, + CONF_MODEL, + CONF_NAME, + CONF_TOKEN, +) from homeassistant.core import HomeAssistant from . import SERIAL_DICT, USER_INPUT, async_init_integration @@ -19,6 +25,15 @@ from . import SERIAL_DICT, USER_INPUT, async_init_integration from tests.common import MockConfigEntry, load_fixture, load_json_object_fixture +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.lamarzocco.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + @pytest.fixture def mock_config_entry( hass: HomeAssistant, mock_lamarzocco: MagicMock @@ -31,6 +46,7 @@ def mock_config_entry( data=USER_INPUT | { CONF_MODEL: mock_lamarzocco.model, + CONF_ADDRESS: "00:00:00:00:00:00", CONF_HOST: "host", CONF_TOKEN: "token", CONF_NAME: "GS3", @@ -75,11 +91,11 @@ def device_fixture() -> MachineModel: @pytest.fixture -def mock_device_info() -> LaMarzoccoDeviceInfo: +def mock_device_info(device_fixture: MachineModel) -> LaMarzoccoDeviceInfo: """Return a mocked La Marzocco device info.""" return LaMarzoccoDeviceInfo( - model=MachineModel.GS3_AV, - serial_number="GS01234", + model=device_fixture, + serial_number=SERIAL_DICT[device_fixture], name="GS3", communication_key="token", ) @@ -157,5 +173,5 @@ def mock_bluetooth(enable_bluetooth: None) -> None: def mock_ble_device() -> BLEDevice: """Return a mock BLE device.""" return BLEDevice( - "00:00:00:00:00:00", "GS_GS01234", details={"path": "path"}, rssi=50 + "00:00:00:00:00:00", "GS_GS012345", details={"path": "path"}, rssi=50 ) diff --git a/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr b/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr index df47ac002e6..cda285a7106 100644 --- a/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr +++ b/tests/components/lamarzocco/snapshots/test_binary_sensor.ambr @@ -1,19 +1,19 @@ # serializer version: 1 -# name: test_binary_sensors[GS01234_backflush_active-binary_sensor] +# name: test_binary_sensors[GS012345_backflush_active-binary_sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', - 'friendly_name': 'GS01234 Backflush active', + 'friendly_name': 'GS012345 Backflush active', }), 'context': , - 'entity_id': 'binary_sensor.gs01234_backflush_active', + 'entity_id': 'binary_sensor.gs012345_backflush_active', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_binary_sensors[GS01234_backflush_active-entry] +# name: test_binary_sensors[GS012345_backflush_active-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -25,7 +25,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.gs01234_backflush_active', + 'entity_id': 'binary_sensor.gs012345_backflush_active', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -42,25 +42,25 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'backflush_enabled', - 'unique_id': 'GS01234_backflush_enabled', + 'unique_id': 'GS012345_backflush_enabled', 'unit_of_measurement': None, }) # --- -# name: test_binary_sensors[GS01234_brewing_active-binary_sensor] +# name: test_binary_sensors[GS012345_brewing_active-binary_sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'running', - 'friendly_name': 'GS01234 Brewing active', + 'friendly_name': 'GS012345 Brewing active', }), 'context': , - 'entity_id': 'binary_sensor.gs01234_brewing_active', + 'entity_id': 'binary_sensor.gs012345_brewing_active', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_binary_sensors[GS01234_brewing_active-entry] +# name: test_binary_sensors[GS012345_brewing_active-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -72,7 +72,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.gs01234_brewing_active', + 'entity_id': 'binary_sensor.gs012345_brewing_active', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -89,25 +89,25 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'brew_active', - 'unique_id': 'GS01234_brew_active', + 'unique_id': 'GS012345_brew_active', 'unit_of_measurement': None, }) # --- -# name: test_binary_sensors[GS01234_water_tank_empty-binary_sensor] +# name: test_binary_sensors[GS012345_water_tank_empty-binary_sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'problem', - 'friendly_name': 'GS01234 Water tank empty', + 'friendly_name': 'GS012345 Water tank empty', }), 'context': , - 'entity_id': 'binary_sensor.gs01234_water_tank_empty', + 'entity_id': 'binary_sensor.gs012345_water_tank_empty', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_binary_sensors[GS01234_water_tank_empty-entry] +# name: test_binary_sensors[GS012345_water_tank_empty-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -119,7 +119,7 @@ 'disabled_by': None, 'domain': 'binary_sensor', 'entity_category': , - 'entity_id': 'binary_sensor.gs01234_water_tank_empty', + 'entity_id': 'binary_sensor.gs012345_water_tank_empty', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -136,7 +136,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'water_tank', - 'unique_id': 'GS01234_water_tank', + 'unique_id': 'GS012345_water_tank', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/lamarzocco/snapshots/test_button.ambr b/tests/components/lamarzocco/snapshots/test_button.ambr index 023039cc6f7..64d47a11072 100644 --- a/tests/components/lamarzocco/snapshots/test_button.ambr +++ b/tests/components/lamarzocco/snapshots/test_button.ambr @@ -2,10 +2,10 @@ # name: test_start_backflush StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Start backflush', + 'friendly_name': 'GS012345 Start backflush', }), 'context': , - 'entity_id': 'button.gs01234_start_backflush', + 'entity_id': 'button.gs012345_start_backflush', 'last_changed': , 'last_reported': , 'last_updated': , @@ -24,7 +24,7 @@ 'disabled_by': None, 'domain': 'button', 'entity_category': None, - 'entity_id': 'button.gs01234_start_backflush', + 'entity_id': 'button.gs012345_start_backflush', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -41,7 +41,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'start_backflush', - 'unique_id': 'GS01234_start_backflush', + 'unique_id': 'GS012345_start_backflush', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/lamarzocco/snapshots/test_calendar.ambr b/tests/components/lamarzocco/snapshots/test_calendar.ambr index 2fd5dab846a..729eed5879a 100644 --- a/tests/components/lamarzocco/snapshots/test_calendar.ambr +++ b/tests/components/lamarzocco/snapshots/test_calendar.ambr @@ -1,7 +1,7 @@ # serializer version: 1 # name: test_calendar_edge_cases[start_date0-end_date0] dict({ - 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -15,7 +15,7 @@ # --- # name: test_calendar_edge_cases[start_date1-end_date1] dict({ - 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -29,7 +29,7 @@ # --- # name: test_calendar_edge_cases[start_date2-end_date2] dict({ - 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -43,7 +43,7 @@ # --- # name: test_calendar_edge_cases[start_date3-end_date3] dict({ - 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -57,7 +57,7 @@ # --- # name: test_calendar_edge_cases[start_date4-end_date4] dict({ - 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ ]), }), @@ -65,7 +65,7 @@ # --- # name: test_calendar_edge_cases[start_date5-end_date5] dict({ - 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -83,7 +83,7 @@ }), }) # --- -# name: test_calendar_events[entry.GS01234_auto_on_off_schedule_axfz5bj] +# name: test_calendar_events[entry.GS012345_auto_on_off_schedule_axfz5bj] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -95,7 +95,7 @@ 'disabled_by': None, 'domain': 'calendar', 'entity_category': None, - 'entity_id': 'calendar.gs01234_auto_on_off_schedule_axfz5bj', + 'entity_id': 'calendar.gs012345_auto_on_off_schedule_axfz5bj', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -112,11 +112,11 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'auto_on_off_schedule', - 'unique_id': 'GS01234_auto_on_off_schedule_aXFz5bJ', + 'unique_id': 'GS012345_auto_on_off_schedule_aXFz5bJ', 'unit_of_measurement': None, }) # --- -# name: test_calendar_events[entry.GS01234_auto_on_off_schedule_os2oswx] +# name: test_calendar_events[entry.GS012345_auto_on_off_schedule_os2oswx] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -128,7 +128,7 @@ 'disabled_by': None, 'domain': 'calendar', 'entity_category': None, - 'entity_id': 'calendar.gs01234_auto_on_off_schedule_os2oswx', + 'entity_id': 'calendar.gs012345_auto_on_off_schedule_os2oswx', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -145,13 +145,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'auto_on_off_schedule', - 'unique_id': 'GS01234_auto_on_off_schedule_Os2OswX', + 'unique_id': 'GS012345_auto_on_off_schedule_Os2OswX', 'unit_of_measurement': None, }) # --- -# name: test_calendar_events[events.GS01234_auto_on_off_schedule_axfz5bj] +# name: test_calendar_events[events.GS012345_auto_on_off_schedule_axfz5bj] dict({ - 'calendar.gs01234_auto_on_off_schedule_axfz5bj': dict({ + 'calendar.gs012345_auto_on_off_schedule_axfz5bj': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -181,9 +181,9 @@ }), }) # --- -# name: test_calendar_events[events.GS01234_auto_on_off_schedule_os2oswx] +# name: test_calendar_events[events.GS012345_auto_on_off_schedule_os2oswx] dict({ - 'calendar.gs01234_auto_on_off_schedule_os2oswx': dict({ + 'calendar.gs012345_auto_on_off_schedule_os2oswx': dict({ 'events': list([ dict({ 'description': 'Machine is scheduled to turn on at the start time and off at the end time', @@ -327,38 +327,38 @@ }), }) # --- -# name: test_calendar_events[state.GS01234_auto_on_off_schedule_axfz5bj] +# name: test_calendar_events[state.GS012345_auto_on_off_schedule_axfz5bj] StateSnapshot({ 'attributes': ReadOnlyDict({ 'all_day': False, 'description': 'Machine is scheduled to turn on at the start time and off at the end time', 'end_time': '2024-01-14 07:30:00', - 'friendly_name': 'GS01234 Auto on/off schedule (aXFz5bJ)', + 'friendly_name': 'GS012345 Auto on/off schedule (aXFz5bJ)', 'location': '', 'message': 'Machine My LaMarzocco on', 'start_time': '2024-01-14 07:00:00', }), 'context': , - 'entity_id': 'calendar.gs01234_auto_on_off_schedule_axfz5bj', + 'entity_id': 'calendar.gs012345_auto_on_off_schedule_axfz5bj', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'off', }) # --- -# name: test_calendar_events[state.GS01234_auto_on_off_schedule_os2oswx] +# name: test_calendar_events[state.GS012345_auto_on_off_schedule_os2oswx] StateSnapshot({ 'attributes': ReadOnlyDict({ 'all_day': False, 'description': 'Machine is scheduled to turn on at the start time and off at the end time', 'end_time': '2024-01-13 00:00:00', - 'friendly_name': 'GS01234 Auto on/off schedule (Os2OswX)', + 'friendly_name': 'GS012345 Auto on/off schedule (Os2OswX)', 'location': '', 'message': 'Machine My LaMarzocco on', 'start_time': '2024-01-12 22:00:00', }), 'context': , - 'entity_id': 'calendar.gs01234_auto_on_off_schedule_os2oswx', + 'entity_id': 'calendar.gs012345_auto_on_off_schedule_os2oswx', 'last_changed': , 'last_reported': , 'last_updated': , @@ -367,7 +367,7 @@ # --- # name: test_no_calendar_events_global_disable dict({ - 'calendar.gs01234_auto_on_off_schedule_os2oswx': dict({ + 'calendar.gs012345_auto_on_off_schedule_os2oswx': dict({ 'events': list([ ]), }), diff --git a/tests/components/lamarzocco/snapshots/test_number.ambr b/tests/components/lamarzocco/snapshots/test_number.ambr index bd54ce2c0b4..b7e42bb425f 100644 --- a/tests/components/lamarzocco/snapshots/test_number.ambr +++ b/tests/components/lamarzocco/snapshots/test_number.ambr @@ -3,7 +3,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', - 'friendly_name': 'GS01234 Coffee target temperature', + 'friendly_name': 'GS012345 Coffee target temperature', 'max': 104, 'min': 85, 'mode': , @@ -11,7 +11,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_coffee_target_temperature', + 'entity_id': 'number.gs012345_coffee_target_temperature', 'last_changed': , 'last_reported': , 'last_updated': , @@ -35,7 +35,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': None, - 'entity_id': 'number.gs01234_coffee_target_temperature', + 'entity_id': 'number.gs012345_coffee_target_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -52,7 +52,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'coffee_temp', - 'unique_id': 'GS01234_coffee_temp', + 'unique_id': 'GS012345_coffee_temp', 'unit_of_measurement': , }) # --- @@ -60,7 +60,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Smart standby time', + 'friendly_name': 'GS012345 Smart standby time', 'max': 240, 'min': 10, 'mode': , @@ -68,7 +68,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_smart_standby_time', + 'entity_id': 'number.gs012345_smart_standby_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -92,7 +92,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.gs01234_smart_standby_time', + 'entity_id': 'number.gs012345_smart_standby_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -109,7 +109,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'smart_standby_time', - 'unique_id': 'GS01234_smart_standby_time', + 'unique_id': 'GS012345_smart_standby_time', 'unit_of_measurement': , }) # --- @@ -117,7 +117,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', - 'friendly_name': 'GS01234 Steam target temperature', + 'friendly_name': 'GS012345 Steam target temperature', 'max': 131, 'min': 126, 'mode': , @@ -125,7 +125,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_steam_target_temperature', + 'entity_id': 'number.gs012345_steam_target_temperature', 'last_changed': , 'last_reported': , 'last_updated': , @@ -149,7 +149,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': None, - 'entity_id': 'number.gs01234_steam_target_temperature', + 'entity_id': 'number.gs012345_steam_target_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -166,7 +166,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'steam_temp', - 'unique_id': 'GS01234_steam_temp', + 'unique_id': 'GS012345_steam_temp', 'unit_of_measurement': , }) # --- @@ -174,7 +174,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', - 'friendly_name': 'GS01234 Steam target temperature', + 'friendly_name': 'GS012345 Steam target temperature', 'max': 131, 'min': 126, 'mode': , @@ -182,7 +182,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_steam_target_temperature', + 'entity_id': 'number.gs012345_steam_target_temperature', 'last_changed': , 'last_reported': , 'last_updated': , @@ -206,7 +206,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': None, - 'entity_id': 'number.gs01234_steam_target_temperature', + 'entity_id': 'number.gs012345_steam_target_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -223,7 +223,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'steam_temp', - 'unique_id': 'GS01234_steam_temp', + 'unique_id': 'GS012345_steam_temp', 'unit_of_measurement': , }) # --- @@ -231,7 +231,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Tea water duration', + 'friendly_name': 'GS012345 Tea water duration', 'max': 30, 'min': 0, 'mode': , @@ -239,7 +239,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_tea_water_duration', + 'entity_id': 'number.gs012345_tea_water_duration', 'last_changed': , 'last_reported': , 'last_updated': , @@ -263,7 +263,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': None, - 'entity_id': 'number.gs01234_tea_water_duration', + 'entity_id': 'number.gs012345_tea_water_duration', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -280,7 +280,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'tea_water_duration', - 'unique_id': 'GS01234_tea_water_duration', + 'unique_id': 'GS012345_tea_water_duration', 'unit_of_measurement': , }) # --- @@ -288,7 +288,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Tea water duration', + 'friendly_name': 'GS012345 Tea water duration', 'max': 30, 'min': 0, 'mode': , @@ -296,7 +296,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_tea_water_duration', + 'entity_id': 'number.gs012345_tea_water_duration', 'last_changed': , 'last_reported': , 'last_updated': , @@ -320,7 +320,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': None, - 'entity_id': 'number.gs01234_tea_water_duration', + 'entity_id': 'number.gs012345_tea_water_duration', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -337,14 +337,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'tea_water_duration', - 'unique_id': 'GS01234_tea_water_duration', + 'unique_id': 'GS012345_tea_water_duration', 'unit_of_measurement': , }) # --- -# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS01234_dose_key_1-state] +# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS012345_dose_key_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Dose Key 1', + 'friendly_name': 'GS012345 Dose Key 1', 'max': 999, 'min': 0, 'mode': , @@ -352,17 +352,17 @@ 'unit_of_measurement': 'ticks', }), 'context': , - 'entity_id': 'number.gs01234_dose_key_1', + 'entity_id': 'number.gs012345_dose_key_1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '135', }) # --- -# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS01234_dose_key_2-state] +# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS012345_dose_key_2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Dose Key 2', + 'friendly_name': 'GS012345 Dose Key 2', 'max': 999, 'min': 0, 'mode': , @@ -370,17 +370,17 @@ 'unit_of_measurement': 'ticks', }), 'context': , - 'entity_id': 'number.gs01234_dose_key_2', + 'entity_id': 'number.gs012345_dose_key_2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '97', }) # --- -# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS01234_dose_key_3-state] +# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS012345_dose_key_3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Dose Key 3', + 'friendly_name': 'GS012345 Dose Key 3', 'max': 999, 'min': 0, 'mode': , @@ -388,17 +388,17 @@ 'unit_of_measurement': 'ticks', }), 'context': , - 'entity_id': 'number.gs01234_dose_key_3', + 'entity_id': 'number.gs012345_dose_key_3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '108', }) # --- -# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS01234_dose_key_4-state] +# name: test_pre_brew_infusion_key_numbers[dose-6-Disabled-set_dose-kwargs3-GS3 AV][GS012345_dose_key_4-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Dose Key 4', + 'friendly_name': 'GS012345 Dose Key 4', 'max': 999, 'min': 0, 'mode': , @@ -406,18 +406,18 @@ 'unit_of_measurement': 'ticks', }), 'context': , - 'entity_id': 'number.gs01234_dose_key_4', + 'entity_id': 'number.gs012345_dose_key_4', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '121', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS01234_prebrew_off_time_key_1-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew off time Key 1', + 'friendly_name': 'GS012345 Prebrew off time Key 1', 'max': 10, 'min': 1, 'mode': , @@ -425,18 +425,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_off_time_key_1', + 'entity_id': 'number.gs012345_prebrew_off_time_key_1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS01234_prebrew_off_time_key_2-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew off time Key 2', + 'friendly_name': 'GS012345 Prebrew off time Key 2', 'max': 10, 'min': 1, 'mode': , @@ -444,18 +444,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_off_time_key_2', + 'entity_id': 'number.gs012345_prebrew_off_time_key_2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS01234_prebrew_off_time_key_3-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew off time Key 3', + 'friendly_name': 'GS012345 Prebrew off time Key 3', 'max': 10, 'min': 1, 'mode': , @@ -463,18 +463,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_off_time_key_3', + 'entity_id': 'number.gs012345_prebrew_off_time_key_3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '3.29999995231628', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS01234_prebrew_off_time_key_4-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_off_time-6-Enabled-set_prebrew_time-kwargs0-GS3 AV][GS012345_prebrew_off_time_key_4-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew off time Key 4', + 'friendly_name': 'GS012345 Prebrew off time Key 4', 'max': 10, 'min': 1, 'mode': , @@ -482,18 +482,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_off_time_key_4', + 'entity_id': 'number.gs012345_prebrew_off_time_key_4', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '2', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS01234_prebrew_on_time_key_1-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew on time Key 1', + 'friendly_name': 'GS012345 Prebrew on time Key 1', 'max': 10, 'min': 2, 'mode': , @@ -501,18 +501,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_on_time_key_1', + 'entity_id': 'number.gs012345_prebrew_on_time_key_1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS01234_prebrew_on_time_key_2-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew on time Key 2', + 'friendly_name': 'GS012345 Prebrew on time Key 2', 'max': 10, 'min': 2, 'mode': , @@ -520,18 +520,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_on_time_key_2', + 'entity_id': 'number.gs012345_prebrew_on_time_key_2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS01234_prebrew_on_time_key_3-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew on time Key 3', + 'friendly_name': 'GS012345 Prebrew on time Key 3', 'max': 10, 'min': 2, 'mode': , @@ -539,18 +539,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_on_time_key_3', + 'entity_id': 'number.gs012345_prebrew_on_time_key_3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '3.29999995231628', }) # --- -# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS01234_prebrew_on_time_key_4-state] +# name: test_pre_brew_infusion_key_numbers[prebrew_on_time-6-Enabled-set_prebrew_time-kwargs1-GS3 AV][GS012345_prebrew_on_time_key_4-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Prebrew on time Key 4', + 'friendly_name': 'GS012345 Prebrew on time Key 4', 'max': 10, 'min': 2, 'mode': , @@ -558,18 +558,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_prebrew_on_time_key_4', + 'entity_id': 'number.gs012345_prebrew_on_time_key_4', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '2', }) # --- -# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS01234_preinfusion_time_key_1-state] +# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS012345_preinfusion_time_key_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Preinfusion time Key 1', + 'friendly_name': 'GS012345 Preinfusion time Key 1', 'max': 29, 'min': 2, 'mode': , @@ -577,18 +577,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_preinfusion_time_key_1', + 'entity_id': 'number.gs012345_preinfusion_time_key_1', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS01234_preinfusion_time_key_2-state] +# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS012345_preinfusion_time_key_2-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Preinfusion time Key 2', + 'friendly_name': 'GS012345 Preinfusion time Key 2', 'max': 29, 'min': 2, 'mode': , @@ -596,18 +596,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_preinfusion_time_key_2', + 'entity_id': 'number.gs012345_preinfusion_time_key_2', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1', }) # --- -# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS01234_preinfusion_time_key_3-state] +# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS012345_preinfusion_time_key_3-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Preinfusion time Key 3', + 'friendly_name': 'GS012345 Preinfusion time Key 3', 'max': 29, 'min': 2, 'mode': , @@ -615,18 +615,18 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_preinfusion_time_key_3', + 'entity_id': 'number.gs012345_preinfusion_time_key_3', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '3.29999995231628', }) # --- -# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS01234_preinfusion_time_key_4-state] +# name: test_pre_brew_infusion_key_numbers[preinfusion_time-7-TypeB-set_preinfusion_time-kwargs2-GS3 AV][GS012345_preinfusion_time_key_4-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Preinfusion time Key 4', + 'friendly_name': 'GS012345 Preinfusion time Key 4', 'max': 29, 'min': 2, 'mode': , @@ -634,7 +634,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.gs01234_preinfusion_time_key_4', + 'entity_id': 'number.gs012345_preinfusion_time_key_4', 'last_changed': , 'last_reported': , 'last_updated': , @@ -645,7 +645,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'LM01234 Prebrew off time', + 'friendly_name': 'LM012345 Prebrew off time', 'max': 10, 'min': 1, 'mode': , @@ -653,7 +653,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.lm01234_prebrew_off_time', + 'entity_id': 'number.lm012345_prebrew_off_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -677,7 +677,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.lm01234_prebrew_off_time', + 'entity_id': 'number.lm012345_prebrew_off_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -694,7 +694,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_off', - 'unique_id': 'LM01234_prebrew_off', + 'unique_id': 'LM012345_prebrew_off', 'unit_of_measurement': , }) # --- @@ -702,7 +702,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'MR01234 Prebrew off time', + 'friendly_name': 'MR012345 Prebrew off time', 'max': 10, 'min': 1, 'mode': , @@ -710,7 +710,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.mr01234_prebrew_off_time', + 'entity_id': 'number.mr012345_prebrew_off_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -734,7 +734,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.mr01234_prebrew_off_time', + 'entity_id': 'number.mr012345_prebrew_off_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -751,7 +751,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_off', - 'unique_id': 'MR01234_prebrew_off', + 'unique_id': 'MR012345_prebrew_off', 'unit_of_measurement': , }) # --- @@ -759,7 +759,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'LM01234 Prebrew on time', + 'friendly_name': 'LM012345 Prebrew on time', 'max': 10, 'min': 2, 'mode': , @@ -767,7 +767,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.lm01234_prebrew_on_time', + 'entity_id': 'number.lm012345_prebrew_on_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -791,7 +791,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.lm01234_prebrew_on_time', + 'entity_id': 'number.lm012345_prebrew_on_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -808,7 +808,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_on', - 'unique_id': 'LM01234_prebrew_on', + 'unique_id': 'LM012345_prebrew_on', 'unit_of_measurement': , }) # --- @@ -816,7 +816,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'MR01234 Prebrew on time', + 'friendly_name': 'MR012345 Prebrew on time', 'max': 10, 'min': 2, 'mode': , @@ -824,7 +824,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.mr01234_prebrew_on_time', + 'entity_id': 'number.mr012345_prebrew_on_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -848,7 +848,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.mr01234_prebrew_on_time', + 'entity_id': 'number.mr012345_prebrew_on_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -865,7 +865,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_on', - 'unique_id': 'MR01234_prebrew_on', + 'unique_id': 'MR012345_prebrew_on', 'unit_of_measurement': , }) # --- @@ -873,7 +873,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'LM01234 Preinfusion time', + 'friendly_name': 'LM012345 Preinfusion time', 'max': 29, 'min': 2, 'mode': , @@ -881,7 +881,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.lm01234_preinfusion_time', + 'entity_id': 'number.lm012345_preinfusion_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -905,7 +905,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.lm01234_preinfusion_time', + 'entity_id': 'number.lm012345_preinfusion_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -922,7 +922,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'preinfusion_off', - 'unique_id': 'LM01234_preinfusion_off', + 'unique_id': 'LM012345_preinfusion_off', 'unit_of_measurement': , }) # --- @@ -930,7 +930,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'MR01234 Preinfusion time', + 'friendly_name': 'MR012345 Preinfusion time', 'max': 29, 'min': 2, 'mode': , @@ -938,7 +938,7 @@ 'unit_of_measurement': , }), 'context': , - 'entity_id': 'number.mr01234_preinfusion_time', + 'entity_id': 'number.mr012345_preinfusion_time', 'last_changed': , 'last_reported': , 'last_updated': , @@ -962,7 +962,7 @@ 'disabled_by': None, 'domain': 'number', 'entity_category': , - 'entity_id': 'number.mr01234_preinfusion_time', + 'entity_id': 'number.mr012345_preinfusion_time', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -979,7 +979,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'preinfusion_off', - 'unique_id': 'MR01234_preinfusion_off', + 'unique_id': 'MR012345_preinfusion_off', 'unit_of_measurement': , }) # --- diff --git a/tests/components/lamarzocco/snapshots/test_select.ambr b/tests/components/lamarzocco/snapshots/test_select.ambr index 4f08b0898b1..46fa55eff13 100644 --- a/tests/components/lamarzocco/snapshots/test_select.ambr +++ b/tests/components/lamarzocco/snapshots/test_select.ambr @@ -2,7 +2,7 @@ # name: test_pre_brew_infusion_select[GS3 AV] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Prebrew/-infusion mode', + 'friendly_name': 'GS012345 Prebrew/-infusion mode', 'options': list([ 'disabled', 'prebrew', @@ -10,7 +10,7 @@ ]), }), 'context': , - 'entity_id': 'select.gs01234_prebrew_infusion_mode', + 'entity_id': 'select.gs012345_prebrew_infusion_mode', 'last_changed': , 'last_reported': , 'last_updated': , @@ -35,7 +35,7 @@ 'disabled_by': None, 'domain': 'select', 'entity_category': , - 'entity_id': 'select.gs01234_prebrew_infusion_mode', + 'entity_id': 'select.gs012345_prebrew_infusion_mode', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -52,14 +52,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_infusion_select', - 'unique_id': 'GS01234_prebrew_infusion_select', + 'unique_id': 'GS012345_prebrew_infusion_select', 'unit_of_measurement': None, }) # --- # name: test_pre_brew_infusion_select[Linea Mini] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'LM01234 Prebrew/-infusion mode', + 'friendly_name': 'LM012345 Prebrew/-infusion mode', 'options': list([ 'disabled', 'prebrew', @@ -67,7 +67,7 @@ ]), }), 'context': , - 'entity_id': 'select.lm01234_prebrew_infusion_mode', + 'entity_id': 'select.lm012345_prebrew_infusion_mode', 'last_changed': , 'last_reported': , 'last_updated': , @@ -92,7 +92,7 @@ 'disabled_by': None, 'domain': 'select', 'entity_category': , - 'entity_id': 'select.lm01234_prebrew_infusion_mode', + 'entity_id': 'select.lm012345_prebrew_infusion_mode', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -109,14 +109,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_infusion_select', - 'unique_id': 'LM01234_prebrew_infusion_select', + 'unique_id': 'LM012345_prebrew_infusion_select', 'unit_of_measurement': None, }) # --- # name: test_pre_brew_infusion_select[Micra] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MR01234 Prebrew/-infusion mode', + 'friendly_name': 'MR012345 Prebrew/-infusion mode', 'options': list([ 'disabled', 'prebrew', @@ -124,7 +124,7 @@ ]), }), 'context': , - 'entity_id': 'select.mr01234_prebrew_infusion_mode', + 'entity_id': 'select.mr012345_prebrew_infusion_mode', 'last_changed': , 'last_reported': , 'last_updated': , @@ -149,7 +149,7 @@ 'disabled_by': None, 'domain': 'select', 'entity_category': , - 'entity_id': 'select.mr01234_prebrew_infusion_mode', + 'entity_id': 'select.mr012345_prebrew_infusion_mode', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -166,21 +166,21 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'prebrew_infusion_select', - 'unique_id': 'MR01234_prebrew_infusion_select', + 'unique_id': 'MR012345_prebrew_infusion_select', 'unit_of_measurement': None, }) # --- # name: test_smart_standby_mode StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Smart standby mode', + 'friendly_name': 'GS012345 Smart standby mode', 'options': list([ 'power_on', 'last_brewing', ]), }), 'context': , - 'entity_id': 'select.gs01234_smart_standby_mode', + 'entity_id': 'select.gs012345_smart_standby_mode', 'last_changed': , 'last_reported': , 'last_updated': , @@ -204,7 +204,7 @@ 'disabled_by': None, 'domain': 'select', 'entity_category': , - 'entity_id': 'select.gs01234_smart_standby_mode', + 'entity_id': 'select.gs012345_smart_standby_mode', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -221,14 +221,14 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'smart_standby_mode', - 'unique_id': 'GS01234_smart_standby_mode', + 'unique_id': 'GS012345_smart_standby_mode', 'unit_of_measurement': None, }) # --- # name: test_steam_boiler_level[Micra] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'MR01234 Steam level', + 'friendly_name': 'MR012345 Steam level', 'options': list([ '1', '2', @@ -236,7 +236,7 @@ ]), }), 'context': , - 'entity_id': 'select.mr01234_steam_level', + 'entity_id': 'select.mr012345_steam_level', 'last_changed': , 'last_reported': , 'last_updated': , @@ -261,7 +261,7 @@ 'disabled_by': None, 'domain': 'select', 'entity_category': None, - 'entity_id': 'select.mr01234_steam_level', + 'entity_id': 'select.mr012345_steam_level', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -278,7 +278,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'steam_temp_select', - 'unique_id': 'MR01234_steam_temp_select', + 'unique_id': 'MR012345_steam_temp_select', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/lamarzocco/snapshots/test_sensor.ambr b/tests/components/lamarzocco/snapshots/test_sensor.ambr index 2237a8416e1..da1efbf1eaa 100644 --- a/tests/components/lamarzocco/snapshots/test_sensor.ambr +++ b/tests/components/lamarzocco/snapshots/test_sensor.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_sensors[GS01234_current_coffee_temperature-entry] +# name: test_sensors[GS012345_current_coffee_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -13,7 +13,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.gs01234_current_coffee_temperature', + 'entity_id': 'sensor.gs012345_current_coffee_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -33,27 +33,27 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'current_temp_coffee', - 'unique_id': 'GS01234_current_temp_coffee', + 'unique_id': 'GS012345_current_temp_coffee', 'unit_of_measurement': , }) # --- -# name: test_sensors[GS01234_current_coffee_temperature-sensor] +# name: test_sensors[GS012345_current_coffee_temperature-sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', - 'friendly_name': 'GS01234 Current coffee temperature', + 'friendly_name': 'GS012345 Current coffee temperature', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.gs01234_current_coffee_temperature', + 'entity_id': 'sensor.gs012345_current_coffee_temperature', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '96.5', }) # --- -# name: test_sensors[GS01234_current_steam_temperature-entry] +# name: test_sensors[GS012345_current_steam_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -67,7 +67,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': None, - 'entity_id': 'sensor.gs01234_current_steam_temperature', + 'entity_id': 'sensor.gs012345_current_steam_temperature', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -87,27 +87,27 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'current_temp_steam', - 'unique_id': 'GS01234_current_temp_steam', + 'unique_id': 'GS012345_current_temp_steam', 'unit_of_measurement': , }) # --- -# name: test_sensors[GS01234_current_steam_temperature-sensor] +# name: test_sensors[GS012345_current_steam_temperature-sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'temperature', - 'friendly_name': 'GS01234 Current steam temperature', + 'friendly_name': 'GS012345 Current steam temperature', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.gs01234_current_steam_temperature', + 'entity_id': 'sensor.gs012345_current_steam_temperature', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '123.800003051758', }) # --- -# name: test_sensors[GS01234_shot_timer-entry] +# name: test_sensors[GS012345_shot_timer-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -121,7 +121,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': , - 'entity_id': 'sensor.gs01234_shot_timer', + 'entity_id': 'sensor.gs012345_shot_timer', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -138,27 +138,27 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'shot_timer', - 'unique_id': 'GS01234_shot_timer', + 'unique_id': 'GS012345_shot_timer', 'unit_of_measurement': , }) # --- -# name: test_sensors[GS01234_shot_timer-sensor] +# name: test_sensors[GS012345_shot_timer-sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'duration', - 'friendly_name': 'GS01234 Shot timer', + 'friendly_name': 'GS012345 Shot timer', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.gs01234_shot_timer', + 'entity_id': 'sensor.gs012345_shot_timer', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '0', }) # --- -# name: test_sensors[GS01234_total_coffees_made-entry] +# name: test_sensors[GS012345_total_coffees_made-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -172,7 +172,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': , - 'entity_id': 'sensor.gs01234_total_coffees_made', + 'entity_id': 'sensor.gs012345_total_coffees_made', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -189,26 +189,26 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'drink_stats_coffee', - 'unique_id': 'GS01234_drink_stats_coffee', + 'unique_id': 'GS012345_drink_stats_coffee', 'unit_of_measurement': 'drinks', }) # --- -# name: test_sensors[GS01234_total_coffees_made-sensor] +# name: test_sensors[GS012345_total_coffees_made-sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Total coffees made', + 'friendly_name': 'GS012345 Total coffees made', 'state_class': , 'unit_of_measurement': 'drinks', }), 'context': , - 'entity_id': 'sensor.gs01234_total_coffees_made', + 'entity_id': 'sensor.gs012345_total_coffees_made', 'last_changed': , 'last_reported': , 'last_updated': , 'state': '1047', }) # --- -# name: test_sensors[GS01234_total_flushes_made-entry] +# name: test_sensors[GS012345_total_flushes_made-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -222,7 +222,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': , - 'entity_id': 'sensor.gs01234_total_flushes_made', + 'entity_id': 'sensor.gs012345_total_flushes_made', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -239,19 +239,19 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'drink_stats_flushing', - 'unique_id': 'GS01234_drink_stats_flushing', + 'unique_id': 'GS012345_drink_stats_flushing', 'unit_of_measurement': 'drinks', }) # --- -# name: test_sensors[GS01234_total_flushes_made-sensor] +# name: test_sensors[GS012345_total_flushes_made-sensor] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Total flushes made', + 'friendly_name': 'GS012345 Total flushes made', 'state_class': , 'unit_of_measurement': 'drinks', }), 'context': , - 'entity_id': 'sensor.gs01234_total_flushes_made', + 'entity_id': 'sensor.gs012345_total_flushes_made', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/lamarzocco/snapshots/test_switch.ambr b/tests/components/lamarzocco/snapshots/test_switch.ambr index 2a368a56467..084b54b3f3a 100644 --- a/tests/components/lamarzocco/snapshots/test_switch.ambr +++ b/tests/components/lamarzocco/snapshots/test_switch.ambr @@ -11,7 +11,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': , - 'entity_id': 'switch.gs01234_auto_on_off_os2oswx', + 'entity_id': 'switch.gs012345_auto_on_off_os2oswx', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'auto_on_off', - 'unique_id': 'GS01234_auto_on_off_Os2OswX', + 'unique_id': 'GS012345_auto_on_off_Os2OswX', 'unit_of_measurement': None, }) # --- @@ -44,7 +44,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': , - 'entity_id': 'switch.gs01234_auto_on_off_axfz5bj', + 'entity_id': 'switch.gs012345_auto_on_off_axfz5bj', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -61,17 +61,17 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'auto_on_off', - 'unique_id': 'GS01234_auto_on_off_aXFz5bJ', + 'unique_id': 'GS012345_auto_on_off_aXFz5bJ', 'unit_of_measurement': None, }) # --- # name: test_auto_on_off_switches[state.auto_on_off_Os2OswX] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Auto on/off (Os2OswX)', + 'friendly_name': 'GS012345 Auto on/off (Os2OswX)', }), 'context': , - 'entity_id': 'switch.gs01234_auto_on_off_os2oswx', + 'entity_id': 'switch.gs012345_auto_on_off_os2oswx', 'last_changed': , 'last_reported': , 'last_updated': , @@ -81,10 +81,10 @@ # name: test_auto_on_off_switches[state.auto_on_off_aXFz5bJ] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Auto on/off (aXFz5bJ)', + 'friendly_name': 'GS012345 Auto on/off (aXFz5bJ)', }), 'context': , - 'entity_id': 'switch.gs01234_auto_on_off_axfz5bj', + 'entity_id': 'switch.gs012345_auto_on_off_axfz5bj', 'last_changed': , 'last_reported': , 'last_updated': , @@ -97,6 +97,10 @@ 'config_entries': , 'configuration_url': None, 'connections': set({ + tuple( + 'mac', + '00:00:00:00:00:00', + ), }), 'disabled_by': None, 'entry_type': None, @@ -105,7 +109,7 @@ 'identifiers': set({ tuple( 'lamarzocco', - 'GS01234', + 'GS012345', ), }), 'is_new': False, @@ -114,10 +118,10 @@ 'manufacturer': 'La Marzocco', 'model': , 'model_id': , - 'name': 'GS01234', + 'name': 'GS012345', 'name_by_user': None, 'primary_config_entry': , - 'serial_number': 'GS01234', + 'serial_number': 'GS012345', 'suggested_area': None, 'sw_version': '1.40', 'via_device_id': None, @@ -126,10 +130,10 @@ # name: test_switches[-set_power-kwargs0] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234', + 'friendly_name': 'GS012345', }), 'context': , - 'entity_id': 'switch.gs01234', + 'entity_id': 'switch.gs012345', 'last_changed': , 'last_reported': , 'last_updated': , @@ -148,7 +152,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': None, - 'entity_id': 'switch.gs01234', + 'entity_id': 'switch.gs012345', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -165,17 +169,17 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'main', - 'unique_id': 'GS01234_main', + 'unique_id': 'GS012345_main', 'unit_of_measurement': None, }) # --- # name: test_switches[_smart_standby_enabled-set_smart_standby-kwargs2] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Smart standby enabled', + 'friendly_name': 'GS012345 Smart standby enabled', }), 'context': , - 'entity_id': 'switch.gs01234_smart_standby_enabled', + 'entity_id': 'switch.gs012345_smart_standby_enabled', 'last_changed': , 'last_reported': , 'last_updated': , @@ -194,7 +198,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': , - 'entity_id': 'switch.gs01234_smart_standby_enabled', + 'entity_id': 'switch.gs012345_smart_standby_enabled', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -211,17 +215,17 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'smart_standby_enabled', - 'unique_id': 'GS01234_smart_standby_enabled', + 'unique_id': 'GS012345_smart_standby_enabled', 'unit_of_measurement': None, }) # --- # name: test_switches[_steam_boiler-set_steam-kwargs1] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'GS01234 Steam boiler', + 'friendly_name': 'GS012345 Steam boiler', }), 'context': , - 'entity_id': 'switch.gs01234_steam_boiler', + 'entity_id': 'switch.gs012345_steam_boiler', 'last_changed': , 'last_reported': , 'last_updated': , @@ -240,7 +244,7 @@ 'disabled_by': None, 'domain': 'switch', 'entity_category': None, - 'entity_id': 'switch.gs01234_steam_boiler', + 'entity_id': 'switch.gs012345_steam_boiler', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -257,7 +261,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'steam_boiler', - 'unique_id': 'GS01234_steam_boiler_enable', + 'unique_id': 'GS012345_steam_boiler_enable', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/lamarzocco/snapshots/test_update.ambr b/tests/components/lamarzocco/snapshots/test_update.ambr index 6e6b7285797..46fa4cff815 100644 --- a/tests/components/lamarzocco/snapshots/test_update.ambr +++ b/tests/components/lamarzocco/snapshots/test_update.ambr @@ -6,7 +6,7 @@ 'device_class': 'firmware', 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/lamarzocco/icon.png', - 'friendly_name': 'GS01234 Gateway firmware', + 'friendly_name': 'GS012345 Gateway firmware', 'in_progress': False, 'installed_version': 'v3.1-rc4', 'latest_version': 'v3.5-rc3', @@ -18,7 +18,7 @@ 'update_percentage': None, }), 'context': , - 'entity_id': 'update.gs01234_gateway_firmware', + 'entity_id': 'update.gs012345_gateway_firmware', 'last_changed': , 'last_reported': , 'last_updated': , @@ -37,7 +37,7 @@ 'disabled_by': None, 'domain': 'update', 'entity_category': , - 'entity_id': 'update.gs01234_gateway_firmware', + 'entity_id': 'update.gs012345_gateway_firmware', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -54,7 +54,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'gateway_firmware', - 'unique_id': 'GS01234_gateway_firmware', + 'unique_id': 'GS012345_gateway_firmware', 'unit_of_measurement': None, }) # --- @@ -65,7 +65,7 @@ 'device_class': 'firmware', 'display_precision': 0, 'entity_picture': 'https://brands.home-assistant.io/_/lamarzocco/icon.png', - 'friendly_name': 'GS01234 Machine firmware', + 'friendly_name': 'GS012345 Machine firmware', 'in_progress': False, 'installed_version': '1.40', 'latest_version': '1.55', @@ -77,7 +77,7 @@ 'update_percentage': None, }), 'context': , - 'entity_id': 'update.gs01234_machine_firmware', + 'entity_id': 'update.gs012345_machine_firmware', 'last_changed': , 'last_reported': , 'last_updated': , @@ -96,7 +96,7 @@ 'disabled_by': None, 'domain': 'update', 'entity_category': , - 'entity_id': 'update.gs01234_machine_firmware', + 'entity_id': 'update.gs012345_machine_firmware', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -113,7 +113,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'machine_firmware', - 'unique_id': 'GS01234_machine_firmware', + 'unique_id': 'GS012345_machine_firmware', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/lamarzocco/test_binary_sensor.py b/tests/components/lamarzocco/test_binary_sensor.py index 120d825c804..956bfe90dd4 100644 --- a/tests/components/lamarzocco/test_binary_sensor.py +++ b/tests/components/lamarzocco/test_binary_sensor.py @@ -4,7 +4,7 @@ from datetime import timedelta from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory -from lmcloud.exceptions import RequestNotSuccessful +from pylamarzocco.exceptions import RequestNotSuccessful from syrupy import SnapshotAssertion from homeassistant.const import STATE_UNAVAILABLE diff --git a/tests/components/lamarzocco/test_button.py b/tests/components/lamarzocco/test_button.py index b754688f369..61b7ba77c22 100644 --- a/tests/components/lamarzocco/test_button.py +++ b/tests/components/lamarzocco/test_button.py @@ -1,8 +1,8 @@ """Tests for the La Marzocco Buttons.""" -from unittest.mock import MagicMock +from unittest.mock import AsyncMock, MagicMock, patch -from lmcloud.exceptions import RequestNotSuccessful +from pylamarzocco.exceptions import RequestNotSuccessful import pytest from syrupy import SnapshotAssertion @@ -33,14 +33,18 @@ async def test_start_backflush( assert entry assert entry == snapshot - await hass.services.async_call( - BUTTON_DOMAIN, - SERVICE_PRESS, - { - ATTR_ENTITY_ID: f"button.{serial_number}_start_backflush", - }, - blocking=True, - ) + with patch( + "homeassistant.components.lamarzocco.button.asyncio.sleep", + new_callable=AsyncMock, + ): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: f"button.{serial_number}_start_backflush", + }, + blocking=True, + ) assert len(mock_lamarzocco.start_backflush.mock_calls) == 1 mock_lamarzocco.start_backflush.assert_called_once() diff --git a/tests/components/lamarzocco/test_config_flow.py b/tests/components/lamarzocco/test_config_flow.py index e4e8d6ebafd..b206b7b68a3 100644 --- a/tests/components/lamarzocco/test_config_flow.py +++ b/tests/components/lamarzocco/test_config_flow.py @@ -1,14 +1,24 @@ """Test the La Marzocco config flow.""" -from unittest.mock import MagicMock, patch +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch -from lmcloud.exceptions import AuthFail, RequestNotSuccessful -from lmcloud.models import LaMarzoccoDeviceInfo +from pylamarzocco.const import MachineModel +from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful +from pylamarzocco.models import LaMarzoccoDeviceInfo +import pytest +from homeassistant.components.dhcp import DhcpServiceInfo from homeassistant.components.lamarzocco.config_flow import CONF_MACHINE from homeassistant.components.lamarzocco.const import CONF_USE_BLUETOOTH, DOMAIN -from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_USER, ConfigEntryState +from homeassistant.config_entries import ( + SOURCE_BLUETOOTH, + SOURCE_DHCP, + SOURCE_USER, + ConfigEntryState, +) from homeassistant.const import ( + CONF_ADDRESS, CONF_HOST, CONF_MAC, CONF_MODEL, @@ -73,6 +83,7 @@ async def test_form( hass: HomeAssistant, mock_cloud_client: MagicMock, mock_device_info: LaMarzoccoDeviceInfo, + mock_setup_entry: Generator[AsyncMock], ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -127,6 +138,7 @@ async def test_form_invalid_auth( hass: HomeAssistant, mock_device_info: LaMarzoccoDeviceInfo, mock_cloud_client: MagicMock, + mock_setup_entry: Generator[AsyncMock], ) -> None: """Test invalid auth error.""" @@ -154,6 +166,7 @@ async def test_form_invalid_host( hass: HomeAssistant, mock_cloud_client: MagicMock, mock_device_info: LaMarzoccoDeviceInfo, + mock_setup_entry: Generator[AsyncMock], ) -> None: """Test invalid auth error.""" result = await hass.config_entries.flow.async_init( @@ -196,6 +209,7 @@ async def test_form_cannot_connect( hass: HomeAssistant, mock_cloud_client: MagicMock, mock_device_info: LaMarzoccoDeviceInfo, + mock_setup_entry: Generator[AsyncMock], ) -> None: """Test cannot connect error.""" @@ -264,6 +278,7 @@ async def test_reconfigure_flow( mock_cloud_client: MagicMock, mock_config_entry: MockConfigEntry, mock_device_info: LaMarzoccoDeviceInfo, + mock_setup_entry: Generator[AsyncMock], ) -> None: """Testing reconfgure flow.""" mock_config_entry.add_to_hass(hass) @@ -319,6 +334,7 @@ async def test_bluetooth_discovery( hass: HomeAssistant, mock_lamarzocco: MagicMock, mock_cloud_client: MagicMock, + mock_setup_entry: Generator[AsyncMock], ) -> None: """Test bluetooth discovery.""" service_info = get_bluetooth_service_info( @@ -370,6 +386,7 @@ async def test_bluetooth_discovery_errors( mock_lamarzocco: MagicMock, mock_cloud_client: MagicMock, mock_device_info: LaMarzoccoDeviceInfo, + mock_setup_entry: Generator[AsyncMock], ) -> None: """Test bluetooth discovery errors.""" service_info = get_bluetooth_service_info( @@ -430,10 +447,108 @@ async def test_bluetooth_discovery_errors( } +@pytest.mark.parametrize( + "device_fixture", + [MachineModel.LINEA_MICRA, MachineModel.LINEA_MINI, MachineModel.GS3_AV], +) +async def test_dhcp_discovery( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_cloud_client: MagicMock, + mock_device_info: LaMarzoccoDeviceInfo, + mock_setup_entry: Generator[AsyncMock], +) -> None: + """Test dhcp discovery.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_DHCP}, + data=DhcpServiceInfo( + ip="192.168.1.42", + hostname=mock_lamarzocco.serial_number, + macaddress="aa:bb:cc:dd:ee:ff", + ), + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + with patch( + "homeassistant.components.lamarzocco.config_flow.LaMarzoccoLocalClient.validate_connection", + return_value=True, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["data"] == { + **USER_INPUT, + CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", + CONF_HOST: "192.168.1.42", + CONF_MACHINE: mock_lamarzocco.serial_number, + CONF_MODEL: mock_device_info.model, + CONF_NAME: mock_device_info.name, + CONF_TOKEN: mock_device_info.communication_key, + } + + +async def test_dhcp_discovery_abort_on_hostname_changed( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_cloud_client: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test dhcp discovery aborts when hostname was changed manually.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_DHCP}, + data=DhcpServiceInfo( + ip="192.168.1.42", + hostname="custom_name", + macaddress="00:00:00:00:00:00", + ), + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_dhcp_already_configured_and_update( + hass: HomeAssistant, + mock_lamarzocco: MagicMock, + mock_cloud_client: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test discovered IP address change.""" + old_ip = mock_config_entry.data[CONF_HOST] + old_address = mock_config_entry.data[CONF_ADDRESS] + + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_DHCP}, + data=DhcpServiceInfo( + ip="192.168.1.42", + hostname=mock_lamarzocco.serial_number, + macaddress="aa:bb:cc:dd:ee:ff", + ), + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert mock_config_entry.data[CONF_HOST] != old_ip + assert mock_config_entry.data[CONF_HOST] == "192.168.1.42" + + assert mock_config_entry.data[CONF_ADDRESS] != old_address + assert mock_config_entry.data[CONF_ADDRESS] == "aa:bb:cc:dd:ee:ff" + + async def test_options_flow( hass: HomeAssistant, mock_lamarzocco: MagicMock, mock_config_entry: MockConfigEntry, + mock_setup_entry: Generator[AsyncMock], ) -> None: """Test options flow.""" await async_init_integration(hass, mock_config_entry) diff --git a/tests/components/lamarzocco/test_init.py b/tests/components/lamarzocco/test_init.py index 2c812f79438..75c3019afb4 100644 --- a/tests/components/lamarzocco/test_init.py +++ b/tests/components/lamarzocco/test_init.py @@ -2,14 +2,22 @@ from unittest.mock import AsyncMock, MagicMock, patch -from lmcloud.const import FirmwareType -from lmcloud.exceptions import AuthFail, RequestNotSuccessful +from pylamarzocco.const import FirmwareType +from pylamarzocco.exceptions import AuthFail, RequestNotSuccessful import pytest +from websockets.protocol import State from homeassistant.components.lamarzocco.config_flow import CONF_MACHINE from homeassistant.components.lamarzocco.const import DOMAIN from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState -from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME, EVENT_HOMEASSISTANT_STOP +from homeassistant.const import ( + CONF_HOST, + CONF_MAC, + CONF_MODEL, + CONF_NAME, + CONF_TOKEN, + EVENT_HOMEASSISTANT_STOP, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir @@ -80,20 +88,22 @@ async def test_invalid_auth( async def test_v1_migration( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, mock_cloud_client: MagicMock, mock_lamarzocco: MagicMock, ) -> None: """Test v1 -> v2 Migration.""" + common_data = { + **USER_INPUT, + CONF_HOST: "host", + CONF_MAC: "aa:bb:cc:dd:ee:ff", + } entry_v1 = MockConfigEntry( domain=DOMAIN, version=1, unique_id=mock_lamarzocco.serial_number, data={ - **USER_INPUT, - CONF_HOST: "host", + **common_data, CONF_MACHINE: mock_lamarzocco.serial_number, - CONF_MAC: "aa:bb:cc:dd:ee:ff", }, ) @@ -102,8 +112,11 @@ async def test_v1_migration( await hass.async_block_till_done() assert entry_v1.version == 2 - assert dict(entry_v1.data) == dict(mock_config_entry.data) | { - CONF_MAC: "aa:bb:cc:dd:ee:ff" + assert dict(entry_v1.data) == { + **common_data, + CONF_NAME: "GS3", + CONF_MODEL: mock_lamarzocco.model, + CONF_TOKEN: "token", } @@ -182,7 +195,7 @@ async def test_websocket_closed_on_unload( ) as local_client: client = local_client.return_value client.websocket = AsyncMock() - client.websocket.connected = True + client.websocket.state = State.OPEN await async_init_integration(hass, mock_config_entry) hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) await hass.async_block_till_done() diff --git a/tests/components/lamarzocco/test_number.py b/tests/components/lamarzocco/test_number.py index 352271f26cf..710a0220e06 100644 --- a/tests/components/lamarzocco/test_number.py +++ b/tests/components/lamarzocco/test_number.py @@ -3,14 +3,14 @@ from typing import Any from unittest.mock import MagicMock -from lmcloud.const import ( +from pylamarzocco.const import ( KEYS_PER_MODEL, BoilerType, MachineModel, PhysicalKey, PrebrewMode, ) -from lmcloud.exceptions import RequestNotSuccessful +from pylamarzocco.exceptions import RequestNotSuccessful import pytest from syrupy import SnapshotAssertion diff --git a/tests/components/lamarzocco/test_select.py b/tests/components/lamarzocco/test_select.py index 415954d30be..24b96f84f37 100644 --- a/tests/components/lamarzocco/test_select.py +++ b/tests/components/lamarzocco/test_select.py @@ -2,8 +2,8 @@ from unittest.mock import MagicMock -from lmcloud.const import MachineModel, PrebrewMode, SmartStandbyMode, SteamLevel -from lmcloud.exceptions import RequestNotSuccessful +from pylamarzocco.const import MachineModel, PrebrewMode, SmartStandbyMode, SteamLevel +from pylamarzocco.exceptions import RequestNotSuccessful import pytest from syrupy import SnapshotAssertion diff --git a/tests/components/lamarzocco/test_sensor.py b/tests/components/lamarzocco/test_sensor.py index 760dcffd28f..6f14d52d1fc 100644 --- a/tests/components/lamarzocco/test_sensor.py +++ b/tests/components/lamarzocco/test_sensor.py @@ -2,7 +2,7 @@ from unittest.mock import MagicMock -from lmcloud.const import MachineModel +from pylamarzocco.const import MachineModel import pytest from syrupy import SnapshotAssertion diff --git a/tests/components/lamarzocco/test_switch.py b/tests/components/lamarzocco/test_switch.py index 802ab59148e..5c6d1cb1e42 100644 --- a/tests/components/lamarzocco/test_switch.py +++ b/tests/components/lamarzocco/test_switch.py @@ -3,7 +3,7 @@ from typing import Any from unittest.mock import MagicMock -from lmcloud.exceptions import RequestNotSuccessful +from pylamarzocco.exceptions import RequestNotSuccessful import pytest from syrupy import SnapshotAssertion diff --git a/tests/components/lamarzocco/test_update.py b/tests/components/lamarzocco/test_update.py index 3dc2a86b574..aef37d7c921 100644 --- a/tests/components/lamarzocco/test_update.py +++ b/tests/components/lamarzocco/test_update.py @@ -2,8 +2,8 @@ from unittest.mock import MagicMock -from lmcloud.const import FirmwareType -from lmcloud.exceptions import RequestNotSuccessful +from pylamarzocco.const import FirmwareType +from pylamarzocco.exceptions import RequestNotSuccessful import pytest from syrupy import SnapshotAssertion diff --git a/tests/components/lcn/test_config_flow.py b/tests/components/lcn/test_config_flow.py index 4ef83aeaf8a..b7967c247ec 100644 --- a/tests/components/lcn/test_config_flow.py +++ b/tests/components/lcn/test_config_flow.py @@ -23,9 +23,7 @@ from homeassistant.const import ( CONF_PORT, CONF_USERNAME, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import issue_registry as ir +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -48,83 +46,6 @@ IMPORT_DATA = { } -async def test_step_import( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test for import step.""" - - with ( - patch("homeassistant.components.lcn.PchkConnectionManager.async_connect"), - patch("homeassistant.components.lcn.async_setup", return_value=True), - patch("homeassistant.components.lcn.async_setup_entry", return_value=True), - ): - data = IMPORT_DATA.copy() - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=data - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "pchk" - assert result["data"] == IMPORT_DATA - assert issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" - ) - - -async def test_step_import_existing_host( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test for update of config_entry if imported host already exists.""" - - # Create config entry and add it to hass - mock_data = IMPORT_DATA.copy() - mock_data.update({CONF_SK_NUM_TRIES: 3, CONF_DIM_MODE: 50}) - mock_entry = MockConfigEntry(domain=DOMAIN, data=mock_data) - mock_entry.add_to_hass(hass) - # Initialize a config flow with different data but same host address - with patch("homeassistant.components.lcn.PchkConnectionManager.async_connect"): - imported_data = IMPORT_DATA.copy() - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=imported_data - ) - - # Check if config entry was updated - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "existing_configuration_updated" - assert mock_entry.source == config_entries.SOURCE_IMPORT - assert mock_entry.data == IMPORT_DATA - assert issue_registry.async_get_issue( - HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" - ) - - -@pytest.mark.parametrize( - ("error", "reason"), - [ - (PchkAuthenticationError, "authentication_error"), - (PchkLicenseError, "license_error"), - (TimeoutError, "connection_refused"), - ], -) -async def test_step_import_error( - hass: HomeAssistant, issue_registry: ir.IssueRegistry, error, reason -) -> None: - """Test for error in import is handled correctly.""" - with patch( - "homeassistant.components.lcn.PchkConnectionManager.async_connect", - side_effect=error, - ): - data = IMPORT_DATA.copy() - data.update({CONF_HOST: "pchk"}) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=data - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == reason - assert issue_registry.async_get_issue(DOMAIN, reason) - - async def test_show_form(hass: HomeAssistant) -> None: """Test that the form is served with no input.""" flow = LcnFlowHandler() @@ -140,7 +61,6 @@ async def test_step_user(hass: HomeAssistant) -> None: """Test for user step.""" with ( patch("homeassistant.components.lcn.PchkConnectionManager.async_connect"), - patch("homeassistant.components.lcn.async_setup", return_value=True), patch("homeassistant.components.lcn.async_setup_entry", return_value=True), ): data = CONNECTION_DATA.copy() @@ -210,7 +130,6 @@ async def test_step_reconfigure(hass: HomeAssistant, entry: MockConfigEntry) -> with ( patch("homeassistant.components.lcn.PchkConnectionManager.async_connect"), - patch("homeassistant.components.lcn.async_setup", return_value=True), patch("homeassistant.components.lcn.async_setup_entry", return_value=True), ): result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/lcn/test_init.py b/tests/components/lcn/test_init.py index 1bd225c5d47..2327635e356 100644 --- a/tests/components/lcn/test_init.py +++ b/tests/components/lcn/test_init.py @@ -16,7 +16,6 @@ from .conftest import ( MockPchkConnectionManager, create_config_entry, init_integration, - setup_component, ) @@ -83,18 +82,6 @@ async def test_async_setup_entry_update( assert dummy_entity in entity_registry.entities.values() assert dummy_device in device_registry.devices.values() - # setup new entry with same data via import step (should cleanup dummy device) - with patch( - "homeassistant.components.lcn.config_flow.validate_connection", - return_value=None, - ): - await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=entry.data - ) - - assert dummy_device not in device_registry.devices.values() - assert dummy_entity not in entity_registry.entities.values() - @pytest.mark.parametrize( "exception", [PchkAuthenticationError, PchkLicenseError, TimeoutError] @@ -114,20 +101,6 @@ async def test_async_setup_entry_raises_authentication_error( assert entry.state is ConfigEntryState.SETUP_ERROR -async def test_async_setup_from_configuration_yaml(hass: HomeAssistant) -> None: - """Test a successful setup using data from configuration.yaml.""" - with ( - patch( - "homeassistant.components.lcn.config_flow.validate_connection", - return_value=None, - ), - patch("homeassistant.components.lcn.async_setup_entry") as async_setup_entry, - ): - await setup_component(hass) - - assert async_setup_entry.await_count == 2 - - @patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) async def test_migrate_1_1(hass: HomeAssistant, entry) -> None: """Test migration config entry.""" diff --git a/tests/components/lektrico/fixtures/get_info.json b/tests/components/lektrico/fixtures/get_info.json index bcd84a9a9df..2b099a666e5 100644 --- a/tests/components/lektrico/fixtures/get_info.json +++ b/tests/components/lektrico/fixtures/get_info.json @@ -14,5 +14,15 @@ "dynamic_current": 32, "user_current": 32, "lb_mode": 0, - "require_auth": true + "require_auth": true, + "state_e_activated": false, + "undervoltage_error": true, + "rcd_error": false, + "meter_fault": false, + "overcurrent": false, + "overtemp": false, + "overvoltage_error": false, + "contactor_failure": false, + "cp_diode_failure": false, + "critical_temp": false } diff --git a/tests/components/lektrico/snapshots/test_binary_sensor.ambr b/tests/components/lektrico/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..6a28e7c60de --- /dev/null +++ b/tests/components/lektrico/snapshots/test_binary_sensor.ambr @@ -0,0 +1,471 @@ +# serializer version: 1 +# name: test_all_entities[binary_sensor.1p7k_500006_ev_diode_short-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_ev_diode_short', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Ev diode short', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cp_diode_failure', + 'unique_id': '500006_cp_diode_failure', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_ev_diode_short-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Ev diode short', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_ev_diode_short', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_ev_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_ev_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Ev error', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'state_e_activated', + 'unique_id': '500006_state_e_activated', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_ev_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Ev error', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_ev_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_metering_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_metering_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Metering error', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'meter_fault', + 'unique_id': '500006_meter_fault', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_metering_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Metering error', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_metering_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_overcurrent-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_overcurrent', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Overcurrent', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'overcurrent', + 'unique_id': '500006_overcurrent', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_overcurrent-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Overcurrent', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_overcurrent', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_overheating-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_overheating', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Overheating', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'critical_temp', + 'unique_id': '500006_critical_temp', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_overheating-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Overheating', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_overheating', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_overvoltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_overvoltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Overvoltage', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'overvoltage', + 'unique_id': '500006_overvoltage', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_overvoltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Overvoltage', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_overvoltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_rcd_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_rcd_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rcd error', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rcd_error', + 'unique_id': '500006_rcd_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_rcd_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Rcd error', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_rcd_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_relay_contacts_welded-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_relay_contacts_welded', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Relay contacts welded', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'contactor_failure', + 'unique_id': '500006_contactor_failure', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_relay_contacts_welded-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Relay contacts welded', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_relay_contacts_welded', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_thermal_throttling-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_thermal_throttling', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Thermal throttling', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'overtemp', + 'unique_id': '500006_overtemp', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_thermal_throttling-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Thermal throttling', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_thermal_throttling', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_undervoltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.1p7k_500006_undervoltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Undervoltage', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'undervoltage', + 'unique_id': '500006_undervoltage', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.1p7k_500006_undervoltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': '1p7k_500006 Undervoltage', + }), + 'context': , + 'entity_id': 'binary_sensor.1p7k_500006_undervoltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/lektrico/test_binary_sensor.py b/tests/components/lektrico/test_binary_sensor.py new file mode 100644 index 00000000000..d49eac6cc23 --- /dev/null +++ b/tests/components/lektrico/test_binary_sensor.py @@ -0,0 +1,32 @@ +"""Tests for the Lektrico binary sensor platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_device: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + + with patch.multiple( + "homeassistant.components.lektrico", + CHARGERS_PLATFORMS=[Platform.BINARY_SENSOR], + LB_DEVICES_PLATFORMS=[Platform.BINARY_SENSOR], + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/lg_thinq/test_config_flow.py b/tests/components/lg_thinq/test_config_flow.py index e7ee632810e..8c5afb4dac7 100644 --- a/tests/components/lg_thinq/test_config_flow.py +++ b/tests/components/lg_thinq/test_config_flow.py @@ -50,7 +50,7 @@ async def test_config_flow_invalid_pat( data={CONF_ACCESS_TOKEN: MOCK_PAT, CONF_COUNTRY: MOCK_COUNTRY}, ) assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "token_unauthorized"} + assert result["errors"] mock_invalid_thinq_api.async_get_device_list.assert_called_once() diff --git a/tests/components/lifx/test_light.py b/tests/components/lifx/test_light.py index 1ce7c69d7fa..084ea0c674b 100644 --- a/tests/components/lifx/test_light.py +++ b/tests/components/lifx/test_light.py @@ -1098,8 +1098,8 @@ async def test_color_light_with_temp( ColorMode.HS, ] assert attributes[ATTR_HS_COLOR] == (30.754, 7.122) - assert attributes[ATTR_RGB_COLOR] == (255, 246, 236) - assert attributes[ATTR_XY_COLOR] == (0.34, 0.339) + assert attributes[ATTR_RGB_COLOR] == (255, 246, 237) + assert attributes[ATTR_XY_COLOR] == (0.339, 0.338) bulb.color = [65535, 65535, 65535, 65535] await hass.services.async_call( diff --git a/tests/components/light/test_init.py b/tests/components/light/test_init.py index eeb32f1b17a..61e7f4e6c29 100644 --- a/tests/components/light/test_init.py +++ b/tests/components/light/test_init.py @@ -1287,9 +1287,9 @@ async def test_light_backwards_compatibility_color_mode(hass: HomeAssistant) -> state = hass.states.get(entity2.entity_id) assert state.attributes["supported_color_modes"] == [light.ColorMode.COLOR_TEMP] assert state.attributes["color_mode"] == light.ColorMode.COLOR_TEMP - assert state.attributes["rgb_color"] == (201, 218, 255) + assert state.attributes["rgb_color"] == (202, 218, 255) assert state.attributes["hs_color"] == (221.575, 20.9) - assert state.attributes["xy_color"] == (0.277, 0.287) + assert state.attributes["xy_color"] == (0.278, 0.287) state = hass.states.get(entity3.entity_id) assert state.attributes["supported_color_modes"] == [light.ColorMode.HS] diff --git a/tests/components/linkplay/__init__.py b/tests/components/linkplay/__init__.py index 5962f7fdaba..f825826f196 100644 --- a/tests/components/linkplay/__init__.py +++ b/tests/components/linkplay/__init__.py @@ -1 +1,16 @@ """Tests for the LinkPlay integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/linkplay/conftest.py b/tests/components/linkplay/conftest.py index be83dd2412d..81ae993f6c3 100644 --- a/tests/components/linkplay/conftest.py +++ b/tests/components/linkplay/conftest.py @@ -1,12 +1,22 @@ """Test configuration and mocks for LinkPlay component.""" -from collections.abc import Generator +from collections.abc import Generator, Iterator +from contextlib import contextmanager +from typing import Any +from unittest import mock from unittest.mock import AsyncMock, patch from aiohttp import ClientSession from linkplay.bridge import LinkPlayBridge, LinkPlayDevice import pytest +from homeassistant.components.linkplay.const import DOMAIN +from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_CLOSE +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry, load_fixture +from tests.conftest import AiohttpClientMocker + HOST = "10.0.0.150" HOST_REENTRY = "10.0.0.66" UUID = "FF31F09E-5001-FBDE-0546-2DBFFF31F09E" @@ -24,15 +34,15 @@ def mock_linkplay_factory_bridge() -> Generator[AsyncMock]: ), patch( "homeassistant.components.linkplay.config_flow.linkplay_factory_httpapi_bridge", - ) as factory, + ) as conf_factory, ): bridge = AsyncMock(spec=LinkPlayBridge) bridge.endpoint = HOST bridge.device = AsyncMock(spec=LinkPlayDevice) bridge.device.uuid = UUID bridge.device.name = NAME - factory.return_value = bridge - yield factory + conf_factory.return_value = bridge + yield conf_factory @pytest.fixture @@ -43,3 +53,55 @@ def mock_setup_entry() -> Generator[AsyncMock]: return_value=True, ) as mock_setup_entry: yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title=NAME, + data={CONF_HOST: HOST}, + unique_id=UUID, + ) + + +@pytest.fixture +def mock_player_ex( + mock_player_ex: AsyncMock, +) -> AsyncMock: + """Mock a update_status of the LinkPlayPlayer.""" + mock_player_ex.return_value = load_fixture("getPlayerEx.json", DOMAIN) + return mock_player_ex + + +@pytest.fixture +def mock_status_ex( + mock_status_ex: AsyncMock, +) -> AsyncMock: + """Mock a update_status of the LinkPlayDevice.""" + mock_status_ex.return_value = load_fixture("getStatusEx.json", DOMAIN) + return mock_status_ex + + +@contextmanager +def mock_lp_aiohttp_client() -> Iterator[AiohttpClientMocker]: + """Context manager to mock aiohttp client.""" + mocker = AiohttpClientMocker() + + def create_session(hass: HomeAssistant, *args: Any, **kwargs: Any) -> ClientSession: + session = mocker.create_session(hass.loop) + + async def close_session(event): + """Close session.""" + await session.close() + + hass.bus.async_listen_once(EVENT_HOMEASSISTANT_CLOSE, close_session) + + return session + + with mock.patch( + "homeassistant.components.linkplay.async_get_client_session", + side_effect=create_session, + ): + yield mocker diff --git a/tests/components/linkplay/fixtures/getPlayerEx.json b/tests/components/linkplay/fixtures/getPlayerEx.json new file mode 100644 index 00000000000..79d09f942df --- /dev/null +++ b/tests/components/linkplay/fixtures/getPlayerEx.json @@ -0,0 +1,19 @@ +{ + "type": "0", + "ch": "0", + "mode": "0", + "loop": "0", + "eq": "0", + "status": "stop", + "curpos": "0", + "offset_pts": "0", + "totlen": "0", + "Title": "", + "Artist": "", + "Album": "", + "alarmflag": "0", + "plicount": "0", + "plicurr": "0", + "vol": "80", + "mute": "0" +} diff --git a/tests/components/linkplay/fixtures/getStatusEx.json b/tests/components/linkplay/fixtures/getStatusEx.json new file mode 100644 index 00000000000..17eda4aeee8 --- /dev/null +++ b/tests/components/linkplay/fixtures/getStatusEx.json @@ -0,0 +1,81 @@ +{ + "uuid": "FF31F09E5001FBDE05462DBFFF31F09E", + "DeviceName": "Smart Zone 1_54B9", + "GroupName": "Smart Zone 1_54B9", + "ssid": "Smart Zone 1_54B9", + "language": "en_us", + "firmware": "4.6.415145", + "hardware": "A31", + "build": "release", + "project": "SMART_ZONE4_AMP", + "priv_prj": "SMART_ZONE4_AMP", + "project_build_name": "a31rakoit", + "Release": "20220427", + "temp_uuid": "97296CE38DE8CC3D", + "hideSSID": "1", + "SSIDStrategy": "2", + "branch": "A31_stable_4.6", + "group": "0", + "wmrm_version": "4.2", + "internet": "1", + "MAC": "00:22:6C:21:7F:1D", + "STA_MAC": "00:00:00:00:00:00", + "CountryCode": "CN", + "CountryRegion": "1", + "netstat": "0", + "essid": "", + "apcli0": "", + "eth2": "192.168.168.197", + "ra0": "10.10.10.254", + "eth_dhcp": "1", + "VersionUpdate": "0", + "NewVer": "0", + "set_dns_enable": "1", + "mcu_ver": "37", + "mcu_ver_new": "0", + "dsp_ver": "0", + "dsp_ver_new": "0", + "date": "2024:10:29", + "time": "17:13:22", + "tz": "1.0000", + "dst_enable": "1", + "region": "unknown", + "prompt_status": "1", + "iot_ver": "1.0.0", + "upnp_version": "1005", + "cap1": "0x305200", + "capability": "0x28e90b80", + "languages": "0x6", + "streams_all": "0x7bff7ffe", + "streams": "0x7b9831fe", + "external": "0x0", + "plm_support": "0x40152", + "preset_key": "10", + "spotify_active": "0", + "lbc_support": "0", + "privacy_mode": "0", + "WifiChannel": "11", + "RSSI": "0", + "BSSID": "", + "battery": "0", + "battery_percent": "0", + "securemode": "1", + "auth": "WPAPSKWPA2PSK", + "encry": "AES", + "upnp_uuid": "uuid:FF31F09E-5001-FBDE-0546-2DBFFF31F09E", + "uart_pass_port": "8899", + "communication_port": "8819", + "web_firmware_update_hide": "0", + "ignore_talkstart": "0", + "web_login_result": "-1", + "silenceOTATime": "", + "ignore_silenceOTATime": "1", + "new_tunein_preset_and_alarm": "1", + "iheartradio_new": "1", + "new_iheart_podcast": "1", + "tidal_version": "2.0", + "service_version": "1.0", + "ETH_MAC": "00:22:6C:21:7F:20", + "security": "https/2.0", + "security_version": "2.0" +} diff --git a/tests/components/linkplay/snapshots/test_diagnostics.ambr b/tests/components/linkplay/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..d8c52a25649 --- /dev/null +++ b/tests/components/linkplay/snapshots/test_diagnostics.ambr @@ -0,0 +1,115 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'device_info': dict({ + 'device': dict({ + 'properties': dict({ + 'BSSID': '', + 'CountryCode': 'CN', + 'CountryRegion': '1', + 'DeviceName': 'Smart Zone 1_54B9', + 'ETH_MAC': '00:22:6C:21:7F:20', + 'GroupName': 'Smart Zone 1_54B9', + 'MAC': '00:22:6C:21:7F:1D', + 'NewVer': '0', + 'RSSI': '0', + 'Release': '20220427', + 'SSIDStrategy': '2', + 'STA_MAC': '00:00:00:00:00:00', + 'VersionUpdate': '0', + 'WifiChannel': '11', + 'apcli0': '', + 'auth': 'WPAPSKWPA2PSK', + 'battery': '0', + 'battery_percent': '0', + 'branch': 'A31_stable_4.6', + 'build': 'release', + 'cap1': '0x305200', + 'capability': '0x28e90b80', + 'communication_port': '8819', + 'date': '2024:10:29', + 'dsp_ver': '0', + 'dsp_ver_new': '0', + 'dst_enable': '1', + 'encry': 'AES', + 'essid': '', + 'eth2': '192.168.168.197', + 'eth_dhcp': '1', + 'external': '0x0', + 'firmware': '4.6.415145', + 'group': '0', + 'hardware': 'A31', + 'hideSSID': '1', + 'ignore_silenceOTATime': '1', + 'ignore_talkstart': '0', + 'iheartradio_new': '1', + 'internet': '1', + 'iot_ver': '1.0.0', + 'language': 'en_us', + 'languages': '0x6', + 'lbc_support': '0', + 'mcu_ver': '37', + 'mcu_ver_new': '0', + 'netstat': '0', + 'new_iheart_podcast': '1', + 'new_tunein_preset_and_alarm': '1', + 'plm_support': '0x40152', + 'preset_key': '10', + 'priv_prj': 'SMART_ZONE4_AMP', + 'privacy_mode': '0', + 'project': 'SMART_ZONE4_AMP', + 'project_build_name': 'a31rakoit', + 'prompt_status': '1', + 'ra0': '10.10.10.254', + 'region': 'unknown', + 'securemode': '1', + 'security': 'https/2.0', + 'security_version': '2.0', + 'service_version': '1.0', + 'set_dns_enable': '1', + 'silenceOTATime': '', + 'spotify_active': '0', + 'ssid': 'Smart Zone 1_54B9', + 'streams': '0x7b9831fe', + 'streams_all': '0x7bff7ffe', + 'temp_uuid': '97296CE38DE8CC3D', + 'tidal_version': '2.0', + 'time': '17:13:22', + 'tz': '1.0000', + 'uart_pass_port': '8899', + 'upnp_uuid': 'uuid:FF31F09E-5001-FBDE-0546-2DBFFF31F09E', + 'upnp_version': '1005', + 'uuid': 'FF31F09E5001FBDE05462DBFFF31F09E', + 'web_firmware_update_hide': '0', + 'web_login_result': '-1', + 'wmrm_version': '4.2', + }), + }), + 'endpoint': dict({ + 'endpoint': 'https://10.0.0.150', + }), + 'multiroom': None, + 'player': dict({ + 'properties': dict({ + 'Album': '', + 'Artist': '', + 'Title': '', + 'alarmflag': '0', + 'ch': '0', + 'curpos': '0', + 'eq': '0', + 'loop': '0', + 'mode': '0', + 'mute': '0', + 'offset_pts': '0', + 'plicount': '0', + 'plicurr': '0', + 'status': 'stop', + 'totlen': '0', + 'type': '0', + 'vol': '80', + }), + }), + }), + }) +# --- diff --git a/tests/components/linkplay/test_diagnostics.py b/tests/components/linkplay/test_diagnostics.py new file mode 100644 index 00000000000..369142978a3 --- /dev/null +++ b/tests/components/linkplay/test_diagnostics.py @@ -0,0 +1,53 @@ +"""Tests for the LinkPlay diagnostics.""" + +from unittest.mock import patch + +from linkplay.bridge import LinkPlayMultiroom +from linkplay.consts import API_ENDPOINT +from linkplay.endpoint import LinkPlayApiEndpoint +from syrupy import SnapshotAssertion + +from homeassistant.components.linkplay.const import DOMAIN +from homeassistant.core import HomeAssistant + +from . import setup_integration +from .conftest import HOST, mock_lp_aiohttp_client + +from tests.common import MockConfigEntry, load_fixture +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + + with ( + mock_lp_aiohttp_client() as mock_session, + patch.object(LinkPlayMultiroom, "update_status", return_value=None), + ): + endpoints = [ + LinkPlayApiEndpoint(protocol="https", endpoint=HOST, session=None), + LinkPlayApiEndpoint(protocol="http", endpoint=HOST, session=None), + ] + for endpoint in endpoints: + mock_session.get( + API_ENDPOINT.format(str(endpoint), "getPlayerStatusEx"), + text=load_fixture("getPlayerEx.json", DOMAIN), + ) + + mock_session.get( + API_ENDPOINT.format(str(endpoint), "getStatusEx"), + text=load_fixture("getStatusEx.json", DOMAIN), + ) + + await setup_integration(hass, mock_config_entry) + + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry) + == snapshot + ) diff --git a/tests/components/matter/fixtures/nodes/door_lock.json b/tests/components/matter/fixtures/nodes/door_lock.json index b6231e04af4..acd327ac56c 100644 --- a/tests/components/matter/fixtures/nodes/door_lock.json +++ b/tests/components/matter/fixtures/nodes/door_lock.json @@ -495,7 +495,7 @@ "1/257/48": 3, "1/257/49": 10, "1/257/51": false, - "1/257/65532": 3507, + "1/257/65532": 0, "1/257/65533": 6, "1/257/65528": [12, 15, 18, 28, 35, 37], "1/257/65529": [ diff --git a/tests/components/matter/snapshots/test_binary_sensor.ambr b/tests/components/matter/snapshots/test_binary_sensor.ambr index 2e3367121e9..82dcc166f13 100644 --- a/tests/components/matter/snapshots/test_binary_sensor.ambr +++ b/tests/components/matter/snapshots/test_binary_sensor.ambr @@ -46,53 +46,6 @@ 'state': 'off', }) # --- -# name: test_binary_sensors[door_lock][binary_sensor.mock_door_lock_door-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.mock_door_lock_door', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Door', - 'platform': 'matter', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00000000000004D2-0000000000000001-MatterNodeDevice-1-LockDoorStateSensor-257-3', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[door_lock][binary_sensor.mock_door_lock_door-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'door', - 'friendly_name': 'Mock Door Lock Door', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_door_lock_door', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_binary_sensors[door_lock_with_unbolt][binary_sensor.mock_door_lock_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/matter/snapshots/test_light.ambr b/tests/components/matter/snapshots/test_light.ambr index 68c1b7dca74..eff5820d27d 100644 --- a/tests/components/matter/snapshots/test_light.ambr +++ b/tests/components/matter/snapshots/test_light.ambr @@ -59,15 +59,15 @@ 'rgb_color': tuple( 255, 193, - 141, + 142, ), 'supported_color_modes': list([ , ]), 'supported_features': , 'xy_color': tuple( - 0.453, - 0.374, + 0.452, + 0.373, ), }), 'context': , @@ -252,7 +252,7 @@ 'rgb_color': tuple( 255, 247, - 203, + 204, ), 'supported_color_modes': list([ , @@ -261,8 +261,8 @@ ]), 'supported_features': , 'xy_color': tuple( - 0.363, - 0.374, + 0.362, + 0.373, ), }), 'context': , diff --git a/tests/components/matter/test_binary_sensor.py b/tests/components/matter/test_binary_sensor.py index 7ae483162bf..cddee975ac8 100644 --- a/tests/components/matter/test_binary_sensor.py +++ b/tests/components/matter/test_binary_sensor.py @@ -4,6 +4,7 @@ from collections.abc import Generator from unittest.mock import MagicMock, patch from matter_server.client.models.node import MatterNode +from matter_server.common.models import EventType import pytest from syrupy import SnapshotAssertion @@ -115,3 +116,34 @@ async def test_battery_sensor( state = hass.states.get(entity_id) assert state assert state.state == "on" + + +@pytest.mark.parametrize("node_fixture", ["door_lock"]) +async def test_optional_sensor_from_featuremap( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + matter_client: MagicMock, + matter_node: MatterNode, +) -> None: + """Test discovery of optional doorsensor in doorlock featuremap.""" + entity_id = "binary_sensor.mock_door_lock_door" + state = hass.states.get(entity_id) + assert state is None + + # update the feature map to include the optional door sensor feature + # and fire a node updated event + set_node_attribute(matter_node, 1, 257, 65532, 32) + await trigger_subscription_callback( + hass, matter_client, event=EventType.NODE_UPDATED, data=matter_node + ) + # this should result in a new binary sensor entity being discovered + state = hass.states.get(entity_id) + assert state + assert state.state == "off" + # now test the reverse, by removing the feature from the feature map + set_node_attribute(matter_node, 1, 257, 65532, 0) + await trigger_subscription_callback( + hass, matter_client, data=(matter_node.node_id, "1/257/65532", 0) + ) + state = hass.states.get(entity_id) + assert state is None diff --git a/tests/components/mealie/test_config_flow.py b/tests/components/mealie/test_config_flow.py index 15c629ec3da..628f0290f43 100644 --- a/tests/components/mealie/test_config_flow.py +++ b/tests/components/mealie/test_config_flow.py @@ -85,6 +85,40 @@ async def test_flow_errors( assert result["type"] is FlowResultType.CREATE_ENTRY +async def test_ingress_host( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test disallow ingress host.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "http://homeassistant/hassio/ingress/db21ed7f_mealie", + CONF_API_TOKEN: "token", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "ingress_url"} + + mock_mealie_client.get_user_info.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "http://homeassistant:9001", CONF_API_TOKEN: "token"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + @pytest.mark.parametrize( ("version"), [ diff --git a/tests/components/media_extractor/test_config_flow.py b/tests/components/media_extractor/test_config_flow.py index bfee5ec4879..786341fd553 100644 --- a/tests/components/media_extractor/test_config_flow.py +++ b/tests/components/media_extractor/test_config_flow.py @@ -1,7 +1,7 @@ """Tests for the Media extractor config flow.""" from homeassistant.components.media_extractor.const import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -41,16 +41,3 @@ async def test_single_instance_allowed(hass: HomeAssistant) -> None: assert result.get("type") is FlowResultType.ABORT assert result.get("reason") == "single_instance_allowed" - - -async def test_import_flow(hass: HomeAssistant, mock_setup_entry) -> None: - """Test import flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT} - ) - - assert result.get("type") is FlowResultType.CREATE_ENTRY - assert result.get("title") == "Media extractor" - assert result.get("data") == {} - assert result.get("options") == {} - assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/media_extractor/test_init.py b/tests/components/media_extractor/test_init.py index bc80e063697..21fab6f875c 100644 --- a/tests/components/media_extractor/test_init.py +++ b/tests/components/media_extractor/test_init.py @@ -22,12 +22,15 @@ from homeassistant.setup import async_setup_component from . import YOUTUBE_EMPTY_PLAYLIST, YOUTUBE_PLAYLIST, YOUTUBE_VIDEO, MockYoutubeDL from .const import NO_FORMATS_RESPONSE, SOUNDCLOUD_TRACK -from tests.common import load_json_object_fixture +from tests.common import MockConfigEntry, load_json_object_fixture async def test_play_media_service_is_registered(hass: HomeAssistant) -> None: """Test play media service is registered.""" - await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + mock_config_entry = MockConfigEntry(domain=DOMAIN) + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() assert hass.services.has_service(DOMAIN, SERVICE_PLAY_MEDIA) diff --git a/tests/components/modbus/conftest.py b/tests/components/modbus/conftest.py index 5c612f9f8ad..cdea046ceea 100644 --- a/tests/components/modbus/conftest.py +++ b/tests/components/modbus/conftest.py @@ -57,7 +57,7 @@ def check_config_loaded_fixture(): @pytest.fixture(name="register_words") def register_words_fixture(): """Set default for register_words.""" - return [0x00, 0x00] + return [0x00] @pytest.fixture(name="config_addon") diff --git a/tests/components/modern_forms/__init__.py b/tests/components/modern_forms/__init__.py index ae4e5bd9862..5882eaf1ec9 100644 --- a/tests/components/modern_forms/__init__.py +++ b/tests/components/modern_forms/__init__.py @@ -62,7 +62,9 @@ async def init_integration( ) entry = MockConfigEntry( - domain=DOMAIN, data={CONF_HOST: "192.168.1.123", CONF_MAC: "AA:BB:CC:DD:EE:FF"} + domain=DOMAIN, + data={CONF_HOST: "192.168.1.123", CONF_MAC: "AA:BB:CC:DD:EE:FF"}, + unique_id="AA:BB:CC:DD:EE:FF", ) entry.add_to_hass(hass) diff --git a/tests/components/modern_forms/snapshots/test_diagnostics.ambr b/tests/components/modern_forms/snapshots/test_diagnostics.ambr index 75794aaca12..f8897a4a47f 100644 --- a/tests/components/modern_forms/snapshots/test_diagnostics.ambr +++ b/tests/components/modern_forms/snapshots/test_diagnostics.ambr @@ -17,7 +17,7 @@ 'pref_disable_polling': False, 'source': 'user', 'title': 'Mock Title', - 'unique_id': None, + 'unique_id': 'AA:BB:CC:DD:EE:FF', 'version': 1, }), 'device': dict({ diff --git a/tests/components/modern_forms/test_config_flow.py b/tests/components/modern_forms/test_config_flow.py index 1484b5d5992..5b10d4d729e 100644 --- a/tests/components/modern_forms/test_config_flow.py +++ b/tests/components/modern_forms/test_config_flow.py @@ -113,7 +113,11 @@ async def test_connection_error( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, - data={CONF_HOST: "example.com"}, + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "example.com"}, ) assert result.get("type") is FlowResultType.FORM @@ -193,24 +197,14 @@ async def test_user_device_exists_abort( await init_integration(hass, aioclient_mock, skip_setup=True) - await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={ - "host": "192.168.1.123", - "hostname": "example.local.", - "properties": {CONF_MAC: "AA:BB:CC:DD:EE:FF"}, - }, - ) - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, - data={ - "host": "192.168.1.123", - "hostname": "example.local.", - "properties": {CONF_MAC: "AA:BB:CC:DD:EE:FF"}, - }, + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: "192.168.1.123"}, ) assert result.get("type") is FlowResultType.ABORT @@ -223,16 +217,6 @@ async def test_zeroconf_with_mac_device_exists_abort( """Test we abort zeroconf flow if a Modern Forms device already configured.""" await init_integration(hass, aioclient_mock, skip_setup=True) - await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={ - "host": "192.168.1.123", - "hostname": "example.local.", - "properties": {CONF_MAC: "AA:BB:CC:DD:EE:FF"}, - }, - ) - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_ZEROCONF}, diff --git a/tests/components/monzo/test_sensor.py b/tests/components/monzo/test_sensor.py index bf88ce14931..a57466fdbd4 100644 --- a/tests/components/monzo/test_sensor.py +++ b/tests/components/monzo/test_sensor.py @@ -5,6 +5,7 @@ from typing import Any from unittest.mock import AsyncMock from freezegun.api import FrozenDateTimeFactory +from monzopy import InvalidMonzoAPIResponseError import pytest from syrupy import SnapshotAssertion @@ -123,15 +124,22 @@ async def test_update_failed( monzo: AsyncMock, polling_config_entry: MockConfigEntry, freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, ) -> None: """Test all entities.""" await setup_integration(hass, polling_config_entry) - monzo.user_account.accounts.side_effect = Exception + monzo.user_account.accounts.side_effect = InvalidMonzoAPIResponseError( + {"acc_id": None}, "account_id" + ) freezer.tick(timedelta(minutes=10)) async_fire_time_changed(hass) await hass.async_block_till_done() + assert "Invalid Monzo API response." in caplog.text + assert "account_id" in caplog.text + assert "acc_id" in caplog.text + entity_id = await async_get_entity_id( hass, TEST_ACCOUNTS[0]["id"], ACCOUNT_SENSORS[0] ) diff --git a/tests/components/mqtt/test_client.py b/tests/components/mqtt/test_client.py index 164c164cdfc..4bfcde752ae 100644 --- a/tests/components/mqtt/test_client.py +++ b/tests/components/mqtt/test_client.py @@ -1045,10 +1045,17 @@ async def test_restore_subscriptions_on_reconnect( mqtt_client_mock.reset_mock() mqtt_client_mock.on_disconnect(None, None, 0) + # Test to subscribe orther topic while the client is not connected + await mqtt.async_subscribe(hass, "test/other", record_calls) + async_fire_time_changed(hass, utcnow() + timedelta(seconds=3)) # cooldown + assert ("test/other", 0) not in help_all_subscribe_calls(mqtt_client_mock) + mock_debouncer.clear() mqtt_client_mock.on_connect(None, None, None, 0) await mock_debouncer.wait() + # Assert all subscriptions are performed at the broker assert ("test/state", 0) in help_all_subscribe_calls(mqtt_client_mock) + assert ("test/other", 0) in help_all_subscribe_calls(mqtt_client_mock) @pytest.mark.parametrize( diff --git a/tests/components/mqtt/test_discovery.py b/tests/components/mqtt/test_discovery.py index e49e7a27c8d..8a674a4e1cd 100644 --- a/tests/components/mqtt/test_discovery.py +++ b/tests/components/mqtt/test_discovery.py @@ -2987,3 +2987,139 @@ async def test_shared_state_topic( state = hass.states.get(entity_id) assert state is not None assert state.state == "New state3" + + +@pytest.mark.parametrize("single_configs", [copy.deepcopy(TEST_SINGLE_CONFIGS)]) +async def test_discovery_with_late_via_device_discovery( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, + tag_mock: AsyncMock, + single_configs: list[tuple[str, dict[str, Any]]], +) -> None: + """Test a via device is available and the discovery of the via device is late.""" + await mqtt_mock_entry() + + await hass.async_block_till_done() + await hass.async_block_till_done() + + via_device_entry = device_registry.async_get_device( + {("mqtt", "id_via_very_unique")} + ) + assert via_device_entry is None + # Discovery single config schema + for discovery_topic, config in single_configs: + config["device"]["via_device"] = "id_via_very_unique" + payload = json.dumps(config) + async_fire_mqtt_message( + hass, + discovery_topic, + payload, + ) + via_device_entry = device_registry.async_get_device( + {("mqtt", "id_via_very_unique")} + ) + assert via_device_entry is not None + assert via_device_entry.name is None + + await hass.async_block_till_done() + + # Now discover the via device (a switch) + via_device_config = { + "name": None, + "command_topic": "test-switch-topic", + "unique_id": "very_unique_switch", + "device": {"identifiers": ["id_via_very_unique"], "name": "My Switch"}, + } + payload = json.dumps(via_device_config) + via_device_discovery_topic = "homeassistant/switch/very_unique/config" + async_fire_mqtt_message( + hass, + via_device_discovery_topic, + payload, + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + via_device_entry = device_registry.async_get_device( + {("mqtt", "id_via_very_unique")} + ) + assert via_device_entry is not None + assert via_device_entry.name == "My Switch" + + await help_check_discovered_items(hass, device_registry, tag_mock) + + +@pytest.mark.parametrize("single_configs", [copy.deepcopy(TEST_SINGLE_CONFIGS)]) +async def test_discovery_with_late_via_device_update( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock_entry: MqttMockHAClientGenerator, + tag_mock: AsyncMock, + single_configs: list[tuple[str, dict[str, Any]]], +) -> None: + """Test a via device is available and the discovery of the via device is is set via an update.""" + await mqtt_mock_entry() + + await hass.async_block_till_done() + await hass.async_block_till_done() + + via_device_entry = device_registry.async_get_device( + {("mqtt", "id_via_very_unique")} + ) + assert via_device_entry is None + # Discovery single config schema without via device + for discovery_topic, config in single_configs: + payload = json.dumps(config) + async_fire_mqtt_message( + hass, + discovery_topic, + payload, + ) + via_device_entry = device_registry.async_get_device( + {("mqtt", "id_via_very_unique")} + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + assert via_device_entry is None + + # Resend the discovery update to set the via device + for discovery_topic, config in single_configs: + config["device"]["via_device"] = "id_via_very_unique" + payload = json.dumps(config) + async_fire_mqtt_message( + hass, + discovery_topic, + payload, + ) + via_device_entry = device_registry.async_get_device( + {("mqtt", "id_via_very_unique")} + ) + assert via_device_entry is not None + assert via_device_entry.name is None + + await hass.async_block_till_done() + await hass.async_block_till_done() + + # Now discover the via device (a switch) + via_device_config = { + "name": None, + "command_topic": "test-switch-topic", + "unique_id": "very_unique_switch", + "device": {"identifiers": ["id_via_very_unique"], "name": "My Switch"}, + } + payload = json.dumps(via_device_config) + via_device_discovery_topic = "homeassistant/switch/very_unique/config" + async_fire_mqtt_message( + hass, + via_device_discovery_topic, + payload, + ) + await hass.async_block_till_done() + await hass.async_block_till_done() + via_device_entry = device_registry.async_get_device( + {("mqtt", "id_via_very_unique")} + ) + assert via_device_entry is not None + assert via_device_entry.name == "My Switch" + + await help_check_discovered_items(hass, device_registry, tag_mock) diff --git a/tests/components/mqtt/test_init.py b/tests/components/mqtt/test_init.py index 145016751e7..2ab664f5041 100644 --- a/tests/components/mqtt/test_init.py +++ b/tests/components/mqtt/test_init.py @@ -255,6 +255,26 @@ async def test_service_call_without_topic_does_not_publish( assert not mqtt_mock.async_publish.called +async def test_service_call_mqtt_entry_does_not_publish( + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient +) -> None: + """Test the service call if topic is missing.""" + assert await async_setup_component(hass, mqtt.DOMAIN, {}) + with pytest.raises( + ServiceValidationError, + match='Cannot publish to topic "test_topic", make sure MQTT is set up correctly', + ): + await hass.services.async_call( + mqtt.DOMAIN, + mqtt.SERVICE_PUBLISH, + { + mqtt.ATTR_TOPIC: "test_topic", + mqtt.ATTR_PAYLOAD: "payload", + }, + blocking=True, + ) + + # The use of a topic_template in an mqtt publish action call # has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 async def test_mqtt_publish_action_call_with_topic_and_topic_template_does_not_publish( @@ -1822,11 +1842,17 @@ async def test_subscribe_connection_status( async def test_unload_config_entry( hass: HomeAssistant, - setup_with_birth_msg_client_mock: MqttMockPahoClient, + mqtt_client_mock: MqttMockPahoClient, caplog: pytest.LogCaptureFixture, ) -> None: """Test unloading the MQTT entry.""" - mqtt_client_mock = setup_with_birth_msg_client_mock + entry = MockConfigEntry( + domain=mqtt.DOMAIN, + data={mqtt.CONF_BROKER: "test-broker"}, + ) + entry.add_to_hass(hass) + + assert await async_setup_component(hass, mqtt.DOMAIN, {}) assert hass.services.has_service(mqtt.DOMAIN, "dump") assert hass.services.has_service(mqtt.DOMAIN, "publish") @@ -1843,8 +1869,8 @@ async def test_unload_config_entry( mqtt_client_mock.publish.assert_any_call("just_in_time", "published", 0, False) assert new_mqtt_config_entry.state is ConfigEntryState.NOT_LOADED await hass.async_block_till_done(wait_background_tasks=True) - assert not hass.services.has_service(mqtt.DOMAIN, "dump") - assert not hass.services.has_service(mqtt.DOMAIN, "publish") + assert hass.services.has_service(mqtt.DOMAIN, "dump") + assert hass.services.has_service(mqtt.DOMAIN, "publish") assert "No ACK from MQTT server" not in caplog.text @@ -1852,6 +1878,9 @@ async def test_publish_or_subscribe_without_valid_config_entry( hass: HomeAssistant, record_calls: MessageCallbackType ) -> None: """Test internal publish function with bad use cases.""" + assert await async_setup_component(hass, mqtt.DOMAIN, {}) + assert hass.services.has_service(mqtt.DOMAIN, "dump") + assert hass.services.has_service(mqtt.DOMAIN, "publish") with pytest.raises(HomeAssistantError): await mqtt.async_publish( hass, "some-topic", "test-payload", qos=0, retain=False, encoding=None diff --git a/tests/components/mqtt/test_light.py b/tests/components/mqtt/test_light.py index 0ef7cda2a7d..b11484d55fb 100644 --- a/tests/components/mqtt/test_light.py +++ b/tests/components/mqtt/test_light.py @@ -721,12 +721,12 @@ async def test_invalid_state_via_topic( state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get("rgb_color") == (255, 254, 250) + assert state.attributes.get("rgb_color") == (255, 255, 251) assert state.attributes.get("brightness") == 255 assert state.attributes.get("color_temp") == 153 assert state.attributes.get("effect") == "none" assert state.attributes.get("hs_color") == (54.768, 1.6) - assert state.attributes.get("xy_color") == (0.326, 0.333) + assert state.attributes.get("xy_color") == (0.325, 0.333) async_fire_mqtt_message(hass, "test_light_rgb/color_temp/status", "") light_state = hass.states.get("light.test") diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index 31573ad88c6..f0da483e706 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -674,12 +674,12 @@ async def test_controlling_state_via_topic( assert state.attributes.get("rgb_color") == ( 255, 253, - 248, + 249, ) # temp converted to color assert state.attributes.get("brightness") == 255 assert state.attributes.get("color_temp") == 155 assert state.attributes.get("effect") == "colorloop" - assert state.attributes.get("xy_color") == (0.328, 0.334) # temp converted to color + assert state.attributes.get("xy_color") == (0.328, 0.333) # temp converted to color assert state.attributes.get("hs_color") == (44.098, 2.43) # temp converted to color # Turn the light off @@ -706,7 +706,7 @@ async def test_controlling_state_via_topic( ) light_state = hass.states.get("light.test") - assert light_state.attributes.get("xy_color") == (0.141, 0.14) + assert light_state.attributes.get("xy_color") == (0.141, 0.141) async_fire_mqtt_message( hass, "test_light_rgb", '{"state":"ON", "color":{"h":180,"s":50}}' @@ -1015,10 +1015,10 @@ async def test_controlling_the_state_with_legacy_color_handling( assert state.attributes.get("color_temp") == 353 assert state.attributes.get("effect") is None assert state.attributes.get("hs_color") == (28.125, 61.661) - assert state.attributes.get("rgb_color") == (255, 171, 97) + assert state.attributes.get("rgb_color") == (255, 171, 98) assert state.attributes.get("rgbw_color") is None assert state.attributes.get("rgbww_color") is None - assert state.attributes.get("xy_color") == (0.513, 0.386) + assert state.attributes.get("xy_color") == (0.512, 0.385) @pytest.mark.parametrize( @@ -1113,8 +1113,8 @@ async def test_sending_mqtt_commands_and_optimistic( mqtt_mock.async_publish.assert_called_once_with( "test_light_rgb/set", JsonValidator( - '{"state": "ON", "color": {"r": 0, "g": 123, "b": 255,' - ' "x": 0.14, "y": 0.131, "h": 210.824, "s": 100.0},' + '{"state": "ON", "color": {"r": 0, "g": 124, "b": 255,' + ' "x": 0.14, "y": 0.133, "h": 210.824, "s": 100.0},' ' "brightness": 50}' ), 2, @@ -1125,8 +1125,8 @@ async def test_sending_mqtt_commands_and_optimistic( assert state.attributes.get("color_mode") == light.ColorMode.HS assert state.attributes["brightness"] == 50 assert state.attributes["hs_color"] == (210.824, 100.0) - assert state.attributes["rgb_color"] == (0, 123, 255) - assert state.attributes["xy_color"] == (0.14, 0.131) + assert state.attributes["rgb_color"] == (0, 124, 255) + assert state.attributes["xy_color"] == (0.14, 0.133) await common.async_turn_on(hass, "light.test", brightness=50, hs_color=(359, 78)) mqtt_mock.async_publish.assert_called_once_with( @@ -1514,7 +1514,7 @@ async def test_sending_rgb_color_no_brightness( ), call( "test_light_rgb/set", - JsonValidator('{"state": "ON", "color": {"r": 50, "g": 11, "b": 11}}'), + JsonValidator('{"state": "ON", "color": {"r": 50, "g": 11, "b": 12}}'), 0, False, ), @@ -1646,7 +1646,7 @@ async def test_sending_rgb_color_with_brightness( call( "test_light_rgb/set", JsonValidator( - '{"state": "ON", "color": {"r": 0, "g": 123, "b": 255},' + '{"state": "ON", "color": {"r": 0, "g": 124, "b": 255},' ' "brightness": 50}' ), 0, @@ -1716,7 +1716,7 @@ async def test_sending_rgb_color_with_scaled_brightness( call( "test_light_rgb/set", JsonValidator( - '{"state": "ON", "color": {"r": 0, "g": 123, "b": 255},' + '{"state": "ON", "color": {"r": 0, "g": 124, "b": 255},' ' "brightness": 20}' ), 0, @@ -1830,7 +1830,7 @@ async def test_sending_xy_color( call( "test_light_rgb/set", JsonValidator( - '{"state": "ON", "color": {"x": 0.14, "y": 0.131},' + '{"state": "ON", "color": {"x": 0.14, "y": 0.133},' ' "brightness": 50}' ), 0, diff --git a/tests/components/mqtt/test_light_template.py b/tests/components/mqtt/test_light_template.py index 63e110ba7c0..59fd3eb88ed 100644 --- a/tests/components/mqtt/test_light_template.py +++ b/tests/components/mqtt/test_light_template.py @@ -322,7 +322,7 @@ async def test_state_brightness_color_effect_temp_change_via_topic( state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get("rgb_color") == (255, 128, 63) + assert state.attributes.get("rgb_color") == (255, 128, 64) assert state.attributes.get("brightness") == 255 assert state.attributes.get("color_temp") is None # rgb color has priority assert state.attributes.get("effect") is None @@ -494,12 +494,12 @@ async def test_sending_mqtt_commands_and_optimistic( # Full brightness - normalization of RGB values sent over MQTT await common.async_turn_on(hass, "light.test", rgb_color=(128, 64, 0)) mqtt_mock.async_publish.assert_called_once_with( - "test_light_rgb/set", "on,,,255-127-0,30.0-100.0", 2, False + "test_light_rgb/set", "on,,,255-128-0,30.0-100.0", 2, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get("rgb_color") == (255, 127, 0) + assert state.attributes.get("rgb_color") == (255, 128, 0) # Set half brightness await common.async_turn_on(hass, "light.test", brightness=128) @@ -528,7 +528,7 @@ async def test_sending_mqtt_commands_and_optimistic( mqtt_mock.async_publish.reset_mock() state = hass.states.get("light.test") assert state.state == STATE_ON - assert state.attributes.get("rgb_color") == (0, 255, 127) + assert state.attributes.get("rgb_color") == (0, 255, 128) @pytest.mark.parametrize( @@ -626,7 +626,7 @@ async def test_sending_mqtt_commands_non_optimistic_brightness_template( # Full brightness - normalization of RGB values sent over MQTT await common.async_turn_on(hass, "light.test", rgb_color=(128, 64, 0)) mqtt_mock.async_publish.assert_called_once_with( - "test_light_rgb/set", "on,,,255-127-0,30.0-100.0", 0, False + "test_light_rgb/set", "on,,,255-128-0,30.0-100.0", 0, False ) mqtt_mock.async_publish.reset_mock() @@ -648,7 +648,7 @@ async def test_sending_mqtt_commands_non_optimistic_brightness_template( # Half brightness - normalization but no scaling of RGB values sent over MQTT await common.async_turn_on(hass, "light.test", rgb_color=(0, 32, 16)) mqtt_mock.async_publish.assert_called_once_with( - "test_light_rgb/set", "on,,,0-255-127,150.0-100.0", 0, False + "test_light_rgb/set", "on,,,0-255-128,150.0-100.0", 0, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get("light.test") diff --git a/tests/components/music_assistant/__init__.py b/tests/components/music_assistant/__init__.py new file mode 100644 index 00000000000..6893b862e2d --- /dev/null +++ b/tests/components/music_assistant/__init__.py @@ -0,0 +1 @@ +"""The tests for the Music Assistant component.""" diff --git a/tests/components/music_assistant/common.py b/tests/components/music_assistant/common.py new file mode 100644 index 00000000000..c8293b5622f --- /dev/null +++ b/tests/components/music_assistant/common.py @@ -0,0 +1,159 @@ +"""Provide common test tools.""" + +from __future__ import annotations + +from typing import Any +from unittest.mock import AsyncMock, MagicMock + +from music_assistant_models.enums import EventType +from music_assistant_models.media_items import Album, Artist, Playlist, Radio, Track +from music_assistant_models.player import Player +from music_assistant_models.player_queue import PlayerQueue +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, load_json_object_fixture + +MASS_DOMAIN = "music_assistant" +MOCK_URL = "http://mock-music_assistant-server-url" + + +def load_and_parse_fixture(fixture: str) -> dict[str, Any]: + """Load and parse a fixture.""" + data = load_json_object_fixture(f"music_assistant/{fixture}.json") + return data[fixture] + + +async def setup_integration_from_fixtures( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Set up MusicAssistant integration with fixture data.""" + players = create_players_from_fixture() + music_assistant_client.players._players = {x.player_id: x for x in players} + player_queues = create_player_queues_from_fixture() + music_assistant_client.player_queues._queues = { + x.queue_id: x for x in player_queues + } + config_entry = MockConfigEntry( + domain=MASS_DOMAIN, + data={"url": MOCK_URL}, + unique_id=music_assistant_client.server_info.server_id, + ) + music = music_assistant_client.music + library_artists = create_library_artists_from_fixture() + music.get_library_artists = AsyncMock(return_value=library_artists) + library_artist_albums = create_library_artist_albums_from_fixture() + music.get_artist_albums = AsyncMock(return_value=library_artist_albums) + library_albums = create_library_albums_from_fixture() + music.get_library_albums = AsyncMock(return_value=library_albums) + library_album_tracks = create_library_album_tracks_from_fixture() + music.get_album_tracks = AsyncMock(return_value=library_album_tracks) + library_tracks = create_library_tracks_from_fixture() + music.get_library_tracks = AsyncMock(return_value=library_tracks) + library_playlists = create_library_playlists_from_fixture() + music.get_library_playlists = AsyncMock(return_value=library_playlists) + library_playlist_tracks = create_library_playlist_tracks_from_fixture() + music.get_playlist_tracks = AsyncMock(return_value=library_playlist_tracks) + library_radios = create_library_radios_from_fixture() + music.get_library_radios = AsyncMock(return_value=library_radios) + music.get_item_by_uri = AsyncMock() + + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + +def create_players_from_fixture() -> list[Player]: + """Create MA Players from fixture.""" + fixture_data = load_and_parse_fixture("players") + return [Player.from_dict(player_data) for player_data in fixture_data] + + +def create_player_queues_from_fixture() -> list[Player]: + """Create MA PlayerQueues from fixture.""" + fixture_data = load_and_parse_fixture("player_queues") + return [ + PlayerQueue.from_dict(player_queue_data) for player_queue_data in fixture_data + ] + + +def create_library_albums_from_fixture() -> list[Album]: + """Create MA Albums from fixture.""" + fixture_data = load_and_parse_fixture("library_albums") + return [Album.from_dict(album_data) for album_data in fixture_data] + + +def create_library_album_tracks_from_fixture() -> list[Track]: + """Create MA Tracks from fixture.""" + fixture_data = load_and_parse_fixture("library_album_tracks") + return [Track.from_dict(track_data) for track_data in fixture_data] + + +def create_library_tracks_from_fixture() -> list[Track]: + """Create MA Tracks from fixture.""" + fixture_data = load_and_parse_fixture("library_tracks") + return [Track.from_dict(track_data) for track_data in fixture_data] + + +def create_library_artists_from_fixture() -> list[Artist]: + """Create MA Artists from fixture.""" + fixture_data = load_and_parse_fixture("library_artists") + return [Artist.from_dict(artist_data) for artist_data in fixture_data] + + +def create_library_artist_albums_from_fixture() -> list[Album]: + """Create MA Albums from fixture.""" + fixture_data = load_and_parse_fixture("library_artist_albums") + return [Album.from_dict(album_data) for album_data in fixture_data] + + +def create_library_playlists_from_fixture() -> list[Playlist]: + """Create MA Playlists from fixture.""" + fixture_data = load_and_parse_fixture("library_playlists") + return [Playlist.from_dict(playlist_data) for playlist_data in fixture_data] + + +def create_library_playlist_tracks_from_fixture() -> list[Track]: + """Create MA Tracks from fixture.""" + fixture_data = load_and_parse_fixture("library_playlist_tracks") + return [Track.from_dict(track_data) for track_data in fixture_data] + + +def create_library_radios_from_fixture() -> list[Radio]: + """Create MA Radios from fixture.""" + fixture_data = load_and_parse_fixture("library_radios") + return [Radio.from_dict(radio_data) for radio_data in fixture_data] + + +async def trigger_subscription_callback( + hass: HomeAssistant, + client: MagicMock, + event: EventType = EventType.PLAYER_UPDATED, + data: Any = None, +) -> None: + """Trigger a subscription callback.""" + # trigger callback on all subscribers + for sub in client.subscribe_events.call_args_list: + callback = sub.kwargs["callback"] + event_filter = sub.kwargs.get("event_filter") + if event_filter in (None, event): + callback(event, data) + await hass.async_block_till_done() + + +def snapshot_music_assistant_entities( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + platform: Platform, +) -> None: + """Snapshot MusicAssistant entities.""" + entities = hass.states.async_all(platform) + for entity_state in entities: + entity_entry = entity_registry.async_get(entity_state.entity_id) + assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") + assert entity_state == snapshot(name=f"{entity_entry.entity_id}-state") diff --git a/tests/components/music_assistant/conftest.py b/tests/components/music_assistant/conftest.py new file mode 100644 index 00000000000..2df43defe62 --- /dev/null +++ b/tests/components/music_assistant/conftest.py @@ -0,0 +1,83 @@ +"""Music Assistant test fixtures.""" + +import asyncio +from collections.abc import AsyncGenerator, Generator +from unittest.mock import MagicMock, patch + +from music_assistant_client.music import Music +from music_assistant_client.player_queues import PlayerQueues +from music_assistant_client.players import Players +from music_assistant_models.api import ServerInfoMessage +import pytest + +from homeassistant.components.music_assistant.config_flow import CONF_URL +from homeassistant.components.music_assistant.const import DOMAIN + +from tests.common import AsyncMock, MockConfigEntry, load_fixture + +MOCK_SERVER_ID = "1234" + + +@pytest.fixture +def mock_get_server_info() -> Generator[AsyncMock]: + """Mock the function to get server info.""" + with patch( + "homeassistant.components.music_assistant.config_flow.get_server_info" + ) as mock_get_server_info: + mock_get_server_info.return_value = ServerInfoMessage.from_json( + load_fixture("server_info_message.json", DOMAIN) + ) + yield mock_get_server_info + + +@pytest.fixture(name="music_assistant_client") +async def music_assistant_client_fixture() -> AsyncGenerator[MagicMock]: + """Fixture for a Music Assistant client.""" + with patch( + "homeassistant.components.music_assistant.MusicAssistantClient", autospec=True + ) as client_class: + client = client_class.return_value + + async def connect() -> None: + """Mock connect.""" + await asyncio.sleep(0) + + async def listen(init_ready: asyncio.Event | None) -> None: + """Mock listen.""" + if init_ready is not None: + init_ready.set() + listen_block = asyncio.Event() + await listen_block.wait() + pytest.fail("Listen was not cancelled!") + + client.connect = AsyncMock(side_effect=connect) + client.start_listening = AsyncMock(side_effect=listen) + client.server_info = ServerInfoMessage( + server_id=MOCK_SERVER_ID, + server_version="0.0.0", + schema_version=1, + min_supported_schema_version=1, + base_url="http://localhost:8095", + homeassistant_addon=False, + onboard_done=True, + ) + client.connection = MagicMock() + client.connection.connected = True + client.players = Players(client) + client.player_queues = PlayerQueues(client) + client.music = Music(client) + client.server_url = client.server_info.base_url + client.get_media_item_image_url = MagicMock(return_value=None) + + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="Music Assistant", + data={CONF_URL: "http://localhost:8095"}, + unique_id="1234", + ) diff --git a/tests/components/music_assistant/fixtures/library_album_tracks.json b/tests/components/music_assistant/fixtures/library_album_tracks.json new file mode 100644 index 00000000000..562ee84fe35 --- /dev/null +++ b/tests/components/music_assistant/fixtures/library_album_tracks.json @@ -0,0 +1,364 @@ +{ + "library_album_tracks": [ + { + "item_id": "247", + "provider": "library", + "name": "Le Mirage", + "version": "", + "sort_name": "mirage, le", + "uri": "library://track/247", + "external_ids": [["isrc", "FR10S1794640"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "70953631", + "provider_domain": "tidal", + "provider_instance": "tidal--63Pkq9Aw", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/70953631", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/b7b1897c/57ed/4a31/83d7/9ab3df83183a/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "Dana Murray", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 35, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "duration": 352, + "artists": [ + { + "item_id": 195, + "provider": "library", + "name": "Dana Jean Phoenix", + "version": "", + "sort_name": "dana jean phoenix", + "uri": "library://artist/195", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": 95, + "provider": "library", + "name": "Synthwave (The 80S Revival)", + "version": "", + "sort_name": "synthwave (the 80s revival)", + "uri": "library://album/95", + "external_ids": [], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://resources.tidal.com/images/b7b1897c/57ed/4a31/83d7/9ab3df83183a/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + }, + "disc_number": 1, + "track_number": 1 + }, + { + "item_id": "362", + "provider": "library", + "name": "Rabbit in the Headlights", + "version": "", + "sort_name": "rabbit in the headlights", + "uri": "library://track/362", + "external_ids": [["isrc", "GBLFP1645070"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "70953636", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/70953636", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/b7b1897c/57ed/4a31/83d7/9ab3df83183a/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "Michael Oakley", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 34, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "duration": 253, + "artists": [ + { + "item_id": 90, + "provider": "library", + "name": "Michael Oakley", + "version": "", + "sort_name": "michael oakley", + "uri": "library://artist/90", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": 95, + "provider": "library", + "name": "Synthwave (The 80S Revival)", + "version": "", + "sort_name": "synthwave (the 80s revival)", + "uri": "library://album/95", + "external_ids": [], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://resources.tidal.com/images/b7b1897c/57ed/4a31/83d7/9ab3df83183a/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + }, + "disc_number": 1, + "track_number": 6 + }, + { + "item_id": "1", + "provider": "library", + "name": "1988 Girls", + "version": "", + "sort_name": "1988 girls", + "uri": "library://track/1", + "external_ids": [["isrc", "DEBL60768604"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "70953637", + "provider_domain": "tidal", + "provider_instance": "tidal--56X5qDS7", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/70953637", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/b7b1897c/57ed/4a31/83d7/9ab3df83183a/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "Kiez Beats", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 14, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "duration": 258, + "artists": [ + { + "item_id": 110, + "provider": "library", + "name": "Futurecop!", + "version": "", + "sort_name": "futurecop!", + "uri": "library://artist/110", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": 95, + "provider": "library", + "name": "Synthwave (The 80S Revival)", + "version": "", + "sort_name": "synthwave (the 80s revival)", + "uri": "library://album/95", + "external_ids": [], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://resources.tidal.com/images/b7b1897c/57ed/4a31/83d7/9ab3df83183a/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + }, + "disc_number": 1, + "track_number": 7 + }, + { + "item_id": "495", + "provider": "library", + "name": "Timmy Goes to Space", + "version": "", + "sort_name": "timmy goes to space", + "uri": "library://track/495", + "external_ids": [["isrc", "NO2D81710001"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "70953643", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/70953643", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/b7b1897c/57ed/4a31/83d7/9ab3df83183a/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "Jens Kristian Espevik", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 4, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "duration": 212, + "artists": [ + { + "item_id": 453, + "provider": "library", + "name": "Mr. Maen", + "version": "", + "sort_name": "mr. maen", + "uri": "library://artist/453", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": 95, + "provider": "library", + "name": "Synthwave (The 80S Revival)", + "version": "", + "sort_name": "synthwave (the 80s revival)", + "uri": "library://album/95", + "external_ids": [], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://resources.tidal.com/images/b7b1897c/57ed/4a31/83d7/9ab3df83183a/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + }, + "disc_number": 1, + "track_number": 13 + } + ] +} diff --git a/tests/components/music_assistant/fixtures/library_albums.json b/tests/components/music_assistant/fixtures/library_albums.json new file mode 100644 index 00000000000..6936a96adc8 --- /dev/null +++ b/tests/components/music_assistant/fixtures/library_albums.json @@ -0,0 +1,148 @@ +{ + "library_albums": [ + { + "item_id": "396", + "provider": "library", + "name": "Synth Punk EP", + "version": "", + "sort_name": "synth punk ep", + "uri": "library://album/396", + "external_ids": [["barcode", "872133626743"]], + "media_type": "album", + "provider_mappings": [ + { + "item_id": "48563817", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/album/48563817", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/99c8bc2f/ed43/4fb2/adfb/e7e3157089d2/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "586446 Records DK", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 7, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": false, + "position": null, + "year": 2015, + "artists": [ + { + "item_id": 289, + "provider": "library", + "name": "A Space Love Adventure", + "version": "", + "sort_name": "space love adventure, a", + "uri": "library://artist/289", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album_type": "ep" + }, + { + "item_id": "95", + "provider": "library", + "name": "Synthwave (The 80S Revival)", + "version": "The 80S Revival", + "sort_name": "synthwave (the 80s revival)", + "uri": "library://album/95", + "external_ids": [["barcode", "3614974086112"]], + "media_type": "album", + "provider_mappings": [ + { + "item_id": "70953630", + "provider_domain": "tidal", + "provider_instance": "tidal--56X5qDS7", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/album/70953630", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/b7b1897c/57ed/4a31/83d7/9ab3df83183a/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "Kiez Beats", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 43, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": false, + "position": null, + "year": 2017, + "artists": [ + { + "item_id": 96, + "provider": "library", + "name": "Various Artists", + "version": "", + "sort_name": "various artists", + "uri": "library://artist/96", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album_type": "compilation" + } + ] +} diff --git a/tests/components/music_assistant/fixtures/library_artist_albums.json b/tests/components/music_assistant/fixtures/library_artist_albums.json new file mode 100644 index 00000000000..31885528734 --- /dev/null +++ b/tests/components/music_assistant/fixtures/library_artist_albums.json @@ -0,0 +1,88 @@ +{ + "library_artist_albums": [ + { + "item_id": "115", + "provider": "library", + "name": "A Sea of Stars", + "version": "", + "sort_name": "sea of stars, a", + "uri": "library://album/115", + "external_ids": [["barcode", "859741010126"]], + "media_type": "album", + "provider_mappings": [ + { + "item_id": "157401232", + "provider_domain": "tidal", + "provider_instance": "tidal--56X5qDS7", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/album/157401232", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/f55c749b/6642/40e3/a291/ff01fd2915cf/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "2021 NRW Records, under exclusive license to NewRetroWave, LLC", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 0, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": false, + "position": null, + "year": 2021, + "artists": [ + { + "item_id": 127, + "provider": "library", + "name": "W O L F C L U B", + "version": "", + "sort_name": "w o l f c l u b", + "uri": "library://artist/127", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + }, + { + "item_id": 128, + "provider": "library", + "name": "Dora Pereli", + "version": "", + "sort_name": "dora pereli", + "uri": "library://artist/128", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album_type": "single" + } + ] +} diff --git a/tests/components/music_assistant/fixtures/library_artists.json b/tests/components/music_assistant/fixtures/library_artists.json new file mode 100644 index 00000000000..803ce003b6c --- /dev/null +++ b/tests/components/music_assistant/fixtures/library_artists.json @@ -0,0 +1,60 @@ +{ + "library_artists": [ + { + "item_id": "127", + "provider": "library", + "name": "W O L F C L U B", + "version": "", + "sort_name": "w o l f c l u b", + "uri": "library://artist/127", + "external_ids": [], + "media_type": "artist", + "provider_mappings": [ + { + "item_id": "8741977", + "provider_domain": "tidal", + "provider_instance": "tidal--56X5qDS7", + "available": 1, + "audio_format": { + "content_type": "?", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "?", + "bit_rate": 0 + }, + "url": "https://tidal.com/artist/8741977", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": null, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/1e01cdb6/f15d/4d8b/8440/a047976c1cac/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": null, + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": null, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": false, + "position": null + } + ] +} diff --git a/tests/components/music_assistant/fixtures/library_playlist_tracks.json b/tests/components/music_assistant/fixtures/library_playlist_tracks.json new file mode 100644 index 00000000000..1fb1c330957 --- /dev/null +++ b/tests/components/music_assistant/fixtures/library_playlist_tracks.json @@ -0,0 +1,262 @@ +{ + "library_playlist_tracks": [ + { + "item_id": "77616130", + "provider": "tidal--Ah76MuMg", + "name": "Won't Get Fooled Again", + "version": "", + "sort_name": "won't get fooled again", + "uri": "tidal--Ah76MuMg://track/77616130", + "external_ids": [["isrc", "GBUM71405419"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "77616130", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": true, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 24, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/77616130", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/3496a8ad/ea69/4d7e/bbda/045417ab59e1/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "℗ 1971 Polydor Ltd. (UK)", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 30, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": false, + "position": 0, + "duration": 516, + "artists": [ + { + "item_id": "24915", + "provider": "tidal--Ah76MuMg", + "name": "The Who", + "version": "", + "sort_name": "who, the", + "uri": "tidal--Ah76MuMg://artist/24915", + "external_ids": [], + "media_type": "artist", + "provider_mappings": [ + { + "item_id": "24915", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": true, + "audio_format": { + "content_type": "?", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "?", + "bit_rate": 0 + }, + "url": "https://tidal.com/artist/24915", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": null, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/0f782232/18c8/40b7/bb13/91c6039e40e6/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": null, + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": null, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": false, + "position": null + } + ], + "album": { + "item_id": "77616121", + "provider": "tidal--Ah76MuMg", + "name": "Who's Next", + "version": "", + "sort_name": "who's next", + "uri": "tidal--Ah76MuMg://album/77616121", + "external_ids": [], + "media_type": "album", + "available": true, + "image": null + }, + "disc_number": 1, + "track_number": 9 + }, + { + "item_id": "153795", + "provider": "tidal--Ah76MuMg", + "name": "We're An American Band", + "version": "Remastered 2002", + "sort_name": "we're an american band", + "uri": "tidal--Ah76MuMg://track/153795", + "external_ids": [["isrc", "USCA20200334"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "153795", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": true, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/153795", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/a6d86e02/84c1/41f7/84f5/41be8571fc40/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "℗ 2002 Capitol Records, LLC", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 48, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": false, + "position": 1, + "duration": 207, + "artists": [ + { + "item_id": "9380", + "provider": "tidal--Ah76MuMg", + "name": "Grand Funk Railroad", + "version": "", + "sort_name": "grand funk railroad", + "uri": "tidal--Ah76MuMg://artist/9380", + "external_ids": [], + "media_type": "artist", + "provider_mappings": [ + { + "item_id": "9380", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": true, + "audio_format": { + "content_type": "?", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "?", + "bit_rate": 0 + }, + "url": "https://tidal.com/artist/9380", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": null, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/6535bf95/a06d/4d23/8262/604fa41d8126/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": null, + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": null, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": false, + "position": null + } + ], + "album": { + "item_id": "153794", + "provider": "tidal--Ah76MuMg", + "name": "We're An American Band (Expanded Edition / Remastered 2002)", + "version": "", + "sort_name": "we're an american band (expanded edition / remastered 2002)", + "uri": "tidal--Ah76MuMg://album/153794", + "external_ids": [], + "media_type": "album", + "available": true, + "image": null + }, + "disc_number": 1, + "track_number": 1 + } + ] +} diff --git a/tests/components/music_assistant/fixtures/library_playlists.json b/tests/components/music_assistant/fixtures/library_playlists.json new file mode 100644 index 00000000000..7f88c5f3e24 --- /dev/null +++ b/tests/components/music_assistant/fixtures/library_playlists.json @@ -0,0 +1,63 @@ +{ + "library_playlists": [ + { + "item_id": "40", + "provider": "library", + "name": "1970s Rock Hits", + "version": "", + "sort_name": "1970s rock hits", + "uri": "library://playlist/40", + "external_ids": [], + "media_type": "playlist", + "provider_mappings": [ + { + "item_id": "30da0578-0ca0-4716-b66e-5f02bcd96702", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": 1, + "audio_format": { + "content_type": "?", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "?", + "bit_rate": 0 + }, + "url": "https://tidal.com/browse/playlist/30da0578-0ca0-4716-b66e-5f02bcd96702", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": null, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/95913801/41c1/4cc9/bf94/a0fba657bba5/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": null, + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": null, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "owner": "TIDAL", + "is_editable": 0, + "cache_checksum": "2023-10-09 07: 09: 23.446000+00: 00" + } + ] +} diff --git a/tests/components/music_assistant/fixtures/library_radios.json b/tests/components/music_assistant/fixtures/library_radios.json new file mode 100644 index 00000000000..1a6a4666ce4 --- /dev/null +++ b/tests/components/music_assistant/fixtures/library_radios.json @@ -0,0 +1,66 @@ +{ + "library_radios": [ + { + "item_id": "1", + "provider": "library", + "name": "fm4 | ORF | HQ", + "version": "", + "sort_name": "fm4 | orf | hq", + "uri": "library://radio/1", + "external_ids": [], + "media_type": "radio", + "provider_mappings": [ + { + "item_id": "1e13ed4e-daa9-4728-8550-e08d89c1c8e7", + "provider_domain": "radiobrowser", + "provider_instance": "radiobrowser--FRc3pD3t", + "available": 1, + "audio_format": { + "content_type": "?", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "?", + "bit_rate": 0 + }, + "url": null, + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": null, + "images": [ + { + "type": "thumb", + "path": "https://tubestatic.orf.at/mojo/1_3/storyserver//tube/fm4/images/touch-icon-iphone-retina.png", + "provider": "radiobrowser", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": null, + "lyrics": null, + "label": null, + "links": [ + { + "type": "website", + "url": "https://fm4.orf.at/" + } + ], + "performers": null, + "preview": null, + "popularity": 166, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "duration": 172800 + } + ] +} diff --git a/tests/components/music_assistant/fixtures/library_tracks.json b/tests/components/music_assistant/fixtures/library_tracks.json new file mode 100644 index 00000000000..c4ed83e9342 --- /dev/null +++ b/tests/components/music_assistant/fixtures/library_tracks.json @@ -0,0 +1,556 @@ +{ + "library_tracks": [ + { + "item_id": "456", + "provider": "library", + "name": "Tennessee Whiskey", + "version": "", + "sort_name": "tennessee whiskey", + "uri": "library://track/456", + "external_ids": [["isrc", "USUM71418088"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "44832786", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/44832786", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/4894ff62/9de2/4ed8/a7b9/69e217bbbdda/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "℗ 2015 Mercury Records, a Division of UMG Recordings, Inc.", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 33, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "duration": 293, + "artists": [ + { + "item_id": 433, + "provider": "library", + "name": "Chris Stapleton", + "version": "", + "sort_name": "chris stapleton", + "uri": "library://artist/433", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": 463, + "provider": "library", + "name": "Traveller", + "version": "", + "sort_name": "traveller", + "uri": "library://album/463", + "external_ids": [], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://resources.tidal.com/images/4894ff62/9de2/4ed8/a7b9/69e217bbbdda/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + }, + "disc_number": 1, + "track_number": 3 + }, + { + "item_id": "467", + "provider": "library", + "name": "Thelma + Louise", + "version": "", + "sort_name": "thelma + louise", + "uri": "library://track/467", + "external_ids": [["isrc", "GBUM72104380"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "194027388", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 24, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/194027388", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/04fc7c3c/b814/4855/874c/a2e456205b65/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "℗ 2021 Virgin Records Limited", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 20, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "duration": 137, + "artists": [ + { + "item_id": 81, + "provider": "library", + "name": "Bastille", + "version": "", + "sort_name": "bastille", + "uri": "library://artist/81", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": 471, + "provider": "library", + "name": "Thelma + Louise", + "version": "", + "sort_name": "thelma + louise", + "uri": "library://album/471", + "external_ids": [], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://resources.tidal.com/images/04fc7c3c/b814/4855/874c/a2e456205b65/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + }, + "disc_number": 1, + "track_number": 1 + }, + { + "item_id": "485", + "provider": "library", + "name": "They Don't Care About Us", + "version": "", + "sort_name": "they don't care about us", + "uri": "library://track/485", + "external_ids": [["isrc", "USSM19500629"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "5279069", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 24, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/5279069", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/a2fa5815/851d/4d2d/b6a7/17a365c838f9/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "(P) 1995 MJJ Productions Inc.", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 27, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "duration": 284, + "artists": [ + { + "item_id": 30, + "provider": "library", + "name": "Michael Jackson", + "version": "", + "sort_name": "michael jackson", + "uri": "library://artist/30", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": 486, + "provider": "library", + "name": "HIStory - PAST, PRESENT AND FUTURE - BOOK I", + "version": "", + "sort_name": "history - past, present and future - book i", + "uri": "library://album/486", + "external_ids": [], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://resources.tidal.com/images/a2fa5815/851d/4d2d/b6a7/17a365c838f9/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + }, + "disc_number": 2, + "track_number": 2 + }, + { + "item_id": "486", + "provider": "library", + "name": "They Don't Give A F**** About Us", + "version": "", + "sort_name": "they don't give a f**** about us", + "uri": "library://track/486", + "external_ids": [["isrc", "USIR10211795"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "44066854", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/44066854", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": true, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/6b7b2b58/5dc2/4d0c/8979/7b30bb779d6f/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "℗ 2002 Amaru Entertainment, Inc., Under exclusive license to Interscope Records", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 34, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "duration": 306, + "artists": [ + { + "item_id": 159, + "provider": "library", + "name": "2Pac", + "version": "", + "sort_name": "2pac", + "uri": "library://artist/159", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + }, + { + "item_id": 451, + "provider": "library", + "name": "The Outlawz", + "version": "", + "sort_name": "outlawz, the", + "uri": "library://artist/451", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": 487, + "provider": "library", + "name": "Better Dayz", + "version": "", + "sort_name": "better dayz", + "uri": "library://album/487", + "external_ids": [], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://resources.tidal.com/images/6b7b2b58/5dc2/4d0c/8979/7b30bb779d6f/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + }, + "disc_number": 2, + "track_number": 13 + }, + { + "item_id": "487", + "provider": "library", + "name": "Things We Lost In The Fire", + "version": "TORN Remix", + "sort_name": "things we lost in the fire", + "uri": "library://track/487", + "external_ids": [["isrc", "GBUM71304903"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "22627902", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/22627902", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/de277fd3/cc29/4d63/a60f/13b501c5f3d0/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "℗ 2013 Virgin Records Limited", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 10, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "duration": 323, + "artists": [ + { + "item_id": 81, + "provider": "library", + "name": "Bastille", + "version": "", + "sort_name": "bastille", + "uri": "library://artist/81", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": 488, + "provider": "library", + "name": "Things We Lost In The Fire", + "version": "", + "sort_name": "things we lost in the fire", + "uri": "library://album/488", + "external_ids": [], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://resources.tidal.com/images/de277fd3/cc29/4d63/a60f/13b501c5f3d0/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + }, + "disc_number": 1, + "track_number": 3 + }, + { + "item_id": "488", + "provider": "library", + "name": "Those Nights", + "version": "", + "sort_name": "those nights", + "uri": "library://track/488", + "external_ids": [["isrc", "GBUM71803866"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "110750762", + "provider_domain": "tidal", + "provider_instance": "tidal--Ah76MuMg", + "available": 1, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 24, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://tidal.com/track/110750762", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://resources.tidal.com/images/713805f3/c08c/4c0f/8199/d63e6badac0d/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "℗ 2019 Virgin Records Limited", + "lyrics": null, + "label": null, + "links": null, + "performers": null, + "preview": null, + "popularity": 21, + "release_date": null, + "languages": null, + "last_refresh": null + }, + "favorite": true, + "position": null, + "duration": 270, + "artists": [ + { + "item_id": 81, + "provider": "library", + "name": "Bastille", + "version": "", + "sort_name": "bastille", + "uri": "library://artist/81", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": 489, + "provider": "library", + "name": "Doom Days", + "version": "", + "sort_name": "doom days", + "uri": "library://album/489", + "external_ids": [], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://resources.tidal.com/images/713805f3/c08c/4c0f/8199/d63e6badac0d/750x750.jpg", + "provider": "tidal", + "remotely_accessible": true + } + }, + "disc_number": 1, + "track_number": 10 + } + ] +} diff --git a/tests/components/music_assistant/fixtures/player_queues.json b/tests/components/music_assistant/fixtures/player_queues.json new file mode 100644 index 00000000000..5251560365c --- /dev/null +++ b/tests/components/music_assistant/fixtures/player_queues.json @@ -0,0 +1,328 @@ +{ + "player_queues": [ + { + "queue_id": "00:00:00:00:00:01", + "active": false, + "display_name": "Test Player 1", + "available": true, + "items": 0, + "shuffle_enabled": false, + "repeat_mode": "off", + "dont_stop_the_music_enabled": false, + "current_index": null, + "index_in_buffer": null, + "elapsed_time": 0, + "elapsed_time_last_updated": 1730118302.163217, + "state": "idle", + "current_item": null, + "next_item": null, + "radio_source": [], + "flow_mode": false, + "resume_pos": 0 + }, + { + "queue_id": "00:00:00:00:00:02", + "active": false, + "display_name": "My Super Test Player 2", + "available": true, + "items": 0, + "shuffle_enabled": false, + "repeat_mode": "off", + "dont_stop_the_music_enabled": false, + "current_index": null, + "index_in_buffer": null, + "elapsed_time": 0, + "elapsed_time_last_updated": 0, + "state": "idle", + "current_item": null, + "next_item": null, + "radio_source": [], + "flow_mode": false, + "resume_pos": 0 + }, + { + "queue_id": "test_group_player_1", + "active": true, + "display_name": "Test Group Player 1", + "available": true, + "items": 1094, + "shuffle_enabled": true, + "repeat_mode": "all", + "dont_stop_the_music_enabled": true, + "current_index": 26, + "index_in_buffer": 26, + "elapsed_time": 232.08810877799988, + "elapsed_time_last_updated": 1730313109.5659513, + "state": "playing", + "current_item": { + "queue_id": "test_group_player_1", + "queue_item_id": "5d95dc5be77e4f7eb4939f62cfef527b", + "name": "Guns N' Roses - November Rain", + "duration": 536, + "sort_index": 2109, + "streamdetails": { + "provider": "spotify", + "item_id": "3YRCqOhFifThpSRFJ1VWFM", + "audio_format": { + "content_type": "ogg", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "ogg", + "bit_rate": 0 + }, + "media_type": "track", + "stream_type": "custom", + "stream_title": null, + "duration": 536, + "size": null, + "can_seek": true, + "loudness": -12.47, + "loudness_album": null, + "prefer_album_loudness": false, + "volume_normalization_mode": "fallback_dynamic", + "target_loudness": -17, + "strip_silence_begin": false, + "strip_silence_end": true, + "stream_error": null + }, + "media_item": { + "item_id": "3YRCqOhFifThpSRFJ1VWFM", + "provider": "spotify", + "name": "November Rain", + "version": "", + "sort_name": "november rain", + "uri": "spotify://track/3YRCqOhFifThpSRFJ1VWFM", + "external_ids": [["isrc", "USGF19141510"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "3YRCqOhFifThpSRFJ1VWFM", + "provider_domain": "spotify", + "provider_instance": "spotify", + "available": true, + "audio_format": { + "content_type": "ogg", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "ogg", + "bit_rate": 320 + }, + "url": "https://open.spotify.com/track/3YRCqOhFifThpSRFJ1VWFM", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": false, + "images": [ + { + "type": "thumb", + "path": "https://i.scdn.co/image/ab67616d0000b273e44963b8bb127552ac761873", + "provider": "spotify", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": null, + "lyrics": null, + "label": null, + "links": null, + "chapters": null, + "performers": null, + "preview": "https://p.scdn.co/mp3-preview/98deb9c370bbaa350be058b3470fbe3bc1e28d9d?cid=2eb96f9b37494be1824999d58028a305", + "popularity": 77, + "last_refresh": null + }, + "favorite": false, + "position": 1372, + "duration": 536, + "artists": [ + { + "item_id": "3qm84nBOXUEQ2vnTfUTTFC", + "provider": "spotify", + "name": "Guns N' Roses", + "version": "", + "sort_name": "guns n' roses", + "uri": "spotify://artist/3qm84nBOXUEQ2vnTfUTTFC", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": "0CxPbTRARqKUYighiEY9Sz", + "provider": "spotify", + "name": "Use Your Illusion I", + "version": "", + "sort_name": "use your illusion i", + "uri": "spotify://album/0CxPbTRARqKUYighiEY9Sz", + "external_ids": [], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://i.scdn.co/image/ab67616d0000b273e44963b8bb127552ac761873", + "provider": "spotify", + "remotely_accessible": true + } + }, + "disc_number": 1, + "track_number": 10 + }, + "image": { + "type": "thumb", + "path": "https://i.scdn.co/image/ab67616d0000b273e44963b8bb127552ac761873", + "provider": "spotify", + "remotely_accessible": true + }, + "index": 0 + }, + "next_item": { + "queue_id": "test_group_player_1", + "queue_item_id": "990ae8f29cdf4fb588d679b115621f55", + "name": "The Stranglers - Golden Brown", + "duration": 207, + "sort_index": 1138, + "streamdetails": { + "provider": "qobuz", + "item_id": "1004735", + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "media_type": "track", + "stream_type": "http", + "stream_title": null, + "duration": 207, + "size": null, + "can_seek": true, + "loudness": -14.23, + "loudness_album": null, + "prefer_album_loudness": true, + "volume_normalization_mode": "fallback_dynamic", + "target_loudness": -17, + "strip_silence_begin": true, + "strip_silence_end": true, + "stream_error": null + }, + "media_item": { + "item_id": "1004735", + "provider": "qobuz", + "name": "Golden Brown", + "version": "", + "sort_name": "golden brown", + "uri": "qobuz://track/1004735", + "external_ids": [["isrc", "GBAYE8100053"]], + "media_type": "track", + "provider_mappings": [ + { + "item_id": "1004735", + "provider_domain": "qobuz", + "provider_instance": "qobuz", + "available": true, + "audio_format": { + "content_type": "flac", + "sample_rate": 44100, + "bit_depth": 16, + "channels": 2, + "output_format_str": "flac", + "bit_rate": 0 + }, + "url": "https://open.qobuz.com/track/1004735", + "details": null + } + ], + "metadata": { + "description": null, + "review": null, + "explicit": null, + "images": [ + { + "type": "thumb", + "path": "https://static.qobuz.com/images/covers/59/88/0724353468859_600.jpg", + "provider": "qobuz", + "remotely_accessible": true + } + ], + "genres": null, + "mood": null, + "style": null, + "copyright": "© 2001 Parlophone Records Ltd, a Warner Music Group Company ℗ 1981 Parlophone Records Ltd, a Warner Music Group Company", + "lyrics": null, + "label": null, + "links": null, + "chapters": null, + "performers": [ + "Dave Greenfield, Composer, Producer, Keyboards, Vocals", + "Jean", + "Hugh Cornwell, Composer, Producer, Guitar, Vocals", + "Jean Jacques Burnel, Producer, Bass Guitar, Vocals", + "Jet Black, Composer, Producer, Drums, Percussion", + "Jacques Burnell, Composer", + "The Stranglers, MainArtist" + ], + "preview": null, + "popularity": null, + "last_refresh": null + }, + "favorite": false, + "position": 183, + "duration": 207, + "artists": [ + { + "item_id": "26779", + "provider": "qobuz", + "name": "The Stranglers", + "version": "", + "sort_name": "stranglers, the", + "uri": "qobuz://artist/26779", + "external_ids": [], + "media_type": "artist", + "available": true, + "image": null + } + ], + "album": { + "item_id": "0724353468859", + "provider": "qobuz", + "name": "La Folie", + "version": "", + "sort_name": "folie, la", + "uri": "qobuz://album/0724353468859", + "external_ids": [["barcode", "0724353468859"]], + "media_type": "album", + "available": true, + "image": { + "type": "thumb", + "path": "https://static.qobuz.com/images/covers/59/88/0724353468859_600.jpg", + "provider": "qobuz", + "remotely_accessible": true + } + }, + "disc_number": 1, + "track_number": 9 + }, + "image": { + "type": "thumb", + "path": "https://static.qobuz.com/images/covers/59/88/0724353468859_600.jpg", + "provider": "qobuz", + "remotely_accessible": true + }, + "index": 0 + }, + "radio_source": [], + "flow_mode": false, + "resume_pos": 0 + } + ] +} diff --git a/tests/components/music_assistant/fixtures/players.json b/tests/components/music_assistant/fixtures/players.json new file mode 100644 index 00000000000..2d8b88d0e8e --- /dev/null +++ b/tests/components/music_assistant/fixtures/players.json @@ -0,0 +1,149 @@ +{ + "players": [ + { + "player_id": "00:00:00:00:00:01", + "provider": "test", + "type": "player", + "name": "Test Player 1", + "available": true, + "powered": false, + "device_info": { + "model": "Test Model", + "address": "192.168.1.1", + "manufacturer": "Test Manufacturer" + }, + "supported_features": [ + "volume_set", + "volume_mute", + "pause", + "set_members", + "power", + "enqueue" + ], + "elapsed_time": 0, + "elapsed_time_last_updated": 0, + "state": "idle", + "volume_level": 20, + "volume_muted": false, + "group_childs": [], + "active_source": "00:00:00:00:00:01", + "active_group": null, + "current_media": null, + "synced_to": null, + "enabled_by_default": true, + "needs_poll": false, + "poll_interval": 30, + "enabled": true, + "hidden": false, + "icon": "mdi-speaker", + "group_volume": 20, + "display_name": "Test Player 1", + "extra_data": {}, + "announcement_in_progress": false + }, + { + "player_id": "00:00:00:00:00:02", + "provider": "test", + "type": "player", + "name": "Test Player 2", + "available": true, + "powered": true, + "device_info": { + "model": "Test Model", + "address": "192.168.1.2", + "manufacturer": "Test Manufacturer" + }, + "supported_features": [ + "volume_set", + "volume_mute", + "pause", + "set_members", + "power", + "enqueue" + ], + "elapsed_time": 0, + "elapsed_time_last_updated": 0, + "state": "playing", + "volume_level": 20, + "volume_muted": false, + "group_childs": [], + "active_source": "spotify", + "active_group": null, + "current_media": { + "uri": "spotify://track/5d95dc5be77e4f7eb4939f62cfef527b", + "media_type": "track", + "title": "Test Track", + "artist": "Test Artist", + "album": "Test Album", + "image_url": null, + "duration": 300, + "queue_id": null, + "queue_item_id": null, + "custom_data": null + }, + "synced_to": null, + "enabled_by_default": true, + "needs_poll": false, + "poll_interval": 30, + "enabled": true, + "hidden": false, + "icon": "mdi-speaker", + "group_volume": 20, + "display_name": "My Super Test Player 2", + "extra_data": {}, + "announcement_in_progress": false + }, + { + "player_id": "test_group_player_1", + "provider": "player_group", + "type": "group", + "name": "Test Group Player 1", + "available": true, + "powered": true, + "device_info": { + "model": "Sync Group", + "address": "", + "manufacturer": "Test" + }, + "supported_features": [ + "volume_set", + "volume_mute", + "pause", + "set_members", + "power", + "enqueue" + ], + "elapsed_time": 0.0, + "elapsed_time_last_updated": 1730315437.9904983, + "state": "idle", + "volume_level": 6, + "volume_muted": false, + "group_childs": ["00:00:00:00:00:01", "00:00:00:00:00:02"], + "active_source": "test_group_player_1", + "active_group": null, + "current_media": { + "uri": "http://192.168.1.1:8097/single/test_group_player_1/5d95dc5be77e4f7eb4939f62cfef527b.flac?ts=1730313038", + "media_type": "unknown", + "title": null, + "artist": null, + "album": null, + "image_url": null, + "duration": null, + "queue_id": "test_group_player_1", + "queue_item_id": "5d95dc5be77e4f7eb4939f62cfef527b", + "custom_data": null + }, + "synced_to": null, + "enabled_by_default": true, + "needs_poll": true, + "poll_interval": 30, + "enabled": true, + "hidden": false, + "icon": "mdi-speaker-multiple", + "group_volume": 6, + "display_name": "Test Group Player 1", + "extra_data": {}, + "announcement_in_progress": false + } + ] +} diff --git a/tests/components/music_assistant/fixtures/server_info_message.json b/tests/components/music_assistant/fixtures/server_info_message.json new file mode 100644 index 00000000000..907ec8af820 --- /dev/null +++ b/tests/components/music_assistant/fixtures/server_info_message.json @@ -0,0 +1,9 @@ +{ + "server_id": "1234", + "server_version": "0.0.0", + "schema_version": 23, + "min_supported_schema_version": 23, + "base_url": "http://localhost:8095", + "homeassistant_addon": false, + "onboard_done": false +} diff --git a/tests/components/music_assistant/snapshots/test_media_player.ambr b/tests/components/music_assistant/snapshots/test_media_player.ambr new file mode 100644 index 00000000000..e3d7a4a0cbc --- /dev/null +++ b/tests/components/music_assistant/snapshots/test_media_player.ambr @@ -0,0 +1,190 @@ +# serializer version: 1 +# name: test_media_player[media_player.my_super_test_player_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.my_super_test_player_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:speaker', + 'original_name': None, + 'platform': 'music_assistant', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:02', + 'unit_of_measurement': None, + }) +# --- +# name: test_media_player[media_player.my_super_test_player_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'active_queue': None, + 'app_id': 'spotify', + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'My Super Test Player 2', + 'group_members': list([ + ]), + 'icon': 'mdi:speaker', + 'is_volume_muted': False, + 'mass_player_type': 'player', + 'media_album_name': 'Test Album', + 'media_artist': 'Test Artist', + 'media_content_id': 'spotify://track/5d95dc5be77e4f7eb4939f62cfef527b', + 'media_content_type': , + 'media_duration': 300, + 'media_position': 0, + 'media_title': 'Test Track', + 'supported_features': , + 'volume_level': 0.2, + }), + 'context': , + 'entity_id': 'media_player.my_super_test_player_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'playing', + }) +# --- +# name: test_media_player[media_player.test_group_player_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.test_group_player_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:speaker-multiple', + 'original_name': None, + 'platform': 'music_assistant', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'test_group_player_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_media_player[media_player.test_group_player_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'active_queue': 'test_group_player_1', + 'app_id': 'music_assistant', + 'device_class': 'speaker', + 'entity_picture_local': None, + 'friendly_name': 'Test Group Player 1', + 'group_members': list([ + 'media_player.my_super_test_player_2', + 'media_player.test_player_1', + ]), + 'icon': 'mdi:speaker-multiple', + 'is_volume_muted': False, + 'mass_player_type': 'group', + 'media_album_name': 'Use Your Illusion I', + 'media_artist': "Guns N' Roses", + 'media_content_id': 'spotify://track/3YRCqOhFifThpSRFJ1VWFM', + 'media_content_type': , + 'media_duration': 536, + 'media_position': 232, + 'media_position_updated_at': datetime.datetime(2024, 10, 30, 18, 31, 49, 565951, tzinfo=datetime.timezone.utc), + 'media_title': 'November Rain', + 'repeat': 'all', + 'shuffle': True, + 'supported_features': , + 'volume_level': 0.06, + }), + 'context': , + 'entity_id': 'media_player.test_group_player_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_media_player[media_player.test_player_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.test_player_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:speaker', + 'original_name': None, + 'platform': 'music_assistant', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_media_player[media_player.test_player_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'active_queue': '00:00:00:00:00:01', + 'device_class': 'speaker', + 'friendly_name': 'Test Player 1', + 'group_members': list([ + ]), + 'icon': 'mdi:speaker', + 'mass_player_type': 'player', + 'supported_features': , + }), + 'context': , + 'entity_id': 'media_player.test_player_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/music_assistant/test_config_flow.py b/tests/components/music_assistant/test_config_flow.py new file mode 100644 index 00000000000..c700060889c --- /dev/null +++ b/tests/components/music_assistant/test_config_flow.py @@ -0,0 +1,217 @@ +"""Define tests for the Music Assistant Integration config flow.""" + +from copy import deepcopy +from ipaddress import ip_address +from unittest import mock +from unittest.mock import AsyncMock + +from music_assistant_client.exceptions import ( + CannotConnect, + InvalidServerVersion, + MusicAssistantClientException, +) +from music_assistant_models.api import ServerInfoMessage +import pytest + +from homeassistant.components.music_assistant.config_flow import CONF_URL +from homeassistant.components.music_assistant.const import DEFAULT_NAME, DOMAIN +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry, load_fixture + +SERVER_INFO = { + "server_id": "1234", + "base_url": "http://localhost:8095", + "server_version": "0.0.0", + "schema_version": 23, + "min_supported_schema_version": 23, + "homeassistant_addon": True, +} + +ZEROCONF_DATA = ZeroconfServiceInfo( + ip_address=ip_address("127.0.0.1"), + ip_addresses=[ip_address("127.0.0.1")], + hostname="mock_hostname", + port=None, + type=mock.ANY, + name=mock.ANY, + properties=SERVER_INFO, +) + + +async def test_full_flow( + hass: HomeAssistant, + mock_get_server_info: AsyncMock, +) -> None: + """Test full flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_URL: "http://localhost:8095"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DEFAULT_NAME + assert result["data"] == { + CONF_URL: "http://localhost:8095", + } + assert result["result"].unique_id == "1234" + + +async def test_zero_conf_flow( + hass: HomeAssistant, + mock_get_server_info: AsyncMock, +) -> None: + """Test zeroconf flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DATA, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "discovery_confirm" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DEFAULT_NAME + assert result["data"] == { + CONF_URL: "http://localhost:8095", + } + assert result["result"].unique_id == "1234" + + +async def test_zero_conf_missing_server_id( + hass: HomeAssistant, + mock_get_server_info: AsyncMock, +) -> None: + """Test zeroconf flow with missing server id.""" + bad_zero_conf_data = deepcopy(ZEROCONF_DATA) + bad_zero_conf_data.properties.pop("server_id") + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=bad_zero_conf_data, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "missing_server_id" + + +async def test_duplicate_user( + hass: HomeAssistant, + mock_get_server_info: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test duplicate user flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_URL: "http://localhost:8095"}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_duplicate_zeroconf( + hass: HomeAssistant, + mock_get_server_info: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test duplicate zeroconf flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DATA, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.parametrize( + ("exception", "error_message"), + [ + (InvalidServerVersion("invalid_server_version"), "invalid_server_version"), + (CannotConnect("cannot_connect"), "cannot_connect"), + (MusicAssistantClientException("unknown"), "unknown"), + ], +) +async def test_flow_user_server_version_invalid( + hass: HomeAssistant, + mock_get_server_info: AsyncMock, + exception: MusicAssistantClientException, + error_message: str, +) -> None: + """Test user flow when server url is invalid.""" + mock_get_server_info.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_URL: "http://localhost:8095"}, + ) + await hass.async_block_till_done() + assert result["errors"] == {"base": error_message} + + mock_get_server_info.side_effect = None + mock_get_server_info.return_value = ServerInfoMessage.from_json( + load_fixture("server_info_message.json", DOMAIN) + ) + + assert result["type"] is FlowResultType.FORM + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_URL: "http://localhost:8095"}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_flow_zeroconf_connect_issue( + hass: HomeAssistant, + mock_get_server_info: AsyncMock, +) -> None: + """Test zeroconf flow when server connect be reached.""" + mock_get_server_info.side_effect = CannotConnect("cannot_connect") + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DATA, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" diff --git a/tests/components/music_assistant/test_media_browser.py b/tests/components/music_assistant/test_media_browser.py new file mode 100644 index 00000000000..96fd54962d8 --- /dev/null +++ b/tests/components/music_assistant/test_media_browser.py @@ -0,0 +1,65 @@ +"""Test Music Assistant media browser implementation.""" + +from unittest.mock import MagicMock + +import pytest + +from homeassistant.components.media_player import BrowseError, BrowseMedia, MediaType +from homeassistant.components.music_assistant.const import DOMAIN +from homeassistant.components.music_assistant.media_browser import ( + LIBRARY_ALBUMS, + LIBRARY_ARTISTS, + LIBRARY_PLAYLISTS, + LIBRARY_RADIO, + LIBRARY_TRACKS, + async_browse_media, +) +from homeassistant.core import HomeAssistant + +from .common import setup_integration_from_fixtures + + +@pytest.mark.parametrize( + ("media_content_id", "media_content_type", "expected"), + [ + (LIBRARY_PLAYLISTS, MediaType.PLAYLIST, "library://playlist/40"), + (LIBRARY_ARTISTS, MediaType.ARTIST, "library://artist/127"), + (LIBRARY_ALBUMS, MediaType.ALBUM, "library://album/396"), + (LIBRARY_TRACKS, MediaType.TRACK, "library://track/486"), + (LIBRARY_RADIO, DOMAIN, "library://radio/1"), + ("artist", MediaType.ARTIST, "library://album/115"), + ("album", MediaType.ALBUM, "library://track/247"), + ("playlist", DOMAIN, "tidal--Ah76MuMg://track/77616130"), + (None, None, "artists"), + ], +) +async def test_browse_media_root( + hass: HomeAssistant, + music_assistant_client: MagicMock, + media_content_id: str, + media_content_type: str, + expected: str, +) -> None: + """Test the async_browse_media method.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + state = hass.states.get(entity_id) + assert state + browse_item: BrowseMedia = await async_browse_media( + hass, music_assistant_client, media_content_id, media_content_type + ) + assert browse_item.children[0].media_content_id == expected + + +async def test_browse_media_not_found( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test the async_browse_media method when media is not found.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + state = hass.states.get(entity_id) + assert state + + with pytest.raises(BrowseError, match="Media not found: unknown / unknown"): + await async_browse_media(hass, music_assistant_client, "unknown", "unknown") diff --git a/tests/components/music_assistant/test_media_player.py b/tests/components/music_assistant/test_media_player.py new file mode 100644 index 00000000000..13716b6a479 --- /dev/null +++ b/tests/components/music_assistant/test_media_player.py @@ -0,0 +1,585 @@ +"""Test Music Assistant media player entities.""" + +from unittest.mock import MagicMock, call + +from music_assistant_models.enums import MediaType, QueueOption +from music_assistant_models.media_items import Track +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.media_player import ( + ATTR_GROUP_MEMBERS, + ATTR_MEDIA_ENQUEUE, + ATTR_MEDIA_REPEAT, + ATTR_MEDIA_SEEK_POSITION, + ATTR_MEDIA_SHUFFLE, + ATTR_MEDIA_VOLUME_LEVEL, + ATTR_MEDIA_VOLUME_MUTED, + DOMAIN as MEDIA_PLAYER_DOMAIN, + SERVICE_CLEAR_PLAYLIST, + SERVICE_JOIN, + SERVICE_UNJOIN, +) +from homeassistant.components.music_assistant.const import DOMAIN as MASS_DOMAIN +from homeassistant.components.music_assistant.media_player import ( + ATTR_ALBUM, + ATTR_ANNOUNCE_VOLUME, + ATTR_ARTIST, + ATTR_AUTO_PLAY, + ATTR_MEDIA_ID, + ATTR_MEDIA_TYPE, + ATTR_RADIO_MODE, + ATTR_SOURCE_PLAYER, + ATTR_URL, + ATTR_USE_PRE_ANNOUNCE, + SERVICE_PLAY_ANNOUNCEMENT, + SERVICE_PLAY_MEDIA_ADVANCED, + SERVICE_TRANSFER_QUEUE, +) +from homeassistant.config_entries import HomeAssistantError +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_MEDIA_NEXT_TRACK, + SERVICE_MEDIA_PAUSE, + SERVICE_MEDIA_PLAY, + SERVICE_MEDIA_PREVIOUS_TRACK, + SERVICE_MEDIA_STOP, + SERVICE_REPEAT_SET, + SERVICE_SHUFFLE_SET, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + SERVICE_VOLUME_DOWN, + SERVICE_VOLUME_MUTE, + SERVICE_VOLUME_SET, + SERVICE_VOLUME_UP, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .common import setup_integration_from_fixtures, snapshot_music_assistant_entities + +from tests.common import AsyncMock + +MOCK_TRACK = Track( + item_id="1", + provider="library", + name="Test Track", + provider_mappings={}, +) + + +async def test_media_player( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + music_assistant_client: MagicMock, +) -> None: + """Test media player.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + snapshot_music_assistant_entities( + hass, entity_registry, snapshot, Platform.MEDIA_PLAYER + ) + + +async def test_media_player_basic_actions( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player entity basic actions (play/stop/pause etc.).""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + for action, cmd in ( + (SERVICE_MEDIA_PLAY, "play"), + (SERVICE_MEDIA_PAUSE, "pause"), + (SERVICE_MEDIA_STOP, "stop"), + (SERVICE_MEDIA_PREVIOUS_TRACK, "previous"), + (SERVICE_MEDIA_NEXT_TRACK, "next"), + (SERVICE_VOLUME_UP, "volume_up"), + (SERVICE_VOLUME_DOWN, "volume_down"), + ): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + action, + { + ATTR_ENTITY_ID: entity_id, + }, + blocking=True, + ) + + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + f"players/cmd/{cmd}", player_id=mass_player_id + ) + music_assistant_client.send_command.reset_mock() + + +async def test_media_player_seek_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player entity seek action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + "media_seek", + { + ATTR_ENTITY_ID: entity_id, + ATTR_MEDIA_SEEK_POSITION: 100, + }, + blocking=True, + ) + + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "players/cmd/seek", player_id=mass_player_id, position=100 + ) + + +async def test_media_player_volume_set_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player entity volume_set action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_SET, + { + ATTR_ENTITY_ID: entity_id, + ATTR_MEDIA_VOLUME_LEVEL: 0.5, + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "players/cmd/volume_set", player_id=mass_player_id, volume_level=50 + ) + + +async def test_media_player_volume_mute_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player entity volume_mute action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_MUTE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_MEDIA_VOLUME_MUTED: True, + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "players/cmd/volume_mute", player_id=mass_player_id, muted=True + ) + + +async def test_media_player_turn_on_off_actions( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player entity turn_on/turn_off actions.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + for action, pwr in ( + (SERVICE_TURN_ON, True), + (SERVICE_TURN_OFF, False), + ): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + action, + { + ATTR_ENTITY_ID: entity_id, + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "players/cmd/power", player_id=mass_player_id, powered=pwr + ) + music_assistant_client.send_command.reset_mock() + + +async def test_media_player_shuffle_set_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player entity shuffle_set action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_SHUFFLE_SET, + { + ATTR_ENTITY_ID: entity_id, + ATTR_MEDIA_SHUFFLE: True, + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "player_queues/shuffle", queue_id=mass_player_id, shuffle_enabled=True + ) + + +async def test_media_player_repeat_set_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player entity repeat_set action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_REPEAT_SET, + { + ATTR_ENTITY_ID: entity_id, + ATTR_MEDIA_REPEAT: "one", + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "player_queues/repeat", queue_id=mass_player_id, repeat_mode="one" + ) + + +async def test_media_player_join_players_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player entity join_players action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_JOIN, + { + ATTR_ENTITY_ID: entity_id, + ATTR_GROUP_MEMBERS: ["media_player.my_super_test_player_2"], + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "players/cmd/group_many", + target_player=mass_player_id, + child_player_ids=["00:00:00:00:00:02"], + ) + # test again with invalid source player + music_assistant_client.send_command.reset_mock() + with pytest.raises( + HomeAssistantError, match="Entity media_player.blah_blah not found" + ): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_JOIN, + { + ATTR_ENTITY_ID: entity_id, + ATTR_GROUP_MEMBERS: ["media_player.blah_blah"], + }, + blocking=True, + ) + + +async def test_media_player_unjoin_player_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player entity unjoin player action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_UNJOIN, + { + ATTR_ENTITY_ID: entity_id, + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "players/cmd/ungroup", player_id=mass_player_id + ) + + +async def test_media_player_clear_playlist_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player entity clear_playlist action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_CLEAR_PLAYLIST, + { + ATTR_ENTITY_ID: entity_id, + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "player_queues/clear", queue_id=mass_player_id + ) + + +async def test_media_player_play_media_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player (advanced) play_media action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + + # test simple play_media call with URI as media_id and no media type + await hass.services.async_call( + MASS_DOMAIN, + SERVICE_PLAY_MEDIA_ADVANCED, + { + ATTR_ENTITY_ID: entity_id, + ATTR_MEDIA_ID: "spotify://track/1234", + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "player_queues/play_media", + queue_id=mass_player_id, + media=["spotify://track/1234"], + option=None, + radio_mode=False, + start_item=None, + ) + + # test simple play_media call with URI and enqueue specified + music_assistant_client.send_command.reset_mock() + await hass.services.async_call( + MASS_DOMAIN, + SERVICE_PLAY_MEDIA_ADVANCED, + { + ATTR_ENTITY_ID: entity_id, + ATTR_MEDIA_ID: "spotify://track/1234", + ATTR_MEDIA_ENQUEUE: "add", + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "player_queues/play_media", + queue_id=mass_player_id, + media=["spotify://track/1234"], + option=QueueOption.ADD, + radio_mode=False, + start_item=None, + ) + + # test basic play_media call with URL and radio mode specified + music_assistant_client.send_command.reset_mock() + await hass.services.async_call( + MASS_DOMAIN, + SERVICE_PLAY_MEDIA_ADVANCED, + { + ATTR_ENTITY_ID: entity_id, + ATTR_MEDIA_ID: "spotify://track/1234", + ATTR_RADIO_MODE: True, + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "player_queues/play_media", + queue_id=mass_player_id, + media=["spotify://track/1234"], + option=None, + radio_mode=True, + start_item=None, + ) + + # test play_media call with media id and media type specified + music_assistant_client.send_command.reset_mock() + music_assistant_client.music.get_item = AsyncMock(return_value=MOCK_TRACK) + await hass.services.async_call( + MASS_DOMAIN, + SERVICE_PLAY_MEDIA_ADVANCED, + { + ATTR_ENTITY_ID: entity_id, + ATTR_MEDIA_ID: "1", + ATTR_MEDIA_TYPE: "track", + }, + blocking=True, + ) + assert music_assistant_client.music.get_item.call_count == 1 + assert music_assistant_client.music.get_item.call_args == call( + MediaType.TRACK, "1", "library" + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "player_queues/play_media", + queue_id=mass_player_id, + media=[MOCK_TRACK.uri], + option=None, + radio_mode=False, + start_item=None, + ) + + # test play_media call by name + music_assistant_client.send_command.reset_mock() + music_assistant_client.music.get_item_by_name = AsyncMock(return_value=MOCK_TRACK) + await hass.services.async_call( + MASS_DOMAIN, + SERVICE_PLAY_MEDIA_ADVANCED, + { + ATTR_ENTITY_ID: entity_id, + ATTR_MEDIA_ID: "test", + ATTR_ARTIST: "artist", + ATTR_ALBUM: "album", + }, + blocking=True, + ) + assert music_assistant_client.music.get_item_by_name.call_count == 1 + assert music_assistant_client.music.get_item_by_name.call_args == call( + name="test", + artist="artist", + album="album", + media_type=None, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "player_queues/play_media", + queue_id=mass_player_id, + media=[MOCK_TRACK.uri], + option=None, + radio_mode=False, + start_item=None, + ) + + +async def test_media_player_play_announcement_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player play_announcement action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + mass_player_id = "00:00:00:00:00:01" + state = hass.states.get(entity_id) + assert state + await hass.services.async_call( + MASS_DOMAIN, + SERVICE_PLAY_ANNOUNCEMENT, + { + ATTR_ENTITY_ID: entity_id, + ATTR_URL: "http://blah.com/announcement.mp3", + ATTR_USE_PRE_ANNOUNCE: True, + ATTR_ANNOUNCE_VOLUME: 50, + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "players/cmd/play_announcement", + player_id=mass_player_id, + url="http://blah.com/announcement.mp3", + use_pre_announce=True, + volume_level=50, + ) + + +async def test_media_player_transfer_queue_action( + hass: HomeAssistant, + music_assistant_client: MagicMock, +) -> None: + """Test media_player transfer_queu action.""" + await setup_integration_from_fixtures(hass, music_assistant_client) + entity_id = "media_player.test_player_1" + state = hass.states.get(entity_id) + assert state + await hass.services.async_call( + MASS_DOMAIN, + SERVICE_TRANSFER_QUEUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_SOURCE_PLAYER: "media_player.my_super_test_player_2", + ATTR_AUTO_PLAY: True, + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "player_queues/transfer", + source_queue_id="00:00:00:00:00:02", + target_queue_id="00:00:00:00:00:01", + auto_play=True, + require_schema=25, + ) + # test again with invalid source player + music_assistant_client.send_command.reset_mock() + with pytest.raises(HomeAssistantError, match="Source player not available."): + await hass.services.async_call( + MASS_DOMAIN, + SERVICE_TRANSFER_QUEUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_SOURCE_PLAYER: "media_player.blah_blah", + }, + blocking=True, + ) + # test again with no source player specified (which picks first playing playerqueue) + music_assistant_client.send_command.reset_mock() + await hass.services.async_call( + MASS_DOMAIN, + SERVICE_TRANSFER_QUEUE, + { + ATTR_ENTITY_ID: entity_id, + }, + blocking=True, + ) + assert music_assistant_client.send_command.call_count == 1 + assert music_assistant_client.send_command.call_args == call( + "player_queues/transfer", + source_queue_id="test_group_player_1", + target_queue_id="00:00:00:00:00:01", + auto_play=None, + require_schema=25, + ) diff --git a/tests/components/nasweb/__init__.py b/tests/components/nasweb/__init__.py new file mode 100644 index 00000000000..d4906d710d5 --- /dev/null +++ b/tests/components/nasweb/__init__.py @@ -0,0 +1 @@ +"""Tests for the NASweb integration.""" diff --git a/tests/components/nasweb/conftest.py b/tests/components/nasweb/conftest.py new file mode 100644 index 00000000000..7757f40ee44 --- /dev/null +++ b/tests/components/nasweb/conftest.py @@ -0,0 +1,61 @@ +"""Common fixtures for the NASweb tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.nasweb.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +BASE_CONFIG_FLOW = "homeassistant.components.nasweb.config_flow." +BASE_NASWEB_DATA = "homeassistant.components.nasweb.nasweb_data." +BASE_COORDINATOR = "homeassistant.components.nasweb.coordinator." +TEST_SERIAL_NUMBER = "0011223344556677" + + +@pytest.fixture +def validate_input_all_ok() -> Generator[dict[str, AsyncMock | MagicMock]]: + """Yield dictionary of mocked functions required for successful test_form execution.""" + with ( + patch( + BASE_CONFIG_FLOW + "WebioAPI.check_connection", + return_value=True, + ) as check_connection, + patch( + BASE_CONFIG_FLOW + "WebioAPI.refresh_device_info", + return_value=True, + ) as refresh_device_info, + patch( + BASE_NASWEB_DATA + "NASwebData.get_webhook_url", + return_value="http://127.0.0.1:8123/api/webhook/de705e77291402afa0dd961426e9f19bb53631a9f2a106c52cfd2d2266913c04", + ) as get_webhook_url, + patch( + BASE_CONFIG_FLOW + "WebioAPI.get_serial_number", + return_value=TEST_SERIAL_NUMBER, + ) as get_serial, + patch( + BASE_CONFIG_FLOW + "WebioAPI.status_subscription", + return_value=True, + ) as status_subscription, + patch( + BASE_NASWEB_DATA + "NotificationCoordinator.check_connection", + return_value=True, + ) as check_status_confirmation, + ): + yield { + BASE_CONFIG_FLOW + "WebioAPI.check_connection": check_connection, + BASE_CONFIG_FLOW + "WebioAPI.refresh_device_info": refresh_device_info, + BASE_NASWEB_DATA + "NASwebData.get_webhook_url": get_webhook_url, + BASE_CONFIG_FLOW + "WebioAPI.get_serial_number": get_serial, + BASE_CONFIG_FLOW + "WebioAPI.status_subscription": status_subscription, + BASE_NASWEB_DATA + + "NotificationCoordinator.check_connection": check_status_confirmation, + } diff --git a/tests/components/nasweb/test_config_flow.py b/tests/components/nasweb/test_config_flow.py new file mode 100644 index 00000000000..a5f2dca680d --- /dev/null +++ b/tests/components/nasweb/test_config_flow.py @@ -0,0 +1,208 @@ +"""Test the NASweb config flow.""" + +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from webio_api.api_client import AuthError + +from homeassistant import config_entries +from homeassistant.components.nasweb.const import DOMAIN +from homeassistant.config_entries import ConfigFlowResult +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers.network import NoURLAvailableError + +from .conftest import ( + BASE_CONFIG_FLOW, + BASE_COORDINATOR, + BASE_NASWEB_DATA, + TEST_SERIAL_NUMBER, +) + +pytestmark = pytest.mark.usefixtures("mock_setup_entry") + + +TEST_USER_INPUT = { + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", +} + + +async def _add_test_config_entry(hass: HomeAssistant) -> ConfigFlowResult: + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result.get("type") == FlowResultType.FORM + assert not result.get("errors") + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + await hass.async_block_till_done() + return result2 + + +async def test_form( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test the form.""" + result = await _add_test_config_entry(hass) + + assert result.get("type") == FlowResultType.CREATE_ENTRY + assert result.get("title") == "1.1.1.1" + assert result.get("data") == TEST_USER_INPUT + + config_entry = result.get("result") + assert config_entry is not None + assert config_entry.unique_id == TEST_SERIAL_NUMBER + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_cannot_connect( + hass: HomeAssistant, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test cannot connect error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch(BASE_CONFIG_FLOW + "WebioAPI.check_connection", return_value=False): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + + assert result2.get("type") == FlowResultType.FORM + assert result2.get("errors") == {"base": "cannot_connect"} + + +async def test_form_invalid_auth( + hass: HomeAssistant, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test invalid auth.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + BASE_CONFIG_FLOW + "WebioAPI.refresh_device_info", + side_effect=AuthError, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + + assert result2.get("type") == FlowResultType.FORM + assert result2.get("errors") == {"base": "invalid_auth"} + + +async def test_form_missing_internal_url( + hass: HomeAssistant, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test missing internal url.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + BASE_NASWEB_DATA + "NASwebData.get_webhook_url", side_effect=NoURLAvailableError + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + assert result2.get("type") == FlowResultType.FORM + assert result2.get("errors") == {"base": "missing_internal_url"} + + +async def test_form_missing_nasweb_data( + hass: HomeAssistant, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test invalid auth.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + BASE_CONFIG_FLOW + "WebioAPI.get_serial_number", + return_value=None, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + assert result2.get("type") == FlowResultType.FORM + assert result2.get("errors") == {"base": "missing_nasweb_data"} + with patch(BASE_CONFIG_FLOW + "WebioAPI.status_subscription", return_value=False): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + assert result2.get("type") == FlowResultType.FORM + assert result2.get("errors") == {"base": "missing_nasweb_data"} + + +async def test_missing_status( + hass: HomeAssistant, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test missing status update.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + BASE_COORDINATOR + "NotificationCoordinator.check_connection", + return_value=False, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + assert result2.get("type") == FlowResultType.FORM + assert result2.get("errors") == {"base": "missing_status"} + + +async def test_form_exception( + hass: HomeAssistant, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test other exceptions.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + "homeassistant.components.nasweb.config_flow.validate_input", + side_effect=Exception, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], TEST_USER_INPUT + ) + assert result2.get("type") == FlowResultType.FORM + assert result2.get("errors") == {"base": "unknown"} + + +async def test_form_already_configured( + hass: HomeAssistant, + validate_input_all_ok: dict[str, AsyncMock | MagicMock], +) -> None: + """Test already configured device.""" + result = await _add_test_config_entry(hass) + config_entry = result.get("result") + assert config_entry is not None + assert config_entry.unique_id == TEST_SERIAL_NUMBER + + result2_1 = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + result2_2 = await hass.config_entries.flow.async_configure( + result2_1["flow_id"], TEST_USER_INPUT + ) + await hass.async_block_till_done() + + assert result2_2.get("type") == FlowResultType.ABORT + assert result2_2.get("reason") == "already_configured" diff --git a/tests/components/nest/common.py b/tests/components/nest/common.py index 5d4719918a6..8f1f0a2f074 100644 --- a/tests/components/nest/common.py +++ b/tests/components/nest/common.py @@ -4,8 +4,7 @@ from __future__ import annotations from collections.abc import Awaitable, Callable, Generator import copy -from dataclasses import dataclass, field -import time +from dataclasses import dataclass from typing import Any from google_nest_sdm.auth import AbstractAuth @@ -37,7 +36,6 @@ SUBSCRIPTION_NAME = "projects/cloud-id-9876/subscriptions/subscriber-id-9876" class NestTestConfig: """Holder for integration configuration.""" - config: dict[str, Any] = field(default_factory=dict) config_entry_data: dict[str, Any] | None = None credential: ClientCredential | None = None @@ -54,39 +52,9 @@ TEST_CONFIG_APP_CREDS = NestTestConfig( credential=ClientCredential(CLIENT_ID, CLIENT_SECRET), ) TEST_CONFIGFLOW_APP_CREDS = NestTestConfig( - config=TEST_CONFIG_APP_CREDS.config, credential=ClientCredential(CLIENT_ID, CLIENT_SECRET), ) -TEST_CONFIG_LEGACY = NestTestConfig( - config={ - "nest": { - "client_id": "some-client-id", - "client_secret": "some-client-secret", - }, - }, - config_entry_data={ - "auth_implementation": "local", - "tokens": { - "expires_at": time.time() + 86400, - "access_token": { - "token": "some-token", - }, - }, - }, -) -TEST_CONFIG_ENTRY_LEGACY = NestTestConfig( - config_entry_data={ - "auth_implementation": "local", - "tokens": { - "expires_at": time.time() + 86400, - "access_token": { - "token": "some-token", - }, - }, - }, -) - TEST_CONFIG_NEW_SUBSCRIPTION = NestTestConfig( config_entry_data={ "sdm": {}, @@ -107,6 +75,7 @@ class FakeSubscriber(GoogleNestSubscriber): def __init__(self) -> None: # pylint: disable=super-init-not-called """Initialize Fake Subscriber.""" self._device_manager = DeviceManager() + self._subscriber_name = "fake-name" def set_update_callback(self, target: Callable[[EventMessage], Awaitable[None]]): """Capture the callback set by Home Assistant.""" diff --git a/tests/components/nest/conftest.py b/tests/components/nest/conftest.py index 85c64aff379..84f22e17e78 100644 --- a/tests/components/nest/conftest.py +++ b/tests/components/nest/conftest.py @@ -22,6 +22,7 @@ from homeassistant.components.application_credentials import ( ) from homeassistant.components.nest import DOMAIN from homeassistant.components.nest.const import CONF_SUBSCRIBER_ID, SDM_SCOPES +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -201,20 +202,6 @@ def nest_test_config() -> NestTestConfig: return TEST_CONFIG_APP_CREDS -@pytest.fixture -def config( - subscriber_id: str | None, nest_test_config: NestTestConfig -) -> dict[str, Any]: - """Fixture that sets up the configuration.yaml for the test.""" - config = copy.deepcopy(nest_test_config.config) - if CONF_SUBSCRIBER_ID in config.get(DOMAIN, {}): - if subscriber_id: - config[DOMAIN][CONF_SUBSCRIBER_ID] = subscriber_id - else: - del config[DOMAIN][CONF_SUBSCRIBER_ID] - return config - - @pytest.fixture def config_entry_unique_id() -> str: """Fixture to set ConfigEntry unique id.""" @@ -274,19 +261,19 @@ async def credential(hass: HomeAssistant, nest_test_config: NestTestConfig) -> N async def setup_base_platform( hass: HomeAssistant, platforms: list[str], - config: dict[str, Any], config_entry: MockConfigEntry | None, ) -> YieldFixture[PlatformSetup]: """Fixture to setup the integration platform.""" - if config_entry: - config_entry.add_to_hass(hass) + config_entry.add_to_hass(hass) with patch("homeassistant.components.nest.PLATFORMS", platforms): async def _setup_func() -> bool: - assert await async_setup_component(hass, DOMAIN, config) + await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() yield _setup_func + if config_entry.state == ConfigEntryState.LOADED: + await hass.config_entries.async_unload(config_entry.entry_id) @pytest.fixture diff --git a/tests/components/nest/test_camera.py b/tests/components/nest/test_camera.py index 029879f1413..698e9b7a274 100644 --- a/tests/components/nest/test_camera.py +++ b/tests/components/nest/test_camera.py @@ -176,16 +176,6 @@ async def async_get_image( return image.content -def get_frontend_stream_type_attribute( - hass: HomeAssistant, entity_id: str -) -> StreamType: - """Get the frontend_stream_type camera attribute.""" - cam = hass.states.get(entity_id) - assert cam is not None - assert cam.state == CameraState.STREAMING - return cam.attributes.get("frontend_stream_type") - - async def async_frontend_stream_types( client: MockHAClientWebSocket, entity_id: str ) -> list[str] | None: @@ -268,9 +258,9 @@ async def test_camera_stream( await setup_platform() assert len(hass.states.async_all()) == 1 - assert ( - get_frontend_stream_type_attribute(hass, "camera.my_camera") == StreamType.HLS - ) + cam = hass.states.get("camera.my_camera") + assert cam is not None + assert cam.state == CameraState.STREAMING client = await hass_ws_client(hass) frontend_stream_types = await async_frontend_stream_types( client, "camera.my_camera" @@ -294,10 +284,9 @@ async def test_camera_ws_stream( await setup_platform() assert len(hass.states.async_all()) == 1 - assert ( - get_frontend_stream_type_attribute(hass, "camera.my_camera") == StreamType.HLS - ) - + cam = hass.states.get("camera.my_camera") + assert cam is not None + assert cam.state == CameraState.STREAMING client = await hass_ws_client(hass) frontend_stream_types = await async_frontend_stream_types( client, "camera.my_camera" @@ -671,7 +660,10 @@ async def test_camera_web_rtc( cam = hass.states.get("camera.my_camera") assert cam is not None assert cam.state == CameraState.STREAMING - assert cam.attributes["frontend_stream_type"] == StreamType.WEB_RTC + client = await hass_ws_client(hass) + assert await async_frontend_stream_types(client, "camera.my_camera") == [ + StreamType.WEB_RTC + ] client = await hass_ws_client(hass) await client.send_json_auto_id( @@ -720,17 +712,11 @@ async def test_camera_web_rtc_unsupported( cam = hass.states.get("camera.my_camera") assert cam is not None assert cam.state == CameraState.STREAMING - assert cam.attributes["frontend_stream_type"] == StreamType.HLS client = await hass_ws_client(hass) - await client.send_json_auto_id( - {"type": "camera/capabilities", "entity_id": "camera.my_camera"} - ) - msg = await client.receive_json() - - assert msg["type"] == TYPE_RESULT - assert msg["success"] - assert msg["result"] == {"frontend_stream_types": ["hls"]} + assert await async_frontend_stream_types(client, "camera.my_camera") == [ + StreamType.HLS + ] await client.send_json_auto_id( { @@ -745,7 +731,7 @@ async def test_camera_web_rtc_unsupported( assert not msg["success"] assert msg["error"] == { "code": "webrtc_offer_failed", - "message": "Camera does not support WebRTC, frontend_stream_type=hls", + "message": "Camera does not support WebRTC, frontend_stream_types={}", } @@ -844,6 +830,10 @@ async def test_camera_multiple_streams( assert cam.state == CameraState.STREAMING # Prefer WebRTC over RTSP/HLS assert cam.attributes["frontend_stream_type"] == StreamType.WEB_RTC + client = await hass_ws_client(hass) + assert await async_frontend_stream_types(client, "camera.my_camera") == [ + StreamType.WEB_RTC + ] # RTSP stream is not supported stream_source = await camera.async_get_stream_source(hass, "camera.my_camera") @@ -919,6 +909,10 @@ async def test_webrtc_refresh_expired_stream( assert cam is not None assert cam.state == CameraState.STREAMING assert cam.attributes["frontend_stream_type"] == StreamType.WEB_RTC + client = await hass_ws_client(hass) + assert await async_frontend_stream_types(client, "camera.my_camera") == [ + StreamType.WEB_RTC + ] client = await hass_ws_client(hass) await client.send_json_auto_id( diff --git a/tests/components/nest/test_config_flow.py b/tests/components/nest/test_config_flow.py index 8b05ace6d4d..807e299b79c 100644 --- a/tests/components/nest/test_config_flow.py +++ b/tests/components/nest/test_config_flow.py @@ -27,7 +27,6 @@ from .common import ( TEST_CONFIGFLOW_APP_CREDS, FakeSubscriber, NestTestConfig, - PlatformSetup, ) from tests.common import MockConfigEntry @@ -350,11 +349,11 @@ def mock_pubsub_api_responses( @pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_app_credentials( - hass: HomeAssistant, oauth, subscriber, setup_platform + hass: HomeAssistant, + oauth, + subscriber, ) -> None: """Check full flow.""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -389,12 +388,8 @@ async def test_app_credentials( ("sdm_managed_topic", "device_access_project_id", "cloud_project_id"), [(True, "new-project-id", "new-cloud-project-id")], ) -async def test_config_flow_restart( - hass: HomeAssistant, oauth, subscriber, setup_platform -) -> None: +async def test_config_flow_restart(hass: HomeAssistant, oauth, subscriber) -> None: """Check with auth implementation is re-initialized when aborting the flow.""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -447,11 +442,11 @@ async def test_config_flow_restart( @pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_config_flow_wrong_project_id( - hass: HomeAssistant, oauth, subscriber, setup_platform + hass: HomeAssistant, + oauth, + subscriber, ) -> None: """Check the case where the wrong project ids are entered.""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -506,12 +501,9 @@ async def test_config_flow_wrong_project_id( async def test_config_flow_pubsub_configuration_error( hass: HomeAssistant, oauth, - setup_platform, mock_subscriber, ) -> None: """Check full flow fails with configuration error.""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -554,11 +546,9 @@ async def test_config_flow_pubsub_configuration_error( [(True, HTTPStatus.INTERNAL_SERVER_ERROR)], ) async def test_config_flow_pubsub_subscriber_error( - hass: HomeAssistant, oauth, setup_platform, mock_subscriber + hass: HomeAssistant, oauth, mock_subscriber ) -> None: """Check full flow with a subscriber error.""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -707,11 +697,9 @@ async def test_reauth_multiple_config_entries( @pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_pubsub_subscription_strip_whitespace( - hass: HomeAssistant, oauth, subscriber, setup_platform + hass: HomeAssistant, oauth, subscriber ) -> None: """Check that project id has whitespace stripped on entry.""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -742,11 +730,9 @@ async def test_pubsub_subscription_strip_whitespace( [(True, HTTPStatus.UNAUTHORIZED)], ) async def test_pubsub_subscription_auth_failure( - hass: HomeAssistant, oauth, setup_platform, mock_subscriber + hass: HomeAssistant, oauth, mock_subscriber ) -> None: """Check flow that creates a pub/sub subscription.""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -819,7 +805,7 @@ async def test_pubsub_subscriber_config_entry_reauth( @pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_config_entry_title_from_home( - hass: HomeAssistant, oauth, setup_platform, subscriber + hass: HomeAssistant, oauth, subscriber ) -> None: """Test that the Google Home name is used for the config entry title.""" @@ -837,8 +823,6 @@ async def test_config_entry_title_from_home( ) ) - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -864,7 +848,7 @@ async def test_config_entry_title_from_home( @pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_config_entry_title_multiple_homes( - hass: HomeAssistant, oauth, setup_platform, subscriber + hass: HomeAssistant, oauth, subscriber ) -> None: """Test handling of multiple Google Homes authorized.""" @@ -894,8 +878,6 @@ async def test_config_entry_title_multiple_homes( ) ) - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -911,11 +893,9 @@ async def test_config_entry_title_multiple_homes( @pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_title_failure_fallback( - hass: HomeAssistant, oauth, setup_platform, mock_subscriber + hass: HomeAssistant, oauth, mock_subscriber ) -> None: """Test exception handling when determining the structure names.""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -943,9 +923,7 @@ async def test_title_failure_fallback( @pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) -async def test_structure_missing_trait( - hass: HomeAssistant, oauth, setup_platform, subscriber -) -> None: +async def test_structure_missing_trait(hass: HomeAssistant, oauth, subscriber) -> None: """Test handling the case where a structure has no name set.""" device_manager = await subscriber.async_get_device_manager() @@ -959,8 +937,6 @@ async def test_structure_missing_trait( ) ) - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -996,11 +972,11 @@ async def test_dhcp_discovery( @pytest.mark.parametrize(("sdm_managed_topic"), [(True)]) async def test_dhcp_discovery_with_creds( - hass: HomeAssistant, oauth, subscriber, setup_platform + hass: HomeAssistant, + oauth, + subscriber, ) -> None: """Exercise discovery dhcp with no config present (can't run).""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_DHCP}, @@ -1054,13 +1030,10 @@ async def test_token_error( hass: HomeAssistant, oauth: OAuthFixture, subscriber: FakeSubscriber, - setup_platform: PlatformSetup, status_code: HTTPStatus, error_reason: str, ) -> None: """Check full flow.""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -1090,11 +1063,11 @@ async def test_token_error( ], ) async def test_existing_topic_and_subscription( - hass: HomeAssistant, oauth, subscriber, setup_platform + hass: HomeAssistant, + oauth, + subscriber, ) -> None: """Test selecting existing user managed topic and subscription.""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -1129,11 +1102,11 @@ async def test_existing_topic_and_subscription( async def test_no_eligible_topics( - hass: HomeAssistant, oauth, subscriber, setup_platform + hass: HomeAssistant, + oauth, + subscriber, ) -> None: """Test the case where there are no eligible pub/sub topics.""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -1153,11 +1126,11 @@ async def test_no_eligible_topics( ], ) async def test_list_topics_failure( - hass: HomeAssistant, oauth, subscriber, setup_platform + hass: HomeAssistant, + oauth, + subscriber, ) -> None: """Test selecting existing user managed topic and subscription.""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -1177,11 +1150,11 @@ async def test_list_topics_failure( ], ) async def test_list_subscriptions_failure( - hass: HomeAssistant, oauth, subscriber, setup_platform + hass: HomeAssistant, + oauth, + subscriber, ) -> None: """Test selecting existing user managed topic and subscription.""" - await setup_platform() - result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) diff --git a/tests/components/nest/test_init.py b/tests/components/nest/test_init.py index a17803a6cde..17ddc485e85 100644 --- a/tests/components/nest/test_init.py +++ b/tests/components/nest/test_init.py @@ -24,22 +24,16 @@ import pytest from homeassistant.components.nest import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component from .common import ( PROJECT_ID, SUBSCRIBER_ID, - TEST_CONFIG_ENTRY_LEGACY, - TEST_CONFIG_LEGACY, TEST_CONFIG_NEW_SUBSCRIPTION, - TEST_CONFIGFLOW_APP_CREDS, FakeSubscriber, PlatformSetup, YieldFixture, ) -from tests.common import MockConfigEntry - PLATFORM = "sensor" @@ -201,18 +195,6 @@ async def test_subscriber_configuration_failure( assert entries[0].state is ConfigEntryState.SETUP_ERROR -@pytest.mark.parametrize("nest_test_config", [TEST_CONFIGFLOW_APP_CREDS]) -async def test_empty_config( - hass: HomeAssistant, error_caplog: pytest.LogCaptureFixture, config, setup_platform -) -> None: - """Test setup is a no-op with not config.""" - await setup_platform() - assert not error_caplog.records - - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 0 - - async def test_unload_entry(hass: HomeAssistant, setup_platform) -> None: """Test successful unload of a ConfigEntry.""" await setup_platform() @@ -318,26 +300,3 @@ async def test_migrate_unique_id( assert config_entry.state is ConfigEntryState.LOADED assert config_entry.unique_id == PROJECT_ID - - -@pytest.mark.parametrize("nest_test_config", [TEST_CONFIG_LEGACY]) -async def test_legacy_works_with_nest_yaml( - hass: HomeAssistant, - config: dict[str, Any], - config_entry: MockConfigEntry, -) -> None: - """Test integration won't start with legacy works with nest yaml config.""" - config_entry.add_to_hass(hass) - assert not await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - - -@pytest.mark.parametrize("nest_test_config", [TEST_CONFIG_ENTRY_LEGACY]) -async def test_legacy_works_with_nest_cleanup( - hass: HomeAssistant, setup_platform -) -> None: - """Test legacy works with nest config entries are silently removed once yaml is removed.""" - await setup_platform() - - entries = hass.config_entries.async_entries(DOMAIN) - assert len(entries) == 0 diff --git a/tests/components/nina/test_binary_sensor.py b/tests/components/nina/test_binary_sensor.py index a7f9a980960..6ed1aee7e9d 100644 --- a/tests/components/nina/test_binary_sensor.py +++ b/tests/components/nina/test_binary_sensor.py @@ -17,6 +17,7 @@ from homeassistant.components.nina.const import ( ATTR_SENT, ATTR_SEVERITY, ATTR_START, + ATTR_WEB, DOMAIN, ) from homeassistant.config_entries import ConfigEntryState @@ -77,6 +78,7 @@ async def test_sensors(hass: HomeAssistant, entity_registry: er.EntityRegistry) assert state_w1.attributes.get(ATTR_SENDER) == "Deutscher Wetterdienst" assert state_w1.attributes.get(ATTR_SEVERITY) == "Minor" assert state_w1.attributes.get(ATTR_RECOMMENDED_ACTIONS) == "" + assert state_w1.attributes.get(ATTR_WEB) == "https://www.wettergefahren.de" assert ( state_w1.attributes.get(ATTR_AFFECTED_AREAS) == "Gemeinde Oberreichenbach, Gemeinde Neuweiler, Stadt Nagold, Stadt Neubulach, Gemeinde Schömberg, Gemeinde Simmersfeld, Gemeinde Simmozheim, Gemeinde Rohrdorf, Gemeinde Ostelsheim, Gemeinde Ebhausen, Gemeinde Egenhausen, Gemeinde Dobel, Stadt Bad Liebenzell, Stadt Solingen, Stadt Haiterbach, Stadt Bad Herrenalb, Gemeinde Höfen an der Enz, Gemeinde Gechingen, Gemeinde Enzklösterle, Gemeinde Gutach (Schwarzwaldbahn) und 3392 weitere." @@ -98,6 +100,7 @@ async def test_sensors(hass: HomeAssistant, entity_registry: er.EntityRegistry) assert state_w2.attributes.get(ATTR_SENDER) is None assert state_w2.attributes.get(ATTR_SEVERITY) is None assert state_w2.attributes.get(ATTR_RECOMMENDED_ACTIONS) is None + assert state_w2.attributes.get(ATTR_WEB) is None assert state_w2.attributes.get(ATTR_AFFECTED_AREAS) is None assert state_w2.attributes.get(ATTR_ID) is None assert state_w2.attributes.get(ATTR_SENT) is None @@ -116,6 +119,7 @@ async def test_sensors(hass: HomeAssistant, entity_registry: er.EntityRegistry) assert state_w3.attributes.get(ATTR_SENDER) is None assert state_w3.attributes.get(ATTR_SEVERITY) is None assert state_w3.attributes.get(ATTR_RECOMMENDED_ACTIONS) is None + assert state_w3.attributes.get(ATTR_WEB) is None assert state_w3.attributes.get(ATTR_AFFECTED_AREAS) is None assert state_w3.attributes.get(ATTR_ID) is None assert state_w3.attributes.get(ATTR_SENT) is None @@ -134,6 +138,7 @@ async def test_sensors(hass: HomeAssistant, entity_registry: er.EntityRegistry) assert state_w4.attributes.get(ATTR_SENDER) is None assert state_w4.attributes.get(ATTR_SEVERITY) is None assert state_w4.attributes.get(ATTR_RECOMMENDED_ACTIONS) is None + assert state_w4.attributes.get(ATTR_WEB) is None assert state_w4.attributes.get(ATTR_AFFECTED_AREAS) is None assert state_w4.attributes.get(ATTR_ID) is None assert state_w4.attributes.get(ATTR_SENT) is None @@ -152,6 +157,7 @@ async def test_sensors(hass: HomeAssistant, entity_registry: er.EntityRegistry) assert state_w5.attributes.get(ATTR_SENDER) is None assert state_w5.attributes.get(ATTR_SEVERITY) is None assert state_w5.attributes.get(ATTR_RECOMMENDED_ACTIONS) is None + assert state_w5.attributes.get(ATTR_WEB) is None assert state_w5.attributes.get(ATTR_AFFECTED_AREAS) is None assert state_w5.attributes.get(ATTR_ID) is None assert state_w5.attributes.get(ATTR_SENT) is None @@ -199,6 +205,7 @@ async def test_sensors_without_corona_filter( state_w1.attributes.get(ATTR_RECOMMENDED_ACTIONS) == "Waschen sich regelmäßig und gründlich die Hände." ) + assert state_w1.attributes.get(ATTR_WEB) == "" assert ( state_w1.attributes.get(ATTR_AFFECTED_AREAS) == "Bundesland: Freie Hansestadt Bremen, Land Berlin, Land Hessen, Land Nordrhein-Westfalen, Land Brandenburg, Freistaat Bayern, Land Mecklenburg-Vorpommern, Land Rheinland-Pfalz, Freistaat Sachsen, Land Schleswig-Holstein, Freie und Hansestadt Hamburg, Freistaat Thüringen, Land Niedersachsen, Land Saarland, Land Sachsen-Anhalt, Land Baden-Württemberg" @@ -227,6 +234,7 @@ async def test_sensors_without_corona_filter( assert state_w2.attributes.get(ATTR_SENDER) == "Deutscher Wetterdienst" assert state_w2.attributes.get(ATTR_SEVERITY) == "Minor" assert state_w2.attributes.get(ATTR_RECOMMENDED_ACTIONS) == "" + assert state_w2.attributes.get(ATTR_WEB) == "https://www.wettergefahren.de" assert state_w2.attributes.get(ATTR_ID) == "mow.DE-NW-BN-SE030-20201014-30-000" assert state_w2.attributes.get(ATTR_SENT) == "2021-10-11T05:20:00+01:00" assert state_w2.attributes.get(ATTR_START) == "2021-11-01T05:20:00+01:00" @@ -244,6 +252,7 @@ async def test_sensors_without_corona_filter( assert state_w3.attributes.get(ATTR_SENDER) is None assert state_w3.attributes.get(ATTR_SEVERITY) is None assert state_w3.attributes.get(ATTR_RECOMMENDED_ACTIONS) is None + assert state_w3.attributes.get(ATTR_WEB) is None assert state_w3.attributes.get(ATTR_AFFECTED_AREAS) is None assert state_w3.attributes.get(ATTR_ID) is None assert state_w3.attributes.get(ATTR_SENT) is None @@ -262,6 +271,7 @@ async def test_sensors_without_corona_filter( assert state_w4.attributes.get(ATTR_SENDER) is None assert state_w4.attributes.get(ATTR_SEVERITY) is None assert state_w4.attributes.get(ATTR_RECOMMENDED_ACTIONS) is None + assert state_w4.attributes.get(ATTR_WEB) is None assert state_w4.attributes.get(ATTR_AFFECTED_AREAS) is None assert state_w4.attributes.get(ATTR_ID) is None assert state_w4.attributes.get(ATTR_SENT) is None @@ -280,6 +290,7 @@ async def test_sensors_without_corona_filter( assert state_w5.attributes.get(ATTR_SENDER) is None assert state_w5.attributes.get(ATTR_SEVERITY) is None assert state_w5.attributes.get(ATTR_RECOMMENDED_ACTIONS) is None + assert state_w5.attributes.get(ATTR_WEB) is None assert state_w5.attributes.get(ATTR_AFFECTED_AREAS) is None assert state_w5.attributes.get(ATTR_ID) is None assert state_w5.attributes.get(ATTR_SENT) is None diff --git a/tests/components/nina/test_config_flow.py b/tests/components/nina/test_config_flow.py index cd0904b181d..309c8860c20 100644 --- a/tests/components/nina/test_config_flow.py +++ b/tests/components/nina/test_config_flow.py @@ -8,7 +8,6 @@ from typing import Any from unittest.mock import patch from pynina import ApiError -import pytest from homeassistant.components.nina.const import ( CONF_AREA_FILTER, @@ -279,10 +278,6 @@ async def test_options_flow_connection_error(hass: HomeAssistant) -> None: assert result["errors"] == {"base": "cannot_connect"} -@pytest.mark.parametrize( # Remove when translations fixed - "ignore_translations", - ["component.nina.options.error.unknown"], -) async def test_options_flow_unexpected_exception(hass: HomeAssistant) -> None: """Test config flow options but with an unexpected exception.""" config_entry = MockConfigEntry( diff --git a/tests/components/nordpool/__init__.py b/tests/components/nordpool/__init__.py new file mode 100644 index 00000000000..20d74d38486 --- /dev/null +++ b/tests/components/nordpool/__init__.py @@ -0,0 +1,9 @@ +"""Tests for the Nord Pool integration.""" + +from homeassistant.components.nordpool.const import CONF_AREAS +from homeassistant.const import CONF_CURRENCY + +ENTRY_CONFIG = { + CONF_AREAS: ["SE3", "SE4"], + CONF_CURRENCY: "SEK", +} diff --git a/tests/components/nordpool/conftest.py b/tests/components/nordpool/conftest.py new file mode 100644 index 00000000000..d1c1972c568 --- /dev/null +++ b/tests/components/nordpool/conftest.py @@ -0,0 +1,75 @@ +"""Fixtures for the Nord Pool integration.""" + +from __future__ import annotations + +from datetime import datetime +import json +from typing import Any +from unittest.mock import patch + +from pynordpool import NordPoolClient +from pynordpool.const import Currency +from pynordpool.model import DeliveryPeriodData +import pytest + +from homeassistant.components.nordpool.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.util import dt as dt_util + +from . import ENTRY_CONFIG + +from tests.common import MockConfigEntry, load_fixture +from tests.test_util.aiohttp import AiohttpClientMocker + + +@pytest.fixture +async def load_int( + hass: HomeAssistant, get_data: DeliveryPeriodData +) -> MockConfigEntry: + """Set up the Nord Pool integration in Home Assistant.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + data=ENTRY_CONFIG, + ) + + config_entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry + + +@pytest.fixture(name="get_data") +async def get_data_from_library( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, load_json: dict[str, Any] +) -> DeliveryPeriodData: + """Retrieve data from Nord Pool library.""" + + client = NordPoolClient(aioclient_mock.create_session(hass.loop)) + with patch("pynordpool.NordPoolClient._get", return_value=load_json): + output = await client.async_get_delivery_period( + datetime(2024, 11, 5, 13, tzinfo=dt_util.UTC), Currency.SEK, ["SE3", "SE4"] + ) + await client._session.close() + return output + + +@pytest.fixture(name="load_json") +def load_json_from_fixture(load_data: str) -> dict[str, Any]: + """Load fixture with json data and return.""" + return json.loads(load_data) + + +@pytest.fixture(name="load_data", scope="package") +def load_data_from_fixture() -> str: + """Load fixture with fixture data and return.""" + return load_fixture("delivery_period.json", DOMAIN) diff --git a/tests/components/nordpool/fixtures/delivery_period.json b/tests/components/nordpool/fixtures/delivery_period.json new file mode 100644 index 00000000000..77d51dc9433 --- /dev/null +++ b/tests/components/nordpool/fixtures/delivery_period.json @@ -0,0 +1,272 @@ +{ + "deliveryDateCET": "2024-11-05", + "version": 3, + "updatedAt": "2024-11-04T12:15:03.9456464Z", + "deliveryAreas": ["SE3", "SE4"], + "market": "DayAhead", + "multiAreaEntries": [ + { + "deliveryStart": "2024-11-04T23:00:00Z", + "deliveryEnd": "2024-11-05T00:00:00Z", + "entryPerArea": { + "SE3": 250.73, + "SE4": 283.79 + } + }, + { + "deliveryStart": "2024-11-05T00:00:00Z", + "deliveryEnd": "2024-11-05T01:00:00Z", + "entryPerArea": { + "SE3": 76.36, + "SE4": 81.36 + } + }, + { + "deliveryStart": "2024-11-05T01:00:00Z", + "deliveryEnd": "2024-11-05T02:00:00Z", + "entryPerArea": { + "SE3": 73.92, + "SE4": 79.15 + } + }, + { + "deliveryStart": "2024-11-05T02:00:00Z", + "deliveryEnd": "2024-11-05T03:00:00Z", + "entryPerArea": { + "SE3": 61.69, + "SE4": 65.19 + } + }, + { + "deliveryStart": "2024-11-05T03:00:00Z", + "deliveryEnd": "2024-11-05T04:00:00Z", + "entryPerArea": { + "SE3": 64.6, + "SE4": 68.44 + } + }, + { + "deliveryStart": "2024-11-05T04:00:00Z", + "deliveryEnd": "2024-11-05T05:00:00Z", + "entryPerArea": { + "SE3": 453.27, + "SE4": 516.71 + } + }, + { + "deliveryStart": "2024-11-05T05:00:00Z", + "deliveryEnd": "2024-11-05T06:00:00Z", + "entryPerArea": { + "SE3": 996.28, + "SE4": 1240.85 + } + }, + { + "deliveryStart": "2024-11-05T06:00:00Z", + "deliveryEnd": "2024-11-05T07:00:00Z", + "entryPerArea": { + "SE3": 1406.14, + "SE4": 1648.25 + } + }, + { + "deliveryStart": "2024-11-05T07:00:00Z", + "deliveryEnd": "2024-11-05T08:00:00Z", + "entryPerArea": { + "SE3": 1346.54, + "SE4": 1570.5 + } + }, + { + "deliveryStart": "2024-11-05T08:00:00Z", + "deliveryEnd": "2024-11-05T09:00:00Z", + "entryPerArea": { + "SE3": 1150.28, + "SE4": 1345.37 + } + }, + { + "deliveryStart": "2024-11-05T09:00:00Z", + "deliveryEnd": "2024-11-05T10:00:00Z", + "entryPerArea": { + "SE3": 1031.32, + "SE4": 1206.51 + } + }, + { + "deliveryStart": "2024-11-05T10:00:00Z", + "deliveryEnd": "2024-11-05T11:00:00Z", + "entryPerArea": { + "SE3": 927.37, + "SE4": 1085.8 + } + }, + { + "deliveryStart": "2024-11-05T11:00:00Z", + "deliveryEnd": "2024-11-05T12:00:00Z", + "entryPerArea": { + "SE3": 925.05, + "SE4": 1081.72 + } + }, + { + "deliveryStart": "2024-11-05T12:00:00Z", + "deliveryEnd": "2024-11-05T13:00:00Z", + "entryPerArea": { + "SE3": 949.49, + "SE4": 1130.38 + } + }, + { + "deliveryStart": "2024-11-05T13:00:00Z", + "deliveryEnd": "2024-11-05T14:00:00Z", + "entryPerArea": { + "SE3": 1042.03, + "SE4": 1256.91 + } + }, + { + "deliveryStart": "2024-11-05T14:00:00Z", + "deliveryEnd": "2024-11-05T15:00:00Z", + "entryPerArea": { + "SE3": 1258.89, + "SE4": 1765.82 + } + }, + { + "deliveryStart": "2024-11-05T15:00:00Z", + "deliveryEnd": "2024-11-05T16:00:00Z", + "entryPerArea": { + "SE3": 1816.45, + "SE4": 2522.55 + } + }, + { + "deliveryStart": "2024-11-05T16:00:00Z", + "deliveryEnd": "2024-11-05T17:00:00Z", + "entryPerArea": { + "SE3": 2512.65, + "SE4": 3533.03 + } + }, + { + "deliveryStart": "2024-11-05T17:00:00Z", + "deliveryEnd": "2024-11-05T18:00:00Z", + "entryPerArea": { + "SE3": 1819.83, + "SE4": 2524.06 + } + }, + { + "deliveryStart": "2024-11-05T18:00:00Z", + "deliveryEnd": "2024-11-05T19:00:00Z", + "entryPerArea": { + "SE3": 1011.77, + "SE4": 1804.46 + } + }, + { + "deliveryStart": "2024-11-05T19:00:00Z", + "deliveryEnd": "2024-11-05T20:00:00Z", + "entryPerArea": { + "SE3": 835.53, + "SE4": 1112.57 + } + }, + { + "deliveryStart": "2024-11-05T20:00:00Z", + "deliveryEnd": "2024-11-05T21:00:00Z", + "entryPerArea": { + "SE3": 796.19, + "SE4": 1051.69 + } + }, + { + "deliveryStart": "2024-11-05T21:00:00Z", + "deliveryEnd": "2024-11-05T22:00:00Z", + "entryPerArea": { + "SE3": 522.3, + "SE4": 662.44 + } + }, + { + "deliveryStart": "2024-11-05T22:00:00Z", + "deliveryEnd": "2024-11-05T23:00:00Z", + "entryPerArea": { + "SE3": 289.14, + "SE4": 349.21 + } + } + ], + "blockPriceAggregates": [ + { + "blockName": "Off-peak 1", + "deliveryStart": "2024-11-04T23:00:00Z", + "deliveryEnd": "2024-11-05T07:00:00Z", + "averagePricePerArea": { + "SE3": { + "average": 422.87, + "min": 61.69, + "max": 1406.14 + }, + "SE4": { + "average": 497.97, + "min": 65.19, + "max": 1648.25 + } + } + }, + { + "blockName": "Peak", + "deliveryStart": "2024-11-05T07:00:00Z", + "deliveryEnd": "2024-11-05T19:00:00Z", + "averagePricePerArea": { + "SE3": { + "average": 1315.97, + "min": 925.05, + "max": 2512.65 + }, + "SE4": { + "average": 1735.59, + "min": 1081.72, + "max": 3533.03 + } + } + }, + { + "blockName": "Off-peak 2", + "deliveryStart": "2024-11-05T19:00:00Z", + "deliveryEnd": "2024-11-05T23:00:00Z", + "averagePricePerArea": { + "SE3": { + "average": 610.79, + "min": 289.14, + "max": 835.53 + }, + "SE4": { + "average": 793.98, + "min": 349.21, + "max": 1112.57 + } + } + } + ], + "currency": "SEK", + "exchangeRate": 11.6402, + "areaStates": [ + { + "state": "Final", + "areas": ["SE3", "SE4"] + } + ], + "areaAverages": [ + { + "areaCode": "SE3", + "price": 900.74 + }, + { + "areaCode": "SE4", + "price": 1166.12 + } + ] +} diff --git a/tests/components/nordpool/snapshots/test_diagnostics.ambr b/tests/components/nordpool/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..dde2eca0022 --- /dev/null +++ b/tests/components/nordpool/snapshots/test_diagnostics.ambr @@ -0,0 +1,283 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'raw': dict({ + 'areaAverages': list([ + dict({ + 'areaCode': 'SE3', + 'price': 900.74, + }), + dict({ + 'areaCode': 'SE4', + 'price': 1166.12, + }), + ]), + 'areaStates': list([ + dict({ + 'areas': list([ + 'SE3', + 'SE4', + ]), + 'state': 'Final', + }), + ]), + 'blockPriceAggregates': list([ + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 422.87, + 'max': 1406.14, + 'min': 61.69, + }), + 'SE4': dict({ + 'average': 497.97, + 'max': 1648.25, + 'min': 65.19, + }), + }), + 'blockName': 'Off-peak 1', + 'deliveryEnd': '2024-11-05T07:00:00Z', + 'deliveryStart': '2024-11-04T23:00:00Z', + }), + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 1315.97, + 'max': 2512.65, + 'min': 925.05, + }), + 'SE4': dict({ + 'average': 1735.59, + 'max': 3533.03, + 'min': 1081.72, + }), + }), + 'blockName': 'Peak', + 'deliveryEnd': '2024-11-05T19:00:00Z', + 'deliveryStart': '2024-11-05T07:00:00Z', + }), + dict({ + 'averagePricePerArea': dict({ + 'SE3': dict({ + 'average': 610.79, + 'max': 835.53, + 'min': 289.14, + }), + 'SE4': dict({ + 'average': 793.98, + 'max': 1112.57, + 'min': 349.21, + }), + }), + 'blockName': 'Off-peak 2', + 'deliveryEnd': '2024-11-05T23:00:00Z', + 'deliveryStart': '2024-11-05T19:00:00Z', + }), + ]), + 'currency': 'SEK', + 'deliveryAreas': list([ + 'SE3', + 'SE4', + ]), + 'deliveryDateCET': '2024-11-05', + 'exchangeRate': 11.6402, + 'market': 'DayAhead', + 'multiAreaEntries': list([ + dict({ + 'deliveryEnd': '2024-11-05T00:00:00Z', + 'deliveryStart': '2024-11-04T23:00:00Z', + 'entryPerArea': dict({ + 'SE3': 250.73, + 'SE4': 283.79, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T01:00:00Z', + 'deliveryStart': '2024-11-05T00:00:00Z', + 'entryPerArea': dict({ + 'SE3': 76.36, + 'SE4': 81.36, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T02:00:00Z', + 'deliveryStart': '2024-11-05T01:00:00Z', + 'entryPerArea': dict({ + 'SE3': 73.92, + 'SE4': 79.15, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T03:00:00Z', + 'deliveryStart': '2024-11-05T02:00:00Z', + 'entryPerArea': dict({ + 'SE3': 61.69, + 'SE4': 65.19, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T04:00:00Z', + 'deliveryStart': '2024-11-05T03:00:00Z', + 'entryPerArea': dict({ + 'SE3': 64.6, + 'SE4': 68.44, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T05:00:00Z', + 'deliveryStart': '2024-11-05T04:00:00Z', + 'entryPerArea': dict({ + 'SE3': 453.27, + 'SE4': 516.71, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T06:00:00Z', + 'deliveryStart': '2024-11-05T05:00:00Z', + 'entryPerArea': dict({ + 'SE3': 996.28, + 'SE4': 1240.85, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T07:00:00Z', + 'deliveryStart': '2024-11-05T06:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1406.14, + 'SE4': 1648.25, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T08:00:00Z', + 'deliveryStart': '2024-11-05T07:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1346.54, + 'SE4': 1570.5, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T09:00:00Z', + 'deliveryStart': '2024-11-05T08:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1150.28, + 'SE4': 1345.37, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T10:00:00Z', + 'deliveryStart': '2024-11-05T09:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1031.32, + 'SE4': 1206.51, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T11:00:00Z', + 'deliveryStart': '2024-11-05T10:00:00Z', + 'entryPerArea': dict({ + 'SE3': 927.37, + 'SE4': 1085.8, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T12:00:00Z', + 'deliveryStart': '2024-11-05T11:00:00Z', + 'entryPerArea': dict({ + 'SE3': 925.05, + 'SE4': 1081.72, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T13:00:00Z', + 'deliveryStart': '2024-11-05T12:00:00Z', + 'entryPerArea': dict({ + 'SE3': 949.49, + 'SE4': 1130.38, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T14:00:00Z', + 'deliveryStart': '2024-11-05T13:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1042.03, + 'SE4': 1256.91, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T15:00:00Z', + 'deliveryStart': '2024-11-05T14:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1258.89, + 'SE4': 1765.82, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T16:00:00Z', + 'deliveryStart': '2024-11-05T15:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1816.45, + 'SE4': 2522.55, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T17:00:00Z', + 'deliveryStart': '2024-11-05T16:00:00Z', + 'entryPerArea': dict({ + 'SE3': 2512.65, + 'SE4': 3533.03, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T18:00:00Z', + 'deliveryStart': '2024-11-05T17:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1819.83, + 'SE4': 2524.06, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T19:00:00Z', + 'deliveryStart': '2024-11-05T18:00:00Z', + 'entryPerArea': dict({ + 'SE3': 1011.77, + 'SE4': 1804.46, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T20:00:00Z', + 'deliveryStart': '2024-11-05T19:00:00Z', + 'entryPerArea': dict({ + 'SE3': 835.53, + 'SE4': 1112.57, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T21:00:00Z', + 'deliveryStart': '2024-11-05T20:00:00Z', + 'entryPerArea': dict({ + 'SE3': 796.19, + 'SE4': 1051.69, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T22:00:00Z', + 'deliveryStart': '2024-11-05T21:00:00Z', + 'entryPerArea': dict({ + 'SE3': 522.3, + 'SE4': 662.44, + }), + }), + dict({ + 'deliveryEnd': '2024-11-05T23:00:00Z', + 'deliveryStart': '2024-11-05T22:00:00Z', + 'entryPerArea': dict({ + 'SE3': 289.14, + 'SE4': 349.21, + }), + }), + ]), + 'updatedAt': '2024-11-04T12:15:03.9456464Z', + 'version': 3, + }), + }) +# --- diff --git a/tests/components/nordpool/snapshots/test_sensor.ambr b/tests/components/nordpool/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..01600352861 --- /dev/null +++ b/tests/components/nordpool/snapshots/test_sensor.ambr @@ -0,0 +1,2215 @@ +# serializer version: 1 +# name: test_sensor[sensor.nord_pool_se3_currency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.nord_pool_se3_currency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Currency', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'currency', + 'unique_id': 'SE3-currency', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_currency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Currency', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_currency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'SEK', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_current_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_current_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Current price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_price', + 'unique_id': 'SE3-current_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_current_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Current price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_current_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.01177', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_daily_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_daily_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Daily average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_average', + 'unique_id': 'SE3-daily_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_daily_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Daily average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_daily_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.90074', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_exchange_rate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.nord_pool_se3_exchange_rate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Exchange rate', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'exchange_rate', + 'unique_id': 'SE3-exchange_rate', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_exchange_rate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Exchange rate', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_exchange_rate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11.6402', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_last_updated-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.nord_pool_se3_last_updated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last updated', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'updated_at', + 'unique_id': 'SE3-updated_at', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_last_updated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE3 Last updated', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_last_updated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-04T12:15:03+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_next_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_next_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Next price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'next_price', + 'unique_id': 'SE3-next_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_next_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Next price', + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_next_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.83553', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 1 average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_average', + 'unique_id': 'off_peak_1-SE3-block_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Off-peak 1 average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.42287', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_highest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_highest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 1 highest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_max', + 'unique_id': 'off_peak_1-SE3-block_max', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_highest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Off-peak 1 highest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_highest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.40614', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_lowest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_lowest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 1 lowest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_min', + 'unique_id': 'off_peak_1-SE3-block_min', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_lowest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Off-peak 1 lowest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_lowest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.06169', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_time_from-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_time_from', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 1 time from', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_start_time', + 'unique_id': 'off_peak_1-SE3-block_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_time_from-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE3 Off-peak 1 time from', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_time_from', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-04T23:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_time_until-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_time_until', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 1 time until', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_end_time', + 'unique_id': 'off_peak_1-SE3-block_end_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_1_time_until-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE3 Off-peak 1 time until', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_1_time_until', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T07:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 2 average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_average', + 'unique_id': 'off_peak_2-SE3-block_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Off-peak 2 average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.61079', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_highest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_highest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 2 highest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_max', + 'unique_id': 'off_peak_2-SE3-block_max', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_highest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Off-peak 2 highest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_highest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.83553', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_lowest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_lowest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 2 lowest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_min', + 'unique_id': 'off_peak_2-SE3-block_min', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_lowest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Off-peak 2 lowest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_lowest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.28914', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_time_from-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_time_from', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 2 time from', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_start_time', + 'unique_id': 'off_peak_2-SE3-block_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_time_from-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE3 Off-peak 2 time from', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_time_from', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T19:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_time_until-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_time_until', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 2 time until', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_end_time', + 'unique_id': 'off_peak_2-SE3-block_end_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_off_peak_2_time_until-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE3 Off-peak 2 time until', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_off_peak_2_time_until', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T23:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_peak_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Peak average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_average', + 'unique_id': 'peak-SE3-block_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Peak average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_peak_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.31597', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_highest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_peak_highest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Peak highest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_max', + 'unique_id': 'peak-SE3-block_max', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_highest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Peak highest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_peak_highest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.51265', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_lowest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_peak_lowest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Peak lowest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_min', + 'unique_id': 'peak-SE3-block_min', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_lowest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Peak lowest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_peak_lowest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.92505', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_time_from-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_peak_time_from', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Peak time from', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_start_time', + 'unique_id': 'peak-SE3-block_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_time_from-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE3 Peak time from', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_peak_time_from', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T07:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_time_until-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_peak_time_until', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Peak time until', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_end_time', + 'unique_id': 'peak-SE3-block_end_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_peak_time_until-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE3 Peak time until', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_peak_time_until', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T19:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_previous_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se3_previous_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Previous price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_price', + 'unique_id': 'SE3-last_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se3_previous_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE3 Previous price', + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se3_previous_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.81983', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_currency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.nord_pool_se4_currency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Currency', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'currency', + 'unique_id': 'SE4-currency', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_currency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Currency', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_currency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'SEK', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_current_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_current_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Current price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_price', + 'unique_id': 'SE4-current_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_current_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Current price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_current_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.80446', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_daily_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_daily_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Daily average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_average', + 'unique_id': 'SE4-daily_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_daily_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Daily average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_daily_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.16612', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_exchange_rate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.nord_pool_se4_exchange_rate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Exchange rate', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'exchange_rate', + 'unique_id': 'SE4-exchange_rate', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_exchange_rate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Exchange rate', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_exchange_rate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11.6402', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_last_updated-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.nord_pool_se4_last_updated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last updated', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'updated_at', + 'unique_id': 'SE4-updated_at', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_last_updated-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE4 Last updated', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_last_updated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-04T12:15:03+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_next_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_next_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Next price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'next_price', + 'unique_id': 'SE4-next_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_next_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Next price', + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_next_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.11257', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 1 average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_average', + 'unique_id': 'off_peak_1-SE4-block_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Off-peak 1 average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.49797', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_highest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_highest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 1 highest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_max', + 'unique_id': 'off_peak_1-SE4-block_max', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_highest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Off-peak 1 highest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_highest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.64825', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_lowest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_lowest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 1 lowest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_min', + 'unique_id': 'off_peak_1-SE4-block_min', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_lowest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Off-peak 1 lowest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_lowest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.06519', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_time_from-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_time_from', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 1 time from', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_start_time', + 'unique_id': 'off_peak_1-SE4-block_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_time_from-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE4 Off-peak 1 time from', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_time_from', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-04T23:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_time_until-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_time_until', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 1 time until', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_end_time', + 'unique_id': 'off_peak_1-SE4-block_end_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_1_time_until-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE4 Off-peak 1 time until', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_1_time_until', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T07:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 2 average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_average', + 'unique_id': 'off_peak_2-SE4-block_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Off-peak 2 average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.79398', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_highest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_highest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 2 highest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_max', + 'unique_id': 'off_peak_2-SE4-block_max', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_highest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Off-peak 2 highest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_highest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.11257', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_lowest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_lowest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Off-peak 2 lowest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_min', + 'unique_id': 'off_peak_2-SE4-block_min', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_lowest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Off-peak 2 lowest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_lowest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.34921', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_time_from-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_time_from', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 2 time from', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_start_time', + 'unique_id': 'off_peak_2-SE4-block_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_time_from-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE4 Off-peak 2 time from', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_time_from', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T19:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_time_until-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_time_until', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off-peak 2 time until', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_end_time', + 'unique_id': 'off_peak_2-SE4-block_end_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_off_peak_2_time_until-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE4 Off-peak 2 time until', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_off_peak_2_time_until', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T23:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_average-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_peak_average', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Peak average', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_average', + 'unique_id': 'peak-SE4-block_average', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_average-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Peak average', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_peak_average', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.73559', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_highest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_peak_highest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Peak highest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_max', + 'unique_id': 'peak-SE4-block_max', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_highest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Peak highest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_peak_highest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.53303', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_lowest_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_peak_lowest_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Peak lowest price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_min', + 'unique_id': 'peak-SE4-block_min', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_lowest_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Peak lowest price', + 'state_class': , + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_peak_lowest_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.08172', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_time_from-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_peak_time_from', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Peak time from', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_start_time', + 'unique_id': 'peak-SE4-block_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_time_from-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE4 Peak time from', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_peak_time_from', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T07:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_time_until-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_peak_time_until', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Peak time until', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'block_end_time', + 'unique_id': 'peak-SE4-block_end_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_peak_time_until-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Nord Pool SE4 Peak time until', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_peak_time_until', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-11-05T19:00:00+00:00', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_previous_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.nord_pool_se4_previous_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Previous price', + 'platform': 'nordpool', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_price', + 'unique_id': 'SE4-last_price', + 'unit_of_measurement': 'SEK/kWh', + }) +# --- +# name: test_sensor[sensor.nord_pool_se4_previous_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Nord Pool SE4 Previous price', + 'unit_of_measurement': 'SEK/kWh', + }), + 'context': , + 'entity_id': 'sensor.nord_pool_se4_previous_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.52406', + }) +# --- diff --git a/tests/components/nordpool/test_config_flow.py b/tests/components/nordpool/test_config_flow.py new file mode 100644 index 00000000000..cfdfc63aca7 --- /dev/null +++ b/tests/components/nordpool/test_config_flow.py @@ -0,0 +1,206 @@ +"""Test the Nord Pool config flow.""" + +from __future__ import annotations + +from unittest.mock import patch + +from pynordpool import ( + DeliveryPeriodData, + NordPoolConnectionError, + NordPoolEmptyResponseError, + NordPoolError, + NordPoolResponseError, +) +import pytest + +from homeassistant import config_entries +from homeassistant.components.nordpool.const import CONF_AREAS, DOMAIN +from homeassistant.const import CONF_CURRENCY +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from . import ENTRY_CONFIG + +from tests.common import MockConfigEntry + + +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +async def test_form(hass: HomeAssistant, get_data: DeliveryPeriodData) -> None: + """Test we get the form.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + ENTRY_CONFIG, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["version"] == 1 + assert result["title"] == "Nord Pool" + assert result["data"] == {"areas": ["SE3", "SE4"], "currency": "SEK"} + + +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +async def test_single_config_entry( + hass: HomeAssistant, load_int: None, get_data: DeliveryPeriodData +) -> None: + """Test abort for single config entry.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "single_instance_allowed" + + +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +@pytest.mark.parametrize( + ("error_message", "p_error"), + [ + (NordPoolConnectionError, "cannot_connect"), + (NordPoolEmptyResponseError, "no_data"), + (NordPoolError, "cannot_connect"), + (NordPoolResponseError, "cannot_connect"), + ], +) +async def test_cannot_connect( + hass: HomeAssistant, + get_data: DeliveryPeriodData, + error_message: Exception, + p_error: str, +) -> None: + """Test cannot connect error.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == config_entries.SOURCE_USER + + with patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + side_effect=error_message, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=ENTRY_CONFIG, + ) + + assert result["errors"] == {"base": p_error} + + with patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=ENTRY_CONFIG, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Nord Pool" + assert result["data"] == {"areas": ["SE3", "SE4"], "currency": "SEK"} + + +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +async def test_reconfigure( + hass: HomeAssistant, + load_int: MockConfigEntry, + get_data: DeliveryPeriodData, +) -> None: + """Test reconfiguration.""" + + result = await load_int.start_reconfigure_flow(hass) + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_AREAS: ["SE3"], + CONF_CURRENCY: "EUR", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert load_int.data == { + "areas": [ + "SE3", + ], + "currency": "EUR", + } + + +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +@pytest.mark.parametrize( + ("error_message", "p_error"), + [ + (NordPoolConnectionError, "cannot_connect"), + (NordPoolEmptyResponseError, "no_data"), + (NordPoolError, "cannot_connect"), + (NordPoolResponseError, "cannot_connect"), + ], +) +async def test_reconfigure_cannot_connect( + hass: HomeAssistant, + load_int: MockConfigEntry, + get_data: DeliveryPeriodData, + error_message: Exception, + p_error: str, +) -> None: + """Test cannot connect error in a reeconfigure flow.""" + + result = await load_int.start_reconfigure_flow(hass) + + with patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + side_effect=error_message, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_AREAS: ["SE3"], + CONF_CURRENCY: "EUR", + }, + ) + + assert result["errors"] == {"base": p_error} + + with patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_AREAS: ["SE3"], + CONF_CURRENCY: "EUR", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert load_int.data == { + "areas": [ + "SE3", + ], + "currency": "EUR", + } diff --git a/tests/components/nordpool/test_coordinator.py b/tests/components/nordpool/test_coordinator.py new file mode 100644 index 00000000000..d2d912b1b99 --- /dev/null +++ b/tests/components/nordpool/test_coordinator.py @@ -0,0 +1,106 @@ +"""The test for the Nord Pool coordinator.""" + +from __future__ import annotations + +from datetime import timedelta +from unittest.mock import patch + +from freezegun.api import FrozenDateTimeFactory +from pynordpool import ( + DeliveryPeriodData, + NordPoolAuthenticationError, + NordPoolEmptyResponseError, + NordPoolError, + NordPoolResponseError, +) +import pytest + +from homeassistant.components.nordpool.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant + +from . import ENTRY_CONFIG + +from tests.common import MockConfigEntry, async_fire_time_changed + + +@pytest.mark.freeze_time("2024-11-05T10:00:00+00:00") +async def test_coordinator( + hass: HomeAssistant, + get_data: DeliveryPeriodData, + freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the Nord Pool coordinator with errors.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + data=ENTRY_CONFIG, + ) + + config_entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + ) as mock_data, + ): + mock_data.return_value = get_data + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + mock_data.assert_called_once() + state = hass.states.get("sensor.nord_pool_se3_current_price") + assert state.state == "0.92737" + mock_data.reset_mock() + + mock_data.side_effect = NordPoolError("error") + freezer.tick(timedelta(hours=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + mock_data.assert_called_once() + state = hass.states.get("sensor.nord_pool_se3_current_price") + assert state.state == STATE_UNAVAILABLE + mock_data.reset_mock() + + assert "Authentication error" not in caplog.text + mock_data.side_effect = NordPoolAuthenticationError("Authentication error") + freezer.tick(timedelta(hours=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + mock_data.assert_called_once() + state = hass.states.get("sensor.nord_pool_se3_current_price") + assert state.state == STATE_UNAVAILABLE + assert "Authentication error" in caplog.text + mock_data.reset_mock() + + assert "Empty response" not in caplog.text + mock_data.side_effect = NordPoolEmptyResponseError("Empty response") + freezer.tick(timedelta(hours=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + mock_data.assert_called_once() + state = hass.states.get("sensor.nord_pool_se3_current_price") + assert state.state == STATE_UNAVAILABLE + assert "Empty response" in caplog.text + mock_data.reset_mock() + + assert "Response error" not in caplog.text + mock_data.side_effect = NordPoolResponseError("Response error") + freezer.tick(timedelta(hours=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + mock_data.assert_called_once() + state = hass.states.get("sensor.nord_pool_se3_current_price") + assert state.state == STATE_UNAVAILABLE + assert "Response error" in caplog.text + mock_data.reset_mock() + + mock_data.return_value = get_data + mock_data.side_effect = None + freezer.tick(timedelta(hours=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + mock_data.assert_called_once() + state = hass.states.get("sensor.nord_pool_se3_current_price") + assert state.state == "1.81645" diff --git a/tests/components/nordpool/test_diagnostics.py b/tests/components/nordpool/test_diagnostics.py new file mode 100644 index 00000000000..4639186ecf1 --- /dev/null +++ b/tests/components/nordpool/test_diagnostics.py @@ -0,0 +1,23 @@ +"""Test Nord Pool diagnostics.""" + +from __future__ import annotations + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant + +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + load_int: ConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test generating diagnostics for a config entry.""" + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, load_int) == snapshot + ) diff --git a/tests/components/nordpool/test_init.py b/tests/components/nordpool/test_init.py new file mode 100644 index 00000000000..ebebb8b60c1 --- /dev/null +++ b/tests/components/nordpool/test_init.py @@ -0,0 +1,78 @@ +"""Test for Nord Pool component Init.""" + +from __future__ import annotations + +from unittest.mock import patch + +from pynordpool import ( + DeliveryPeriodData, + NordPoolConnectionError, + NordPoolEmptyResponseError, + NordPoolError, + NordPoolResponseError, +) +import pytest + +from homeassistant.components.nordpool.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER, ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import ENTRY_CONFIG + +from tests.common import MockConfigEntry + + +async def test_unload_entry(hass: HomeAssistant, get_data: DeliveryPeriodData) -> None: + """Test load and unload an entry.""" + entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + data=ENTRY_CONFIG, + ) + entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + return_value=get_data, + ), + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + + assert entry.state is ConfigEntryState.LOADED + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + assert entry.state is ConfigEntryState.NOT_LOADED + + +@pytest.mark.parametrize( + ("error"), + [ + (NordPoolConnectionError), + (NordPoolEmptyResponseError), + (NordPoolError), + (NordPoolResponseError), + ], +) +async def test_initial_startup_fails( + hass: HomeAssistant, get_data: DeliveryPeriodData, error: Exception +) -> None: + """Test load and unload an entry.""" + entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + data=ENTRY_CONFIG, + ) + entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.nordpool.coordinator.NordPoolClient.async_get_delivery_period", + side_effect=error, + ), + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + + assert entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/nordpool/test_sensor.py b/tests/components/nordpool/test_sensor.py new file mode 100644 index 00000000000..c7a305c8a40 --- /dev/null +++ b/tests/components/nordpool/test_sensor.py @@ -0,0 +1,25 @@ +"""The test for the Nord Pool sensor platform.""" + +from __future__ import annotations + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import snapshot_platform + + +@pytest.mark.freeze_time("2024-11-05T18:00:00+00:00") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor( + hass: HomeAssistant, + load_int: ConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the Nord Pool sensor.""" + + await snapshot_platform(hass, entity_registry, snapshot, load_int.entry_id) diff --git a/tests/components/number/test_init.py b/tests/components/number/test_init.py index 721b531e8cd..31d99dc55d7 100644 --- a/tests/components/number/test_init.py +++ b/tests/components/number/test_init.py @@ -2,7 +2,7 @@ from collections.abc import Generator from typing import Any -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch import pytest @@ -836,6 +836,69 @@ async def test_custom_unit_change( assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == default_unit +async def test_translated_unit( + hass: HomeAssistant, +) -> None: + """Test translated unit.""" + + with patch( + "homeassistant.helpers.service.translation.async_get_translations", + return_value={ + "component.test.entity.number.test_translation_key.unit_of_measurement": "Tests" + }, + ): + entity0 = common.MockNumberEntity( + name="Test", + native_value=123, + unique_id="very_unique", + ) + entity0.entity_description = NumberEntityDescription( + "test", + translation_key="test_translation_key", + ) + setup_test_component_platform(hass, DOMAIN, [entity0]) + + assert await async_setup_component( + hass, "number", {"number": {"platform": "test"}} + ) + await hass.async_block_till_done() + + entity_id = entity0.entity_id + state = hass.states.get(entity_id) + assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == "Tests" + + +async def test_translated_unit_with_native_unit_raises( + hass: HomeAssistant, +) -> None: + """Test that translated unit.""" + + with patch( + "homeassistant.helpers.service.translation.async_get_translations", + return_value={ + "component.test.entity.number.test_translation_key.unit_of_measurement": "Tests" + }, + ): + entity0 = common.MockNumberEntity( + name="Test", + native_value=123, + unique_id="very_unique", + ) + entity0.entity_description = NumberEntityDescription( + "test", + translation_key="test_translation_key", + native_unit_of_measurement="bad_unit", + ) + setup_test_component_platform(hass, DOMAIN, [entity0]) + + assert await async_setup_component( + hass, "number", {"number": {"platform": "test"}} + ) + await hass.async_block_till_done() + # Setup fails so entity_id is None + assert entity0.entity_id is None + + def test_device_classes_aligned() -> None: """Make sure all sensor device classes are also available in NumberDeviceClass.""" diff --git a/tests/components/nut/test_init.py b/tests/components/nut/test_init.py index 61a5187407b..d5d85daa336 100644 --- a/tests/components/nut/test_init.py +++ b/tests/components/nut/test_init.py @@ -8,8 +8,9 @@ from homeassistant.components.nut.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_HOST, CONF_PORT, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr -from .util import _get_mock_nutclient +from .util import _get_mock_nutclient, async_init_integration from tests.common import MockConfigEntry @@ -96,3 +97,53 @@ async def test_auth_fails(hass: HomeAssistant) -> None: flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 assert flows[0]["context"]["source"] == "reauth" + + +async def test_serial_number(hass: HomeAssistant) -> None: + """Test for serial number set on device.""" + mock_serial_number = "A00000000000" + await async_init_integration( + hass, + username="someuser", + password="somepassword", + list_vars={"ups.serial": mock_serial_number}, + list_ups={"ups1": "UPS 1"}, + list_commands_return_value=[], + ) + + device_registry = dr.async_get(hass) + assert device_registry is not None + + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_serial_number)} + ) + + assert device_entry is not None + assert device_entry.serial_number == mock_serial_number + + +async def test_device_location(hass: HomeAssistant) -> None: + """Test for suggested location on device.""" + mock_serial_number = "A00000000000" + mock_device_location = "XYZ Location" + await async_init_integration( + hass, + username="someuser", + password="somepassword", + list_vars={ + "ups.serial": mock_serial_number, + "device.location": mock_device_location, + }, + list_ups={"ups1": "UPS 1"}, + list_commands_return_value=[], + ) + + device_registry = dr.async_get(hass) + assert device_registry is not None + + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_serial_number)} + ) + + assert device_entry is not None + assert device_entry.suggested_area == mock_device_location diff --git a/tests/components/onboarding/test_views.py b/tests/components/onboarding/test_views.py index 6df3951249b..35f6b7d739c 100644 --- a/tests/components/onboarding/test_views.py +++ b/tests/components/onboarding/test_views.py @@ -72,23 +72,11 @@ async def mock_supervisor_fixture( aioclient_mock: AiohttpClientMocker, store_info: AsyncMock, supervisor_is_connected: AsyncMock, + resolution_info: AsyncMock, ) -> AsyncGenerator[None]: """Mock supervisor.""" aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"}) aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"}) - aioclient_mock.get( - "http://127.0.0.1/resolution/info", - json={ - "result": "ok", - "data": { - "unsupported": [], - "unhealthy": [], - "suggestions": [], - "issues": [], - "checks": [], - }, - }, - ) aioclient_mock.get( "http://127.0.0.1/network/info", json={ diff --git a/tests/components/p1_monitor/test_init.py b/tests/components/p1_monitor/test_init.py index 20714740385..3b7426051d4 100644 --- a/tests/components/p1_monitor/test_init.py +++ b/tests/components/p1_monitor/test_init.py @@ -26,7 +26,6 @@ async def test_load_unload_config_entry( await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() - assert not hass.data.get(DOMAIN) assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/palazzetti/conftest.py b/tests/components/palazzetti/conftest.py index 33dca845098..ec58afc324a 100644 --- a/tests/components/palazzetti/conftest.py +++ b/tests/components/palazzetti/conftest.py @@ -1,12 +1,14 @@ """Fixtures for Palazzetti integration tests.""" from collections.abc import Generator -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, MagicMock, patch +from pypalazzetti.temperature import TemperatureDefinition, TemperatureDescriptionKey import pytest from homeassistant.components.palazzetti.const import DOMAIN from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -50,18 +52,32 @@ def mock_palazzetti_client() -> Generator[AsyncMock]: mock_client.name = "Stove" mock_client.sw_version = "0.0.0" mock_client.hw_version = "1.1.1" + mock_client.to_dict.return_value = { + "host": "XXXXXXXXXX", + "connected": True, + "properties": {}, + "attributes": {}, + } mock_client.fan_speed_min = 1 mock_client.fan_speed_max = 5 mock_client.has_fan_silent = True mock_client.has_fan_high = True mock_client.has_fan_auto = True mock_client.has_on_off_switch = True + mock_client.has_pellet_level = False mock_client.connected = True mock_client.is_heating = True mock_client.room_temperature = 18 + mock_client.T1 = 21.5 + mock_client.T2 = 25.1 + mock_client.T3 = 45 + mock_client.T4 = 0 + mock_client.T5 = 0 mock_client.target_temperature = 21 mock_client.target_temperature_min = 5 mock_client.target_temperature_max = 50 + mock_client.pellet_quantity = 1248 + mock_client.pellet_level = 0 mock_client.fan_speed = 3 mock_client.connect.return_value = True mock_client.update_state.return_value = True @@ -71,4 +87,48 @@ def mock_palazzetti_client() -> Generator[AsyncMock]: mock_client.set_fan_silent.return_value = True mock_client.set_fan_high.return_value = True mock_client.set_fan_auto.return_value = True + mock_client.list_temperatures.return_value = [ + TemperatureDefinition( + description_key=TemperatureDescriptionKey.ROOM_TEMP, + state_property="T1", + ), + TemperatureDefinition( + description_key=TemperatureDescriptionKey.RETURN_WATER_TEMP, + state_property="T4", + ), + TemperatureDefinition( + description_key=TemperatureDescriptionKey.TANK_WATER_TEMP, + state_property="T5", + ), + TemperatureDefinition( + description_key=TemperatureDescriptionKey.WOOD_COMBUSTION_TEMP, + state_property="T3", + ), + TemperatureDefinition( + description_key=TemperatureDescriptionKey.AIR_OUTLET_TEMP, + state_property="T2", + ), + TemperatureDefinition( + description_key=TemperatureDescriptionKey.T1_HYDRO_TEMP, + state_property="T1", + ), + TemperatureDefinition( + description_key=TemperatureDescriptionKey.T2_HYDRO_TEMP, + state_property="T2", + ), + ] yield mock_client + + +@pytest.fixture +async def init_integration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_palazzetti_client: MagicMock, +) -> MockConfigEntry: + """Set up the Palazzetti integration for testing.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + return mock_config_entry diff --git a/tests/components/palazzetti/snapshots/test_climate.ambr b/tests/components/palazzetti/snapshots/test_climate.ambr index eb3b323272e..e7cea3749a1 100644 --- a/tests/components/palazzetti/snapshots/test_climate.ambr +++ b/tests/components/palazzetti/snapshots/test_climate.ambr @@ -66,6 +66,7 @@ 'auto', ]), 'friendly_name': 'Stove', + 'hvac_action': , 'hvac_modes': list([ , , diff --git a/tests/components/palazzetti/snapshots/test_diagnostics.ambr b/tests/components/palazzetti/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..e3f2d7430e5 --- /dev/null +++ b/tests/components/palazzetti/snapshots/test_diagnostics.ambr @@ -0,0 +1,13 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'api_data': dict({ + 'attributes': dict({ + }), + 'connected': True, + 'host': 'XXXXXXXXXX', + 'properties': dict({ + }), + }), + }) +# --- diff --git a/tests/components/palazzetti/snapshots/test_sensor.ambr b/tests/components/palazzetti/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..107b818f195 --- /dev/null +++ b/tests/components/palazzetti/snapshots/test_sensor.ambr @@ -0,0 +1,409 @@ +# serializer version: 1 +# name: test_all_entities[sensor.stove_air_outlet_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stove_air_outlet_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Air outlet temperature', + 'platform': 'palazzetti', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'air_outlet_temperature', + 'unique_id': '11:22:33:44:55:66-air_outlet_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.stove_air_outlet_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Stove Air outlet temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.stove_air_outlet_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '25.1', + }) +# --- +# name: test_all_entities[sensor.stove_hydro_temperature_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stove_hydro_temperature_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Hydro temperature 1', + 'platform': 'palazzetti', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 't1_hydro', + 'unique_id': '11:22:33:44:55:66-t1_hydro', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.stove_hydro_temperature_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Stove Hydro temperature 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.stove_hydro_temperature_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '21.5', + }) +# --- +# name: test_all_entities[sensor.stove_hydro_temperature_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stove_hydro_temperature_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Hydro temperature 2', + 'platform': 'palazzetti', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 't2_hydro', + 'unique_id': '11:22:33:44:55:66-t2_hydro', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.stove_hydro_temperature_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Stove Hydro temperature 2', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.stove_hydro_temperature_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '25.1', + }) +# --- +# name: test_all_entities[sensor.stove_pellet_quantity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stove_pellet_quantity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pellet quantity', + 'platform': 'palazzetti', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pellet_quantity', + 'unique_id': '11:22:33:44:55:66-pellet_quantity', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.stove_pellet_quantity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'weight', + 'friendly_name': 'Stove Pellet quantity', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.stove_pellet_quantity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1248', + }) +# --- +# name: test_all_entities[sensor.stove_return_water_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stove_return_water_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Return water temperature', + 'platform': 'palazzetti', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'return_water_temperature', + 'unique_id': '11:22:33:44:55:66-return_water_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.stove_return_water_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Stove Return water temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.stove_return_water_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.stove_room_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stove_room_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Room temperature', + 'platform': 'palazzetti', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'room_temperature', + 'unique_id': '11:22:33:44:55:66-room_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.stove_room_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Stove Room temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.stove_room_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '21.5', + }) +# --- +# name: test_all_entities[sensor.stove_tank_water_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stove_tank_water_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tank water temperature', + 'platform': 'palazzetti', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tank_water_temperature', + 'unique_id': '11:22:33:44:55:66-tank_water_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.stove_tank_water_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Stove Tank water temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.stove_tank_water_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.stove_wood_combustion_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stove_wood_combustion_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wood combustion temperature', + 'platform': 'palazzetti', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wood_combustion_temperature', + 'unique_id': '11:22:33:44:55:66-wood_combustion_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.stove_wood_combustion_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Stove Wood combustion temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.stove_wood_combustion_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '45', + }) +# --- diff --git a/tests/components/palazzetti/test_config_flow.py b/tests/components/palazzetti/test_config_flow.py index 960ad7a1184..03c56c33d0c 100644 --- a/tests/components/palazzetti/test_config_flow.py +++ b/tests/components/palazzetti/test_config_flow.py @@ -4,8 +4,9 @@ from unittest.mock import AsyncMock from pypalazzetti.exceptions import CommunicationError +from homeassistant.components import dhcp from homeassistant.components.palazzetti.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -92,3 +93,48 @@ async def test_duplicate( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_dhcp_flow( + hass: HomeAssistant, mock_palazzetti_client: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test the DHCP flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + data=dhcp.DhcpServiceInfo( + hostname="connbox1234", ip="192.168.1.1", macaddress="11:22:33:44:55:66" + ), + context={"source": SOURCE_DHCP}, + ) + + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "discovery_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {}, + ) + + await hass.async_block_till_done() + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Stove" + assert result["result"].unique_id == "11:22:33:44:55:66" + + +async def test_dhcp_flow_error( + hass: HomeAssistant, mock_palazzetti_client: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test the DHCP flow.""" + mock_palazzetti_client.connect.side_effect = CommunicationError() + + result = await hass.config_entries.flow.async_init( + DOMAIN, + data=dhcp.DhcpServiceInfo( + hostname="connbox1234", ip="192.168.1.1", macaddress="11:22:33:44:55:66" + ), + context={"source": SOURCE_DHCP}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" diff --git a/tests/components/palazzetti/test_diagnostics.py b/tests/components/palazzetti/test_diagnostics.py new file mode 100644 index 00000000000..80d021be511 --- /dev/null +++ b/tests/components/palazzetti/test_diagnostics.py @@ -0,0 +1,22 @@ +"""Test Palazzetti diagnostics.""" + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + init_integration: MockConfigEntry, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, init_integration) + == snapshot + ) diff --git a/tests/components/palazzetti/test_sensor.py b/tests/components/palazzetti/test_sensor.py new file mode 100644 index 00000000000..c7d7317bb0b --- /dev/null +++ b/tests/components/palazzetti/test_sensor.py @@ -0,0 +1,27 @@ +"""Tests for the Palazzetti sensor platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_palazzetti_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.palazzetti.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/pegel_online/test_init.py b/tests/components/pegel_online/test_init.py index ee2e78af7cf..c1b8f1861c4 100644 --- a/tests/components/pegel_online/test_init.py +++ b/tests/components/pegel_online/test_init.py @@ -3,6 +3,7 @@ from unittest.mock import patch from aiohttp.client_exceptions import ClientError +import pytest from homeassistant.components.pegel_online.const import ( CONF_STATION, @@ -23,7 +24,9 @@ from .const import ( from tests.common import MockConfigEntry, async_fire_time_changed -async def test_update_error(hass: HomeAssistant) -> None: +async def test_update_error( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: """Tests error during update entity.""" entry = MockConfigEntry( domain=DOMAIN, @@ -43,9 +46,11 @@ async def test_update_error(hass: HomeAssistant) -> None: state = hass.states.get("sensor.dresden_elbe_water_level") assert state - pegelonline().override_side_effect(ClientError) + pegelonline().override_side_effect(ClientError("Boom")) async_fire_time_changed(hass, utcnow() + MIN_TIME_BETWEEN_UPDATES) await hass.async_block_till_done() + assert "Failed to communicate with API: Boom" in caplog.text + state = hass.states.get("sensor.dresden_elbe_water_level") assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/plugwise/conftest.py b/tests/components/plugwise/conftest.py index f18c96d36c5..dead58e0581 100644 --- a/tests/components/plugwise/conftest.py +++ b/tests/components/plugwise/conftest.py @@ -93,7 +93,7 @@ def mock_smile_adam() -> Generator[MagicMock]: smile.connect.return_value = Version("3.0.15") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( - all_data["gateway"], all_data["devices"] + all_data["devices"], all_data["gateway"] ) yield smile @@ -120,7 +120,7 @@ def mock_smile_adam_2() -> Generator[MagicMock]: smile.connect.return_value = Version("3.6.4") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( - all_data["gateway"], all_data["devices"] + all_data["devices"], all_data["gateway"] ) yield smile @@ -147,7 +147,7 @@ def mock_smile_adam_3() -> Generator[MagicMock]: smile.connect.return_value = Version("3.6.4") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( - all_data["gateway"], all_data["devices"] + all_data["devices"], all_data["gateway"] ) yield smile @@ -174,7 +174,7 @@ def mock_smile_adam_4() -> Generator[MagicMock]: smile.connect.return_value = Version("3.2.8") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( - all_data["gateway"], all_data["devices"] + all_data["devices"], all_data["gateway"] ) yield smile @@ -200,7 +200,7 @@ def mock_smile_anna() -> Generator[MagicMock]: smile.connect.return_value = Version("4.0.15") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( - all_data["gateway"], all_data["devices"] + all_data["devices"], all_data["gateway"] ) yield smile @@ -226,7 +226,7 @@ def mock_smile_anna_2() -> Generator[MagicMock]: smile.connect.return_value = Version("4.0.15") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( - all_data["gateway"], all_data["devices"] + all_data["devices"], all_data["gateway"] ) yield smile @@ -252,7 +252,7 @@ def mock_smile_anna_3() -> Generator[MagicMock]: smile.connect.return_value = Version("4.0.15") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( - all_data["gateway"], all_data["devices"] + all_data["devices"], all_data["gateway"] ) yield smile @@ -278,7 +278,7 @@ def mock_smile_p1() -> Generator[MagicMock]: smile.connect.return_value = Version("4.4.2") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( - all_data["gateway"], all_data["devices"] + all_data["devices"], all_data["gateway"] ) yield smile @@ -304,7 +304,7 @@ def mock_smile_p1_2() -> Generator[MagicMock]: smile.connect.return_value = Version("4.4.2") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( - all_data["gateway"], all_data["devices"] + all_data["devices"], all_data["gateway"] ) yield smile @@ -330,7 +330,7 @@ def mock_smile_legacy_anna() -> Generator[MagicMock]: smile.connect.return_value = Version("1.8.22") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( - all_data["gateway"], all_data["devices"] + all_data["devices"], all_data["gateway"] ) yield smile @@ -356,7 +356,7 @@ def mock_stretch() -> Generator[MagicMock]: smile.connect.return_value = Version("3.1.11") all_data = _read_json(chosen_env, "all_data") smile.async_update.return_value = PlugwiseData( - all_data["gateway"], all_data["devices"] + all_data["devices"], all_data["gateway"] ) yield smile diff --git a/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json b/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json index b767f5531f2..5fc2a114b2f 100644 --- a/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json +++ b/tests/components/plugwise/fixtures/anna_heatpump_heating/all_data.json @@ -61,11 +61,11 @@ "3cb70739631c4d17a86b8b12e8a5161b": { "active_preset": "home", "available_schedules": ["standaard", "off"], + "climate_mode": "auto", "dev_class": "thermostat", "firmware": "2018-02-08T11:15:53+01:00", "hardware": "6539-1301-5002", "location": "c784ee9fdab44e1395b8dee7d7a497d5", - "mode": "auto", "model": "ThermoTouch", "name": "Anna", "preset_modes": ["no_frost", "home", "away", "asleep", "vacation"], diff --git a/tests/components/plugwise/fixtures/legacy_anna/all_data.json b/tests/components/plugwise/fixtures/legacy_anna/all_data.json index 1eca4e285cc..2cb439950af 100644 --- a/tests/components/plugwise/fixtures/legacy_anna/all_data.json +++ b/tests/components/plugwise/fixtures/legacy_anna/all_data.json @@ -36,16 +36,16 @@ }, "0d266432d64443e283b5d708ae98b455": { "active_preset": "home", + "climate_mode": "heat", "dev_class": "thermostat", "firmware": "2017-03-13T11:54:58+01:00", "hardware": "6539-1301-500", "location": "0000aaaa0000aaaa0000aaaa0000aa00", - "mode": "heat", "model": "ThermoTouch", "name": "Anna", "preset_modes": ["away", "vacation", "asleep", "home", "no_frost"], "sensors": { - "illuminance": 151, + "illuminance": 150.8, "setpoint": 20.5, "temperature": 20.4 }, diff --git a/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json b/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json index 166b13b84ff..9c40e50278b 100644 --- a/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_cooling/all_data.json @@ -31,7 +31,7 @@ "binary_sensors": { "low_battery": false }, - "dev_class": "thermo_sensor", + "dev_class": "thermostatic_radiator_valve", "firmware": "2020-11-04T01:00:00+01:00", "hardware": "1", "location": "f871b8c4d63549319221e294e4f88074", @@ -40,6 +40,7 @@ "name": "Tom Badkamer", "sensors": { "battery": 99, + "setpoint": 18.0, "temperature": 21.6, "temperature_difference": -0.2, "valve_position": 100 @@ -54,34 +55,16 @@ "zigbee_mac_address": "000D6F000C8FF5EE" }, "ad4838d7d35c4d6ea796ee12ae5aedf8": { - "active_preset": "home", "available": true, - "available_schedules": [ - "Badkamer", - "Test", - "Vakantie", - "Weekschema", - "off" - ], - "control_state": "cooling", "dev_class": "thermostat", "location": "f2bf9048bef64cc5b6d5110154e33c81", - "mode": "cool", "model": "ThermoTouch", "model_id": "143.1", "name": "Anna", - "preset_modes": ["no_frost", "asleep", "vacation", "home", "away"], - "select_schedule": "off", "sensors": { "setpoint": 23.5, "temperature": 25.8 }, - "thermostat": { - "lower_bound": 1.0, - "resolution": 0.01, - "setpoint": 23.5, - "upper_bound": 35.0 - }, "vendor": "Plugwise" }, "da224107914542988a88561b4452b0f6": { @@ -113,29 +96,17 @@ "zigbee_mac_address": "000D6F000D5A168D" }, "e2f4322d57924fa090fbbc48b3a140dc": { - "active_preset": "home", "available": true, - "available_schedules": [ - "Badkamer", - "Test", - "Vakantie", - "Weekschema", - "off" - ], "binary_sensors": { "low_battery": true }, - "control_state": "preheating", "dev_class": "zone_thermostat", "firmware": "2016-10-10T02:00:00+02:00", "hardware": "255", "location": "f871b8c4d63549319221e294e4f88074", - "mode": "auto", "model": "Lisa", "model_id": "158-01", "name": "Lisa Badkamer", - "preset_modes": ["no_frost", "asleep", "vacation", "home", "away"], - "select_schedule": "Badkamer", "sensors": { "battery": 14, "setpoint": 23.5, @@ -147,12 +118,6 @@ "setpoint": 0.0, "upper_bound": 2.0 }, - "thermostat": { - "lower_bound": 0.0, - "resolution": 0.01, - "setpoint": 25.0, - "upper_bound": 99.9 - }, "vendor": "Plugwise", "zigbee_mac_address": "000D6F000C869B61" }, @@ -166,14 +131,81 @@ "name": "Test", "switches": { "relay": true - } + }, + "vendor": "Plugwise" + }, + "f2bf9048bef64cc5b6d5110154e33c81": { + "active_preset": "home", + "available_schedules": [ + "Badkamer", + "Test", + "Vakantie", + "Weekschema", + "off" + ], + "climate_mode": "cool", + "control_state": "cooling", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Living room", + "preset_modes": ["no_frost", "asleep", "vacation", "home", "away"], + "select_schedule": "off", + "sensors": { + "electricity_consumed": 149.9, + "electricity_produced": 0.0, + "temperature": 25.8 + }, + "thermostat": { + "lower_bound": 1.0, + "resolution": 0.01, + "setpoint": 23.5, + "upper_bound": 35.0 + }, + "thermostats": { + "primary": ["ad4838d7d35c4d6ea796ee12ae5aedf8"], + "secondary": [] + }, + "vendor": "Plugwise" + }, + "f871b8c4d63549319221e294e4f88074": { + "active_preset": "home", + "available_schedules": [ + "Badkamer", + "Test", + "Vakantie", + "Weekschema", + "off" + ], + "climate_mode": "cool", + "control_state": "auto", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Bathroom", + "preset_modes": ["no_frost", "asleep", "vacation", "home", "away"], + "select_schedule": "Badkamer", + "sensors": { + "electricity_consumed": 0.0, + "electricity_produced": 0.0, + "temperature": 23.9 + }, + "thermostat": { + "lower_bound": 0.0, + "resolution": 0.01, + "setpoint": 25.0, + "upper_bound": 99.9 + }, + "thermostats": { + "primary": ["e2f4322d57924fa090fbbc48b3a140dc"], + "secondary": ["1772a4ea304041adb83f357b751341ff"] + }, + "vendor": "Plugwise" } }, "gateway": { "cooling_present": true, "gateway_id": "da224107914542988a88561b4452b0f6", "heater_id": "056ee145a816487eaa69243c3280f8bf", - "item_count": 157, + "item_count": 89, "notifications": {}, "reboot": true, "smile_name": "Adam" diff --git a/tests/components/plugwise/fixtures/m_adam_heating/all_data.json b/tests/components/plugwise/fixtures/m_adam_heating/all_data.json index 61935f1306a..fab2cea5fdc 100644 --- a/tests/components/plugwise/fixtures/m_adam_heating/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_heating/all_data.json @@ -36,7 +36,7 @@ "binary_sensors": { "low_battery": false }, - "dev_class": "thermo_sensor", + "dev_class": "thermostatic_radiator_valve", "firmware": "2020-11-04T01:00:00+01:00", "hardware": "1", "location": "f871b8c4d63549319221e294e4f88074", @@ -45,6 +45,7 @@ "name": "Tom Badkamer", "sensors": { "battery": 99, + "setpoint": 18.0, "temperature": 18.6, "temperature_difference": -0.2, "valve_position": 100 @@ -59,34 +60,16 @@ "zigbee_mac_address": "000D6F000C8FF5EE" }, "ad4838d7d35c4d6ea796ee12ae5aedf8": { - "active_preset": "home", "available": true, - "available_schedules": [ - "Badkamer", - "Test", - "Vakantie", - "Weekschema", - "off" - ], - "control_state": "preheating", "dev_class": "thermostat", "location": "f2bf9048bef64cc5b6d5110154e33c81", - "mode": "heat", "model": "ThermoTouch", "model_id": "143.1", "name": "Anna", - "preset_modes": ["no_frost", "asleep", "vacation", "home", "away"], - "select_schedule": "off", "sensors": { "setpoint": 20.0, "temperature": 19.1 }, - "thermostat": { - "lower_bound": 1.0, - "resolution": 0.01, - "setpoint": 20.0, - "upper_bound": 35.0 - }, "vendor": "Plugwise" }, "da224107914542988a88561b4452b0f6": { @@ -112,29 +95,17 @@ "zigbee_mac_address": "000D6F000D5A168D" }, "e2f4322d57924fa090fbbc48b3a140dc": { - "active_preset": "home", "available": true, - "available_schedules": [ - "Badkamer", - "Test", - "Vakantie", - "Weekschema", - "off" - ], "binary_sensors": { "low_battery": true }, - "control_state": "off", "dev_class": "zone_thermostat", "firmware": "2016-10-10T02:00:00+02:00", "hardware": "255", "location": "f871b8c4d63549319221e294e4f88074", - "mode": "auto", "model": "Lisa", "model_id": "158-01", "name": "Lisa Badkamer", - "preset_modes": ["no_frost", "asleep", "vacation", "home", "away"], - "select_schedule": "Badkamer", "sensors": { "battery": 14, "setpoint": 15.0, @@ -146,12 +117,6 @@ "setpoint": 0.0, "upper_bound": 2.0 }, - "thermostat": { - "lower_bound": 0.0, - "resolution": 0.01, - "setpoint": 15.0, - "upper_bound": 99.9 - }, "vendor": "Plugwise", "zigbee_mac_address": "000D6F000C869B61" }, @@ -165,14 +130,81 @@ "name": "Test", "switches": { "relay": true - } + }, + "vendor": "Plugwise" + }, + "f2bf9048bef64cc5b6d5110154e33c81": { + "active_preset": "home", + "available_schedules": [ + "Badkamer", + "Test", + "Vakantie", + "Weekschema", + "off" + ], + "climate_mode": "heat", + "control_state": "preheating", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Living room", + "preset_modes": ["no_frost", "asleep", "vacation", "home", "away"], + "select_schedule": "off", + "sensors": { + "electricity_consumed": 149.9, + "electricity_produced": 0.0, + "temperature": 19.1 + }, + "thermostat": { + "lower_bound": 1.0, + "resolution": 0.01, + "setpoint": 20.0, + "upper_bound": 35.0 + }, + "thermostats": { + "primary": ["ad4838d7d35c4d6ea796ee12ae5aedf8"], + "secondary": [] + }, + "vendor": "Plugwise" + }, + "f871b8c4d63549319221e294e4f88074": { + "active_preset": "home", + "available_schedules": [ + "Badkamer", + "Test", + "Vakantie", + "Weekschema", + "off" + ], + "climate_mode": "auto", + "control_state": "off", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Bathroom", + "preset_modes": ["no_frost", "asleep", "vacation", "home", "away"], + "select_schedule": "Badkamer", + "sensors": { + "electricity_consumed": 0.0, + "electricity_produced": 0.0, + "temperature": 17.9 + }, + "thermostat": { + "lower_bound": 0.0, + "resolution": 0.01, + "setpoint": 15.0, + "upper_bound": 99.9 + }, + "thermostats": { + "primary": ["e2f4322d57924fa090fbbc48b3a140dc"], + "secondary": ["1772a4ea304041adb83f357b751341ff"] + }, + "vendor": "Plugwise" } }, "gateway": { "cooling_present": false, "gateway_id": "da224107914542988a88561b4452b0f6", "heater_id": "056ee145a816487eaa69243c3280f8bf", - "item_count": 157, + "item_count": 89, "notifications": {}, "reboot": true, "smile_name": "Adam" diff --git a/tests/components/plugwise/fixtures/m_adam_jip/all_data.json b/tests/components/plugwise/fixtures/m_adam_jip/all_data.json index ec2095648b8..4516ce2c2d0 100644 --- a/tests/components/plugwise/fixtures/m_adam_jip/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_jip/all_data.json @@ -1,21 +1,63 @@ { "devices": { - "1346fbd8498d4dbcab7e18d51b771f3d": { + "06aecb3d00354375924f50c47af36bd2": { "active_preset": "no_frost", + "climate_mode": "off", + "control_state": "off", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Slaapkamer", + "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], + "sensors": { + "temperature": 24.2 + }, + "thermostat": { + "lower_bound": 0.0, + "resolution": 0.01, + "setpoint": 13.0, + "upper_bound": 99.9 + }, + "thermostats": { + "primary": ["1346fbd8498d4dbcab7e18d51b771f3d"], + "secondary": ["356b65335e274d769c338223e7af9c33"] + }, + "vendor": "Plugwise" + }, + "13228dab8ce04617af318a2888b3c548": { + "active_preset": "home", + "climate_mode": "heat", + "control_state": "off", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Woonkamer", + "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], + "sensors": { + "temperature": 27.4 + }, + "thermostat": { + "lower_bound": 4.0, + "resolution": 0.01, + "setpoint": 9.0, + "upper_bound": 30.0 + }, + "thermostats": { + "primary": ["f61f1a2535f54f52ad006a3d18e459ca"], + "secondary": ["833de10f269c4deab58fb9df69901b4e"] + }, + "vendor": "Plugwise" + }, + "1346fbd8498d4dbcab7e18d51b771f3d": { "available": true, "binary_sensors": { "low_battery": false }, - "control_state": "off", "dev_class": "zone_thermostat", "firmware": "2016-10-27T02:00:00+02:00", "hardware": "255", "location": "06aecb3d00354375924f50c47af36bd2", - "mode": "off", "model": "Lisa", "model_id": "158-01", "name": "Slaapkamer", - "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], "sensors": { "battery": 92, "setpoint": 13.0, @@ -27,18 +69,12 @@ "setpoint": 0.0, "upper_bound": 2.0 }, - "thermostat": { - "lower_bound": 0.0, - "resolution": 0.01, - "setpoint": 13.0, - "upper_bound": 99.9 - }, "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A03" }, "1da4d325838e4ad8aac12177214505c9": { "available": true, - "dev_class": "thermo_sensor", + "dev_class": "thermostatic_radiator_valve", "firmware": "2020-11-04T01:00:00+01:00", "hardware": "1", "location": "d58fec52899f4f1c92e4f8fad6d8c48c", @@ -62,7 +98,7 @@ }, "356b65335e274d769c338223e7af9c33": { "available": true, - "dev_class": "thermo_sensor", + "dev_class": "thermostatic_radiator_valve", "firmware": "2020-11-04T01:00:00+01:00", "hardware": "1", "location": "06aecb3d00354375924f50c47af36bd2", @@ -102,21 +138,17 @@ "zigbee_mac_address": "ABCD012345670A06" }, "6f3e9d7084214c21b9dfa46f6eeb8700": { - "active_preset": "home", "available": true, "binary_sensors": { "low_battery": false }, - "control_state": "off", "dev_class": "zone_thermostat", "firmware": "2016-10-27T02:00:00+02:00", "hardware": "255", "location": "d27aede973b54be484f6842d1b2802ad", - "mode": "heat", "model": "Lisa", "model_id": "158-01", "name": "Kinderkamer", - "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], "sensors": { "battery": 79, "setpoint": 13.0, @@ -128,18 +160,12 @@ "setpoint": 0.0, "upper_bound": 2.0 }, - "thermostat": { - "lower_bound": 0.0, - "resolution": 0.01, - "setpoint": 13.0, - "upper_bound": 99.9 - }, "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A02" }, "833de10f269c4deab58fb9df69901b4e": { "available": true, - "dev_class": "thermo_sensor", + "dev_class": "thermostatic_radiator_valve", "firmware": "2020-11-04T01:00:00+01:00", "hardware": "1", "location": "13228dab8ce04617af318a2888b3c548", @@ -162,21 +188,17 @@ "zigbee_mac_address": "ABCD012345670A09" }, "a6abc6a129ee499c88a4d420cc413b47": { - "active_preset": "home", "available": true, "binary_sensors": { "low_battery": false }, - "control_state": "off", "dev_class": "zone_thermostat", "firmware": "2016-10-27T02:00:00+02:00", "hardware": "255", "location": "d58fec52899f4f1c92e4f8fad6d8c48c", - "mode": "heat", "model": "Lisa", "model_id": "158-01", "name": "Logeerkamer", - "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], "sensors": { "battery": 80, "setpoint": 13.0, @@ -188,12 +210,6 @@ "setpoint": 0.0, "upper_bound": 2.0 }, - "thermostat": { - "lower_bound": 0.0, - "resolution": 0.01, - "setpoint": 13.0, - "upper_bound": 99.9 - }, "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A01" }, @@ -219,9 +235,32 @@ "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670101" }, + "d27aede973b54be484f6842d1b2802ad": { + "active_preset": "home", + "climate_mode": "heat", + "control_state": "off", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Kinderkamer", + "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], + "sensors": { + "temperature": 30.0 + }, + "thermostat": { + "lower_bound": 0.0, + "resolution": 0.01, + "setpoint": 13.0, + "upper_bound": 99.9 + }, + "thermostats": { + "primary": ["6f3e9d7084214c21b9dfa46f6eeb8700"], + "secondary": ["d4496250d0e942cfa7aea3476e9070d5"] + }, + "vendor": "Plugwise" + }, "d4496250d0e942cfa7aea3476e9070d5": { "available": true, - "dev_class": "thermo_sensor", + "dev_class": "thermostatic_radiator_valve", "firmware": "2020-11-04T01:00:00+01:00", "hardware": "1", "location": "d27aede973b54be484f6842d1b2802ad", @@ -243,6 +282,29 @@ "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A04" }, + "d58fec52899f4f1c92e4f8fad6d8c48c": { + "active_preset": "home", + "climate_mode": "heat", + "control_state": "off", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Logeerkamer", + "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], + "sensors": { + "temperature": 30.0 + }, + "thermostat": { + "lower_bound": 0.0, + "resolution": 0.01, + "setpoint": 13.0, + "upper_bound": 99.9 + }, + "thermostats": { + "primary": ["a6abc6a129ee499c88a4d420cc413b47"], + "secondary": ["1da4d325838e4ad8aac12177214505c9"] + }, + "vendor": "Plugwise" + }, "e4684553153b44afbef2200885f379dc": { "available": true, "binary_sensors": { @@ -280,21 +342,17 @@ "vendor": "Remeha B.V." }, "f61f1a2535f54f52ad006a3d18e459ca": { - "active_preset": "home", "available": true, "binary_sensors": { "low_battery": false }, - "control_state": "off", "dev_class": "zone_thermometer", "firmware": "2020-09-01T02:00:00+02:00", "hardware": "1", "location": "13228dab8ce04617af318a2888b3c548", - "mode": "heat", "model": "Jip", "model_id": "168-01", "name": "Woonkamer", - "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], "sensors": { "battery": 100, "humidity": 56.2, @@ -307,12 +365,6 @@ "setpoint": 0.0, "upper_bound": 2.0 }, - "thermostat": { - "lower_bound": 4.0, - "resolution": 0.01, - "setpoint": 9.0, - "upper_bound": 30.0 - }, "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A08" } @@ -321,7 +373,7 @@ "cooling_present": false, "gateway_id": "b5c2386c6f6342669e50fe49dd05b188", "heater_id": "e4684553153b44afbef2200885f379dc", - "item_count": 228, + "item_count": 244, "notifications": {}, "reboot": true, "smile_name": "Adam" diff --git a/tests/components/plugwise/fixtures/m_adam_multiple_devices_per_zone/all_data.json b/tests/components/plugwise/fixtures/m_adam_multiple_devices_per_zone/all_data.json index a182b1ac8dd..67e8c235cc3 100644 --- a/tests/components/plugwise/fixtures/m_adam_multiple_devices_per_zone/all_data.json +++ b/tests/components/plugwise/fixtures/m_adam_multiple_devices_per_zone/all_data.json @@ -21,6 +21,73 @@ "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A15" }, + "08963fec7c53423ca5680aa4cb502c63": { + "active_preset": "away", + "available_schedules": [ + "CV Roan", + "Bios Schema met Film Avond", + "GF7 Woonkamer", + "Badkamer Schema", + "CV Jessie", + "off" + ], + "climate_mode": "auto", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Badkamer", + "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], + "select_schedule": "Badkamer Schema", + "sensors": { + "temperature": 18.9 + }, + "thermostat": { + "lower_bound": 0.0, + "resolution": 0.01, + "setpoint": 14.0, + "upper_bound": 100.0 + }, + "thermostats": { + "primary": [ + "f1fee6043d3642a9b0a65297455f008e", + "680423ff840043738f42cc7f1ff97a36" + ], + "secondary": [] + }, + "vendor": "Plugwise" + }, + "12493538af164a409c6a1c79e38afe1c": { + "active_preset": "away", + "available_schedules": [ + "CV Roan", + "Bios Schema met Film Avond", + "GF7 Woonkamer", + "Badkamer Schema", + "CV Jessie", + "off" + ], + "climate_mode": "heat", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Bios", + "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], + "select_schedule": "off", + "sensors": { + "electricity_consumed": 0.0, + "electricity_produced": 0.0, + "temperature": 16.5 + }, + "thermostat": { + "lower_bound": 0.0, + "resolution": 0.01, + "setpoint": 13.0, + "upper_bound": 100.0 + }, + "thermostats": { + "primary": ["df4a4a8169904cdb9c03d61a21f42140"], + "secondary": ["a2c3583e0a6349358998b760cea82d2a"] + }, + "vendor": "Plugwise" + }, "21f2b542c49845e6bb416884c55778d6": { "available": true, "dev_class": "game_console_plug", @@ -42,6 +109,28 @@ "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A12" }, + "446ac08dd04d4eff8ac57489757b7314": { + "active_preset": "no_frost", + "climate_mode": "heat", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Garage", + "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], + "sensors": { + "temperature": 15.6 + }, + "thermostat": { + "lower_bound": 0.0, + "resolution": 0.01, + "setpoint": 5.5, + "upper_bound": 100.0 + }, + "thermostats": { + "primary": ["e7693eb9582644e5b865dba8d4447cf1"], + "secondary": [] + }, + "vendor": "Plugwise" + }, "4a810418d5394b3f82727340b91ba740": { "available": true, "dev_class": "router_plug", @@ -89,13 +178,13 @@ "binary_sensors": { "low_battery": false }, - "dev_class": "thermo_sensor", + "dev_class": "thermostatic_radiator_valve", "firmware": "2019-03-27T01:00:00+01:00", "hardware": "1", "location": "08963fec7c53423ca5680aa4cb502c63", "model": "Tom/Floor", "model_id": "106-03", - "name": "Thermostatic Radiator Badkamer", + "name": "Thermostatic Radiator Badkamer 1", "sensors": { "battery": 51, "setpoint": 14.0, @@ -113,16 +202,7 @@ "zigbee_mac_address": "ABCD012345670A17" }, "6a3bf693d05e48e0b460c815a4fdd09d": { - "active_preset": "asleep", "available": true, - "available_schedules": [ - "CV Roan", - "Bios Schema met Film Avond", - "GF7 Woonkamer", - "Badkamer Schema", - "CV Jessie", - "off" - ], "binary_sensors": { "low_battery": false }, @@ -130,12 +210,9 @@ "firmware": "2016-10-27T02:00:00+02:00", "hardware": "255", "location": "82fa13f017d240daa0d0ea1775420f24", - "mode": "auto", "model": "Lisa", "model_id": "158-01", "name": "Zone Thermostat Jessie", - "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], - "select_schedule": "CV Jessie", "sensors": { "battery": 37, "setpoint": 15.0, @@ -147,12 +224,6 @@ "setpoint": 0.0, "upper_bound": 2.0 }, - "thermostat": { - "lower_bound": 0.0, - "resolution": 0.01, - "setpoint": 15.0, - "upper_bound": 99.9 - }, "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A03" }, @@ -176,6 +247,37 @@ "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A05" }, + "82fa13f017d240daa0d0ea1775420f24": { + "active_preset": "asleep", + "available_schedules": [ + "CV Roan", + "Bios Schema met Film Avond", + "GF7 Woonkamer", + "Badkamer Schema", + "CV Jessie", + "off" + ], + "climate_mode": "auto", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Jessie", + "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], + "select_schedule": "CV Jessie", + "sensors": { + "temperature": 17.2 + }, + "thermostat": { + "lower_bound": 0.0, + "resolution": 0.01, + "setpoint": 15.0, + "upper_bound": 100.0 + }, + "thermostats": { + "primary": ["6a3bf693d05e48e0b460c815a4fdd09d"], + "secondary": ["d3da73bde12a47d5a6b8f9dad971f2ec"] + }, + "vendor": "Plugwise" + }, "90986d591dcd426cae3ec3e8111ff730": { "binary_sensors": { "heating_state": true @@ -216,7 +318,7 @@ "binary_sensors": { "low_battery": false }, - "dev_class": "thermo_sensor", + "dev_class": "thermostatic_radiator_valve", "firmware": "2019-03-27T01:00:00+01:00", "hardware": "1", "location": "12493538af164a409c6a1c79e38afe1c", @@ -241,7 +343,7 @@ }, "b310b72a0e354bfab43089919b9a88bf": { "available": true, - "dev_class": "thermo_sensor", + "dev_class": "thermostatic_radiator_valve", "firmware": "2019-03-27T01:00:00+01:00", "hardware": "1", "location": "c50f167537524366a5af7aa3942feb1e", @@ -264,16 +366,7 @@ "zigbee_mac_address": "ABCD012345670A02" }, "b59bcebaf94b499ea7d46e4a66fb62d8": { - "active_preset": "home", "available": true, - "available_schedules": [ - "CV Roan", - "Bios Schema met Film Avond", - "GF7 Woonkamer", - "Badkamer Schema", - "CV Jessie", - "off" - ], "binary_sensors": { "low_battery": false }, @@ -281,12 +374,9 @@ "firmware": "2016-08-02T02:00:00+02:00", "hardware": "255", "location": "c50f167537524366a5af7aa3942feb1e", - "mode": "auto", "model": "Lisa", "model_id": "158-01", "name": "Zone Lisa WK", - "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], - "select_schedule": "GF7 Woonkamer", "sensors": { "battery": 34, "setpoint": 21.5, @@ -298,14 +388,41 @@ "setpoint": 0.0, "upper_bound": 2.0 }, + "vendor": "Plugwise", + "zigbee_mac_address": "ABCD012345670A07" + }, + "c50f167537524366a5af7aa3942feb1e": { + "active_preset": "home", + "available_schedules": [ + "CV Roan", + "Bios Schema met Film Avond", + "GF7 Woonkamer", + "Badkamer Schema", + "CV Jessie", + "off" + ], + "climate_mode": "auto", + "dev_class": "climate", + "model": "ThermoZone", + "name": "Woonkamer", + "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], + "select_schedule": "GF7 Woonkamer", + "sensors": { + "electricity_consumed": 35.6, + "electricity_produced": 0.0, + "temperature": 20.9 + }, "thermostat": { "lower_bound": 0.0, "resolution": 0.01, "setpoint": 21.5, - "upper_bound": 99.9 + "upper_bound": 100.0 }, - "vendor": "Plugwise", - "zigbee_mac_address": "ABCD012345670A07" + "thermostats": { + "primary": ["b59bcebaf94b499ea7d46e4a66fb62d8"], + "secondary": ["b310b72a0e354bfab43089919b9a88bf"] + }, + "vendor": "Plugwise" }, "cd0ddb54ef694e11ac18ed1cbce5dbbd": { "available": true, @@ -333,7 +450,7 @@ "binary_sensors": { "low_battery": false }, - "dev_class": "thermo_sensor", + "dev_class": "thermostatic_radiator_valve", "firmware": "2019-03-27T01:00:00+01:00", "hardware": "1", "location": "82fa13f017d240daa0d0ea1775420f24", @@ -357,16 +474,7 @@ "zigbee_mac_address": "ABCD012345670A10" }, "df4a4a8169904cdb9c03d61a21f42140": { - "active_preset": "away", "available": true, - "available_schedules": [ - "CV Roan", - "Bios Schema met Film Avond", - "GF7 Woonkamer", - "Badkamer Schema", - "CV Jessie", - "off" - ], "binary_sensors": { "low_battery": false }, @@ -374,12 +482,9 @@ "firmware": "2016-10-27T02:00:00+02:00", "hardware": "255", "location": "12493538af164a409c6a1c79e38afe1c", - "mode": "heat", "model": "Lisa", "model_id": "158-01", "name": "Zone Lisa Bios", - "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], - "select_schedule": "off", "sensors": { "battery": 67, "setpoint": 13.0, @@ -391,17 +496,10 @@ "setpoint": 0.0, "upper_bound": 2.0 }, - "thermostat": { - "lower_bound": 0.0, - "resolution": 0.01, - "setpoint": 13.0, - "upper_bound": 99.9 - }, "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A06" }, "e7693eb9582644e5b865dba8d4447cf1": { - "active_preset": "no_frost", "available": true, "binary_sensors": { "low_battery": false @@ -410,11 +508,9 @@ "firmware": "2019-03-27T01:00:00+01:00", "hardware": "1", "location": "446ac08dd04d4eff8ac57489757b7314", - "mode": "heat", "model": "Tom/Floor", "model_id": "106-03", "name": "CV Kraan Garage", - "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], "sensors": { "battery": 68, "setpoint": 5.5, @@ -428,39 +524,21 @@ "setpoint": 0.0, "upper_bound": 2.0 }, - "thermostat": { - "lower_bound": 0.0, - "resolution": 0.01, - "setpoint": 5.5, - "upper_bound": 100.0 - }, "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A11" }, "f1fee6043d3642a9b0a65297455f008e": { - "active_preset": "away", "available": true, - "available_schedules": [ - "CV Roan", - "Bios Schema met Film Avond", - "GF7 Woonkamer", - "Badkamer Schema", - "CV Jessie", - "off" - ], "binary_sensors": { "low_battery": false }, - "dev_class": "zone_thermostat", + "dev_class": "thermostatic_radiator_valve", "firmware": "2016-10-27T02:00:00+02:00", "hardware": "255", "location": "08963fec7c53423ca5680aa4cb502c63", - "mode": "auto", "model": "Lisa", "model_id": "158-01", - "name": "Zone Thermostat Badkamer", - "preset_modes": ["home", "asleep", "away", "vacation", "no_frost"], - "select_schedule": "Badkamer Schema", + "name": "Thermostatic Radiator Badkamer 2", "sensors": { "battery": 92, "setpoint": 14.0, @@ -472,12 +550,6 @@ "setpoint": 0.0, "upper_bound": 2.0 }, - "thermostat": { - "lower_bound": 0.0, - "resolution": 0.01, - "setpoint": 14.0, - "upper_bound": 99.9 - }, "vendor": "Plugwise", "zigbee_mac_address": "ABCD012345670A08" }, @@ -505,7 +577,7 @@ "cooling_present": false, "gateway_id": "fe799307f1624099878210aa0b9f1475", "heater_id": "90986d591dcd426cae3ec3e8111ff730", - "item_count": 340, + "item_count": 364, "notifications": { "af82e4ccf9c548528166d38e560662a4": { "warning": "Node Plug (with MAC address 000D6F000D13CB01, in room 'n.a.') has been unreachable since 23:03 2020-01-18. Please check the connection and restart the device." diff --git a/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json b/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json index 05f5e0ffa46..74f20379d68 100644 --- a/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json +++ b/tests/components/plugwise/fixtures/m_anna_heatpump_cooling/all_data.json @@ -61,11 +61,11 @@ "3cb70739631c4d17a86b8b12e8a5161b": { "active_preset": "home", "available_schedules": ["standaard", "off"], + "climate_mode": "auto", "dev_class": "thermostat", "firmware": "2018-02-08T11:15:53+01:00", "hardware": "6539-1301-5002", "location": "c784ee9fdab44e1395b8dee7d7a497d5", - "mode": "auto", "model": "ThermoTouch", "name": "Anna", "preset_modes": ["no_frost", "home", "away", "asleep", "vacation"], diff --git a/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json b/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json index 327a87f9409..3b1e9bf8cac 100644 --- a/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json +++ b/tests/components/plugwise/fixtures/m_anna_heatpump_idle/all_data.json @@ -61,11 +61,11 @@ "3cb70739631c4d17a86b8b12e8a5161b": { "active_preset": "home", "available_schedules": ["standaard", "off"], + "climate_mode": "auto", "dev_class": "thermostat", "firmware": "2018-02-08T11:15:53+01:00", "hardware": "6539-1301-5002", "location": "c784ee9fdab44e1395b8dee7d7a497d5", - "mode": "auto", "model": "ThermoTouch", "name": "Anna", "preset_modes": ["no_frost", "home", "away", "asleep", "vacation"], diff --git a/tests/components/plugwise/fixtures/stretch_v31/all_data.json b/tests/components/plugwise/fixtures/stretch_v31/all_data.json index a875324fc13..b1675116bdf 100644 --- a/tests/components/plugwise/fixtures/stretch_v31/all_data.json +++ b/tests/components/plugwise/fixtures/stretch_v31/all_data.json @@ -96,7 +96,8 @@ "name": "Schakel", "switches": { "relay": true - } + }, + "vendor": "Plugwise" }, "d950b314e9d8499f968e6db8d82ef78c": { "dev_class": "report", @@ -111,7 +112,8 @@ "name": "Stroomvreters", "switches": { "relay": true - } + }, + "vendor": "Plugwise" }, "e1c884e7dede431dadee09506ec4f859": { "dev_class": "refrigerator", diff --git a/tests/components/plugwise/snapshots/test_diagnostics.ambr b/tests/components/plugwise/snapshots/test_diagnostics.ambr index d187e0355bf..bf7d4260a32 100644 --- a/tests/components/plugwise/snapshots/test_diagnostics.ambr +++ b/tests/components/plugwise/snapshots/test_diagnostics.ambr @@ -23,6 +23,90 @@ 'vendor': 'Plugwise', 'zigbee_mac_address': 'ABCD012345670A15', }), + '08963fec7c53423ca5680aa4cb502c63': dict({ + 'active_preset': 'away', + 'available_schedules': list([ + 'CV Roan', + 'Bios Schema met Film Avond', + 'GF7 Woonkamer', + 'Badkamer Schema', + 'CV Jessie', + 'off', + ]), + 'climate_mode': 'auto', + 'dev_class': 'climate', + 'model': 'ThermoZone', + 'name': 'Badkamer', + 'preset_modes': list([ + 'home', + 'asleep', + 'away', + 'vacation', + 'no_frost', + ]), + 'select_schedule': 'Badkamer Schema', + 'sensors': dict({ + 'temperature': 18.9, + }), + 'thermostat': dict({ + 'lower_bound': 0.0, + 'resolution': 0.01, + 'setpoint': 14.0, + 'upper_bound': 100.0, + }), + 'thermostats': dict({ + 'primary': list([ + 'f1fee6043d3642a9b0a65297455f008e', + '680423ff840043738f42cc7f1ff97a36', + ]), + 'secondary': list([ + ]), + }), + 'vendor': 'Plugwise', + }), + '12493538af164a409c6a1c79e38afe1c': dict({ + 'active_preset': 'away', + 'available_schedules': list([ + 'CV Roan', + 'Bios Schema met Film Avond', + 'GF7 Woonkamer', + 'Badkamer Schema', + 'CV Jessie', + 'off', + ]), + 'climate_mode': 'heat', + 'dev_class': 'climate', + 'model': 'ThermoZone', + 'name': 'Bios', + 'preset_modes': list([ + 'home', + 'asleep', + 'away', + 'vacation', + 'no_frost', + ]), + 'select_schedule': 'off', + 'sensors': dict({ + 'electricity_consumed': 0.0, + 'electricity_produced': 0.0, + 'temperature': 16.5, + }), + 'thermostat': dict({ + 'lower_bound': 0.0, + 'resolution': 0.01, + 'setpoint': 13.0, + 'upper_bound': 100.0, + }), + 'thermostats': dict({ + 'primary': list([ + 'df4a4a8169904cdb9c03d61a21f42140', + ]), + 'secondary': list([ + 'a2c3583e0a6349358998b760cea82d2a', + ]), + }), + 'vendor': 'Plugwise', + }), '21f2b542c49845e6bb416884c55778d6': dict({ 'available': True, 'dev_class': 'game_console_plug', @@ -44,6 +128,37 @@ 'vendor': 'Plugwise', 'zigbee_mac_address': 'ABCD012345670A12', }), + '446ac08dd04d4eff8ac57489757b7314': dict({ + 'active_preset': 'no_frost', + 'climate_mode': 'heat', + 'dev_class': 'climate', + 'model': 'ThermoZone', + 'name': 'Garage', + 'preset_modes': list([ + 'home', + 'asleep', + 'away', + 'vacation', + 'no_frost', + ]), + 'sensors': dict({ + 'temperature': 15.6, + }), + 'thermostat': dict({ + 'lower_bound': 0.0, + 'resolution': 0.01, + 'setpoint': 5.5, + 'upper_bound': 100.0, + }), + 'thermostats': dict({ + 'primary': list([ + 'e7693eb9582644e5b865dba8d4447cf1', + ]), + 'secondary': list([ + ]), + }), + 'vendor': 'Plugwise', + }), '4a810418d5394b3f82727340b91ba740': dict({ 'available': True, 'dev_class': 'router_plug', @@ -91,13 +206,13 @@ 'binary_sensors': dict({ 'low_battery': False, }), - 'dev_class': 'thermo_sensor', + 'dev_class': 'thermostatic_radiator_valve', 'firmware': '2019-03-27T01:00:00+01:00', 'hardware': '1', 'location': '08963fec7c53423ca5680aa4cb502c63', 'model': 'Tom/Floor', 'model_id': '106-03', - 'name': 'Thermostatic Radiator Badkamer', + 'name': 'Thermostatic Radiator Badkamer 1', 'sensors': dict({ 'battery': 51, 'setpoint': 14.0, @@ -115,16 +230,7 @@ 'zigbee_mac_address': 'ABCD012345670A17', }), '6a3bf693d05e48e0b460c815a4fdd09d': dict({ - 'active_preset': 'asleep', 'available': True, - 'available_schedules': list([ - 'CV Roan', - 'Bios Schema met Film Avond', - 'GF7 Woonkamer', - 'Badkamer Schema', - 'CV Jessie', - 'off', - ]), 'binary_sensors': dict({ 'low_battery': False, }), @@ -132,18 +238,9 @@ 'firmware': '2016-10-27T02:00:00+02:00', 'hardware': '255', 'location': '82fa13f017d240daa0d0ea1775420f24', - 'mode': 'auto', 'model': 'Lisa', 'model_id': '158-01', 'name': 'Zone Thermostat Jessie', - 'preset_modes': list([ - 'home', - 'asleep', - 'away', - 'vacation', - 'no_frost', - ]), - 'select_schedule': 'CV Jessie', 'sensors': dict({ 'battery': 37, 'setpoint': 15.0, @@ -155,12 +252,6 @@ 'setpoint': 0.0, 'upper_bound': 2.0, }), - 'thermostat': dict({ - 'lower_bound': 0.0, - 'resolution': 0.01, - 'setpoint': 15.0, - 'upper_bound': 99.9, - }), 'vendor': 'Plugwise', 'zigbee_mac_address': 'ABCD012345670A03', }), @@ -184,6 +275,47 @@ 'vendor': 'Plugwise', 'zigbee_mac_address': 'ABCD012345670A05', }), + '82fa13f017d240daa0d0ea1775420f24': dict({ + 'active_preset': 'asleep', + 'available_schedules': list([ + 'CV Roan', + 'Bios Schema met Film Avond', + 'GF7 Woonkamer', + 'Badkamer Schema', + 'CV Jessie', + 'off', + ]), + 'climate_mode': 'auto', + 'dev_class': 'climate', + 'model': 'ThermoZone', + 'name': 'Jessie', + 'preset_modes': list([ + 'home', + 'asleep', + 'away', + 'vacation', + 'no_frost', + ]), + 'select_schedule': 'CV Jessie', + 'sensors': dict({ + 'temperature': 17.2, + }), + 'thermostat': dict({ + 'lower_bound': 0.0, + 'resolution': 0.01, + 'setpoint': 15.0, + 'upper_bound': 100.0, + }), + 'thermostats': dict({ + 'primary': list([ + '6a3bf693d05e48e0b460c815a4fdd09d', + ]), + 'secondary': list([ + 'd3da73bde12a47d5a6b8f9dad971f2ec', + ]), + }), + 'vendor': 'Plugwise', + }), '90986d591dcd426cae3ec3e8111ff730': dict({ 'binary_sensors': dict({ 'heating_state': True, @@ -224,7 +356,7 @@ 'binary_sensors': dict({ 'low_battery': False, }), - 'dev_class': 'thermo_sensor', + 'dev_class': 'thermostatic_radiator_valve', 'firmware': '2019-03-27T01:00:00+01:00', 'hardware': '1', 'location': '12493538af164a409c6a1c79e38afe1c', @@ -249,7 +381,7 @@ }), 'b310b72a0e354bfab43089919b9a88bf': dict({ 'available': True, - 'dev_class': 'thermo_sensor', + 'dev_class': 'thermostatic_radiator_valve', 'firmware': '2019-03-27T01:00:00+01:00', 'hardware': '1', 'location': 'c50f167537524366a5af7aa3942feb1e', @@ -272,16 +404,7 @@ 'zigbee_mac_address': 'ABCD012345670A02', }), 'b59bcebaf94b499ea7d46e4a66fb62d8': dict({ - 'active_preset': 'home', 'available': True, - 'available_schedules': list([ - 'CV Roan', - 'Bios Schema met Film Avond', - 'GF7 Woonkamer', - 'Badkamer Schema', - 'CV Jessie', - 'off', - ]), 'binary_sensors': dict({ 'low_battery': False, }), @@ -289,18 +412,9 @@ 'firmware': '2016-08-02T02:00:00+02:00', 'hardware': '255', 'location': 'c50f167537524366a5af7aa3942feb1e', - 'mode': 'auto', 'model': 'Lisa', 'model_id': '158-01', 'name': 'Zone Lisa WK', - 'preset_modes': list([ - 'home', - 'asleep', - 'away', - 'vacation', - 'no_frost', - ]), - 'select_schedule': 'GF7 Woonkamer', 'sensors': dict({ 'battery': 34, 'setpoint': 21.5, @@ -312,14 +426,51 @@ 'setpoint': 0.0, 'upper_bound': 2.0, }), + 'vendor': 'Plugwise', + 'zigbee_mac_address': 'ABCD012345670A07', + }), + 'c50f167537524366a5af7aa3942feb1e': dict({ + 'active_preset': 'home', + 'available_schedules': list([ + 'CV Roan', + 'Bios Schema met Film Avond', + 'GF7 Woonkamer', + 'Badkamer Schema', + 'CV Jessie', + 'off', + ]), + 'climate_mode': 'auto', + 'dev_class': 'climate', + 'model': 'ThermoZone', + 'name': 'Woonkamer', + 'preset_modes': list([ + 'home', + 'asleep', + 'away', + 'vacation', + 'no_frost', + ]), + 'select_schedule': 'GF7 Woonkamer', + 'sensors': dict({ + 'electricity_consumed': 35.6, + 'electricity_produced': 0.0, + 'temperature': 20.9, + }), 'thermostat': dict({ 'lower_bound': 0.0, 'resolution': 0.01, 'setpoint': 21.5, - 'upper_bound': 99.9, + 'upper_bound': 100.0, + }), + 'thermostats': dict({ + 'primary': list([ + 'b59bcebaf94b499ea7d46e4a66fb62d8', + ]), + 'secondary': list([ + 'b310b72a0e354bfab43089919b9a88bf', + ]), }), 'vendor': 'Plugwise', - 'zigbee_mac_address': 'ABCD012345670A07', }), 'cd0ddb54ef694e11ac18ed1cbce5dbbd': dict({ 'available': True, @@ -347,7 +498,7 @@ 'binary_sensors': dict({ 'low_battery': False, }), - 'dev_class': 'thermo_sensor', + 'dev_class': 'thermostatic_radiator_valve', 'firmware': '2019-03-27T01:00:00+01:00', 'hardware': '1', 'location': '82fa13f017d240daa0d0ea1775420f24', @@ -371,16 +522,7 @@ 'zigbee_mac_address': 'ABCD012345670A10', }), 'df4a4a8169904cdb9c03d61a21f42140': dict({ - 'active_preset': 'away', 'available': True, - 'available_schedules': list([ - 'CV Roan', - 'Bios Schema met Film Avond', - 'GF7 Woonkamer', - 'Badkamer Schema', - 'CV Jessie', - 'off', - ]), 'binary_sensors': dict({ 'low_battery': False, }), @@ -388,18 +530,9 @@ 'firmware': '2016-10-27T02:00:00+02:00', 'hardware': '255', 'location': '12493538af164a409c6a1c79e38afe1c', - 'mode': 'heat', 'model': 'Lisa', 'model_id': '158-01', 'name': 'Zone Lisa Bios', - 'preset_modes': list([ - 'home', - 'asleep', - 'away', - 'vacation', - 'no_frost', - ]), - 'select_schedule': 'off', 'sensors': dict({ 'battery': 67, 'setpoint': 13.0, @@ -411,17 +544,10 @@ 'setpoint': 0.0, 'upper_bound': 2.0, }), - 'thermostat': dict({ - 'lower_bound': 0.0, - 'resolution': 0.01, - 'setpoint': 13.0, - 'upper_bound': 99.9, - }), 'vendor': 'Plugwise', 'zigbee_mac_address': 'ABCD012345670A06', }), 'e7693eb9582644e5b865dba8d4447cf1': dict({ - 'active_preset': 'no_frost', 'available': True, 'binary_sensors': dict({ 'low_battery': False, @@ -430,17 +556,9 @@ 'firmware': '2019-03-27T01:00:00+01:00', 'hardware': '1', 'location': '446ac08dd04d4eff8ac57489757b7314', - 'mode': 'heat', 'model': 'Tom/Floor', 'model_id': '106-03', 'name': 'CV Kraan Garage', - 'preset_modes': list([ - 'home', - 'asleep', - 'away', - 'vacation', - 'no_frost', - ]), 'sensors': dict({ 'battery': 68, 'setpoint': 5.5, @@ -454,45 +572,21 @@ 'setpoint': 0.0, 'upper_bound': 2.0, }), - 'thermostat': dict({ - 'lower_bound': 0.0, - 'resolution': 0.01, - 'setpoint': 5.5, - 'upper_bound': 100.0, - }), 'vendor': 'Plugwise', 'zigbee_mac_address': 'ABCD012345670A11', }), 'f1fee6043d3642a9b0a65297455f008e': dict({ - 'active_preset': 'away', 'available': True, - 'available_schedules': list([ - 'CV Roan', - 'Bios Schema met Film Avond', - 'GF7 Woonkamer', - 'Badkamer Schema', - 'CV Jessie', - 'off', - ]), 'binary_sensors': dict({ 'low_battery': False, }), - 'dev_class': 'zone_thermostat', + 'dev_class': 'thermostatic_radiator_valve', 'firmware': '2016-10-27T02:00:00+02:00', 'hardware': '255', 'location': '08963fec7c53423ca5680aa4cb502c63', - 'mode': 'auto', 'model': 'Lisa', 'model_id': '158-01', - 'name': 'Zone Thermostat Badkamer', - 'preset_modes': list([ - 'home', - 'asleep', - 'away', - 'vacation', - 'no_frost', - ]), - 'select_schedule': 'Badkamer Schema', + 'name': 'Thermostatic Radiator Badkamer 2', 'sensors': dict({ 'battery': 92, 'setpoint': 14.0, @@ -504,12 +598,6 @@ 'setpoint': 0.0, 'upper_bound': 2.0, }), - 'thermostat': dict({ - 'lower_bound': 0.0, - 'resolution': 0.01, - 'setpoint': 14.0, - 'upper_bound': 99.9, - }), 'vendor': 'Plugwise', 'zigbee_mac_address': 'ABCD012345670A08', }), @@ -537,7 +625,7 @@ 'cooling_present': False, 'gateway_id': 'fe799307f1624099878210aa0b9f1475', 'heater_id': '90986d591dcd426cae3ec3e8111ff730', - 'item_count': 340, + 'item_count': 364, 'notifications': dict({ 'af82e4ccf9c548528166d38e560662a4': dict({ 'warning': "Node Plug (with MAC address 000D6F000D13CB01, in room 'n.a.') has been unreachable since 23:03 2020-01-18. Please check the connection and restart the device.", diff --git a/tests/components/plugwise/test_climate.py b/tests/components/plugwise/test_climate.py index f846e818b6e..c0c1c00c68d 100644 --- a/tests/components/plugwise/test_climate.py +++ b/tests/components/plugwise/test_climate.py @@ -28,7 +28,7 @@ async def test_adam_climate_entity_attributes( hass: HomeAssistant, mock_smile_adam: MagicMock, init_integration: MockConfigEntry ) -> None: """Test creation of adam climate device environment.""" - state = hass.states.get("climate.zone_lisa_wk") + state = hass.states.get("climate.woonkamer") assert state assert state.state == HVACMode.AUTO assert state.attributes["hvac_modes"] == [HVACMode.AUTO, HVACMode.HEAT] @@ -46,7 +46,7 @@ async def test_adam_climate_entity_attributes( assert state.attributes["max_temp"] == 35.0 assert state.attributes["target_temp_step"] == 0.1 - state = hass.states.get("climate.zone_thermostat_jessie") + state = hass.states.get("climate.jessie") assert state assert state.state == HVACMode.AUTO assert state.attributes["hvac_modes"] == [HVACMode.AUTO, HVACMode.HEAT] @@ -68,7 +68,7 @@ async def test_adam_2_climate_entity_attributes( hass: HomeAssistant, mock_smile_adam_2: MagicMock, init_integration: MockConfigEntry ) -> None: """Test creation of adam climate device environment.""" - state = hass.states.get("climate.anna") + state = hass.states.get("climate.living_room") assert state assert state.state == HVACMode.HEAT assert state.attributes["hvac_action"] == "preheating" @@ -78,7 +78,7 @@ async def test_adam_2_climate_entity_attributes( HVACMode.HEAT, ] - state = hass.states.get("climate.lisa_badkamer") + state = hass.states.get("climate.bathroom") assert state assert state.state == HVACMode.AUTO assert state.attributes["hvac_action"] == "idle" @@ -96,7 +96,7 @@ async def test_adam_3_climate_entity_attributes( freezer: FrozenDateTimeFactory, ) -> None: """Test creation of adam climate device environment.""" - state = hass.states.get("climate.anna") + state = hass.states.get("climate.living_room") assert state assert state.state == HVACMode.COOL assert state.attributes["hvac_action"] == "cooling" @@ -109,7 +109,7 @@ async def test_adam_3_climate_entity_attributes( data.devices["da224107914542988a88561b4452b0f6"]["select_regulation_mode"] = ( "heating" ) - data.devices["ad4838d7d35c4d6ea796ee12ae5aedf8"]["control_state"] = "heating" + data.devices["f2bf9048bef64cc5b6d5110154e33c81"]["control_state"] = "heating" data.devices["056ee145a816487eaa69243c3280f8bf"]["binary_sensors"][ "cooling_state" ] = False @@ -121,7 +121,7 @@ async def test_adam_3_climate_entity_attributes( async_fire_time_changed(hass) await hass.async_block_till_done() - state = hass.states.get("climate.anna") + state = hass.states.get("climate.living_room") assert state assert state.state == HVACMode.HEAT assert state.attributes["hvac_action"] == "heating" @@ -135,7 +135,7 @@ async def test_adam_3_climate_entity_attributes( data.devices["da224107914542988a88561b4452b0f6"]["select_regulation_mode"] = ( "cooling" ) - data.devices["ad4838d7d35c4d6ea796ee12ae5aedf8"]["control_state"] = "cooling" + data.devices["f2bf9048bef64cc5b6d5110154e33c81"]["control_state"] = "cooling" data.devices["056ee145a816487eaa69243c3280f8bf"]["binary_sensors"][ "cooling_state" ] = True @@ -147,7 +147,7 @@ async def test_adam_3_climate_entity_attributes( async_fire_time_changed(hass) await hass.async_block_till_done() - state = hass.states.get("climate.anna") + state = hass.states.get("climate.living_room") assert state assert state.state == HVACMode.COOL assert state.attributes["hvac_action"] == "cooling" @@ -168,7 +168,7 @@ async def test_adam_climate_adjust_negative_testing( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {"entity_id": "climate.zone_lisa_wk", "temperature": 25}, + {"entity_id": "climate.woonkamer", "temperature": 25}, blocking=True, ) @@ -180,7 +180,7 @@ async def test_adam_climate_entity_climate_changes( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {"entity_id": "climate.zone_lisa_wk", "temperature": 25}, + {"entity_id": "climate.woonkamer", "temperature": 25}, blocking=True, ) assert mock_smile_adam.set_temperature.call_count == 1 @@ -192,7 +192,7 @@ async def test_adam_climate_entity_climate_changes( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { - "entity_id": "climate.zone_lisa_wk", + "entity_id": "climate.woonkamer", "hvac_mode": "heat", "temperature": 25, }, @@ -207,14 +207,14 @@ async def test_adam_climate_entity_climate_changes( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {"entity_id": "climate.zone_lisa_wk", "temperature": 150}, + {"entity_id": "climate.woonkamer", "temperature": 150}, blocking=True, ) await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_PRESET_MODE, - {"entity_id": "climate.zone_lisa_wk", "preset_mode": "away"}, + {"entity_id": "climate.woonkamer", "preset_mode": "away"}, blocking=True, ) assert mock_smile_adam.set_preset.call_count == 1 @@ -225,7 +225,7 @@ async def test_adam_climate_entity_climate_changes( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, - {"entity_id": "climate.zone_lisa_wk", "hvac_mode": "heat"}, + {"entity_id": "climate.woonkamer", "hvac_mode": "heat"}, blocking=True, ) assert mock_smile_adam.set_schedule_state.call_count == 2 @@ -238,7 +238,7 @@ async def test_adam_climate_entity_climate_changes( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, { - "entity_id": "climate.zone_thermostat_jessie", + "entity_id": "climate.jessie", "hvac_mode": "dry", }, blocking=True, diff --git a/tests/components/plugwise/test_init.py b/tests/components/plugwise/test_init.py index 5b276d5018d..3b9881c9e3d 100644 --- a/tests/components/plugwise/test_init.py +++ b/tests/components/plugwise/test_init.py @@ -34,17 +34,18 @@ SECONDARY_ID = ( TOM = { "01234567890abcdefghijklmnopqrstu": { "available": True, - "dev_class": "thermo_sensor", + "dev_class": "thermostatic_radiator_valve", "firmware": "2020-11-04T01:00:00+01:00", "hardware": "1", "location": "f871b8c4d63549319221e294e4f88074", "model": "Tom/Floor", - "name": "Tom Zolder", + "name": "Tom Badkamer 2", "binary_sensors": { "low_battery": False, }, "sensors": { "battery": 99, + "setpoint": 18.0, "temperature": 18.6, "temperature_difference": 2.3, "valve_position": 0.0, @@ -246,7 +247,7 @@ async def test_update_device( entity_registry, mock_config_entry.entry_id ) ) - == 31 + == 38 ) assert ( len( @@ -254,11 +255,19 @@ async def test_update_device( device_registry, mock_config_entry.entry_id ) ) - == 6 + == 8 ) # Add a 2nd Tom/Floor data.devices.update(TOM) + data.devices["f871b8c4d63549319221e294e4f88074"]["thermostats"].update( + { + "secondary": [ + "01234567890abcdefghijklmnopqrstu", + "1772a4ea304041adb83f357b751341ff", + ] + } + ) with patch(HA_PLUGWISE_SMILE_ASYNC_UPDATE, return_value=data): freezer.tick(timedelta(minutes=1)) async_fire_time_changed(hass) @@ -270,7 +279,7 @@ async def test_update_device( entity_registry, mock_config_entry.entry_id ) ) - == 37 + == 45 ) assert ( len( @@ -278,7 +287,7 @@ async def test_update_device( device_registry, mock_config_entry.entry_id ) ) - == 7 + == 9 ) item_list: list[str] = [] for device_entry in list(device_registry.devices.values()): @@ -286,6 +295,9 @@ async def test_update_device( assert "01234567890abcdefghijklmnopqrstu" in item_list # Remove the existing Tom/Floor + data.devices["f871b8c4d63549319221e294e4f88074"]["thermostats"].update( + {"secondary": ["01234567890abcdefghijklmnopqrstu"]} + ) data.devices.pop("1772a4ea304041adb83f357b751341ff") with patch(HA_PLUGWISE_SMILE_ASYNC_UPDATE, return_value=data): freezer.tick(timedelta(minutes=1)) @@ -298,7 +310,7 @@ async def test_update_device( entity_registry, mock_config_entry.entry_id ) ) - == 31 + == 38 ) assert ( len( @@ -306,7 +318,7 @@ async def test_update_device( device_registry, mock_config_entry.entry_id ) ) - == 6 + == 8 ) item_list: list[str] = [] for device_entry in list(device_registry.devices.values()): diff --git a/tests/components/plugwise/test_select.py b/tests/components/plugwise/test_select.py index f521787714b..0fab41cdbae 100644 --- a/tests/components/plugwise/test_select.py +++ b/tests/components/plugwise/test_select.py @@ -18,7 +18,7 @@ async def test_adam_select_entities( ) -> None: """Test a thermostat Select.""" - state = hass.states.get("select.zone_lisa_wk_thermostat_schedule") + state = hass.states.get("select.woonkamer_thermostat_schedule") assert state assert state.state == "GF7 Woonkamer" @@ -32,7 +32,7 @@ async def test_adam_change_select_entity( SELECT_DOMAIN, SERVICE_SELECT_OPTION, { - ATTR_ENTITY_ID: "select.zone_lisa_wk_thermostat_schedule", + ATTR_ENTITY_ID: "select.woonkamer_thermostat_schedule", ATTR_OPTION: "Badkamer Schema", }, blocking=True, diff --git a/tests/components/profiler/test_init.py b/tests/components/profiler/test_init.py index 3f0e0b92056..37940df437b 100644 --- a/tests/components/profiler/test_init.py +++ b/tests/components/profiler/test_init.py @@ -5,6 +5,7 @@ from functools import lru_cache import logging import os from pathlib import Path +import sys from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory @@ -70,6 +71,9 @@ async def test_basic_usage(hass: HomeAssistant, tmp_path: Path) -> None: await hass.async_block_till_done() +@pytest.mark.skipif( + sys.version_info >= (3, 13), reason="not yet available on Python 3.13" +) async def test_memory_usage(hass: HomeAssistant, tmp_path: Path) -> None: """Test we can setup and the service is registered.""" test_dir = tmp_path / "profiles" @@ -101,6 +105,24 @@ async def test_memory_usage(hass: HomeAssistant, tmp_path: Path) -> None: await hass.async_block_till_done() +@pytest.mark.skipif(sys.version_info < (3, 13), reason="still works on python 3.12") +async def test_memory_usage_py313(hass: HomeAssistant, tmp_path: Path) -> None: + """Test raise an error on python3.13.""" + entry = MockConfigEntry(domain=DOMAIN) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert hass.services.has_service(DOMAIN, SERVICE_MEMORY) + with pytest.raises( + HomeAssistantError, + match="Memory profiling is not supported on Python 3.13. Please use Python 3.12.", + ): + await hass.services.async_call( + DOMAIN, SERVICE_MEMORY, {CONF_SECONDS: 0.000001}, blocking=True + ) + + async def test_object_growth_logging( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, diff --git a/tests/components/pure_energie/test_init.py b/tests/components/pure_energie/test_init.py index 0dbd8a753e6..c0d07248664 100644 --- a/tests/components/pure_energie/test_init.py +++ b/tests/components/pure_energie/test_init.py @@ -5,7 +5,6 @@ from unittest.mock import AsyncMock, MagicMock, patch from gridnet import GridNetConnectionError import pytest -from homeassistant.components.pure_energie.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -32,7 +31,6 @@ async def test_load_unload_config_entry( await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() - assert not hass.data.get(DOMAIN) assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/rachio/test_config_flow.py b/tests/components/rachio/test_config_flow.py index 1eaec1bc46e..586b31b092f 100644 --- a/tests/components/rachio/test_config_flow.py +++ b/tests/components/rachio/test_config_flow.py @@ -183,3 +183,16 @@ async def test_form_homekit_ignored(hass: HomeAssistant) -> None: ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_options_flow(hass: HomeAssistant) -> None: + """Test option flow.""" + entry = MockConfigEntry(domain=DOMAIN, data={CONF_API_KEY: "api_key"}) + entry.add_to_hass(hass) + + result = await hass.config_entries.options.async_init(entry.entry_id) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + # This should be improved at a later stage to increase test coverage + hass.config_entries.options.async_abort(result["flow_id"]) diff --git a/tests/components/rainbird/test_config_flow.py b/tests/components/rainbird/test_config_flow.py index 87506ad656c..6e76943f202 100644 --- a/tests/components/rainbird/test_config_flow.py +++ b/tests/components/rainbird/test_config_flow.py @@ -56,7 +56,7 @@ async def mock_setup() -> AsyncGenerator[AsyncMock]: yield mock_setup -async def complete_flow(hass: HomeAssistant) -> FlowResult: +async def complete_flow(hass: HomeAssistant, password: str = PASSWORD) -> FlowResult: """Start the config flow and enter the host and password.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -268,6 +268,59 @@ async def test_controller_cannot_connect( assert not mock_setup.mock_calls +async def test_controller_invalid_auth( + hass: HomeAssistant, + mock_setup: Mock, + responses: list[AiohttpClientMockResponse], + aioclient_mock: AiohttpClientMocker, +) -> None: + """Test an invalid password.""" + + responses.clear() + responses.extend( + [ + # Incorrect password response + AiohttpClientMockResponse("POST", URL, status=HTTPStatus.FORBIDDEN), + AiohttpClientMockResponse("POST", URL, status=HTTPStatus.FORBIDDEN), + # Second attempt with the correct password + mock_response(SERIAL_RESPONSE), + mock_json_response(WIFI_PARAMS_RESPONSE), + ] + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "user" + assert not result.get("errors") + assert "flow_id" in result + + # Simulate authentication error + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: HOST, CONF_PASSWORD: "wrong-password"}, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "user" + assert result.get("errors") == {"base": "invalid_auth"} + + assert not mock_setup.mock_calls + + # Correct the form and enter the password again and setup completes + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: HOST, CONF_PASSWORD: PASSWORD}, + ) + assert result.get("type") is FlowResultType.CREATE_ENTRY + assert result.get("title") == HOST + assert "result" in result + assert dict(result["result"].data) == CONFIG_ENTRY_DATA + assert result["result"].unique_id == MAC_ADDRESS_UNIQUE_ID + + assert len(mock_setup.mock_calls) == 1 + + async def test_controller_timeout( hass: HomeAssistant, mock_setup: Mock, @@ -286,6 +339,67 @@ async def test_controller_timeout( assert not mock_setup.mock_calls +@pytest.mark.parametrize( + ("responses", "config_entry_data"), + [ + ( + [ + # First attempt simulate the wrong password + AiohttpClientMockResponse("POST", URL, status=HTTPStatus.FORBIDDEN), + AiohttpClientMockResponse("POST", URL, status=HTTPStatus.FORBIDDEN), + # Second attempt simulate the correct password + mock_response(SERIAL_RESPONSE), + mock_json_response(WIFI_PARAMS_RESPONSE), + ], + { + **CONFIG_ENTRY_DATA, + CONF_PASSWORD: "old-password", + }, + ), + ], +) +async def test_reauth_flow( + hass: HomeAssistant, + mock_setup: Mock, + config_entry: MockConfigEntry, +) -> None: + """Test the controller is setup correctly.""" + assert config_entry.data.get(CONF_PASSWORD) == "old-password" + config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + result = flows[0] + assert result.get("step_id") == "reauth_confirm" + assert not result.get("errors") + + # Simluate the wrong password + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_PASSWORD: "incorrect_password"}, + ) + assert result.get("type") == FlowResultType.FORM + assert result.get("step_id") == "reauth_confirm" + assert result.get("errors") == {"base": "invalid_auth"} + + # Enter the correct password and complete the flow + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_PASSWORD: PASSWORD}, + ) + assert result.get("type") == FlowResultType.ABORT + assert result.get("reason") == "reauth_successful" + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + entry = entries[0] + assert entry.unique_id == MAC_ADDRESS_UNIQUE_ID + assert entry.data.get(CONF_PASSWORD) == PASSWORD + + assert len(mock_setup.mock_calls) == 1 + + async def test_options_flow(hass: HomeAssistant, mock_setup: Mock) -> None: """Test config flow options.""" diff --git a/tests/components/rainbird/test_init.py b/tests/components/rainbird/test_init.py index 5b2e2ea6d1b..01e0c4458e4 100644 --- a/tests/components/rainbird/test_init.py +++ b/tests/components/rainbird/test_init.py @@ -45,17 +45,19 @@ async def test_init_success( @pytest.mark.parametrize( - ("config_entry_data", "responses", "config_entry_state"), + ("config_entry_data", "responses", "config_entry_state", "config_flow_steps"), [ ( CONFIG_ENTRY_DATA, [mock_response_error(HTTPStatus.SERVICE_UNAVAILABLE)], ConfigEntryState.SETUP_RETRY, + [], ), ( CONFIG_ENTRY_DATA, [mock_response_error(HTTPStatus.INTERNAL_SERVER_ERROR)], ConfigEntryState.SETUP_RETRY, + [], ), ( CONFIG_ENTRY_DATA, @@ -64,6 +66,7 @@ async def test_init_success( mock_response_error(HTTPStatus.SERVICE_UNAVAILABLE), ], ConfigEntryState.SETUP_RETRY, + [], ), ( CONFIG_ENTRY_DATA, @@ -72,6 +75,13 @@ async def test_init_success( mock_response_error(HTTPStatus.INTERNAL_SERVER_ERROR), ], ConfigEntryState.SETUP_RETRY, + [], + ), + ( + CONFIG_ENTRY_DATA, + [mock_response_error(HTTPStatus.FORBIDDEN)], + ConfigEntryState.SETUP_ERROR, + ["reauth_confirm"], ), ], ids=[ @@ -79,17 +89,22 @@ async def test_init_success( "server-error", "coordinator-unavailable", "coordinator-server-error", + "forbidden", ], ) async def test_communication_failure( hass: HomeAssistant, config_entry: MockConfigEntry, config_entry_state: list[ConfigEntryState], + config_flow_steps: list[str], ) -> None: """Test unable to talk to device on startup, which fails setup.""" await hass.config_entries.async_setup(config_entry.entry_id) assert config_entry.state == config_entry_state + flows = hass.config_entries.flow.async_progress() + assert [flow["step_id"] for flow in flows] == config_flow_steps + @pytest.mark.parametrize( ("config_entry_unique_id", "config_entry_data"), diff --git a/tests/components/recorder/test_purge.py b/tests/components/recorder/test_purge.py index e0b3f7ca8a8..f721a260c14 100644 --- a/tests/components/recorder/test_purge.py +++ b/tests/components/recorder/test_purge.py @@ -112,6 +112,9 @@ async def test_purge_big_database(hass: HomeAssistant, recorder_mock: Recorder) async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test deleting old states.""" + assert recorder_mock.states_manager.oldest_ts is None + oldest_ts = recorder_mock.states_manager.oldest_ts + await _add_test_states(hass) # make sure we start with 6 states @@ -127,6 +130,10 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> events = session.query(Events).filter(Events.event_type == "state_changed") assert events.count() == 0 + assert recorder_mock.states_manager.oldest_ts != oldest_ts + assert recorder_mock.states_manager.oldest_ts == states[0].last_updated_ts + oldest_ts = recorder_mock.states_manager.oldest_ts + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id purge_before = dt_util.utcnow() - timedelta(days=4) @@ -140,6 +147,8 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> repack=False, ) assert not finished + # states_manager.oldest_ts is not updated until after the purge is complete + assert recorder_mock.states_manager.oldest_ts == oldest_ts with session_scope(hass=hass) as session: states = session.query(States) @@ -162,6 +171,8 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> finished = purge_old_data(recorder_mock, purge_before, repack=False) assert finished + # states_manager.oldest_ts should now be updated + assert recorder_mock.states_manager.oldest_ts != oldest_ts with session_scope(hass=hass) as session: states = session.query(States) @@ -169,6 +180,10 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> assert states.count() == 2 assert state_attributes.count() == 1 + assert recorder_mock.states_manager.oldest_ts != oldest_ts + assert recorder_mock.states_manager.oldest_ts == states[0].last_updated_ts + oldest_ts = recorder_mock.states_manager.oldest_ts + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id # run purge_old_data again @@ -181,6 +196,8 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> repack=False, ) assert not finished + # states_manager.oldest_ts is not updated until after the purge is complete + assert recorder_mock.states_manager.oldest_ts == oldest_ts with session_scope(hass=hass) as session: assert states.count() == 0 @@ -352,6 +369,8 @@ async def test_purge_old_recorder_runs( with session_scope(hass=hass) as session: recorder_runs = session.query(RecorderRuns) assert recorder_runs.count() == 7 + # Make sure we have a run that is not closed + assert sum(run.end is None for run in recorder_runs) == 1 purge_before = dt_util.utcnow() @@ -376,7 +395,9 @@ async def test_purge_old_recorder_runs( with session_scope(hass=hass) as session: recorder_runs = session.query(RecorderRuns) - assert recorder_runs.count() == 1 + assert recorder_runs.count() == 3 + # Make sure we did not purge the unclosed run + assert sum(run.end is None for run in recorder_runs) == 1 async def test_purge_old_statistics_runs( diff --git a/tests/components/recorder/test_purge_v32_schema.py b/tests/components/recorder/test_purge_v32_schema.py index 0754b2e911c..468fd38c855 100644 --- a/tests/components/recorder/test_purge_v32_schema.py +++ b/tests/components/recorder/test_purge_v32_schema.py @@ -347,7 +347,7 @@ async def test_purge_old_recorder_runs( with session_scope(hass=hass) as session: recorder_runs = session.query(RecorderRuns) - assert recorder_runs.count() == 1 + assert recorder_runs.count() == 3 async def test_purge_old_statistics_runs( diff --git a/tests/components/recorder/test_util.py b/tests/components/recorder/test_util.py index 4904bdecc4d..7b8eef6b16f 100644 --- a/tests/components/recorder/test_util.py +++ b/tests/components/recorder/test_util.py @@ -9,6 +9,7 @@ import threading from typing import Any from unittest.mock import MagicMock, Mock, patch +from freezegun.api import FrozenDateTimeFactory import pytest from sqlalchemy import lambda_stmt, text from sqlalchemy.engine.result import ChunkedIteratorResult @@ -1052,55 +1053,94 @@ async def test_execute_stmt_lambda_element( assert rows == ["mock_row"] -@pytest.mark.freeze_time(datetime(2022, 10, 21, 7, 25, tzinfo=UTC)) -async def test_resolve_period(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("start_time", "periods"), + [ + ( + # Test 00:25 local time, during DST + datetime(2022, 10, 21, 7, 25, 50, 123, tzinfo=UTC), + { + "hour": ["2022-10-21T07:00:00+00:00", "2022-10-21T08:00:00+00:00"], + "hour-1": ["2022-10-21T06:00:00+00:00", "2022-10-21T07:00:00+00:00"], + "day": ["2022-10-21T07:00:00+00:00", "2022-10-22T07:00:00+00:00"], + "day-1": ["2022-10-20T07:00:00+00:00", "2022-10-21T07:00:00+00:00"], + "week": ["2022-10-17T07:00:00+00:00", "2022-10-24T07:00:00+00:00"], + "week-1": ["2022-10-10T07:00:00+00:00", "2022-10-17T07:00:00+00:00"], + "month": ["2022-10-01T07:00:00+00:00", "2022-11-01T07:00:00+00:00"], + "month-1": ["2022-09-01T07:00:00+00:00", "2022-10-01T07:00:00+00:00"], + "year": ["2022-01-01T08:00:00+00:00", "2023-01-01T08:00:00+00:00"], + "year-1": ["2021-01-01T08:00:00+00:00", "2022-01-01T08:00:00+00:00"], + }, + ), + ( + # Test 00:25 local time, standard time, February 28th a leap year + datetime(2024, 2, 28, 8, 25, 50, 123, tzinfo=UTC), + { + "hour": ["2024-02-28T08:00:00+00:00", "2024-02-28T09:00:00+00:00"], + "hour-1": ["2024-02-28T07:00:00+00:00", "2024-02-28T08:00:00+00:00"], + "day": ["2024-02-28T08:00:00+00:00", "2024-02-29T08:00:00+00:00"], + "day-1": ["2024-02-27T08:00:00+00:00", "2024-02-28T08:00:00+00:00"], + "week": ["2024-02-26T08:00:00+00:00", "2024-03-04T08:00:00+00:00"], + "week-1": ["2024-02-19T08:00:00+00:00", "2024-02-26T08:00:00+00:00"], + "month": ["2024-02-01T08:00:00+00:00", "2024-03-01T08:00:00+00:00"], + "month-1": ["2024-01-01T08:00:00+00:00", "2024-02-01T08:00:00+00:00"], + "year": ["2024-01-01T08:00:00+00:00", "2025-01-01T08:00:00+00:00"], + "year-1": ["2023-01-01T08:00:00+00:00", "2024-01-01T08:00:00+00:00"], + }, + ), + ], +) +async def test_resolve_period( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + start_time: datetime, + periods: dict[str, tuple[str, str]], +) -> None: """Test statistic_during_period.""" + assert hass.config.time_zone == "US/Pacific" + freezer.move_to(start_time) now = dt_util.utcnow() start_t, end_t = resolve_period({"calendar": {"period": "hour"}}) - assert start_t.isoformat() == "2022-10-21T07:00:00+00:00" - assert end_t.isoformat() == "2022-10-21T08:00:00+00:00" - - start_t, end_t = resolve_period({"calendar": {"period": "hour"}}) - assert start_t.isoformat() == "2022-10-21T07:00:00+00:00" - assert end_t.isoformat() == "2022-10-21T08:00:00+00:00" + assert start_t.isoformat() == periods["hour"][0] + assert end_t.isoformat() == periods["hour"][1] start_t, end_t = resolve_period({"calendar": {"period": "hour", "offset": -1}}) - assert start_t.isoformat() == "2022-10-21T06:00:00+00:00" - assert end_t.isoformat() == "2022-10-21T07:00:00+00:00" + assert start_t.isoformat() == periods["hour-1"][0] + assert end_t.isoformat() == periods["hour-1"][1] start_t, end_t = resolve_period({"calendar": {"period": "day"}}) - assert start_t.isoformat() == "2022-10-21T07:00:00+00:00" - assert end_t.isoformat() == "2022-10-22T07:00:00+00:00" + assert start_t.isoformat() == periods["day"][0] + assert end_t.isoformat() == periods["day"][1] start_t, end_t = resolve_period({"calendar": {"period": "day", "offset": -1}}) - assert start_t.isoformat() == "2022-10-20T07:00:00+00:00" - assert end_t.isoformat() == "2022-10-21T07:00:00+00:00" + assert start_t.isoformat() == periods["day-1"][0] + assert end_t.isoformat() == periods["day-1"][1] start_t, end_t = resolve_period({"calendar": {"period": "week"}}) - assert start_t.isoformat() == "2022-10-17T07:00:00+00:00" - assert end_t.isoformat() == "2022-10-24T07:00:00+00:00" + assert start_t.isoformat() == periods["week"][0] + assert end_t.isoformat() == periods["week"][1] start_t, end_t = resolve_period({"calendar": {"period": "week", "offset": -1}}) - assert start_t.isoformat() == "2022-10-10T07:00:00+00:00" - assert end_t.isoformat() == "2022-10-17T07:00:00+00:00" + assert start_t.isoformat() == periods["week-1"][0] + assert end_t.isoformat() == periods["week-1"][1] start_t, end_t = resolve_period({"calendar": {"period": "month"}}) - assert start_t.isoformat() == "2022-10-01T07:00:00+00:00" - assert end_t.isoformat() == "2022-11-01T07:00:00+00:00" + assert start_t.isoformat() == periods["month"][0] + assert end_t.isoformat() == periods["month"][1] start_t, end_t = resolve_period({"calendar": {"period": "month", "offset": -1}}) - assert start_t.isoformat() == "2022-09-01T07:00:00+00:00" - assert end_t.isoformat() == "2022-10-01T07:00:00+00:00" + assert start_t.isoformat() == periods["month-1"][0] + assert end_t.isoformat() == periods["month-1"][1] start_t, end_t = resolve_period({"calendar": {"period": "year"}}) - assert start_t.isoformat() == "2022-01-01T08:00:00+00:00" - assert end_t.isoformat() == "2023-01-01T08:00:00+00:00" + assert start_t.isoformat() == periods["year"][0] + assert end_t.isoformat() == periods["year"][1] start_t, end_t = resolve_period({"calendar": {"period": "year", "offset": -1}}) - assert start_t.isoformat() == "2021-01-01T08:00:00+00:00" - assert end_t.isoformat() == "2022-01-01T08:00:00+00:00" + assert start_t.isoformat() == periods["year-1"][0] + assert end_t.isoformat() == periods["year-1"][1] # Fixed period assert resolve_period({}) == (None, None) diff --git a/tests/components/recorder/test_websocket_api.py b/tests/components/recorder/test_websocket_api.py index 547288d1cc3..403384aee9f 100644 --- a/tests/components/recorder/test_websocket_api.py +++ b/tests/components/recorder/test_websocket_api.py @@ -51,6 +51,16 @@ async def mock_recorder_before_hass( """Set up recorder.""" +AREA_SENSOR_FT_ATTRIBUTES = { + "device_class": "area", + "state_class": "measurement", + "unit_of_measurement": "ft²", +} +AREA_SENSOR_M_ATTRIBUTES = { + "device_class": "area", + "state_class": "measurement", + "unit_of_measurement": "m²", +} DISTANCE_SENSOR_FT_ATTRIBUTES = { "device_class": "distance", "state_class": "measurement", @@ -1247,6 +1257,9 @@ async def test_statistic_during_period_calendar( @pytest.mark.parametrize( ("attributes", "state", "value", "custom_units", "converted_value"), [ + (AREA_SENSOR_M_ATTRIBUTES, 10, 10, {"area": "cm²"}, 100000), + (AREA_SENSOR_M_ATTRIBUTES, 10, 10, {"area": "m²"}, 10), + (AREA_SENSOR_M_ATTRIBUTES, 10, 10, {"area": "ft²"}, 107.639), (DISTANCE_SENSOR_M_ATTRIBUTES, 10, 10, {"distance": "cm"}, 1000), (DISTANCE_SENSOR_M_ATTRIBUTES, 10, 10, {"distance": "m"}, 10), (DISTANCE_SENSOR_M_ATTRIBUTES, 10, 10, {"distance": "in"}, 10 / 0.0254), @@ -1434,6 +1447,7 @@ async def test_sum_statistics_during_period_unit_conversion( "custom_units", [ {"distance": "L"}, + {"area": "L"}, {"energy": "W"}, {"power": "Pa"}, {"pressure": "K"}, @@ -1678,6 +1692,8 @@ async def test_statistics_during_period_empty_statistic_ids( @pytest.mark.parametrize( ("units", "attributes", "display_unit", "statistics_unit", "unit_class"), [ + (US_CUSTOMARY_SYSTEM, AREA_SENSOR_M_ATTRIBUTES, "m²", "m²", "area"), + (METRIC_SYSTEM, AREA_SENSOR_M_ATTRIBUTES, "m²", "m²", "area"), (US_CUSTOMARY_SYSTEM, DISTANCE_SENSOR_M_ATTRIBUTES, "m", "m", "distance"), (METRIC_SYSTEM, DISTANCE_SENSOR_M_ATTRIBUTES, "m", "m", "distance"), ( @@ -1852,6 +1868,13 @@ async def test_list_statistic_ids( @pytest.mark.parametrize( ("attributes", "attributes2", "display_unit", "statistics_unit", "unit_class"), [ + ( + AREA_SENSOR_M_ATTRIBUTES, + AREA_SENSOR_FT_ATTRIBUTES, + "ft²", + "m²", + "area", + ), ( DISTANCE_SENSOR_M_ATTRIBUTES, DISTANCE_SENSOR_FT_ATTRIBUTES, diff --git a/tests/components/renault/test_config_flow.py b/tests/components/renault/test_config_flow.py index 234d1dca069..56e0c8a99d7 100644 --- a/tests/components/renault/test_config_flow.py +++ b/tests/components/renault/test_config_flow.py @@ -2,6 +2,7 @@ from unittest.mock import AsyncMock, PropertyMock, patch +import aiohttp import pytest from renault_api.gigya.exceptions import InvalidCredentialsException from renault_api.kamereon import schemas @@ -23,20 +24,35 @@ from tests.common import MockConfigEntry, load_fixture pytestmark = pytest.mark.usefixtures("mock_setup_entry") +@pytest.mark.parametrize( + ("exception", "error"), + [ + (Exception, "unknown"), + (aiohttp.ClientConnectionError, "cannot_connect"), + ( + InvalidCredentialsException(403042, "invalid loginID or password"), + "invalid_credentials", + ), + ], +) async def test_config_flow_single_account( - hass: HomeAssistant, mock_setup_entry: AsyncMock + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + exception: Exception | type[Exception], + error: str, ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} + assert result["step_id"] == "user" + assert not result["errors"] - # Failed credentials + # Raise error with patch( "renault_api.renault_session.RenaultSession.login", - side_effect=InvalidCredentialsException(403042, "invalid loginID or password"), + side_effect=exception, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -48,7 +64,8 @@ async def test_config_flow_single_account( ) assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "invalid_credentials"} + assert result["step_id"] == "user" + assert result["errors"] == {"base": error} renault_account = AsyncMock() type(renault_account).account_id = PropertyMock(return_value="account_id_1") @@ -256,3 +273,6 @@ async def test_reauth(hass: HomeAssistant, config_entry: MockConfigEntry) -> Non assert result3["type"] is FlowResultType.ABORT assert result3["reason"] == "reauth_successful" + + assert config_entry.data[CONF_USERNAME] == "email@test.com" + assert config_entry.data[CONF_PASSWORD] == "any" diff --git a/tests/components/renault/test_services.py b/tests/components/renault/test_services.py index bdb233f4d97..970d7cf4ad8 100644 --- a/tests/components/renault/test_services.py +++ b/tests/components/renault/test_services.py @@ -30,7 +30,7 @@ from homeassistant.const import ( ATTR_NAME, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import device_registry as dr from .const import MOCK_VEHICLES @@ -341,12 +341,14 @@ async def test_service_invalid_device_id( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - data = {ATTR_VEHICLE: "VF1AAAAA555777999"} + data = {ATTR_VEHICLE: "some_random_id"} - with pytest.raises(ValueError): + with pytest.raises(ServiceValidationError) as err: await hass.services.async_call( DOMAIN, SERVICE_AC_CANCEL, service_data=data, blocking=True ) + assert err.value.translation_key == "invalid_device_id" + assert err.value.translation_placeholders == {"device_id": "some_random_id"} async def test_service_invalid_device_id2( @@ -372,7 +374,9 @@ async def test_service_invalid_device_id2( data = {ATTR_VEHICLE: device_id} - with pytest.raises(ValueError): + with pytest.raises(ServiceValidationError) as err: await hass.services.async_call( DOMAIN, SERVICE_AC_CANCEL, service_data=data, blocking=True ) + assert err.value.translation_key == "no_config_entry_for_device" + assert err.value.translation_placeholders == {"device_id": "REG-NUMBER"} diff --git a/tests/components/reolink/conftest.py b/tests/components/reolink/conftest.py index 94192c3502e..81865d98801 100644 --- a/tests/components/reolink/conftest.py +++ b/tests/components/reolink/conftest.py @@ -86,6 +86,7 @@ def reolink_connect_class() -> Generator[MagicMock]: host_mock.sw_version_update_required = False host_mock.hardware_version = "IPC_00000" host_mock.sw_version = "v1.0.0.0.0.0000" + host_mock.sw_upload_progress.return_value = 100 host_mock.manufacturer = "Reolink" host_mock.model = TEST_HOST_MODEL host_mock.item_number = TEST_ITEM_NUMBER diff --git a/tests/components/reolink/snapshots/test_diagnostics.ambr b/tests/components/reolink/snapshots/test_diagnostics.ambr index 33e9c78c550..71c5397fbd1 100644 --- a/tests/components/reolink/snapshots/test_diagnostics.ambr +++ b/tests/components/reolink/snapshots/test_diagnostics.ambr @@ -118,8 +118,8 @@ 'null': 2, }), 'GetPtzCurPos': dict({ - '0': 1, - 'null': 1, + '0': 2, + 'null': 2, }), 'GetPtzGuard': dict({ '0': 2, diff --git a/tests/components/reolink/test_host.py b/tests/components/reolink/test_host.py index 2286ca5d266..c777e4064f0 100644 --- a/tests/components/reolink/test_host.py +++ b/tests/components/reolink/test_host.py @@ -21,13 +21,15 @@ from homeassistant.components.reolink.host import ( ) from homeassistant.components.webhook import async_handle_webhook from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import Platform +from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.network import NoURLAvailableError from homeassistant.util.aiohttp import MockRequest +from .conftest import TEST_NVR_NAME + from tests.common import MockConfigEntry, async_fire_time_changed from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -92,23 +94,32 @@ async def test_webhook_callback( entity_registry: er.EntityRegistry, ) -> None: """Test webhook callback with motion sensor.""" - assert await hass.config_entries.async_setup(config_entry.entry_id) + reolink_connect.motion_detected.return_value = False + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BINARY_SENSOR]): + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED + entity_id = f"{Platform.BINARY_SENSOR}.{TEST_NVR_NAME}_motion" webhook_id = config_entry.runtime_data.host.webhook_id + unique_id = config_entry.runtime_data.host.unique_id signal_all = MagicMock() signal_ch = MagicMock() - async_dispatcher_connect(hass, f"{webhook_id}_all", signal_all) - async_dispatcher_connect(hass, f"{webhook_id}_0", signal_ch) + async_dispatcher_connect(hass, f"{unique_id}_all", signal_all) + async_dispatcher_connect(hass, f"{unique_id}_0", signal_ch) client = await hass_client_no_auth() + assert hass.states.get(entity_id).state == STATE_OFF + # test webhook callback success all channels + reolink_connect.motion_detected.return_value = True reolink_connect.ONVIF_event_callback.return_value = None await client.post(f"/api/webhook/{webhook_id}") signal_all.assert_called_once() + assert hass.states.get(entity_id).state == STATE_ON freezer.tick(timedelta(seconds=FIRST_ONVIF_TIMEOUT)) async_fire_time_changed(hass) @@ -120,10 +131,14 @@ async def test_webhook_callback( await client.post(f"/api/webhook/{webhook_id}") signal_all.assert_not_called() + assert hass.states.get(entity_id).state == STATE_ON + # test webhook callback success single channel + reolink_connect.motion_detected.return_value = False reolink_connect.ONVIF_event_callback.return_value = [0] await client.post(f"/api/webhook/{webhook_id}", data="test_data") signal_ch.assert_called_once() + assert hass.states.get(entity_id).state == STATE_OFF # test webhook callback single channel with error in event callback signal_ch.reset_mock() diff --git a/tests/components/reolink/test_update.py b/tests/components/reolink/test_update.py index a13009204d7..a6cfe862963 100644 --- a/tests/components/reolink/test_update.py +++ b/tests/components/reolink/test_update.py @@ -1,5 +1,7 @@ """Test the Reolink update platform.""" +import asyncio +from datetime import timedelta from unittest.mock import MagicMock, patch from freezegun.api import FrozenDateTimeFactory @@ -7,12 +9,13 @@ import pytest from reolink_aio.exceptions import ReolinkError from reolink_aio.software_version import NewSoftwareVersion -from homeassistant.components.reolink.update import POLL_AFTER_INSTALL +from homeassistant.components.reolink.update import POLL_AFTER_INSTALL, POLL_PROGRESS from homeassistant.components.update import DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.util.dt import utcnow from .conftest import TEST_CAM_NAME, TEST_NVR_NAME @@ -73,6 +76,7 @@ async def test_update_firm( ) -> None: """Test update state when update available with firmware info from reolink.com.""" reolink_connect.camera_name.return_value = TEST_CAM_NAME + reolink_connect.sw_upload_progress.return_value = 100 reolink_connect.camera_sw_version.return_value = "v1.1.0.0.0.0000" new_firmware = NewSoftwareVersion( version_string="v3.3.0.226_23031644", @@ -88,6 +92,8 @@ async def test_update_firm( entity_id = f"{Platform.UPDATE}.{entity_name}_firmware" assert hass.states.get(entity_id).state == STATE_ON + assert not hass.states.get(entity_id).attributes["in_progress"] + assert hass.states.get(entity_id).attributes["update_percentage"] is None # release notes client = await hass_ws_client(hass) @@ -113,6 +119,22 @@ async def test_update_firm( ) reolink_connect.update_firmware.assert_called() + reolink_connect.sw_upload_progress.return_value = 50 + freezer.tick(POLL_PROGRESS) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).attributes["in_progress"] + assert hass.states.get(entity_id).attributes["update_percentage"] == 50 + + reolink_connect.sw_upload_progress.return_value = 100 + freezer.tick(POLL_AFTER_INSTALL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert not hass.states.get(entity_id).attributes["in_progress"] + assert hass.states.get(entity_id).attributes["update_percentage"] is None + reolink_connect.update_firmware.side_effect = ReolinkError("Test error") with pytest.raises(HomeAssistantError): await hass.services.async_call( @@ -132,3 +154,53 @@ async def test_update_firm( assert hass.states.get(entity_id).state == STATE_OFF reolink_connect.update_firmware.side_effect = None + + +@pytest.mark.parametrize("entity_name", [TEST_NVR_NAME, TEST_CAM_NAME]) +async def test_update_firm_keeps_available( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + hass_ws_client: WebSocketGenerator, + entity_name: str, +) -> None: + """Test update entity keeps being available during update.""" + reolink_connect.camera_name.return_value = TEST_CAM_NAME + reolink_connect.camera_sw_version.return_value = "v1.1.0.0.0.0000" + new_firmware = NewSoftwareVersion( + version_string="v3.3.0.226_23031644", + download_url=TEST_DOWNLOAD_URL, + release_notes=TEST_RELEASE_NOTES, + ) + reolink_connect.firmware_update_available.return_value = new_firmware + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.UPDATE]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.UPDATE}.{entity_name}_firmware" + assert hass.states.get(entity_id).state == STATE_ON + + async def mock_update_firmware(*args, **kwargs) -> None: + await asyncio.sleep(0.000005) + + reolink_connect.update_firmware = mock_update_firmware + + # test install + with patch("homeassistant.components.reolink.update.POLL_PROGRESS", 0.000001): + await hass.services.async_call( + UPDATE_DOMAIN, + SERVICE_INSTALL, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + reolink_connect.session_active = False + async_fire_time_changed(hass, utcnow() + timedelta(seconds=1)) + await hass.async_block_till_done() + + # still available + assert hass.states.get(entity_id).state == STATE_ON + + reolink_connect.session_active = True diff --git a/tests/components/repairs/test_init.py b/tests/components/repairs/test_init.py index edb6e509841..e78563503f1 100644 --- a/tests/components/repairs/test_init.py +++ b/tests/components/repairs/test_init.py @@ -21,6 +21,16 @@ from tests.common import mock_platform from tests.typing import WebSocketGenerator +@pytest.mark.parametrize( + "ignore_translations", + [ + [ + "component.test.issues.even_worse.title", + "component.test.issues.even_worse.description", + "component.test.issues.abc_123.title", + ] + ], +) @pytest.mark.freeze_time("2022-07-19 07:53:05") async def test_create_update_issue( hass: HomeAssistant, hass_ws_client: WebSocketGenerator @@ -160,6 +170,14 @@ async def test_create_issue_invalid_version( assert msg["result"] == {"issues": []} +@pytest.mark.parametrize( + "ignore_translations", + [ + [ + "component.test.issues.abc_123.title", + ] + ], +) @pytest.mark.freeze_time("2022-07-19 07:53:05") async def test_ignore_issue( hass: HomeAssistant, hass_ws_client: WebSocketGenerator @@ -329,6 +347,10 @@ async def test_ignore_issue( } +@pytest.mark.parametrize( + "ignore_translations", + ["component.fake_integration.issues.abc_123.title"], +) @pytest.mark.freeze_time("2022-07-19 07:53:05") async def test_delete_issue( hass: HomeAssistant, @@ -483,6 +505,10 @@ async def test_non_compliant_platform( assert list(hass.data[DOMAIN]["platforms"].keys()) == ["fake_integration"] +@pytest.mark.parametrize( + "ignore_translations", + ["component.fake_integration.issues.abc_123.title"], +) @pytest.mark.freeze_time("2022-07-21 08:22:00") async def test_sync_methods( hass: HomeAssistant, diff --git a/tests/components/repairs/test_websocket_api.py b/tests/components/repairs/test_websocket_api.py index bb3d50f9eb5..399292fb83f 100644 --- a/tests/components/repairs/test_websocket_api.py +++ b/tests/components/repairs/test_websocket_api.py @@ -151,6 +151,10 @@ async def mock_repairs_integration(hass: HomeAssistant) -> None: ) +@pytest.mark.parametrize( + "ignore_translations", + ["component.fake_integration.issues.abc_123.title"], +) async def test_dismiss_issue( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -234,6 +238,10 @@ async def test_dismiss_issue( } +@pytest.mark.parametrize( + "ignore_translations", + ["component.fake_integration.issues.abc_123.title"], +) async def test_fix_non_existing_issue( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -281,10 +289,20 @@ async def test_fix_non_existing_issue( @pytest.mark.parametrize( - ("domain", "step", "description_placeholders"), + ("domain", "step", "description_placeholders", "ignore_translations"), [ - ("fake_integration", "custom_step", None), - ("fake_integration_default_handler", "confirm", {"abc": "123"}), + ( + "fake_integration", + "custom_step", + None, + ["component.fake_integration.issues.abc_123.title"], + ), + ( + "fake_integration_default_handler", + "confirm", + {"abc": "123"}, + ["component.fake_integration_default_handler.issues.abc_123.title"], + ), ], ) async def test_fix_issue( @@ -380,6 +398,10 @@ async def test_fix_issue_unauth( assert resp.status == HTTPStatus.UNAUTHORIZED +@pytest.mark.parametrize( + "ignore_translations", + ["component.fake_integration.issues.abc_123.title"], +) async def test_get_progress_unauth( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -411,6 +433,10 @@ async def test_get_progress_unauth( assert resp.status == HTTPStatus.UNAUTHORIZED +@pytest.mark.parametrize( + "ignore_translations", + ["component.fake_integration.issues.abc_123.title"], +) async def test_step_unauth( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -442,6 +468,16 @@ async def test_step_unauth( assert resp.status == HTTPStatus.UNAUTHORIZED +@pytest.mark.parametrize( + "ignore_translations", + [ + [ + "component.test.issues.even_worse.title", + "component.test.issues.even_worse.description", + "component.test.issues.abc_123.title", + ] + ], +) @pytest.mark.freeze_time("2022-07-19 07:53:05") async def test_list_issues( hass: HomeAssistant, @@ -533,6 +569,15 @@ async def test_list_issues( } +@pytest.mark.parametrize( + "ignore_translations", + [ + [ + "component.fake_integration.issues.abc_123.title", + "component.fake_integration.issues.abc_123.fix_flow.abort.not_given", + ] + ], +) async def test_fix_issue_aborted( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -594,6 +639,16 @@ async def test_fix_issue_aborted( assert msg["result"]["issues"][0] == first_issue +@pytest.mark.parametrize( + "ignore_translations", + [ + [ + "component.test.issues.abc_123.title", + "component.test.issues.even_worse.title", + "component.test.issues.even_worse.description", + ] + ], +) @pytest.mark.freeze_time("2022-07-19 07:53:05") async def test_get_issue_data( hass: HomeAssistant, hass_ws_client: WebSocketGenerator diff --git a/tests/components/ring/snapshots/test_camera.ambr b/tests/components/ring/snapshots/test_camera.ambr index 4347f302c72..ec285b438b3 100644 --- a/tests/components/ring/snapshots/test_camera.ambr +++ b/tests/components/ring/snapshots/test_camera.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_states[camera.front-entry] +# name: test_states[camera.front_door_last_recording-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -11,7 +11,7 @@ 'disabled_by': None, 'domain': 'camera', 'entity_category': None, - 'entity_id': 'camera.front', + 'entity_id': 'camera.front_door_last_recording', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -23,88 +23,36 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': None, + 'original_name': 'Last recording', 'platform': 'ring', 'previous_unique_id': None, 'supported_features': 0, - 'translation_key': None, - 'unique_id': '765432', + 'translation_key': 'last_recording', + 'unique_id': '987654-last_recording', 'unit_of_measurement': None, }) # --- -# name: test_states[camera.front-state] +# name: test_states[camera.front_door_last_recording-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'access_token': '1caab5c3b3', 'attribution': 'Data provided by Ring.com', - 'entity_picture': '/api/camera_proxy/camera.front?token=1caab5c3b3', - 'friendly_name': 'Front', - 'last_video_id': None, - 'supported_features': , - 'video_url': None, - }), - 'context': , - 'entity_id': 'camera.front', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'idle', - }) -# --- -# name: test_states[camera.front_door-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'camera', - 'entity_category': None, - 'entity_id': 'camera.front_door', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'ring', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '987654', - 'unit_of_measurement': None, - }) -# --- -# name: test_states[camera.front_door-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'access_token': '1caab5c3b3', - 'attribution': 'Data provided by Ring.com', - 'entity_picture': '/api/camera_proxy/camera.front_door?token=1caab5c3b3', - 'friendly_name': 'Front Door', + 'entity_picture': '/api/camera_proxy/camera.front_door_last_recording?token=1caab5c3b3', + 'friendly_name': 'Front Door Last recording', 'last_video_id': None, 'motion_detection': True, 'supported_features': , 'video_url': None, }), 'context': , - 'entity_id': 'camera.front_door', + 'entity_id': 'camera.front_door_last_recording', 'last_changed': , 'last_reported': , 'last_updated': , 'state': 'idle', }) # --- -# name: test_states[camera.internal-entry] +# name: test_states[camera.front_door_live_view-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -116,7 +64,7 @@ 'disabled_by': None, 'domain': 'camera', 'entity_category': None, - 'entity_id': 'camera.internal', + 'entity_id': 'camera.front_door_live_view', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -128,29 +76,240 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': None, + 'original_name': 'Live view', 'platform': 'ring', 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '345678', + 'supported_features': , + 'translation_key': 'live_view', + 'unique_id': '987654-live_view', 'unit_of_measurement': None, }) # --- -# name: test_states[camera.internal-state] +# name: test_states[camera.front_door_live_view-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'access_token': '1caab5c3b3', 'attribution': 'Data provided by Ring.com', - 'entity_picture': '/api/camera_proxy/camera.internal?token=1caab5c3b3', - 'friendly_name': 'Internal', + 'entity_picture': '/api/camera_proxy/camera.front_door_live_view?token=1caab5c3b3', + 'friendly_name': 'Front Door Live view', + 'frontend_stream_type': , 'last_video_id': None, - 'motion_detection': True, - 'supported_features': , + 'supported_features': , 'video_url': None, }), 'context': , - 'entity_id': 'camera.internal', + 'entity_id': 'camera.front_door_live_view', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_states[camera.front_last_recording-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'camera', + 'entity_category': None, + 'entity_id': 'camera.front_last_recording', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Last recording', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_recording', + 'unique_id': '765432-last_recording', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[camera.front_last_recording-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'access_token': '1caab5c3b3', + 'attribution': 'Data provided by Ring.com', + 'entity_picture': '/api/camera_proxy/camera.front_last_recording?token=1caab5c3b3', + 'friendly_name': 'Front Last recording', + 'last_video_id': None, + 'supported_features': , + 'video_url': None, + }), + 'context': , + 'entity_id': 'camera.front_last_recording', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_states[camera.front_live_view-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'camera', + 'entity_category': None, + 'entity_id': 'camera.front_live_view', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Live view', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'live_view', + 'unique_id': '765432-live_view', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[camera.front_live_view-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'access_token': '1caab5c3b3', + 'attribution': 'Data provided by Ring.com', + 'entity_picture': '/api/camera_proxy/camera.front_live_view?token=1caab5c3b3', + 'friendly_name': 'Front Live view', + 'frontend_stream_type': , + 'last_video_id': None, + 'supported_features': , + 'video_url': None, + }), + 'context': , + 'entity_id': 'camera.front_live_view', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_states[camera.internal_last_recording-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'camera', + 'entity_category': None, + 'entity_id': 'camera.internal_last_recording', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Last recording', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_recording', + 'unique_id': '345678-last_recording', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[camera.internal_last_recording-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'access_token': '1caab5c3b3', + 'attribution': 'Data provided by Ring.com', + 'entity_picture': '/api/camera_proxy/camera.internal_last_recording?token=1caab5c3b3', + 'friendly_name': 'Internal Last recording', + 'last_video_id': None, + 'motion_detection': True, + 'supported_features': , + 'video_url': None, + }), + 'context': , + 'entity_id': 'camera.internal_last_recording', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_states[camera.internal_live_view-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'camera', + 'entity_category': None, + 'entity_id': 'camera.internal_live_view', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Live view', + 'platform': 'ring', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'live_view', + 'unique_id': '345678-live_view', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[camera.internal_live_view-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'access_token': '1caab5c3b3', + 'attribution': 'Data provided by Ring.com', + 'entity_picture': '/api/camera_proxy/camera.internal_live_view?token=1caab5c3b3', + 'friendly_name': 'Internal Live view', + 'frontend_stream_type': , + 'last_video_id': None, + 'supported_features': , + 'video_url': None, + }), + 'context': , + 'entity_id': 'camera.internal_live_view', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/ring/test_camera.py b/tests/components/ring/test_camera.py index 94ddc335dac..4b4f019fdf7 100644 --- a/tests/components/ring/test_camera.py +++ b/tests/components/ring/test_camera.py @@ -1,14 +1,22 @@ """The tests for the Ring switch platform.""" +import logging from unittest.mock import AsyncMock, Mock, patch from aiohttp.test_utils import make_mocked_request from freezegun.api import FrozenDateTimeFactory import pytest import ring_doorbell +from ring_doorbell.webrtcstream import RingWebRtcMessage from syrupy.assertion import SnapshotAssertion -from homeassistant.components import camera +from homeassistant.components.camera import ( + CameraEntityFeature, + StreamType, + async_get_image, + async_get_mjpeg_stream, + get_camera_from_entity_id, +) from homeassistant.components.ring.camera import FORCE_REFRESH_INTERVAL from homeassistant.components.ring.const import SCAN_INTERVAL from homeassistant.config_entries import SOURCE_REAUTH @@ -19,8 +27,10 @@ from homeassistant.helpers import entity_registry as er from homeassistant.util.aiohttp import MockStreamReader from .common import MockConfigEntry, setup_platform +from .device_mocks import FRONT_DEVICE_ID from tests.common import async_fire_time_changed, snapshot_platform +from tests.typing import WebSocketGenerator SMALLEST_VALID_JPEG = ( "ffd8ffe000104a46494600010101004800480000ffdb00430003020202020203020202030303030406040404040408060" @@ -30,6 +40,7 @@ SMALLEST_VALID_JPEG = ( SMALLEST_VALID_JPEG_BYTES = bytes.fromhex(SMALLEST_VALID_JPEG) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_states( hass: HomeAssistant, mock_ring_client: Mock, @@ -48,11 +59,12 @@ async def test_states( @pytest.mark.parametrize( ("entity_name", "expected_state", "friendly_name"), [ - ("camera.internal", True, "Internal"), - ("camera.front", None, "Front"), + ("camera.internal_last_recording", True, "Internal Last recording"), + ("camera.front_last_recording", None, "Front Last recording"), ], ids=["On", "Off"], ) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_camera_motion_detection_state_reports_correctly( hass: HomeAssistant, mock_ring_client, @@ -68,40 +80,43 @@ async def test_camera_motion_detection_state_reports_correctly( assert state.attributes.get("friendly_name") == friendly_name +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_camera_motion_detection_can_be_turned_on_and_off( - hass: HomeAssistant, mock_ring_client + hass: HomeAssistant, + mock_ring_client, ) -> None: """Tests the siren turns on correctly.""" await setup_platform(hass, Platform.CAMERA) - state = hass.states.get("camera.front") + state = hass.states.get("camera.front_last_recording") assert state.attributes.get("motion_detection") is not True await hass.services.async_call( "camera", "enable_motion_detection", - {"entity_id": "camera.front"}, + {"entity_id": "camera.front_last_recording"}, blocking=True, ) await hass.async_block_till_done() - state = hass.states.get("camera.front") + state = hass.states.get("camera.front_last_recording") assert state.attributes.get("motion_detection") is True await hass.services.async_call( "camera", "disable_motion_detection", - {"entity_id": "camera.front"}, + {"entity_id": "camera.front_last_recording"}, blocking=True, ) await hass.async_block_till_done() - state = hass.states.get("camera.front") + state = hass.states.get("camera.front_last_recording") assert state.attributes.get("motion_detection") is None +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_camera_motion_detection_not_supported( hass: HomeAssistant, mock_ring_client, @@ -121,21 +136,22 @@ async def test_camera_motion_detection_not_supported( await setup_platform(hass, Platform.CAMERA) - state = hass.states.get("camera.front") + state = hass.states.get("camera.front_last_recording") assert state.attributes.get("motion_detection") is None await hass.services.async_call( "camera", "enable_motion_detection", - {"entity_id": "camera.front"}, + {"entity_id": "camera.front_last_recording"}, blocking=True, ) await hass.async_block_till_done() - state = hass.states.get("camera.front") + state = hass.states.get("camera.front_last_recording") assert state.attributes.get("motion_detection") is None assert ( - "Entity camera.front does not have motion detection capability" in caplog.text + "Entity camera.front_last_recording does not have motion detection capability" + in caplog.text ) @@ -148,6 +164,7 @@ async def test_camera_motion_detection_not_supported( ], ids=["Authentication", "Timeout", "Other"], ) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_motion_detection_errors_when_turned_on( hass: HomeAssistant, mock_ring_client, @@ -168,7 +185,7 @@ async def test_motion_detection_errors_when_turned_on( await hass.services.async_call( "camera", "enable_motion_detection", - {"entity_id": "camera.front"}, + {"entity_id": "camera.front_last_recording"}, blocking=True, ) await hass.async_block_till_done() @@ -183,6 +200,7 @@ async def test_motion_detection_errors_when_turned_on( ) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_camera_handle_mjpeg_stream( hass: HomeAssistant, mock_ring_client, @@ -195,7 +213,7 @@ async def test_camera_handle_mjpeg_stream( front_camera_mock = mock_ring_devices.get_device(765432) front_camera_mock.async_recording_url.return_value = None - state = hass.states.get("camera.front") + state = hass.states.get("camera.front_last_recording") assert state is not None mock_request = make_mocked_request("GET", "/", headers={"token": "x"}) @@ -203,7 +221,9 @@ async def test_camera_handle_mjpeg_stream( # history not updated yet front_camera_mock.async_history.assert_not_called() front_camera_mock.async_recording_url.assert_not_called() - stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") + stream = await async_get_mjpeg_stream( + hass, mock_request, "camera.front_last_recording" + ) assert stream is None # Video url will be none so no stream @@ -211,9 +231,11 @@ async def test_camera_handle_mjpeg_stream( async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) front_camera_mock.async_history.assert_called_once() - front_camera_mock.async_recording_url.assert_called_once() + front_camera_mock.async_recording_url.assert_called() - stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") + stream = await async_get_mjpeg_stream( + hass, mock_request, "camera.front_last_recording" + ) assert stream is None # Stop the history updating so we can update the values manually @@ -222,8 +244,10 @@ async def test_camera_handle_mjpeg_stream( freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - front_camera_mock.async_recording_url.assert_called_once() - stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") + front_camera_mock.async_recording_url.assert_called() + stream = await async_get_mjpeg_stream( + hass, mock_request, "camera.front_last_recording" + ) assert stream is None # If the history id hasn't changed the camera will not check again for the video url @@ -235,13 +259,15 @@ async def test_camera_handle_mjpeg_stream( await hass.async_block_till_done(wait_background_tasks=True) front_camera_mock.async_recording_url.assert_not_called() - stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") + stream = await async_get_mjpeg_stream( + hass, mock_request, "camera.front_last_recording" + ) assert stream is None freezer.tick(FORCE_REFRESH_INTERVAL) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - front_camera_mock.async_recording_url.assert_called_once() + front_camera_mock.async_recording_url.assert_called() # Now the stream should be returned stream_reader = MockStreamReader(SMALLEST_VALID_JPEG_BYTES) @@ -250,7 +276,9 @@ async def test_camera_handle_mjpeg_stream( mock_camera.return_value.open_camera = AsyncMock() mock_camera.return_value.close = AsyncMock() - stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") + stream = await async_get_mjpeg_stream( + hass, mock_request, "camera.front_last_recording" + ) assert stream is not None # Check the stream has been read assert not await stream_reader.read(-1) @@ -267,7 +295,7 @@ async def test_camera_image( front_camera_mock = mock_ring_devices.get_device(765432) - state = hass.states.get("camera.front") + state = hass.states.get("camera.front_live_view") assert state is not None # history not updated yet @@ -280,7 +308,7 @@ async def test_camera_image( ), pytest.raises(HomeAssistantError), ): - image = await camera.async_get_image(hass, "camera.front") + image = await async_get_image(hass, "camera.front_live_view") freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) @@ -293,5 +321,145 @@ async def test_camera_image( "homeassistant.components.ring.camera.ffmpeg.async_get_image", return_value=SMALLEST_VALID_JPEG_BYTES, ): - image = await camera.async_get_image(hass, "camera.front") + image = await async_get_image(hass, "camera.front_live_view") assert image.content == SMALLEST_VALID_JPEG_BYTES + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_camera_stream_attributes( + hass: HomeAssistant, + mock_ring_client: Mock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test stream attributes.""" + await setup_platform(hass, Platform.CAMERA) + + # Live view + state = hass.states.get("camera.front_live_view") + supported_features = state.attributes.get("supported_features") + assert supported_features is CameraEntityFeature.STREAM + camera = get_camera_from_entity_id(hass, "camera.front_live_view") + assert camera.camera_capabilities.frontend_stream_types == {StreamType.WEB_RTC} + + # Last recording + state = hass.states.get("camera.front_last_recording") + supported_features = state.attributes.get("supported_features") + assert supported_features is CameraEntityFeature(0) + camera = get_camera_from_entity_id(hass, "camera.front_last_recording") + assert camera.camera_capabilities.frontend_stream_types == set() + + +async def test_camera_webrtc( + hass: HomeAssistant, + mock_ring_client: Mock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_ring_devices, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test WebRTC interactions.""" + caplog.set_level(logging.ERROR) + await setup_platform(hass, Platform.CAMERA) + client = await hass_ws_client(hass) + + # sdp offer + await client.send_json_auto_id( + { + "type": "camera/webrtc/offer", + "entity_id": "camera.front_live_view", + "offer": "v=0\r\n", + } + ) + response = await client.receive_json() + assert response + assert response.get("success") is True + subscription_id = response["id"] + assert not caplog.text + + front_camera_mock = mock_ring_devices.get_device(FRONT_DEVICE_ID) + front_camera_mock.generate_async_webrtc_stream.assert_called_once() + args = front_camera_mock.generate_async_webrtc_stream.call_args.args + session_id = args[1] + on_message = args[2] + + # receive session + response = await client.receive_json() + event = response.get("event") + assert event + assert event.get("type") == "session" + assert not caplog.text + + # Ring candidate + on_message(RingWebRtcMessage(candidate="candidate", sdp_m_line_index=1)) + response = await client.receive_json() + event = response.get("event") + assert event + assert event.get("type") == "candidate" + assert not caplog.text + + # Error message + on_message(RingWebRtcMessage(error_code=1, error_message="error")) + response = await client.receive_json() + event = response.get("event") + assert event + assert event.get("type") == "error" + assert not caplog.text + + # frontend candidate + await client.send_json_auto_id( + { + "type": "camera/webrtc/candidate", + "entity_id": "camera.front_live_view", + "session_id": session_id, + "candidate": {"candidate": "candidate", "sdpMLineIndex": 1}, + } + ) + response = await client.receive_json() + assert response + assert response.get("success") is True + assert not caplog.text + front_camera_mock.on_webrtc_candidate.assert_called_once() + + # Invalid frontend candidate + await client.send_json_auto_id( + { + "type": "camera/webrtc/candidate", + "entity_id": "camera.front_live_view", + "session_id": session_id, + "candidate": {"candidate": "candidate", "sdpMid": "1"}, + } + ) + response = await client.receive_json() + assert response + assert response.get("success") is False + assert response["error"]["code"] == "home_assistant_error" + msg = "The sdp_m_line_index is required for ring webrtc streaming" + assert msg in response["error"].get("message") + assert msg in caplog.text + front_camera_mock.on_webrtc_candidate.assert_called_once() + + # Answer message + caplog.clear() + on_message(RingWebRtcMessage(answer="v=0\r\n")) + response = await client.receive_json() + event = response.get("event") + assert event + assert event.get("type") == "answer" + assert not caplog.text + + # Unsubscribe/Close session + front_camera_mock.sync_close_webrtc_stream.assert_not_called() + await client.send_json_auto_id( + { + "type": "unsubscribe_events", + "subscription": subscription_id, + } + ) + + response = await client.receive_json() + assert response + assert response.get("success") is True + front_camera_mock.sync_close_webrtc_stream.assert_called_once() diff --git a/tests/components/ring/test_init.py b/tests/components/ring/test_init.py index 1b5ee68c659..27d4813f02d 100644 --- a/tests/components/ring/test_init.py +++ b/tests/components/ring/test_init.py @@ -11,7 +11,11 @@ from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAI from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN from homeassistant.components.ring import DOMAIN -from homeassistant.components.ring.const import CONF_LISTEN_CREDENTIALS, SCAN_INTERVAL +from homeassistant.components.ring.const import ( + CONF_CONFIG_ENTRY_MINOR_VERSION, + CONF_LISTEN_CREDENTIALS, + SCAN_INTERVAL, +) from homeassistant.components.ring.coordinator import RingEventListener from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import CONF_DEVICE_ID, CONF_TOKEN, CONF_USERNAME @@ -237,15 +241,14 @@ async def test_error_on_device_update( @pytest.mark.parametrize( - ("domain", "old_unique_id"), + ("domain", "old_unique_id", "new_unique_id"), [ - ( - LIGHT_DOMAIN, - 123456, - ), - ( + pytest.param(LIGHT_DOMAIN, 123456, "123456", id="Light integer"), + pytest.param( CAMERA_DOMAIN, 654321, + "654321-last_recording", + id="Camera integer", ), ], ) @@ -256,6 +259,7 @@ async def test_update_unique_id( mock_ring_client, domain: str, old_unique_id: int | str, + new_unique_id: str, ) -> None: """Test unique_id update of integration.""" entry = MockConfigEntry( @@ -266,6 +270,7 @@ async def test_update_unique_id( "token": {"access_token": "mock-token"}, }, unique_id="foo@bar.com", + minor_version=1, ) entry.add_to_hass(hass) @@ -281,8 +286,9 @@ async def test_update_unique_id( entity_migrated = entity_registry.async_get(entity.entity_id) assert entity_migrated - assert entity_migrated.unique_id == str(old_unique_id) + assert entity_migrated.unique_id == new_unique_id assert (f"Fixing non string unique id {old_unique_id}") in caplog.text + assert entry.minor_version == CONF_CONFIG_ENTRY_MINOR_VERSION async def test_update_unique_id_existing( @@ -301,6 +307,7 @@ async def test_update_unique_id_existing( "token": {"access_token": "mock-token"}, }, unique_id="foo@bar.com", + minor_version=1, ) entry.add_to_hass(hass) @@ -331,16 +338,17 @@ async def test_update_unique_id_existing( f"already exists for '{entity_existing.entity_id}', " "You may have to delete unavailable ring entities" ) in caplog.text + assert entry.minor_version == CONF_CONFIG_ENTRY_MINOR_VERSION -async def test_update_unique_id_no_update( +async def test_update_unique_id_camera_update( hass: HomeAssistant, entity_registry: er.EntityRegistry, caplog: pytest.LogCaptureFixture, mock_ring_client, ) -> None: - """Test unique_id update of integration.""" - correct_unique_id = "123456" + """Test camera unique id with no suffix is updated.""" + correct_unique_id = "123456-last_recording" entry = MockConfigEntry( title="Ring", domain=DOMAIN, @@ -349,6 +357,7 @@ async def test_update_unique_id_no_update( "token": {"access_token": "mock-token"}, }, unique_id="foo@bar.com", + minor_version=1, ) entry.add_to_hass(hass) @@ -358,14 +367,16 @@ async def test_update_unique_id_no_update( unique_id="123456", config_entry=entry, ) - assert entity.unique_id == correct_unique_id + assert entity.unique_id == "123456" assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() entity_migrated = entity_registry.async_get(entity.entity_id) assert entity_migrated assert entity_migrated.unique_id == correct_unique_id + assert entity.disabled is False assert "Fixing non string unique id" not in caplog.text + assert entry.minor_version == CONF_CONFIG_ENTRY_MINOR_VERSION async def test_token_updated( @@ -477,7 +488,7 @@ async def test_migrate_create_device_id( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert entry.minor_version == 2 + assert entry.minor_version == CONF_CONFIG_ENTRY_MINOR_VERSION assert CONF_DEVICE_ID in entry.data assert entry.data[CONF_DEVICE_ID] == MOCK_HARDWARE_ID diff --git a/tests/components/rituals_perfume_genie/test_select.py b/tests/components/rituals_perfume_genie/test_select.py index 17612edfd97..a4d97ab83fd 100644 --- a/tests/components/rituals_perfume_genie/test_select.py +++ b/tests/components/rituals_perfume_genie/test_select.py @@ -9,10 +9,10 @@ from homeassistant.components.select import ( DOMAIN as SELECT_DOMAIN, ) from homeassistant.const import ( - AREA_SQUARE_METERS, ATTR_ENTITY_ID, SERVICE_SELECT_OPTION, EntityCategory, + UnitOfArea, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -38,7 +38,7 @@ async def test_select_entity( entry = entity_registry.async_get("select.genie_room_size") assert entry assert entry.unique_id == f"{diffuser.hublot}-room_size_square_meter" - assert entry.unit_of_measurement == AREA_SQUARE_METERS + assert entry.unit_of_measurement == UnitOfArea.SQUARE_METERS assert entry.entity_category == EntityCategory.CONFIG diff --git a/tests/components/rtsp_to_webrtc/test_init.py b/tests/components/rtsp_to_webrtc/test_init.py index 85155855a09..985e76fa1d1 100644 --- a/tests/components/rtsp_to_webrtc/test_init.py +++ b/tests/components/rtsp_to_webrtc/test_init.py @@ -14,10 +14,12 @@ from homeassistant.components.rtsp_to_webrtc import DOMAIN from homeassistant.components.websocket_api import TYPE_RESULT from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component from .conftest import SERVER_URL, STREAM_SOURCE, ComponentSetup +from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import WebSocketGenerator @@ -33,15 +35,28 @@ async def setup_homeassistant(hass: HomeAssistant): await async_setup_component(hass, "homeassistant", {}) +@pytest.mark.usefixtures("rtsp_to_webrtc_client") async def test_setup_success( - hass: HomeAssistant, rtsp_to_webrtc_client: Any, setup_integration: ComponentSetup + hass: HomeAssistant, + config_entry: MockConfigEntry, + issue_registry: ir.IssueRegistry, ) -> None: """Test successful setup and unload.""" - await setup_integration() + config_entry.add_to_hass(hass) + + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + assert issue_registry.async_get_issue(DOMAIN, "deprecated") entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 assert entries[0].state is ConfigEntryState.LOADED + await hass.config_entries.async_unload(entries[0].entry_id) + await hass.async_block_till_done() + + assert not hass.data.get(DOMAIN) + assert entries[0].state is ConfigEntryState.NOT_LOADED + assert not issue_registry.async_get_issue(DOMAIN, "deprecated") @pytest.mark.parametrize("config_entry_data", [{}]) diff --git a/tests/components/russound_rio/__init__.py b/tests/components/russound_rio/__init__.py index 96171071907..d0e6d77f1ee 100644 --- a/tests/components/russound_rio/__init__.py +++ b/tests/components/russound_rio/__init__.py @@ -1 +1,13 @@ """Tests for the Russound RIO integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/russound_rio/conftest.py b/tests/components/russound_rio/conftest.py index 91d009f13f4..09cccd7d83f 100644 --- a/tests/components/russound_rio/conftest.py +++ b/tests/components/russound_rio/conftest.py @@ -1,16 +1,19 @@ """Test fixtures for Russound RIO integration.""" from collections.abc import Generator -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, Mock, patch +from aiorussound import Controller, RussoundTcpConnectionHandler, Source +from aiorussound.rio import ZoneControlSurface +from aiorussound.util import controller_device_str, zone_device_str import pytest from homeassistant.components.russound_rio.const import DOMAIN from homeassistant.core import HomeAssistant -from .const import HARDWARE_MAC, MOCK_CONFIG, MOCK_CONTROLLERS, MODEL +from .const import HARDWARE_MAC, HOST, MOCK_CONFIG, MODEL, PORT -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, load_json_object_fixture @pytest.fixture @@ -25,15 +28,13 @@ def mock_setup_entry(): @pytest.fixture def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Mock a Russound RIO config entry.""" - entry = MockConfigEntry( + return MockConfigEntry( domain=DOMAIN, data=MOCK_CONFIG, unique_id=HARDWARE_MAC, title=MODEL ) - entry.add_to_hass(hass) - return entry @pytest.fixture -def mock_russound() -> Generator[AsyncMock]: +def mock_russound_client() -> Generator[AsyncMock]: """Mock the Russound RIO client.""" with ( patch( @@ -41,8 +42,32 @@ def mock_russound() -> Generator[AsyncMock]: ) as mock_client, patch( "homeassistant.components.russound_rio.config_flow.RussoundClient", - return_value=mock_client, + new=mock_client, ), ): - mock_client.controllers = MOCK_CONTROLLERS - yield mock_client + client = mock_client.return_value + zones = { + int(k): ZoneControlSurface.from_dict(v) + for k, v in load_json_object_fixture("get_zones.json", DOMAIN).items() + } + client.sources = { + int(k): Source.from_dict(v) + for k, v in load_json_object_fixture("get_sources.json", DOMAIN).items() + } + for k, v in zones.items(): + v.device_str = zone_device_str(1, k) + v.fetch_current_source = Mock( + side_effect=lambda current_source=v.current_source: client.sources.get( + int(current_source) + ) + ) + + client.controllers = { + 1: Controller( + 1, "MCA-C5", client, controller_device_str(1), HARDWARE_MAC, None, zones + ) + } + client.connection_handler = RussoundTcpConnectionHandler(HOST, PORT) + client.is_connected = Mock(return_value=True) + client.unregister_state_update_callbacks.return_value = True + yield client diff --git a/tests/components/russound_rio/const.py b/tests/components/russound_rio/const.py index 527f4fe3377..3d2924693d2 100644 --- a/tests/components/russound_rio/const.py +++ b/tests/components/russound_rio/const.py @@ -2,6 +2,8 @@ from collections import namedtuple +from homeassistant.components.media_player import DOMAIN as MP_DOMAIN + HOST = "127.0.0.1" PORT = 9621 MODEL = "MCA-C5" @@ -14,3 +16,7 @@ MOCK_CONFIG = { _CONTROLLER = namedtuple("Controller", ["mac_address", "controller_type"]) # noqa: PYI024 MOCK_CONTROLLERS = {1: _CONTROLLER(mac_address=HARDWARE_MAC, controller_type=MODEL)} + +DEVICE_NAME = "mca_c5" +NAME_ZONE_1 = "backyard" +ENTITY_ID_ZONE_1 = f"{MP_DOMAIN}.{DEVICE_NAME}_{NAME_ZONE_1}" diff --git a/tests/components/russound_rio/fixtures/get_sources.json b/tests/components/russound_rio/fixtures/get_sources.json new file mode 100644 index 00000000000..e39d702b8a1 --- /dev/null +++ b/tests/components/russound_rio/fixtures/get_sources.json @@ -0,0 +1,10 @@ +{ + "1": { + "name": "Aux", + "type": "Miscellaneous Audio" + }, + "2": { + "name": "Spotify", + "type": "Russound Media Streamer" + } +} diff --git a/tests/components/russound_rio/fixtures/get_zones.json b/tests/components/russound_rio/fixtures/get_zones.json new file mode 100644 index 00000000000..396310339b3 --- /dev/null +++ b/tests/components/russound_rio/fixtures/get_zones.json @@ -0,0 +1,22 @@ +{ + "1": { + "name": "Backyard", + "volume": "10", + "status": "ON", + "enabled": "True", + "current_source": "1" + }, + "2": { + "name": "Kitchen", + "volume": "50", + "status": "OFF", + "enabled": "True", + "current_source": "2" + }, + "3": { + "name": "Bedroom", + "volume": "10", + "status": "OFF", + "enabled": "False" + } +} diff --git a/tests/components/russound_rio/snapshots/test_init.ambr b/tests/components/russound_rio/snapshots/test_init.ambr new file mode 100644 index 00000000000..fcd59dd06f7 --- /dev/null +++ b/tests/components/russound_rio/snapshots/test_init.ambr @@ -0,0 +1,37 @@ +# serializer version: 1 +# name: test_device_info + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://127.0.0.1', + 'connections': set({ + tuple( + 'mac', + '00:11:22:33:44:55', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'russound_rio', + '00:11:22:33:44:55', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Russound', + 'model': 'MCA-C5', + 'model_id': None, + 'name': 'MCA-C5', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- diff --git a/tests/components/russound_rio/test_config_flow.py b/tests/components/russound_rio/test_config_flow.py index 9461fe1d5be..cf754852731 100644 --- a/tests/components/russound_rio/test_config_flow.py +++ b/tests/components/russound_rio/test_config_flow.py @@ -11,7 +11,7 @@ from .const import MOCK_CONFIG, MODEL async def test_form( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound_client: AsyncMock ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -32,13 +32,13 @@ async def test_form( async def test_form_cannot_connect( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound_client: AsyncMock ) -> None: """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - mock_russound.connect.side_effect = TimeoutError + mock_russound_client.connect.side_effect = TimeoutError result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_CONFIG, @@ -48,7 +48,7 @@ async def test_form_cannot_connect( assert result["errors"] == {"base": "cannot_connect"} # Recover with correct information - mock_russound.connect.side_effect = None + mock_russound_client.connect.side_effect = None result = await hass.config_entries.flow.async_configure( result["flow_id"], MOCK_CONFIG, @@ -61,7 +61,7 @@ async def test_form_cannot_connect( async def test_import( - hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound_client: AsyncMock ) -> None: """Test we import a config entry.""" result = await hass.config_entries.flow.async_init( @@ -77,10 +77,10 @@ async def test_import( async def test_import_cannot_connect( - hass: HomeAssistant, mock_russound: AsyncMock + hass: HomeAssistant, mock_russound_client: AsyncMock ) -> None: """Test we handle import cannot connect error.""" - mock_russound.connect.side_effect = TimeoutError + mock_russound_client.connect.side_effect = TimeoutError result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=MOCK_CONFIG diff --git a/tests/components/russound_rio/test_init.py b/tests/components/russound_rio/test_init.py new file mode 100644 index 00000000000..6787ee37c79 --- /dev/null +++ b/tests/components/russound_rio/test_init.py @@ -0,0 +1,44 @@ +"""Tests for the Russound RIO integration.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.components.russound_rio.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_config_entry_not_ready( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_russound_client: AsyncMock, +) -> None: + """Test the Cambridge Audio configuration entry not ready.""" + mock_russound_client.connect.side_effect = TimeoutError + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + mock_russound_client.connect = AsyncMock(return_value=True) + + +async def test_device_info( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_russound_client: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test device registry integration.""" + await setup_integration(hass, mock_config_entry) + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) + assert device_entry is not None + assert device_entry == snapshot diff --git a/tests/components/russound_rio/test_media_player.py b/tests/components/russound_rio/test_media_player.py new file mode 100644 index 00000000000..e720e2c7f65 --- /dev/null +++ b/tests/components/russound_rio/test_media_player.py @@ -0,0 +1,58 @@ +"""Tests for the Russound RIO media player.""" + +from unittest.mock import AsyncMock + +from aiorussound.models import CallbackType, PlayStatus +import pytest + +from homeassistant.const import ( + STATE_BUFFERING, + STATE_IDLE, + STATE_OFF, + STATE_ON, + STATE_PAUSED, + STATE_PLAYING, +) +from homeassistant.core import HomeAssistant + +from . import setup_integration +from .const import ENTITY_ID_ZONE_1 + +from tests.common import MockConfigEntry + + +async def mock_state_update(client: AsyncMock) -> None: + """Trigger a callback in the media player.""" + for callback in client.register_state_update_callbacks.call_args_list: + await callback[0][0](client, CallbackType.STATE) + + +@pytest.mark.parametrize( + ("zone_status", "source_play_status", "media_player_state"), + [ + (True, None, STATE_ON), + (True, PlayStatus.PLAYING, STATE_PLAYING), + (True, PlayStatus.PAUSED, STATE_PAUSED), + (True, PlayStatus.TRANSITIONING, STATE_BUFFERING), + (True, PlayStatus.STOPPED, STATE_IDLE), + (False, None, STATE_OFF), + (False, PlayStatus.STOPPED, STATE_OFF), + ], +) +async def test_entity_state( + hass: HomeAssistant, + mock_russound_client: AsyncMock, + mock_config_entry: MockConfigEntry, + zone_status: bool, + source_play_status: PlayStatus | None, + media_player_state: str, +) -> None: + """Test media player state.""" + await setup_integration(hass, mock_config_entry) + mock_russound_client.controllers[1].zones[1].status = zone_status + mock_russound_client.sources[1].play_status = source_play_status + await mock_state_update(mock_russound_client) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID_ZONE_1) + assert state.state == media_player_state diff --git a/tests/components/sabnzbd/conftest.py b/tests/components/sabnzbd/conftest.py index b5450e5134f..6fa3d14e880 100644 --- a/tests/components/sabnzbd/conftest.py +++ b/tests/components/sabnzbd/conftest.py @@ -5,6 +5,13 @@ from unittest.mock import AsyncMock, patch import pytest +from homeassistant.components.sabnzbd import DOMAIN +from homeassistant.const import CONF_API_KEY, CONF_URL +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry, load_json_object_fixture + @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: @@ -13,3 +20,41 @@ def mock_setup_entry() -> Generator[AsyncMock]: "homeassistant.components.sabnzbd.async_setup_entry", return_value=True ) as mock_setup_entry: yield mock_setup_entry + + +@pytest.fixture(name="sabnzbd", autouse=True) +def mock_sabnzbd() -> Generator[AsyncMock]: + """Mock the Sabnzbd API.""" + with patch( + "homeassistant.components.sabnzbd.helpers.SabnzbdApi", autospec=True + ) as mock_sabnzbd: + mock = mock_sabnzbd.return_value + mock.return_value.check_available = True + mock.queue = load_json_object_fixture("queue.json", DOMAIN) + yield mock + + +@pytest.fixture(name="config_entry") +async def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: + """Return a MockConfigEntry for testing.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + title="Sabnzbd", + entry_id="01JD2YVVPBC62D620DGYNG2R8H", + data={ + CONF_API_KEY: "edc3eee7330e4fdda04489e3fbc283d0", + CONF_URL: "http://localhost:8080", + }, + ) + config_entry.add_to_hass(hass) + + return config_entry + + +@pytest.fixture(name="setup_integration") +async def mock_setup_integration( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: + """Fixture for setting up the component.""" + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() diff --git a/tests/components/sabnzbd/fixtures/queue.json b/tests/components/sabnzbd/fixtures/queue.json new file mode 100644 index 00000000000..7acef65f2e9 --- /dev/null +++ b/tests/components/sabnzbd/fixtures/queue.json @@ -0,0 +1,39 @@ +{ + "total_size": 1638.4, + "month_size": 38.8, + "week_size": 9.4, + "day_size": 9.4, + "version": "4.3.3", + "paused": true, + "pause_int": "0", + "paused_all": false, + "diskspace1": "444.95", + "diskspace2": "3127.88", + "diskspace1_norm": "445.0 G", + "diskspace2_norm": "3.1 T", + "diskspacetotal1": "465.76", + "diskspacetotal2": "7448.42", + "speedlimit": "85", + "speedlimit_abs": "22282240", + "have_warnings": "1", + "finishaction": null, + "quota": "0 ", + "have_quota": false, + "left_quota": "0 ", + "cache_art": "0", + "cache_size": "0 B", + "kbpersec": "0.00", + "speed": "0 ", + "mbleft": "0.00", + "mb": "0.00", + "sizeleft": "0 B", + "size": "0 B", + "noofslots_total": 0, + "noofslots": 0, + "start": 0, + "limit": 10, + "finish": 10, + "status": "Paused", + "timeleft": "0:00:00", + "slots": [] +} diff --git a/tests/components/sabnzbd/snapshots/test_binary_sensor.ambr b/tests/components/sabnzbd/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..9f3087df3d1 --- /dev/null +++ b/tests/components/sabnzbd/snapshots/test_binary_sensor.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_sensor[binary_sensor.sabnzbd_warnings-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.sabnzbd_warnings', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Warnings', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'warnings', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_warnings', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[binary_sensor.sabnzbd_warnings-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Sabnzbd Warnings', + }), + 'context': , + 'entity_id': 'binary_sensor.sabnzbd_warnings', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/sabnzbd/snapshots/test_button.ambr b/tests/components/sabnzbd/snapshots/test_button.ambr new file mode 100644 index 00000000000..9b965e10518 --- /dev/null +++ b/tests/components/sabnzbd/snapshots/test_button.ambr @@ -0,0 +1,93 @@ +# serializer version: 1 +# name: test_button_setup[button.sabnzbd_pause-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.sabnzbd_pause', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pause', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pause', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_pause', + 'unit_of_measurement': None, + }) +# --- +# name: test_button_setup[button.sabnzbd_pause-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Sabnzbd Pause', + }), + 'context': , + 'entity_id': 'button.sabnzbd_pause', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_button_setup[button.sabnzbd_resume-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.sabnzbd_resume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Resume', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'resume', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_resume', + 'unit_of_measurement': None, + }) +# --- +# name: test_button_setup[button.sabnzbd_resume-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Sabnzbd Resume', + }), + 'context': , + 'entity_id': 'button.sabnzbd_resume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/sabnzbd/snapshots/test_number.ambr b/tests/components/sabnzbd/snapshots/test_number.ambr new file mode 100644 index 00000000000..6a370797264 --- /dev/null +++ b/tests/components/sabnzbd/snapshots/test_number.ambr @@ -0,0 +1,57 @@ +# serializer version: 1 +# name: test_number_setup[number.sabnzbd_speedlimit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.sabnzbd_speedlimit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Speedlimit', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'speedlimit', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_speedlimit', + 'unit_of_measurement': '%', + }) +# --- +# name: test_number_setup[number.sabnzbd_speedlimit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Sabnzbd Speedlimit', + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'number.sabnzbd_speedlimit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '85', + }) +# --- diff --git a/tests/components/sabnzbd/snapshots/test_sensor.ambr b/tests/components/sabnzbd/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..8b977e69aa6 --- /dev/null +++ b/tests/components/sabnzbd/snapshots/test_sensor.ambr @@ -0,0 +1,576 @@ +# serializer version: 1 +# name: test_sensor[sensor.sabnzbd_daily_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sabnzbd_daily_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Daily total', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_total', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_day_size', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sabnzbd_daily_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Sabnzbd Daily total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sabnzbd_daily_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '9.4', + }) +# --- +# name: test_sensor[sensor.sabnzbd_free_disk_space-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sabnzbd_free_disk_space', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Free disk space', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'free_disk_space', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_diskspace1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sabnzbd_free_disk_space-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Sabnzbd Free disk space', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sabnzbd_free_disk_space', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '444.95', + }) +# --- +# name: test_sensor[sensor.sabnzbd_left_to_download-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sabnzbd_left_to_download', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Left to download', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'left', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_mbleft', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sabnzbd_left_to_download-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Sabnzbd Left to download', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sabnzbd_left_to_download', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00', + }) +# --- +# name: test_sensor[sensor.sabnzbd_monthly_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sabnzbd_monthly_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Monthly total', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'monthly_total', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_month_size', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sabnzbd_monthly_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Sabnzbd Monthly total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sabnzbd_monthly_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '38.8', + }) +# --- +# name: test_sensor[sensor.sabnzbd_overall_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sabnzbd_overall_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Overall total', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'overall_total', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_total_size', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sabnzbd_overall_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Sabnzbd Overall total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sabnzbd_overall_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1638.4', + }) +# --- +# name: test_sensor[sensor.sabnzbd_queue-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sabnzbd_queue', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Queue', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'queue', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_mb', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sabnzbd_queue-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Sabnzbd Queue', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sabnzbd_queue', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00', + }) +# --- +# name: test_sensor[sensor.sabnzbd_queue_count-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sabnzbd_queue_count', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Queue count', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'queue_count', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_noofslots_total', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.sabnzbd_queue_count-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Sabnzbd Queue count', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.sabnzbd_queue_count', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[sensor.sabnzbd_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sabnzbd_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Speed', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'speed', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_kbpersec', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sabnzbd_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Sabnzbd Speed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sabnzbd_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensor[sensor.sabnzbd_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sabnzbd_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Status', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'status', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.sabnzbd_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Sabnzbd Status', + }), + 'context': , + 'entity_id': 'sensor.sabnzbd_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Paused', + }) +# --- +# name: test_sensor[sensor.sabnzbd_total_disk_space-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sabnzbd_total_disk_space', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total disk space', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_disk_space', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_diskspacetotal1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sabnzbd_total_disk_space-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Sabnzbd Total disk space', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sabnzbd_total_disk_space', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '465.76', + }) +# --- +# name: test_sensor[sensor.sabnzbd_weekly_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sabnzbd_weekly_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Weekly total', + 'platform': 'sabnzbd', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'weekly_total', + 'unique_id': '01JD2YVVPBC62D620DGYNG2R8H_week_size', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.sabnzbd_weekly_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Sabnzbd Weekly total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.sabnzbd_weekly_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '9.4', + }) +# --- diff --git a/tests/components/sabnzbd/test_binary_sensor.py b/tests/components/sabnzbd/test_binary_sensor.py new file mode 100644 index 00000000000..48a3c006488 --- /dev/null +++ b/tests/components/sabnzbd/test_binary_sensor.py @@ -0,0 +1,23 @@ +"""Binary sensor tests for the Sabnzbd component.""" + +from unittest.mock import patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@patch("homeassistant.components.sabnzbd.PLATFORMS", [Platform.BINARY_SENSOR]) +async def test_sensor( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test binary sensor setup.""" + await hass.config_entries.async_setup(config_entry.entry_id) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) diff --git a/tests/components/sabnzbd/test_button.py b/tests/components/sabnzbd/test_button.py new file mode 100644 index 00000000000..199d8eb03a0 --- /dev/null +++ b/tests/components/sabnzbd/test_button.py @@ -0,0 +1,116 @@ +"""Button tests for the SABnzbd component.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +from pysabnzbd import SabnzbdApiException +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_UNAVAILABLE, + STATE_UNKNOWN, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +@patch("homeassistant.components.sabnzbd.PLATFORMS", [Platform.BUTTON]) +async def test_button_setup( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test button setup.""" + await hass.config_entries.async_setup(config_entry.entry_id) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("button", "called_function"), + [("resume", "resume_queue"), ("pause", "pause_queue")], +) +@pytest.mark.usefixtures("setup_integration") +async def test_button_presses( + hass: HomeAssistant, + sabnzbd: AsyncMock, + button: str, + called_function: str, +) -> None: + """Test the sabnzbd button presses.""" + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: f"button.sabnzbd_{button}", + }, + blocking=True, + ) + + function = getattr(sabnzbd, called_function) + function.assert_called_once() + + +@pytest.mark.parametrize( + ("button", "called_function"), + [("resume", "resume_queue"), ("pause", "pause_queue")], +) +@pytest.mark.usefixtures("setup_integration") +async def test_buttons_exception( + hass: HomeAssistant, + sabnzbd: AsyncMock, + button: str, + called_function: str, +) -> None: + """Test the button handles errors.""" + function = getattr(sabnzbd, called_function) + function.side_effect = SabnzbdApiException("Boom") + + with pytest.raises( + HomeAssistantError, + match="Unable to send command to SABnzbd due to a connection error, try again later", + ): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: f"button.sabnzbd_{button}", + }, + blocking=True, + ) + + function.assert_called_once() + + +@pytest.mark.parametrize( + "button", + ["resume", "pause"], +) +@pytest.mark.usefixtures("setup_integration") +async def test_buttons_unavailable( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + sabnzbd: AsyncMock, + button: str, +) -> None: + """Test the button is unavailable when coordinator can't update data.""" + state = hass.states.get(f"button.sabnzbd_{button}") + assert state + assert state.state == STATE_UNKNOWN + + sabnzbd.refresh_data.side_effect = Exception("Boom") + freezer.tick(timedelta(minutes=10)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(f"button.sabnzbd_{button}") + assert state + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/sabnzbd/test_config_flow.py b/tests/components/sabnzbd/test_config_flow.py index 7f5394902b4..797af63c096 100644 --- a/tests/components/sabnzbd/test_config_flow.py +++ b/tests/components/sabnzbd/test_config_flow.py @@ -1,38 +1,24 @@ """Define tests for the Sabnzbd config flow.""" -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock from pysabnzbd import SabnzbdApiException import pytest from homeassistant import config_entries from homeassistant.components.sabnzbd import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER -from homeassistant.const import ( - CONF_API_KEY, - CONF_HOST, - CONF_NAME, - CONF_PORT, - CONF_SSL, - CONF_URL, -) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_API_KEY, CONF_URL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from tests.common import MockConfigEntry + VALID_CONFIG = { - CONF_NAME: "Sabnzbd", CONF_API_KEY: "edc3eee7330e4fdda04489e3fbc283d0", CONF_URL: "http://localhost:8080", } -VALID_CONFIG_OLD = { - CONF_NAME: "Sabnzbd", - CONF_API_KEY: "edc3eee7330e4fdda04489e3fbc283d0", - CONF_HOST: "localhost", - CONF_PORT: 8080, - CONF_SSL: False, -} - pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -44,57 +30,140 @@ async def test_create_entry(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - with patch( - "homeassistant.components.sabnzbd.sab.SabnzbdApi.check_available", - return_value=True, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - VALID_CONFIG, - ) - await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + VALID_CONFIG, + ) + await hass.async_block_till_done() - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "edc3eee7330e" - assert result2["data"] == { - CONF_API_KEY: "edc3eee7330e4fdda04489e3fbc283d0", - CONF_NAME: "Sabnzbd", - CONF_URL: "http://localhost:8080", - } - assert len(mock_setup_entry.mock_calls) == 1 + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "localhost" + assert result["data"] == { + CONF_API_KEY: "edc3eee7330e4fdda04489e3fbc283d0", + CONF_URL: "http://localhost:8080", + } + assert len(mock_setup_entry.mock_calls) == 1 -async def test_auth_error(hass: HomeAssistant) -> None: - """Test that the user step fails.""" - with patch( - "homeassistant.components.sabnzbd.sab.SabnzbdApi.check_available", - side_effect=SabnzbdApiException("Some error"), - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data=VALID_CONFIG, - ) +async def test_auth_error(hass: HomeAssistant, sabnzbd: AsyncMock) -> None: + """Test when the user step fails and if we can recover.""" + sabnzbd.check_available.side_effect = SabnzbdApiException("Some error") - assert result["errors"] == {"base": "cannot_connect"} + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data=VALID_CONFIG, + ) + + assert result["errors"] == {"base": "cannot_connect"} + + # reset side effect and check if we can recover + sabnzbd.check_available.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + VALID_CONFIG, + ) + await hass.async_block_till_done() + + assert "errors" not in result + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "localhost" + assert result["data"] == { + CONF_API_KEY: "edc3eee7330e4fdda04489e3fbc283d0", + CONF_URL: "http://localhost:8080", + } -async def test_import_flow(hass: HomeAssistant) -> None: - """Test the import configuration flow.""" - with patch( - "homeassistant.components.sabnzbd.sab.SabnzbdApi.check_available", - return_value=True, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=VALID_CONFIG_OLD, - ) +async def test_reconfigure_successful( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: + """Test reconfiguring a SABnzbd entry.""" + result = await config_entry.start_reconfigure_flow(hass) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "edc3eee7330e" - assert result["data"][CONF_NAME] == "Sabnzbd" - assert result["data"][CONF_API_KEY] == "edc3eee7330e4fdda04489e3fbc283d0" - assert result["data"][CONF_HOST] == "localhost" - assert result["data"][CONF_PORT] == 8080 - assert result["data"][CONF_SSL] is False + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_URL: "http://10.10.10.10:8080", CONF_API_KEY: "new_key"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert config_entry.data == { + CONF_URL: "http://10.10.10.10:8080", + CONF_API_KEY: "new_key", + } + + +async def test_reconfigure_error( + hass: HomeAssistant, config_entry: MockConfigEntry, sabnzbd: AsyncMock +) -> None: + """Test reconfiguring a SABnzbd entry.""" + result = await config_entry.start_reconfigure_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + # set side effect and check if error is handled + sabnzbd.check_available.side_effect = SabnzbdApiException("Some error") + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_URL: "http://10.10.10.10:8080", CONF_API_KEY: "new_key"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "cannot_connect"} + + # reset side effect and check if we can recover + sabnzbd.check_available.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_URL: "http://10.10.10.10:8080", CONF_API_KEY: "new_key"}, + ) + + assert "errors" not in result + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert config_entry.data == { + CONF_URL: "http://10.10.10.10:8080", + CONF_API_KEY: "new_key", + } + + +async def test_abort_already_configured( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: + """Test that the flow aborts if SABnzbd instance is already configured.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + VALID_CONFIG, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_abort_reconfigure_already_configured( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: + """Test that the reconfigure flow aborts if SABnzbd instance is already configured.""" + result = await config_entry.start_reconfigure_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + VALID_CONFIG, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/sabnzbd/test_init.py b/tests/components/sabnzbd/test_init.py index e666f9f1d3e..9b833875bbc 100644 --- a/tests/components/sabnzbd/test_init.py +++ b/tests/components/sabnzbd/test_init.py @@ -1,77 +1,42 @@ """Tests for the SABnzbd Integration.""" -from unittest.mock import patch +import pytest -from homeassistant.components.sabnzbd import DEFAULT_NAME, DOMAIN, OLD_SENSOR_KEYS -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN -from homeassistant.const import CONF_API_KEY, CONF_NAME, CONF_URL -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er - -from tests.common import MockConfigEntry - -MOCK_ENTRY_ID = "mock_entry_id" - -MOCK_UNIQUE_ID = "someuniqueid" - -MOCK_DEVICE_ID = "somedeviceid" - -MOCK_DATA_VERSION_1 = { - CONF_API_KEY: "api_key", - CONF_URL: "http://127.0.0.1:8080", - CONF_NAME: "name", -} - -MOCK_ENTRY_VERSION_1 = MockConfigEntry( - domain=DOMAIN, data=MOCK_DATA_VERSION_1, entry_id=MOCK_ENTRY_ID, version=1 +from homeassistant.components.sabnzbd.const import ( + ATTR_API_KEY, + DOMAIN, + SERVICE_PAUSE, + SERVICE_RESUME, + SERVICE_SET_SPEED, ) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir -async def test_unique_id_migrate( +@pytest.mark.parametrize( + ("service", "issue_id"), + [ + (SERVICE_RESUME, "resume_action_deprecated"), + (SERVICE_PAUSE, "pause_action_deprecated"), + (SERVICE_SET_SPEED, "set_speed_action_deprecated"), + ], +) +@pytest.mark.usefixtures("setup_integration") +async def test_deprecated_service_creates_issue( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, + issue_registry: ir.IssueRegistry, + service: str, + issue_id: str, ) -> None: - """Test that config flow entry is migrated correctly.""" - # Start with the config entry at Version 1. - mock_entry = MOCK_ENTRY_VERSION_1 - mock_entry.add_to_hass(hass) - - mock_d_entry = device_registry.async_get_or_create( - config_entry_id=mock_entry.entry_id, - identifiers={(DOMAIN, DOMAIN)}, - name=DEFAULT_NAME, - entry_type=dr.DeviceEntryType.SERVICE, + """Test that deprecated actions creates an issue.""" + await hass.services.async_call( + DOMAIN, + service, + {ATTR_API_KEY: "edc3eee7330e4fdda04489e3fbc283d0"}, + blocking=True, ) - entity_id_sensor_key = [] - - for sensor_key in OLD_SENSOR_KEYS: - mock_entity_id = f"{SENSOR_DOMAIN}.{DOMAIN}_{sensor_key}" - entity_registry.async_get_or_create( - SENSOR_DOMAIN, - DOMAIN, - unique_id=sensor_key, - config_entry=mock_entry, - device_id=mock_d_entry.id, - ) - entity = entity_registry.async_get(mock_entity_id) - assert entity.entity_id == mock_entity_id - assert entity.unique_id == sensor_key - entity_id_sensor_key.append((mock_entity_id, sensor_key)) - - with patch( - "homeassistant.components.sabnzbd.sab.SabnzbdApi.check_available", - return_value=True, - ): - await hass.config_entries.async_setup(mock_entry.entry_id) - - await hass.async_block_till_done() - - for mock_entity_id, sensor_key in entity_id_sensor_key: - entity = entity_registry.async_get(mock_entity_id) - assert entity.unique_id == f"{MOCK_ENTRY_ID}_{sensor_key}" - - assert device_registry.async_get(mock_d_entry.id).identifiers == { - (DOMAIN, MOCK_ENTRY_ID) - } + issue = issue_registry.async_get_issue(domain=DOMAIN, issue_id=issue_id) + assert issue + assert issue.severity == ir.IssueSeverity.WARNING + assert issue.breaks_in_ha_version == "2025.6" diff --git a/tests/components/sabnzbd/test_number.py b/tests/components/sabnzbd/test_number.py new file mode 100644 index 00000000000..61f7ea45ab1 --- /dev/null +++ b/tests/components/sabnzbd/test_number.py @@ -0,0 +1,123 @@ +"""Number tests for the SABnzbd component.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +from pysabnzbd import SabnzbdApiException +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +@patch("homeassistant.components.sabnzbd.PLATFORMS", [Platform.NUMBER]) +async def test_number_setup( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test number setup.""" + await hass.config_entries.async_setup(config_entry.entry_id) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("number", "input_number", "called_function", "expected_state"), + [ + ("speedlimit", 50.0, "set_speed_limit", 50), + ], +) +@pytest.mark.usefixtures("setup_integration") +async def test_number_set( + hass: HomeAssistant, + sabnzbd: AsyncMock, + number: str, + input_number: float, + called_function: str, + expected_state: str, +) -> None: + """Test the sabnzbd number set.""" + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_VALUE: input_number, + ATTR_ENTITY_ID: f"number.sabnzbd_{number}", + }, + blocking=True, + ) + + function = getattr(sabnzbd, called_function) + function.assert_called_with(int(input_number)) + + +@pytest.mark.parametrize( + ("number", "input_number", "called_function"), + [("speedlimit", 55.0, "set_speed_limit")], +) +@pytest.mark.usefixtures("setup_integration") +async def test_number_exception( + hass: HomeAssistant, + sabnzbd: AsyncMock, + number: str, + input_number: float, + called_function: str, +) -> None: + """Test the number entity handles errors.""" + function = getattr(sabnzbd, called_function) + function.side_effect = SabnzbdApiException("Boom") + + with pytest.raises( + HomeAssistantError, + match="Unable to send command to SABnzbd due to a connection error, try again later", + ): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_VALUE: input_number, + ATTR_ENTITY_ID: f"number.sabnzbd_{number}", + }, + blocking=True, + ) + + function.assert_called_once() + + +@pytest.mark.parametrize( + ("number", "initial_state"), + [("speedlimit", "85")], +) +@pytest.mark.usefixtures("setup_integration") +async def test_number_unavailable( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + sabnzbd: AsyncMock, + number: str, + initial_state: str, +) -> None: + """Test the number is unavailable when coordinator can't update data.""" + state = hass.states.get(f"number.sabnzbd_{number}") + assert state + assert state.state == initial_state + + sabnzbd.refresh_data.side_effect = Exception("Boom") + freezer.tick(timedelta(minutes=10)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(f"number.sabnzbd_{number}") + assert state + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/sabnzbd/test_sensor.py b/tests/components/sabnzbd/test_sensor.py new file mode 100644 index 00000000000..31c0868a5a7 --- /dev/null +++ b/tests/components/sabnzbd/test_sensor.py @@ -0,0 +1,25 @@ +"""Sensor tests for the Sabnzbd component.""" + +from unittest.mock import patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@patch("homeassistant.components.sabnzbd.PLATFORMS", [Platform.SENSOR]) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test sensor setup.""" + await hass.config_entries.async_setup(config_entry.entry_id) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) diff --git a/tests/components/samsungtv/test_config_flow.py b/tests/components/samsungtv/test_config_flow.py index 7e707376b6f..32e169ffb24 100644 --- a/tests/components/samsungtv/test_config_flow.py +++ b/tests/components/samsungtv/test_config_flow.py @@ -14,6 +14,9 @@ from samsungtvws.exceptions import ( UnauthorizedError, ) from websockets import frames + +# WebSocketProtocolError was deprecated in websockets '14.0' +# pylint: disable-next=no-name-in-module from websockets.exceptions import ( ConnectionClosedError, WebSocketException, diff --git a/tests/components/sensibo/conftest.py b/tests/components/sensibo/conftest.py index 1c835cd8001..eaa42e47257 100644 --- a/tests/components/sensibo/conftest.py +++ b/tests/components/sensibo/conftest.py @@ -10,8 +10,9 @@ from pysensibo import SensiboClient from pysensibo.model import SensiboData import pytest -from homeassistant.components.sensibo.const import DOMAIN +from homeassistant.components.sensibo.const import DOMAIN, PLATFORMS from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from . import ENTRY_CONFIG @@ -20,8 +21,18 @@ from tests.common import MockConfigEntry, load_fixture from tests.test_util.aiohttp import AiohttpClientMocker +@pytest.fixture(name="load_platforms") +async def patch_platform_constant() -> list[Platform]: + """Return list of platforms to load.""" + return PLATFORMS + + @pytest.fixture -async def load_int(hass: HomeAssistant, get_data: SensiboData) -> MockConfigEntry: +async def load_int( + hass: HomeAssistant, + get_data: SensiboData, + load_platforms: list[Platform], +) -> MockConfigEntry: """Set up the Sensibo integration in Home Assistant.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -35,6 +46,7 @@ async def load_int(hass: HomeAssistant, get_data: SensiboData) -> MockConfigEntr config_entry.add_to_hass(hass) with ( + patch("homeassistant.components.sensibo.PLATFORMS", load_platforms), patch( "homeassistant.components.sensibo.coordinator.SensiboClient.async_get_devices_data", return_value=get_data, diff --git a/tests/components/sensibo/snapshots/test_binary_sensor.ambr b/tests/components/sensibo/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..110a6ae8174 --- /dev/null +++ b/tests/components/sensibo/snapshots/test_binary_sensor.ambr @@ -0,0 +1,705 @@ +# serializer version: 1 +# name: test_binary_sensor[load_platforms0][binary_sensor.bedroom_filter_clean_required-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.bedroom_filter_clean_required', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Filter clean required', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'filter_clean', + 'unique_id': 'BBZZBBZZ-filter_clean', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.bedroom_filter_clean_required-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Bedroom Filter clean required', + }), + 'context': , + 'entity_id': 'binary_sensor.bedroom_filter_clean_required', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.bedroom_pure_boost_linked_with_ac-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.bedroom_pure_boost_linked_with_ac', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure Boost linked with AC', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pure_ac_integration', + 'unique_id': 'BBZZBBZZ-pure_ac_integration', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.bedroom_pure_boost_linked_with_ac-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Bedroom Pure Boost linked with AC', + }), + 'context': , + 'entity_id': 'binary_sensor.bedroom_pure_boost_linked_with_ac', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.bedroom_pure_boost_linked_with_indoor_air_quality-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.bedroom_pure_boost_linked_with_indoor_air_quality', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure Boost linked with indoor air quality', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pure_measure_integration', + 'unique_id': 'BBZZBBZZ-pure_measure_integration', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.bedroom_pure_boost_linked_with_indoor_air_quality-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Bedroom Pure Boost linked with indoor air quality', + }), + 'context': , + 'entity_id': 'binary_sensor.bedroom_pure_boost_linked_with_indoor_air_quality', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.bedroom_pure_boost_linked_with_outdoor_air_quality-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.bedroom_pure_boost_linked_with_outdoor_air_quality', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure Boost linked with outdoor air quality', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pure_prime_integration', + 'unique_id': 'BBZZBBZZ-pure_prime_integration', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.bedroom_pure_boost_linked_with_outdoor_air_quality-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Bedroom Pure Boost linked with outdoor air quality', + }), + 'context': , + 'entity_id': 'binary_sensor.bedroom_pure_boost_linked_with_outdoor_air_quality', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.bedroom_pure_boost_linked_with_presence-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.bedroom_pure_boost_linked_with_presence', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure Boost linked with presence', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pure_geo_integration', + 'unique_id': 'BBZZBBZZ-pure_geo_integration', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.bedroom_pure_boost_linked_with_presence-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Bedroom Pure Boost linked with presence', + }), + 'context': , + 'entity_id': 'binary_sensor.bedroom_pure_boost_linked_with_presence', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.hallway_filter_clean_required-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.hallway_filter_clean_required', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Filter clean required', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'filter_clean', + 'unique_id': 'ABC999111-filter_clean', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.hallway_filter_clean_required-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Hallway Filter clean required', + }), + 'context': , + 'entity_id': 'binary_sensor.hallway_filter_clean_required', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.hallway_motion_sensor_connectivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.hallway_motion_sensor_connectivity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Connectivity', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'AABBCC-alive', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.hallway_motion_sensor_connectivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Hallway Motion Sensor Connectivity', + }), + 'context': , + 'entity_id': 'binary_sensor.hallway_motion_sensor_connectivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.hallway_motion_sensor_main_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.hallway_motion_sensor_main_sensor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Main sensor', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'is_main_sensor', + 'unique_id': 'AABBCC-is_main_sensor', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.hallway_motion_sensor_main_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Hallway Motion Sensor Main sensor', + }), + 'context': , + 'entity_id': 'binary_sensor.hallway_motion_sensor_main_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.hallway_motion_sensor_motion-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.hallway_motion_sensor_motion', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motion', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'AABBCC-motion', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.hallway_motion_sensor_motion-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'Hallway Motion Sensor Motion', + }), + 'context': , + 'entity_id': 'binary_sensor.hallway_motion_sensor_motion', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.hallway_room_occupied-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.hallway_room_occupied', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Room occupied', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'room_occupied', + 'unique_id': 'ABC999111-room_occupied', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.hallway_room_occupied-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'Hallway Room occupied', + }), + 'context': , + 'entity_id': 'binary_sensor.hallway_room_occupied', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.kitchen_filter_clean_required-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.kitchen_filter_clean_required', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Filter clean required', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'filter_clean', + 'unique_id': 'AAZZAAZZ-filter_clean', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.kitchen_filter_clean_required-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Kitchen Filter clean required', + }), + 'context': , + 'entity_id': 'binary_sensor.kitchen_filter_clean_required', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.kitchen_pure_boost_linked_with_ac-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.kitchen_pure_boost_linked_with_ac', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure Boost linked with AC', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pure_ac_integration', + 'unique_id': 'AAZZAAZZ-pure_ac_integration', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.kitchen_pure_boost_linked_with_ac-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Kitchen Pure Boost linked with AC', + }), + 'context': , + 'entity_id': 'binary_sensor.kitchen_pure_boost_linked_with_ac', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.kitchen_pure_boost_linked_with_indoor_air_quality-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.kitchen_pure_boost_linked_with_indoor_air_quality', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure Boost linked with indoor air quality', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pure_measure_integration', + 'unique_id': 'AAZZAAZZ-pure_measure_integration', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.kitchen_pure_boost_linked_with_indoor_air_quality-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Kitchen Pure Boost linked with indoor air quality', + }), + 'context': , + 'entity_id': 'binary_sensor.kitchen_pure_boost_linked_with_indoor_air_quality', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.kitchen_pure_boost_linked_with_outdoor_air_quality-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.kitchen_pure_boost_linked_with_outdoor_air_quality', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure Boost linked with outdoor air quality', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pure_prime_integration', + 'unique_id': 'AAZZAAZZ-pure_prime_integration', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.kitchen_pure_boost_linked_with_outdoor_air_quality-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Kitchen Pure Boost linked with outdoor air quality', + }), + 'context': , + 'entity_id': 'binary_sensor.kitchen_pure_boost_linked_with_outdoor_air_quality', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.kitchen_pure_boost_linked_with_presence-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.kitchen_pure_boost_linked_with_presence', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure Boost linked with presence', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pure_geo_integration', + 'unique_id': 'AAZZAAZZ-pure_geo_integration', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[load_platforms0][binary_sensor.kitchen_pure_boost_linked_with_presence-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Kitchen Pure Boost linked with presence', + }), + 'context': , + 'entity_id': 'binary_sensor.kitchen_pure_boost_linked_with_presence', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/sensibo/snapshots/test_button.ambr b/tests/components/sensibo/snapshots/test_button.ambr new file mode 100644 index 00000000000..7ef6d56c714 --- /dev/null +++ b/tests/components/sensibo/snapshots/test_button.ambr @@ -0,0 +1,139 @@ +# serializer version: 1 +# name: test_button[load_platforms0][button.bedroom_reset_filter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.bedroom_reset_filter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset filter', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_filter', + 'unique_id': 'BBZZBBZZ-reset_filter', + 'unit_of_measurement': None, + }) +# --- +# name: test_button[load_platforms0][button.bedroom_reset_filter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Bedroom Reset filter', + }), + 'context': , + 'entity_id': 'button.bedroom_reset_filter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_button[load_platforms0][button.hallway_reset_filter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.hallway_reset_filter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset filter', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_filter', + 'unique_id': 'ABC999111-reset_filter', + 'unit_of_measurement': None, + }) +# --- +# name: test_button[load_platforms0][button.hallway_reset_filter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Hallway Reset filter', + }), + 'context': , + 'entity_id': 'button.hallway_reset_filter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_button[load_platforms0][button.kitchen_reset_filter-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.kitchen_reset_filter', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset filter', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_filter', + 'unique_id': 'AAZZAAZZ-reset_filter', + 'unit_of_measurement': None, + }) +# --- +# name: test_button[load_platforms0][button.kitchen_reset_filter-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Kitchen Reset filter', + }), + 'context': , + 'entity_id': 'button.kitchen_reset_filter', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/sensibo/snapshots/test_climate.ambr b/tests/components/sensibo/snapshots/test_climate.ambr index 1e02ee63a9a..e3b27332932 100644 --- a/tests/components/sensibo/snapshots/test_climate.ambr +++ b/tests/components/sensibo/snapshots/test_climate.ambr @@ -1,33 +1,230 @@ # serializer version: 1 -# name: test_climate - ReadOnlyDict({ - 'current_humidity': 32.9, - 'current_temperature': 21.2, - 'fan_mode': 'high', - 'fan_modes': list([ - 'quiet', - 'low', - 'medium', - ]), - 'friendly_name': 'Hallway', - 'hvac_modes': list([ - , - , - , - , - , - , - ]), - 'max_temp': 20, - 'min_temp': 10, - 'supported_features': , - 'swing_mode': 'stopped', - 'swing_modes': list([ - 'stopped', - 'fixedtop', - 'fixedmiddletop', - ]), - 'target_temp_step': 1, - 'temperature': 25, +# name: test_climate[load_platforms0][climate.bedroom-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + ]), + 'max_temp': 1, + 'min_temp': 0, + 'target_temp_step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.bedroom', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'climate_device', + 'unique_id': 'BBZZBBZZ', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate[load_platforms0][climate.bedroom-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'Bedroom', + 'hvac_modes': list([ + , + ]), + 'max_temp': 1, + 'min_temp': 0, + 'supported_features': , + 'target_temp_step': 1, + }), + 'context': , + 'entity_id': 'climate.bedroom', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_climate[load_platforms0][climate.hallway-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'fan_modes': list([ + 'quiet', + 'low', + 'medium', + ]), + 'hvac_modes': list([ + , + , + , + , + , + , + ]), + 'max_temp': 20, + 'min_temp': 10, + 'swing_modes': list([ + 'stopped', + 'fixedtop', + 'fixedmiddletop', + ]), + 'target_temp_step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.hallway', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'climate_device', + 'unique_id': 'ABC999111', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate[load_platforms0][climate.hallway-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_humidity': 32.9, + 'current_temperature': 21.2, + 'fan_mode': 'high', + 'fan_modes': list([ + 'quiet', + 'low', + 'medium', + ]), + 'friendly_name': 'Hallway', + 'hvac_modes': list([ + , + , + , + , + , + , + ]), + 'max_temp': 20, + 'min_temp': 10, + 'supported_features': , + 'swing_mode': 'stopped', + 'swing_modes': list([ + 'stopped', + 'fixedtop', + 'fixedmiddletop', + ]), + 'target_temp_step': 1, + 'temperature': 25, + }), + 'context': , + 'entity_id': 'climate.hallway', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_climate[load_platforms0][climate.kitchen-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'fan_modes': list([ + 'low', + 'high', + ]), + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 1, + 'min_temp': 0, + 'target_temp_step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.kitchen', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'climate_device', + 'unique_id': 'AAZZAAZZ', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate[load_platforms0][climate.kitchen-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'fan_mode': 'low', + 'fan_modes': list([ + 'low', + 'high', + ]), + 'friendly_name': 'Kitchen', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 1, + 'min_temp': 0, + 'supported_features': , + 'target_temp_step': 1, + 'temperature': None, + }), + 'context': , + 'entity_id': 'climate.kitchen', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', }) # --- diff --git a/tests/components/sensibo/snapshots/test_number.ambr b/tests/components/sensibo/snapshots/test_number.ambr new file mode 100644 index 00000000000..b632b95f1be --- /dev/null +++ b/tests/components/sensibo/snapshots/test_number.ambr @@ -0,0 +1,343 @@ +# serializer version: 1 +# name: test_number[load_platforms0][number.bedroom_humidity_calibration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.bedroom_humidity_calibration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity calibration', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'calibration_humidity', + 'unique_id': 'BBZZBBZZ-calibration_hum', + 'unit_of_measurement': '%', + }) +# --- +# name: test_number[load_platforms0][number.bedroom_humidity_calibration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Bedroom Humidity calibration', + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'number.bedroom_humidity_calibration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_number[load_platforms0][number.bedroom_temperature_calibration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.bedroom_temperature_calibration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature calibration', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'calibration_temperature', + 'unique_id': 'BBZZBBZZ-calibration_temp', + 'unit_of_measurement': , + }) +# --- +# name: test_number[load_platforms0][number.bedroom_temperature_calibration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Bedroom Temperature calibration', + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.bedroom_temperature_calibration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_number[load_platforms0][number.hallway_humidity_calibration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.hallway_humidity_calibration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity calibration', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'calibration_humidity', + 'unique_id': 'ABC999111-calibration_hum', + 'unit_of_measurement': '%', + }) +# --- +# name: test_number[load_platforms0][number.hallway_humidity_calibration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Hallway Humidity calibration', + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'number.hallway_humidity_calibration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_number[load_platforms0][number.hallway_temperature_calibration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.hallway_temperature_calibration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature calibration', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'calibration_temperature', + 'unique_id': 'ABC999111-calibration_temp', + 'unit_of_measurement': , + }) +# --- +# name: test_number[load_platforms0][number.hallway_temperature_calibration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Hallway Temperature calibration', + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.hallway_temperature_calibration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.1', + }) +# --- +# name: test_number[load_platforms0][number.kitchen_humidity_calibration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.kitchen_humidity_calibration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity calibration', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'calibration_humidity', + 'unique_id': 'AAZZAAZZ-calibration_hum', + 'unit_of_measurement': '%', + }) +# --- +# name: test_number[load_platforms0][number.kitchen_humidity_calibration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Kitchen Humidity calibration', + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'number.kitchen_humidity_calibration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_number[load_platforms0][number.kitchen_temperature_calibration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.kitchen_temperature_calibration', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature calibration', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'calibration_temperature', + 'unique_id': 'AAZZAAZZ-calibration_temp', + 'unit_of_measurement': , + }) +# --- +# name: test_number[load_platforms0][number.kitchen_temperature_calibration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Kitchen Temperature calibration', + 'max': 10, + 'min': -10, + 'mode': , + 'step': 0.1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.kitchen_temperature_calibration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- diff --git a/tests/components/sensibo/snapshots/test_select.ambr b/tests/components/sensibo/snapshots/test_select.ambr new file mode 100644 index 00000000000..bdafc8654ff --- /dev/null +++ b/tests/components/sensibo/snapshots/test_select.ambr @@ -0,0 +1,170 @@ +# serializer version: 1 +# name: test_select[load_platforms0][select.hallway_horizontal_swing-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'stopped', + 'fixedleft', + 'fixedcenterleft', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.hallway_horizontal_swing', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Horizontal swing', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'horizontalswing', + 'unique_id': 'ABC999111-horizontalSwing', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[load_platforms0][select.hallway_horizontal_swing-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Hallway Horizontal swing', + 'options': list([ + 'stopped', + 'fixedleft', + 'fixedcenterleft', + ]), + }), + 'context': , + 'entity_id': 'select.hallway_horizontal_swing', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stopped', + }) +# --- +# name: test_select[load_platforms0][select.hallway_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'off', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.hallway_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': 'ABC999111-light', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[load_platforms0][select.hallway_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Hallway Light', + 'options': list([ + 'on', + 'off', + ]), + }), + 'context': , + 'entity_id': 'select.hallway_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_select[load_platforms0][select.kitchen_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'on', + 'dim', + 'off', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.kitchen_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': 'AAZZAAZZ-light', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[load_platforms0][select.kitchen_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Kitchen Light', + 'options': list([ + 'on', + 'dim', + 'off', + ]), + }), + 'context': , + 'entity_id': 'select.kitchen_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/sensibo/snapshots/test_sensor.ambr b/tests/components/sensibo/snapshots/test_sensor.ambr index cd8d510b6cc..31e579d9929 100644 --- a/tests/components/sensibo/snapshots/test_sensor.ambr +++ b/tests/components/sensibo/snapshots/test_sensor.ambr @@ -1,28 +1,818 @@ # serializer version: 1 -# name: test_sensor - ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Kitchen Pure AQI', - 'options': list([ - 'good', - 'moderate', - 'bad', - ]), +# name: test_sensor[load_platforms0][sensor.bedroom_filter_last_reset-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.bedroom_filter_last_reset', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Filter last reset', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'filter_last_reset', + 'unique_id': 'BBZZBBZZ-filter_last_reset', + 'unit_of_measurement': None, }) # --- -# name: test_sensor.1 - ReadOnlyDict({ - 'device_class': 'temperature', - 'fanlevel': 'low', - 'friendly_name': 'Hallway Climate React low temperature threshold', - 'horizontalswing': 'stopped', - 'light': 'on', - 'mode': 'heat', - 'on': True, - 'state_class': , - 'swing': 'stopped', - 'targettemperature': 21, - 'temperatureunit': 'c', +# name: test_sensor[load_platforms0][sensor.bedroom_filter_last_reset-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Bedroom Filter last reset', + }), + 'context': , + 'entity_id': 'sensor.bedroom_filter_last_reset', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2022-04-23T15:58:45+00:00', + }) +# --- +# name: test_sensor[load_platforms0][sensor.bedroom_pure_aqi-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'good', + 'moderate', + 'bad', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.bedroom_pure_aqi', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure AQI', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pm25_pure', + 'unique_id': 'BBZZBBZZ-pm25', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[load_platforms0][sensor.bedroom_pure_aqi-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Bedroom Pure AQI', + 'options': list([ + 'good', + 'moderate', + 'bad', + ]), + }), + 'context': , + 'entity_id': 'sensor.bedroom_pure_aqi', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'good', + }) +# --- +# name: test_sensor[load_platforms0][sensor.bedroom_pure_sensitivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.bedroom_pure_sensitivity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pure sensitivity', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sensitivity', + 'unique_id': 'BBZZBBZZ-pure_sensitivity', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[load_platforms0][sensor.bedroom_pure_sensitivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Bedroom Pure sensitivity', + }), + 'context': , + 'entity_id': 'sensor.bedroom_pure_sensitivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'n', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_climate_react_high_temperature_threshold-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.hallway_climate_react_high_temperature_threshold', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Climate React high temperature threshold', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_react_high', + 'unique_id': 'ABC999111-climate_react_high', 'unit_of_measurement': , }) # --- +# name: test_sensor[load_platforms0][sensor.hallway_climate_react_high_temperature_threshold-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'fanlevel': 'high', + 'friendly_name': 'Hallway Climate React high temperature threshold', + 'horizontalswing': 'stopped', + 'light': 'on', + 'mode': 'cool', + 'on': True, + 'state_class': , + 'swing': 'stopped', + 'targettemperature': 21, + 'temperatureunit': 'c', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.hallway_climate_react_high_temperature_threshold', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '27.5', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_climate_react_low_temperature_threshold-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.hallway_climate_react_low_temperature_threshold', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Climate React low temperature threshold', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_react_low', + 'unique_id': 'ABC999111-climate_react_low', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_climate_react_low_temperature_threshold-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'fanlevel': 'low', + 'friendly_name': 'Hallway Climate React low temperature threshold', + 'horizontalswing': 'stopped', + 'light': 'on', + 'mode': 'heat', + 'on': True, + 'state_class': , + 'swing': 'stopped', + 'targettemperature': 21, + 'temperatureunit': 'c', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.hallway_climate_react_low_temperature_threshold', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_climate_react_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.hallway_climate_react_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Climate React type', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'smart_type', + 'unique_id': 'ABC999111-climate_react_type', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_climate_react_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Hallway Climate React type', + }), + 'context': , + 'entity_id': 'sensor.hallway_climate_react_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'temperature', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_filter_last_reset-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.hallway_filter_last_reset', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Filter last reset', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'filter_last_reset', + 'unique_id': 'ABC999111-filter_last_reset', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_filter_last_reset-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Hallway Filter last reset', + }), + 'context': , + 'entity_id': 'sensor.hallway_filter_last_reset', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2022-03-12T15:24:26+00:00', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_motion_sensor_battery_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hallway_motion_sensor_battery_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery voltage', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_voltage', + 'unique_id': 'AABBCC-battery_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_motion_sensor_battery_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Hallway Motion Sensor Battery voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.hallway_motion_sensor_battery_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3000', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_motion_sensor_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.hallway_motion_sensor_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'AABBCC-humidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_motion_sensor_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Hallway Motion Sensor Humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.hallway_motion_sensor_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '57', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_motion_sensor_rssi-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.hallway_motion_sensor_rssi', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'RSSI', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rssi', + 'unique_id': 'AABBCC-rssi', + 'unit_of_measurement': 'dBm', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_motion_sensor_rssi-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'signal_strength', + 'friendly_name': 'Hallway Motion Sensor RSSI', + 'state_class': , + 'unit_of_measurement': 'dBm', + }), + 'context': , + 'entity_id': 'sensor.hallway_motion_sensor_rssi', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-72', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_motion_sensor_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.hallway_motion_sensor_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'AABBCC-temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_motion_sensor_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Hallway Motion Sensor Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.hallway_motion_sensor_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '23.9', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_temperature_feels_like-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.hallway_temperature_feels_like', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature feels like', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'feels_like', + 'unique_id': 'ABC999111-feels_like', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_temperature_feels_like-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Hallway Temperature feels like', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.hallway_temperature_feels_like', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '21.2', + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_timer_end_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.hallway_timer_end_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Timer end time', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'timer_time', + 'unique_id': 'ABC999111-timer_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[load_platforms0][sensor.hallway_timer_end_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Hallway Timer end time', + 'id': None, + 'turn_on': None, + }), + 'context': , + 'entity_id': 'sensor.hallway_timer_end_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensor[load_platforms0][sensor.kitchen_filter_last_reset-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kitchen_filter_last_reset', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Filter last reset', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'filter_last_reset', + 'unique_id': 'AAZZAAZZ-filter_last_reset', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[load_platforms0][sensor.kitchen_filter_last_reset-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Kitchen Filter last reset', + }), + 'context': , + 'entity_id': 'sensor.kitchen_filter_last_reset', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2022-04-23T15:58:45+00:00', + }) +# --- +# name: test_sensor[load_platforms0][sensor.kitchen_pure_aqi-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'good', + 'moderate', + 'bad', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kitchen_pure_aqi', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure AQI', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pm25_pure', + 'unique_id': 'AAZZAAZZ-pm25', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[load_platforms0][sensor.kitchen_pure_aqi-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Kitchen Pure AQI', + 'options': list([ + 'good', + 'moderate', + 'bad', + ]), + }), + 'context': , + 'entity_id': 'sensor.kitchen_pure_aqi', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'good', + }) +# --- +# name: test_sensor[load_platforms0][sensor.kitchen_pure_sensitivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kitchen_pure_sensitivity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pure sensitivity', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sensitivity', + 'unique_id': 'AAZZAAZZ-pure_sensitivity', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[load_platforms0][sensor.kitchen_pure_sensitivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Kitchen Pure sensitivity', + }), + 'context': , + 'entity_id': 'sensor.kitchen_pure_sensitivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'n', + }) +# --- diff --git a/tests/components/sensibo/snapshots/test_switch.ambr b/tests/components/sensibo/snapshots/test_switch.ambr new file mode 100644 index 00000000000..13cb73cef7a --- /dev/null +++ b/tests/components/sensibo/snapshots/test_switch.ambr @@ -0,0 +1,192 @@ +# serializer version: 1 +# name: test_switch[load_platforms0][switch.bedroom_pure_boost-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.bedroom_pure_boost', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure Boost', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pure_boost_switch', + 'unique_id': 'BBZZBBZZ-pure_boost_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[load_platforms0][switch.bedroom_pure_boost-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Bedroom Pure Boost', + }), + 'context': , + 'entity_id': 'switch.bedroom_pure_boost', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch[load_platforms0][switch.hallway_climate_react-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.hallway_climate_react', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Climate React', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_react_switch', + 'unique_id': 'ABC999111-climate_react_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[load_platforms0][switch.hallway_climate_react-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Hallway Climate React', + 'type': 'temperature', + }), + 'context': , + 'entity_id': 'switch.hallway_climate_react', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch[load_platforms0][switch.hallway_timer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.hallway_timer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Timer', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'timer_on_switch', + 'unique_id': 'ABC999111-timer_on_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[load_platforms0][switch.hallway_timer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Hallway Timer', + 'id': None, + 'turn_on': None, + }), + 'context': , + 'entity_id': 'switch.hallway_timer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch[load_platforms0][switch.kitchen_pure_boost-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.kitchen_pure_boost', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pure Boost', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pure_boost_switch', + 'unique_id': 'AAZZAAZZ-pure_boost_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[load_platforms0][switch.kitchen_pure_boost-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Kitchen Pure Boost', + }), + 'context': , + 'entity_id': 'switch.kitchen_pure_boost', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/sensibo/snapshots/test_update.ambr b/tests/components/sensibo/snapshots/test_update.ambr new file mode 100644 index 00000000000..3eb69c9a812 --- /dev/null +++ b/tests/components/sensibo/snapshots/test_update.ambr @@ -0,0 +1,178 @@ +# serializer version: 1 +# name: test_update[load_platforms0][update.bedroom_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.bedroom_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'BBZZBBZZ-fw_ver_available', + 'unit_of_measurement': None, + }) +# --- +# name: test_update[load_platforms0][update.bedroom_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/sensibo/icon.png', + 'friendly_name': 'Bedroom Firmware', + 'in_progress': False, + 'installed_version': 'PUR00111', + 'latest_version': 'PUR00111', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': 'pure', + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.bedroom_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_update[load_platforms0][update.hallway_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.hallway_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ABC999111-fw_ver_available', + 'unit_of_measurement': None, + }) +# --- +# name: test_update[load_platforms0][update.hallway_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/sensibo/icon.png', + 'friendly_name': 'Hallway Firmware', + 'in_progress': False, + 'installed_version': 'SKY30046', + 'latest_version': 'SKY30048', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': 'skyv2', + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.hallway_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_update[load_platforms0][update.kitchen_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.kitchen_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'sensibo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'AAZZAAZZ-fw_ver_available', + 'unit_of_measurement': None, + }) +# --- +# name: test_update[load_platforms0][update.kitchen_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'display_precision': 0, + 'entity_picture': 'https://brands.home-assistant.io/_/sensibo/icon.png', + 'friendly_name': 'Kitchen Firmware', + 'in_progress': False, + 'installed_version': 'PUR00111', + 'latest_version': 'PUR00111', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': 'pure', + 'update_percentage': None, + }), + 'context': , + 'entity_id': 'update.kitchen_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/sensibo/test_binary_sensor.py b/tests/components/sensibo/test_binary_sensor.py index 61b62226679..dbc3e87a236 100644 --- a/tests/components/sensibo/test_binary_sensor.py +++ b/tests/components/sensibo/test_binary_sensor.py @@ -7,39 +7,33 @@ from unittest.mock import patch from pysensibo.model import SensiboData import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform @pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + "load_platforms", + [[Platform.BINARY_SENSOR]], +) async def test_binary_sensor( hass: HomeAssistant, load_int: ConfigEntry, monkeypatch: pytest.MonkeyPatch, get_data: SensiboData, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test the Sensibo binary sensor.""" - state1 = hass.states.get("binary_sensor.hallway_motion_sensor_connectivity") - state2 = hass.states.get("binary_sensor.hallway_motion_sensor_main_sensor") - state3 = hass.states.get("binary_sensor.hallway_motion_sensor_motion") - state4 = hass.states.get("binary_sensor.hallway_room_occupied") - state5 = hass.states.get( - "binary_sensor.kitchen_pure_boost_linked_with_indoor_air_quality" - ) - state6 = hass.states.get( - "binary_sensor.kitchen_pure_boost_linked_with_outdoor_air_quality" - ) - assert state1.state == "on" - assert state2.state == "on" - assert state3.state == "on" - assert state4.state == "on" - assert state5.state == "on" - assert state6.state == "off" + await snapshot_platform(hass, entity_registry, snapshot, load_int.entry_id) monkeypatch.setattr( get_data.parsed["ABC999111"].motion_sensors["AABBCC"], "alive", False diff --git a/tests/components/sensibo/test_button.py b/tests/components/sensibo/test_button.py index 6d7ce442562..5c36fe9e94d 100644 --- a/tests/components/sensibo/test_button.py +++ b/tests/components/sensibo/test_button.py @@ -5,21 +5,47 @@ from __future__ import annotations from datetime import datetime, timedelta from unittest.mock import patch +from freezegun import freeze_time from freezegun.api import FrozenDateTimeFactory from pysensibo.model import SensiboData import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, STATE_UNKNOWN +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_OFF, + STATE_ON, + STATE_UNKNOWN, + Platform, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform +@freeze_time("2022-03-12T15:24:26+00:00") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + "load_platforms", + [[Platform.BUTTON]], +) async def test_button( + hass: HomeAssistant, + load_int: ConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the Sensibo button.""" + + await snapshot_platform(hass, entity_registry, snapshot, load_int.entry_id) + + +async def test_button_update( hass: HomeAssistant, load_int: ConfigEntry, monkeypatch: pytest.MonkeyPatch, diff --git a/tests/components/sensibo/test_climate.py b/tests/components/sensibo/test_climate.py index b5a7be7bde0..8be9f4a60e4 100644 --- a/tests/components/sensibo/test_climate.py +++ b/tests/components/sensibo/test_climate.py @@ -54,12 +54,14 @@ from homeassistant.const import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_UNKNOWN, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform async def test_climate_find_valid_targets() -> None: @@ -77,26 +79,22 @@ async def test_climate_find_valid_targets() -> None: assert _find_valid_target_temp(25, valid_targets) == 20 +@pytest.mark.parametrize( + "load_platforms", + [[Platform.CLIMATE]], +) async def test_climate( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, get_data: SensiboData, load_int: ConfigEntry, + entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, ) -> None: """Test the Sensibo climate.""" - state1 = hass.states.get("climate.hallway") - state2 = hass.states.get("climate.kitchen") - state3 = hass.states.get("climate.bedroom") + await snapshot_platform(hass, entity_registry, snapshot, load_int.entry_id) - assert state1.state == "heat" - assert state1.attributes == snapshot - - assert state2.state == "off" - - assert state3 - assert state3.state == "off" found_log = False logs = caplog.get_records("setup") for log in logs: diff --git a/tests/components/sensibo/test_number.py b/tests/components/sensibo/test_number.py index de369698f50..95836ba023c 100644 --- a/tests/components/sensibo/test_number.py +++ b/tests/components/sensibo/test_number.py @@ -7,6 +7,7 @@ from unittest.mock import patch from pysensibo.model import SensiboData import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.components.number import ( ATTR_VALUE, @@ -14,27 +15,31 @@ from homeassistant.components.number import ( SERVICE_SET_VALUE, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform +@pytest.mark.parametrize( + "load_platforms", + [[Platform.NUMBER]], +) @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_number( hass: HomeAssistant, load_int: ConfigEntry, monkeypatch: pytest.MonkeyPatch, get_data: SensiboData, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test the Sensibo number.""" - state1 = hass.states.get("number.hallway_temperature_calibration") - state2 = hass.states.get("number.hallway_humidity_calibration") - assert state1.state == "0.1" - assert state2.state == "0.0" + await snapshot_platform(hass, entity_registry, snapshot, load_int.entry_id) monkeypatch.setattr(get_data.parsed["ABC999111"], "calibration_temp", 0.2) diff --git a/tests/components/sensibo/test_select.py b/tests/components/sensibo/test_select.py index 7a9c89ef612..2e4a1cb507c 100644 --- a/tests/components/sensibo/test_select.py +++ b/tests/components/sensibo/test_select.py @@ -7,6 +7,7 @@ from unittest.mock import patch from pysensibo.model import SensiboData import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.components.select import ( ATTR_OPTION, @@ -14,24 +15,30 @@ from homeassistant.components.select import ( SERVICE_SELECT_OPTION, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform +@pytest.mark.parametrize( + "load_platforms", + [[Platform.SELECT]], +) async def test_select( hass: HomeAssistant, load_int: ConfigEntry, monkeypatch: pytest.MonkeyPatch, get_data: SensiboData, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test the Sensibo select.""" - state1 = hass.states.get("select.hallway_horizontal_swing") - assert state1.state == "stopped" + await snapshot_platform(hass, entity_registry, snapshot, load_int.entry_id) monkeypatch.setattr( get_data.parsed["ABC999111"], "horizontal_swing_mode", "fixedleft" diff --git a/tests/components/sensibo/test_sensor.py b/tests/components/sensibo/test_sensor.py index 5fc761f178a..32794e266b0 100644 --- a/tests/components/sensibo/test_sensor.py +++ b/tests/components/sensibo/test_sensor.py @@ -5,37 +5,37 @@ from __future__ import annotations from datetime import timedelta from unittest.mock import patch +from freezegun.api import FrozenDateTimeFactory from pysensibo.model import PureAQI, SensiboData import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform @pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + "load_platforms", + [[Platform.SENSOR]], +) async def test_sensor( hass: HomeAssistant, load_int: ConfigEntry, monkeypatch: pytest.MonkeyPatch, get_data: SensiboData, + entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, ) -> None: """Test the Sensibo sensor.""" - state1 = hass.states.get("sensor.hallway_motion_sensor_battery_voltage") - state2 = hass.states.get("sensor.kitchen_pure_aqi") - state3 = hass.states.get("sensor.kitchen_pure_sensitivity") - state4 = hass.states.get("sensor.hallway_climate_react_low_temperature_threshold") - assert state1.state == "3000" - assert state2.state == "good" - assert state3.state == "n" - assert state4.state == "0.0" - assert state2.attributes == snapshot - assert state4.attributes == snapshot + await snapshot_platform(hass, entity_registry, snapshot, load_int.entry_id) monkeypatch.setattr(get_data.parsed["AAZZAAZZ"], "pm25_pure", PureAQI(2)) diff --git a/tests/components/sensibo/test_switch.py b/tests/components/sensibo/test_switch.py index cc3c8881bec..f260af7baaa 100644 --- a/tests/components/sensibo/test_switch.py +++ b/tests/components/sensibo/test_switch.py @@ -7,6 +7,7 @@ from unittest.mock import patch from pysensibo.model import SensiboData import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.config_entries import ConfigEntry @@ -16,12 +17,29 @@ from homeassistant.const import ( SERVICE_TURN_ON, STATE_OFF, STATE_ON, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + "load_platforms", + [[Platform.SWITCH]], +) +async def test_switch( + hass: HomeAssistant, + load_int: ConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the Sensibo switch.""" + await snapshot_platform(hass, entity_registry, snapshot, load_int.entry_id) async def test_switch_timer( diff --git a/tests/components/sensibo/test_update.py b/tests/components/sensibo/test_update.py index 23b2719d5b5..a4eb9751243 100644 --- a/tests/components/sensibo/test_update.py +++ b/tests/components/sensibo/test_update.py @@ -5,32 +5,36 @@ from __future__ import annotations from datetime import timedelta from unittest.mock import patch +from freezegun.api import FrozenDateTimeFactory from pysensibo.model import SensiboData import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_OFF, STATE_ON +from homeassistant.const import STATE_OFF, Platform from homeassistant.core import HomeAssistant -from homeassistant.util import dt as dt_util +from homeassistant.helpers import entity_registry as er -from tests.common import async_fire_time_changed +from tests.common import async_fire_time_changed, snapshot_platform +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + "load_platforms", + [[Platform.UPDATE]], +) async def test_update( hass: HomeAssistant, load_int: ConfigEntry, monkeypatch: pytest.MonkeyPatch, get_data: SensiboData, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, ) -> None: """Test the Sensibo update.""" - state1 = hass.states.get("update.hallway_firmware") - state2 = hass.states.get("update.kitchen_firmware") - assert state1.state == STATE_ON - assert state1.attributes["installed_version"] == "SKY30046" - assert state1.attributes["latest_version"] == "SKY30048" - assert state1.attributes["title"] == "skyv2" - assert state2.state == STATE_OFF + await snapshot_platform(hass, entity_registry, snapshot, load_int.entry_id) monkeypatch.setattr(get_data.parsed["ABC999111"], "fw_ver", "SKY30048") @@ -38,10 +42,8 @@ async def test_update( "homeassistant.components.sensibo.coordinator.SensiboClient.async_get_devices_data", return_value=get_data, ): - async_fire_time_changed( - hass, - dt_util.utcnow() + timedelta(minutes=5), - ) + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) await hass.async_block_till_done() state1 = hass.states.get("update.hallway_firmware") diff --git a/tests/components/sensor/test_init.py b/tests/components/sensor/test_init.py index 2504ea80d84..d53818e77b3 100644 --- a/tests/components/sensor/test_init.py +++ b/tests/components/sensor/test_init.py @@ -7,6 +7,7 @@ from datetime import UTC, date, datetime from decimal import Decimal from types import ModuleType from typing import Any +from unittest.mock import patch import pytest @@ -30,6 +31,7 @@ from homeassistant.const import ( PERCENTAGE, STATE_UNKNOWN, EntityCategory, + UnitOfArea, UnitOfDataRate, UnitOfEnergy, UnitOfLength, @@ -483,6 +485,108 @@ async def test_restore_sensor_restore_state( assert entity0.native_unit_of_measurement == uom +async def test_translated_unit( + hass: HomeAssistant, +) -> None: + """Test translated unit.""" + + with patch( + "homeassistant.helpers.service.translation.async_get_translations", + return_value={ + "component.test.entity.sensor.test_translation_key.unit_of_measurement": "Tests" + }, + ): + entity0 = MockSensor( + name="Test", + native_value="123", + unique_id="very_unique", + ) + entity0.entity_description = SensorEntityDescription( + "test", + translation_key="test_translation_key", + ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) + + assert await async_setup_component( + hass, "sensor", {"sensor": {"platform": "test"}} + ) + await hass.async_block_till_done() + + entity_id = entity0.entity_id + state = hass.states.get(entity_id) + assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == "Tests" + + +async def test_translated_unit_with_native_unit_raises( + hass: HomeAssistant, +) -> None: + """Test that translated unit.""" + + with patch( + "homeassistant.helpers.service.translation.async_get_translations", + return_value={ + "component.test.entity.sensor.test_translation_key.unit_of_measurement": "Tests" + }, + ): + entity0 = MockSensor( + name="Test", + native_value="123", + unique_id="very_unique", + ) + entity0.entity_description = SensorEntityDescription( + "test", + translation_key="test_translation_key", + native_unit_of_measurement="bad_unit", + ) + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) + + assert await async_setup_component( + hass, "sensor", {"sensor": {"platform": "test"}} + ) + await hass.async_block_till_done() + # Setup fails so entity_id is None + assert entity0.entity_id is None + + +async def test_unit_translation_key_without_platform_raises( + hass: HomeAssistant, +) -> None: + """Test that unit translation key property raises if the entity has no platform yet.""" + + with patch( + "homeassistant.helpers.service.translation.async_get_translations", + return_value={ + "component.test.entity.sensor.test_translation_key.unit_of_measurement": "Tests" + }, + ): + entity0 = MockSensor( + name="Test", + native_value="123", + unique_id="very_unique", + ) + entity0.entity_description = SensorEntityDescription( + "test", + translation_key="test_translation_key", + ) + with pytest.raises( + ValueError, + match="cannot have a translation key for unit of measurement before " + "being added to the entity platform", + ): + unit = entity0.unit_of_measurement # noqa: F841 + + setup_test_component_platform(hass, sensor.DOMAIN, [entity0]) + + assert await async_setup_component( + hass, "sensor", {"sensor": {"platform": "test"}} + ) + await hass.async_block_till_done() + + # Should not raise after being added to the platform + unit = entity0.unit_of_measurement # noqa: F841 + assert unit == "Tests" + + @pytest.mark.parametrize( ( "device_class", @@ -651,6 +755,34 @@ async def test_custom_unit( "device_class", ), [ + # Area + ( + UnitOfArea.SQUARE_KILOMETERS, + UnitOfArea.SQUARE_MILES, + UnitOfArea.SQUARE_MILES, + 1000, + "1000", + "386", + SensorDeviceClass.AREA, + ), + ( + UnitOfArea.SQUARE_CENTIMETERS, + UnitOfArea.SQUARE_INCHES, + UnitOfArea.SQUARE_INCHES, + 7.24, + "7.24", + "1.12", + SensorDeviceClass.AREA, + ), + ( + UnitOfArea.SQUARE_KILOMETERS, + "peer_distance", + UnitOfArea.SQUARE_KILOMETERS, + 1000, + "1000", + "1000", + SensorDeviceClass.AREA, + ), # Distance ( UnitOfLength.KILOMETERS, @@ -1834,6 +1966,7 @@ async def test_non_numeric_device_class_with_unit_of_measurement( [ SensorDeviceClass.APPARENT_POWER, SensorDeviceClass.AQI, + SensorDeviceClass.AREA, SensorDeviceClass.ATMOSPHERIC_PRESSURE, SensorDeviceClass.BATTERY, SensorDeviceClass.CO, diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index 37f080d2de2..44eaa9fde0d 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -227,6 +227,8 @@ async def assert_validation_result( ), [ (None, "%", "%", "%", "unitless", 13.050847, -10, 30), + ("area", "m²", "m²", "m²", "area", 13.050847, -10, 30), + ("area", "mi²", "mi²", "mi²", "area", 13.050847, -10, 30), ("battery", "%", "%", "%", "unitless", 13.050847, -10, 30), ("battery", None, None, None, "unitless", 13.050847, -10, 30), ("distance", "m", "m", "m", "distance", 13.050847, -10, 30), @@ -914,6 +916,8 @@ async def test_compile_hourly_statistics_wrong_unit( "factor", ), [ + (US_CUSTOMARY_SYSTEM, "area", "m²", "m²", "m²", "area", 1), + (US_CUSTOMARY_SYSTEM, "area", "mi²", "mi²", "mi²", "area", 1), (US_CUSTOMARY_SYSTEM, "distance", "m", "m", "m", "distance", 1), (US_CUSTOMARY_SYSTEM, "distance", "mi", "mi", "mi", "distance", 1), (US_CUSTOMARY_SYSTEM, "energy", "kWh", "kWh", "kWh", "energy", 1), @@ -926,6 +930,8 @@ async def test_compile_hourly_statistics_wrong_unit( (US_CUSTOMARY_SYSTEM, "volume", "ft³", "ft³", "ft³", "volume", 1), (US_CUSTOMARY_SYSTEM, "weight", "g", "g", "g", "mass", 1), (US_CUSTOMARY_SYSTEM, "weight", "oz", "oz", "oz", "mass", 1), + (METRIC_SYSTEM, "area", "m²", "m²", "m²", "area", 1), + (METRIC_SYSTEM, "area", "mi²", "mi²", "mi²", "area", 1), (METRIC_SYSTEM, "distance", "m", "m", "m", "distance", 1), (METRIC_SYSTEM, "distance", "mi", "mi", "mi", "distance", 1), (METRIC_SYSTEM, "energy", "kWh", "kWh", "kWh", "energy", 1), @@ -2228,6 +2234,8 @@ async def test_compile_hourly_energy_statistics_multiple( [ ("battery", "%", 30), ("battery", None, 30), + ("area", "m²", 30), + ("area", "mi²", 30), ("distance", "m", 30), ("distance", "mi", 30), ("humidity", "%", 30), @@ -2336,6 +2344,8 @@ async def test_compile_hourly_statistics_partially_unavailable( [ ("battery", "%", 30), ("battery", None, 30), + ("area", "m²", 30), + ("area", "mi²", 30), ("distance", "m", 30), ("distance", "mi", 30), ("humidity", "%", 30), @@ -2438,6 +2448,10 @@ async def test_compile_hourly_statistics_fails( "statistic_type", ), [ + ("measurement", "area", "m²", "m²", "m²", "area", "mean"), + ("measurement", "area", "mi²", "mi²", "mi²", "area", "mean"), + ("total", "area", "m²", "m²", "m²", "area", "sum"), + ("total", "area", "mi²", "mi²", "mi²", "area", "sum"), ("measurement", "battery", "%", "%", "%", "unitless", "mean"), ("measurement", "battery", None, None, None, "unitless", "mean"), ("measurement", "distance", "m", "m", "m", "distance", "mean"), @@ -4233,8 +4247,8 @@ async def async_record_states( @pytest.mark.parametrize( ("units", "attributes", "unit", "unit2", "supported_unit"), [ - (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), - (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), + (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), + (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), ( US_CUSTOMARY_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, @@ -4445,8 +4459,8 @@ async def test_validate_statistics_unit_ignore_device_class( @pytest.mark.parametrize( ("units", "attributes", "unit", "unit2", "supported_unit"), [ - (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), - (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "W, kW"), + (US_CUSTOMARY_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), + (METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W", "kW", "GW, MW, TW, W, kW"), ( US_CUSTOMARY_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, @@ -5432,6 +5446,17 @@ async def test_exclude_attributes(hass: HomeAssistant) -> None: assert ATTR_FRIENDLY_NAME in states[0].attributes +@pytest.mark.parametrize( + "ignore_translations", + [ + [ + "component.test.issues..title", + "component.test.issues..description", + "component.sensor.issues..title", + "component.sensor.issues..description", + ] + ], +) async def test_clean_up_repairs( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: diff --git a/tests/components/shelly/test_config_flow.py b/tests/components/shelly/test_config_flow.py index 93b3a46910c..d9945706182 100644 --- a/tests/components/shelly/test_config_flow.py +++ b/tests/components/shelly/test_config_flow.py @@ -16,7 +16,7 @@ import pytest from homeassistant import config_entries from homeassistant.components import zeroconf -from homeassistant.components.shelly import config_flow +from homeassistant.components.shelly import MacAddressMismatchError, config_flow from homeassistant.components.shelly.const import ( CONF_BLE_SCANNER_MODE, DOMAIN, @@ -331,6 +331,7 @@ async def test_form_missing_model_key_zeroconf( ("exc", "base_error"), [ (DeviceConnectionError, "cannot_connect"), + (MacAddressMismatchError, "mac_address_mismatch"), (ValueError, "unknown"), ], ) @@ -436,6 +437,7 @@ async def test_user_setup_ignored_device( [ (InvalidAuthError, "invalid_auth"), (DeviceConnectionError, "cannot_connect"), + (MacAddressMismatchError, "mac_address_mismatch"), (ValueError, "unknown"), ], ) @@ -473,6 +475,7 @@ async def test_form_auth_errors_test_connection_gen1( [ (DeviceConnectionError, "cannot_connect"), (InvalidAuthError, "invalid_auth"), + (MacAddressMismatchError, "mac_address_mismatch"), (ValueError, "unknown"), ], ) @@ -844,8 +847,19 @@ async def test_reauth_successful( (3, {"password": "test2 password"}), ], ) +@pytest.mark.parametrize( + ("exc", "abort_reason"), + [ + (DeviceConnectionError, "reauth_unsuccessful"), + (MacAddressMismatchError, "mac_address_mismatch"), + ], +) async def test_reauth_unsuccessful( - hass: HomeAssistant, gen: int, user_input: dict[str, str] + hass: HomeAssistant, + gen: int, + user_input: dict[str, str], + exc: Exception, + abort_reason: str, ) -> None: """Test reauthentication flow failed.""" entry = MockConfigEntry( @@ -862,13 +876,9 @@ async def test_reauth_unsuccessful( return_value={"mac": "test-mac", "type": MODEL_1, "auth": True, "gen": gen}, ), patch( - "aioshelly.block_device.BlockDevice.create", - new=AsyncMock(side_effect=InvalidAuthError), - ), - patch( - "aioshelly.rpc_device.RpcDevice.create", - new=AsyncMock(side_effect=InvalidAuthError), + "aioshelly.block_device.BlockDevice.create", new=AsyncMock(side_effect=exc) ), + patch("aioshelly.rpc_device.RpcDevice.create", new=AsyncMock(side_effect=exc)), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -876,7 +886,7 @@ async def test_reauth_unsuccessful( ) assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_unsuccessful" + assert result["reason"] == abort_reason async def test_reauth_get_info_error(hass: HomeAssistant) -> None: diff --git a/tests/components/shelly/test_coordinator.py b/tests/components/shelly/test_coordinator.py index 47c338e3fad..090c5e7207f 100644 --- a/tests/components/shelly/test_coordinator.py +++ b/tests/components/shelly/test_coordinator.py @@ -10,6 +10,7 @@ import pytest from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.components.shelly import MacAddressMismatchError from homeassistant.components.shelly.const import ( ATTR_CHANNEL, ATTR_CLICK_TYPE, @@ -254,11 +255,13 @@ async def test_block_polling_connection_error( assert get_entity_state(hass, "switch.test_name_channel_1") == STATE_UNAVAILABLE +@pytest.mark.parametrize("exc", [DeviceConnectionError, MacAddressMismatchError]) async def test_block_rest_update_connection_error( hass: HomeAssistant, freezer: FrozenDateTimeFactory, mock_block_device: Mock, monkeypatch: pytest.MonkeyPatch, + exc: Exception, ) -> None: """Test block REST update connection error.""" entity_id = register_entity(hass, BINARY_SENSOR_DOMAIN, "test_name_cloud", "cloud") @@ -269,11 +272,7 @@ async def test_block_rest_update_connection_error( await mock_rest_update(hass, freezer) assert get_entity_state(hass, entity_id) == STATE_ON - monkeypatch.setattr( - mock_block_device, - "update_shelly", - AsyncMock(side_effect=DeviceConnectionError), - ) + monkeypatch.setattr(mock_block_device, "update_shelly", AsyncMock(side_effect=exc)) await mock_rest_update(hass, freezer) assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE @@ -702,11 +701,13 @@ async def test_rpc_polling_auth_error( assert flow["context"].get("entry_id") == entry.entry_id +@pytest.mark.parametrize("exc", [DeviceConnectionError, MacAddressMismatchError]) async def test_rpc_reconnect_error( hass: HomeAssistant, freezer: FrozenDateTimeFactory, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch, + exc: Exception, ) -> None: """Test RPC reconnect error.""" await init_integration(hass, 2) @@ -714,13 +715,7 @@ async def test_rpc_reconnect_error( assert get_entity_state(hass, "switch.test_switch_0") == STATE_ON monkeypatch.setattr(mock_rpc_device, "connected", False) - monkeypatch.setattr( - mock_rpc_device, - "initialize", - AsyncMock( - side_effect=DeviceConnectionError, - ), - ) + monkeypatch.setattr(mock_rpc_device, "initialize", AsyncMock(side_effect=exc)) # Move time to generate reconnect freezer.tick(timedelta(seconds=RPC_RECONNECT_INTERVAL)) diff --git a/tests/components/sky_remote/__init__.py b/tests/components/sky_remote/__init__.py new file mode 100644 index 00000000000..83d68330d5b --- /dev/null +++ b/tests/components/sky_remote/__init__.py @@ -0,0 +1,13 @@ +"""Tests for the Sky Remote component.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_mock_entry(hass: HomeAssistant, entry: MockConfigEntry): + """Initialize a mock config entry.""" + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + + await hass.async_block_till_done() diff --git a/tests/components/sky_remote/conftest.py b/tests/components/sky_remote/conftest.py new file mode 100644 index 00000000000..d6c453d81f7 --- /dev/null +++ b/tests/components/sky_remote/conftest.py @@ -0,0 +1,47 @@ +"""Test mocks and fixtures.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +from homeassistant.components.sky_remote.const import DEFAULT_PORT, DOMAIN +from homeassistant.const import CONF_HOST, CONF_PORT + +from tests.common import MockConfigEntry + +SAMPLE_CONFIG = {CONF_HOST: "example.com", CONF_PORT: DEFAULT_PORT} + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry(domain=DOMAIN, data=SAMPLE_CONFIG) + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Stub out setup function.""" + with patch( + "homeassistant.components.sky_remote.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_remote_control(request: pytest.FixtureRequest) -> Generator[MagicMock]: + """Mock skyboxremote library.""" + with ( + patch( + "homeassistant.components.sky_remote.RemoteControl" + ) as mock_remote_control, + patch( + "homeassistant.components.sky_remote.config_flow.RemoteControl", + mock_remote_control, + ), + ): + mock_remote_control._instance_mock = MagicMock(host="example.com") + mock_remote_control._instance_mock.check_connectable = AsyncMock(True) + mock_remote_control.return_value = mock_remote_control._instance_mock + yield mock_remote_control diff --git a/tests/components/sky_remote/test_config_flow.py b/tests/components/sky_remote/test_config_flow.py new file mode 100644 index 00000000000..aaeda20788c --- /dev/null +++ b/tests/components/sky_remote/test_config_flow.py @@ -0,0 +1,125 @@ +"""Test the Sky Remote config flow.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +import pytest +from skyboxremote import LEGACY_PORT, SkyBoxConnectionError + +from homeassistant.components.sky_remote.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .conftest import SAMPLE_CONFIG + + +async def test_user_flow( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_remote_control +) -> None: + """Test we can setup an entry.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: SAMPLE_CONFIG[CONF_HOST]}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == SAMPLE_CONFIG + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_device_exists_abort( + hass: HomeAssistant, mock_config_entry, mock_remote_control +) -> None: + """Test we abort flow if device already configured.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_HOST: mock_config_entry.data[CONF_HOST]}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.parametrize("mock_remote_control", [LEGACY_PORT], indirect=True) +async def test_user_flow_legacy_device( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_remote_control, +) -> None: + """Test we can setup an entry with a legacy port.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + async def mock_check_connectable(): + if mock_remote_control.call_args[0][1] == LEGACY_PORT: + return True + raise SkyBoxConnectionError("Wrong port") + + mock_remote_control._instance_mock.check_connectable = mock_check_connectable + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: SAMPLE_CONFIG[CONF_HOST]}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == {**SAMPLE_CONFIG, CONF_PORT: LEGACY_PORT} + + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize("mock_remote_control", [6], indirect=True) +async def test_user_flow_unconnectable( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_remote_control, +) -> None: + """Test we can setup an entry.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + mock_remote_control._instance_mock.check_connectable = AsyncMock( + side_effect=SkyBoxConnectionError("Example") + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: SAMPLE_CONFIG[CONF_HOST]}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + assert len(mock_setup_entry.mock_calls) == 0 + + mock_remote_control._instance_mock.check_connectable = AsyncMock(True) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: SAMPLE_CONFIG[CONF_HOST]}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == SAMPLE_CONFIG + + assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/sky_remote/test_init.py b/tests/components/sky_remote/test_init.py new file mode 100644 index 00000000000..fe316baa6bf --- /dev/null +++ b/tests/components/sky_remote/test_init.py @@ -0,0 +1,59 @@ +"""Tests for the Sky Remote component.""" + +from unittest.mock import AsyncMock + +from skyboxremote import SkyBoxConnectionError + +from homeassistant.components.sky_remote.const import DEFAULT_PORT, DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_mock_entry + +from tests.common import MockConfigEntry + + +async def test_setup_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_remote_control, + device_registry: dr.DeviceRegistry, +) -> None: + """Test successful setup of entry.""" + await setup_mock_entry(hass, mock_config_entry) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + mock_remote_control.assert_called_once_with("example.com", DEFAULT_PORT) + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.entry_id)} + ) + assert device_entry is not None + assert device_entry.name == "example.com" + + +async def test_setup_unconnectable_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_remote_control, +) -> None: + """Test unsuccessful setup of entry.""" + mock_remote_control._instance_mock.check_connectable = AsyncMock( + side_effect=SkyBoxConnectionError() + ) + + await setup_mock_entry(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_unload_entry( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_remote_control +) -> None: + """Test unload an entry.""" + await setup_mock_entry(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + assert await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/sky_remote/test_remote.py b/tests/components/sky_remote/test_remote.py new file mode 100644 index 00000000000..301375bc039 --- /dev/null +++ b/tests/components/sky_remote/test_remote.py @@ -0,0 +1,46 @@ +"""Test sky_remote remote.""" + +import pytest + +from homeassistant.components.remote import ( + ATTR_COMMAND, + DOMAIN as REMOTE_DOMAIN, + SERVICE_SEND_COMMAND, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError + +from . import setup_mock_entry + +ENTITY_ID = "remote.example_com" + + +async def test_send_command( + hass: HomeAssistant, mock_config_entry, mock_remote_control +) -> None: + """Test "send_command" method.""" + await setup_mock_entry(hass, mock_config_entry) + await hass.services.async_call( + REMOTE_DOMAIN, + SERVICE_SEND_COMMAND, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_COMMAND: ["sky"]}, + blocking=True, + ) + mock_remote_control._instance_mock.send_keys.assert_called_once_with(["sky"]) + + +async def test_send_invalid_command( + hass: HomeAssistant, mock_config_entry, mock_remote_control +) -> None: + """Test "send_command" method.""" + await setup_mock_entry(hass, mock_config_entry) + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + REMOTE_DOMAIN, + SERVICE_SEND_COMMAND, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_COMMAND: ["apple"]}, + blocking=True, + ) + mock_remote_control._instance_mock.send_keys.assert_not_called() diff --git a/tests/components/smarty/conftest.py b/tests/components/smarty/conftest.py index c62097f0516..a9b518d88f4 100644 --- a/tests/components/smarty/conftest.py +++ b/tests/components/smarty/conftest.py @@ -40,6 +40,8 @@ def mock_smarty() -> Generator[AsyncMock]: client.warning = False client.alarm = False client.boost = False + client.enable_boost.return_value = True + client.disable_boost.return_value = True client.supply_air_temperature = 20 client.extract_air_temperature = 23 client.outdoor_air_temperature = 24 @@ -48,6 +50,7 @@ def mock_smarty() -> Generator[AsyncMock]: client.filter_timer = 31 client.get_configuration_version.return_value = 111 client.get_software_version.return_value = 127 + client.reset_filters_timer.return_value = True yield client diff --git a/tests/components/smarty/snapshots/test_button.ambr b/tests/components/smarty/snapshots/test_button.ambr new file mode 100644 index 00000000000..38849bd2b2e --- /dev/null +++ b/tests/components/smarty/snapshots/test_button.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_all_entities[button.mock_title_reset_filters_timer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.mock_title_reset_filters_timer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Reset filters timer', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reset_filters_timer', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_reset_filters_timer', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[button.mock_title_reset_filters_timer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Reset filters timer', + }), + 'context': , + 'entity_id': 'button.mock_title_reset_filters_timer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/smarty/snapshots/test_switch.ambr b/tests/components/smarty/snapshots/test_switch.ambr new file mode 100644 index 00000000000..be1da7c6961 --- /dev/null +++ b/tests/components/smarty/snapshots/test_switch.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_all_entities[switch.mock_title_boost-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.mock_title_boost', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Boost', + 'platform': 'smarty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'boost', + 'unique_id': '01JAZ5DPW8C62D620DGYNG2R8H_boost', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[switch.mock_title_boost-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Boost', + }), + 'context': , + 'entity_id': 'switch.mock_title_boost', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/smarty/test_button.py b/tests/components/smarty/test_button.py new file mode 100644 index 00000000000..0a7b67f2be6 --- /dev/null +++ b/tests/components/smarty/test_button.py @@ -0,0 +1,45 @@ +"""Tests for the Smarty button platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_smarty: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.smarty.PLATFORMS", [Platform.BUTTON]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_setting_value( + hass: HomeAssistant, + mock_smarty: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting value.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + target={ATTR_ENTITY_ID: "button.mock_title_reset_filters_timer"}, + blocking=True, + ) + mock_smarty.reset_filters_timer.assert_called_once_with() diff --git a/tests/components/smarty/test_switch.py b/tests/components/smarty/test_switch.py new file mode 100644 index 00000000000..1a6748e2d23 --- /dev/null +++ b/tests/components/smarty/test_switch.py @@ -0,0 +1,58 @@ +"""Tests for the Smarty switch platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_smarty: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.smarty.PLATFORMS", [Platform.SWITCH]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_setting_value( + hass: HomeAssistant, + mock_smarty: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting value.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + target={ATTR_ENTITY_ID: "switch.mock_title_boost"}, + blocking=True, + ) + mock_smarty.enable_boost.assert_called_once_with() + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + target={ATTR_ENTITY_ID: "switch.mock_title_boost"}, + blocking=True, + ) + mock_smarty.disable_boost.assert_called_once_with() diff --git a/tests/components/sonarr/__init__.py b/tests/components/sonarr/__init__.py index b6050808a34..660102ed082 100644 --- a/tests/components/sonarr/__init__.py +++ b/tests/components/sonarr/__init__.py @@ -5,6 +5,6 @@ from homeassistant.const import CONF_API_KEY, CONF_URL MOCK_REAUTH_INPUT = {CONF_API_KEY: "test-api-key-reauth"} MOCK_USER_INPUT = { - CONF_URL: "http://192.168.1.189:8989", + CONF_URL: "http://192.168.1.189:8989/", CONF_API_KEY: "MOCK_API_KEY", } diff --git a/tests/components/sonarr/test_config_flow.py b/tests/components/sonarr/test_config_flow.py index 118d5020cba..efbfbd749b3 100644 --- a/tests/components/sonarr/test_config_flow.py +++ b/tests/components/sonarr/test_config_flow.py @@ -50,6 +50,34 @@ async def test_cannot_connect( assert result["errors"] == {"base": "cannot_connect"} +async def test_url_rewrite( + hass: HomeAssistant, + mock_sonarr_config_flow: MagicMock, + mock_setup_entry: None, +) -> None: + """Test the full manual user flow from start to finish.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={CONF_SOURCE: SOURCE_USER}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + user_input = MOCK_USER_INPUT.copy() + user_input[CONF_URL] = "https://192.168.1.189" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=user_input, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "192.168.1.189" + + assert result["data"] + assert result["data"][CONF_URL] == "https://192.168.1.189:443/" + + async def test_invalid_auth( hass: HomeAssistant, mock_sonarr_config_flow: MagicMock ) -> None: @@ -145,7 +173,7 @@ async def test_full_user_flow_implementation( assert result["title"] == "192.168.1.189" assert result["data"] - assert result["data"][CONF_URL] == "http://192.168.1.189:8989" + assert result["data"][CONF_URL] == "http://192.168.1.189:8989/" async def test_full_user_flow_advanced_options( @@ -175,7 +203,7 @@ async def test_full_user_flow_advanced_options( assert result["title"] == "192.168.1.189" assert result["data"] - assert result["data"][CONF_URL] == "http://192.168.1.189:8989" + assert result["data"][CONF_URL] == "http://192.168.1.189:8989/" assert result["data"][CONF_VERIFY_SSL] diff --git a/tests/components/sonos/fixtures/sonos_queue.json b/tests/components/sonos/fixtures/sonos_queue.json index 50689a00e1d..ffe08fc2b08 100644 --- a/tests/components/sonos/fixtures/sonos_queue.json +++ b/tests/components/sonos/fixtures/sonos_queue.json @@ -26,5 +26,17 @@ "protocol_info": "file:*:audio/mpegurl:*" } ] + }, + { + "title": "Track with no album or creator", + "item_id": "Q:0/3", + "parent_id": "Q:0", + "original_track_number": 1, + "resources": [ + { + "uri": "x-file-cifs://192.168.42.10/music/TrackWithNoAlbumOrCreator.mp3", + "protocol_info": "file:*:audio/mpegurl:*" + } + ] } ] diff --git a/tests/components/sonos/snapshots/test_media_player.ambr b/tests/components/sonos/snapshots/test_media_player.ambr index f382d341de6..8ef298de3db 100644 --- a/tests/components/sonos/snapshots/test_media_player.ambr +++ b/tests/components/sonos/snapshots/test_media_player.ambr @@ -71,6 +71,12 @@ 'media_content_id': 'x-file-cifs://192.168.42.10/music/The%20Beatles/Abbey%20Road/01%20Come%20Together.mp3', 'media_title': 'Come Together', }), + dict({ + 'media_album_name': None, + 'media_artist': None, + 'media_content_id': 'x-file-cifs://192.168.42.10/music/TrackWithNoAlbumOrCreator.mp3', + 'media_title': 'Track with no album or creator', + }), ]), }) # --- diff --git a/tests/components/spotify/conftest.py b/tests/components/spotify/conftest.py index d3fc418f1cd..67d4eac3960 100644 --- a/tests/components/spotify/conftest.py +++ b/tests/components/spotify/conftest.py @@ -9,12 +9,7 @@ from spotifyaio.models import ( Album, Artist, ArtistResponse, - AudioFeatures, - CategoriesResponse, - Category, - CategoryPlaylistResponse, Devices, - FeaturedPlaylistResponse, NewReleasesResponse, NewReleasesResponseInner, PlaybackState, @@ -135,12 +130,10 @@ def mock_spotify() -> Generator[AsyncMock]: PlaybackState, ), ("current_user.json", "get_current_user", UserProfile), - ("category.json", "get_category", Category), ("playlist.json", "get_playlist", Playlist), ("album.json", "get_album", Album), ("artist.json", "get_artist", Artist), ("show.json", "get_show", Show), - ("audio_features.json", "get_audio_features", AudioFeatures), ): getattr(client, method).return_value = obj.from_json( load_fixture(fixture, DOMAIN) @@ -148,15 +141,6 @@ def mock_spotify() -> Generator[AsyncMock]: client.get_followed_artists.return_value = ArtistResponse.from_json( load_fixture("followed_artists.json", DOMAIN) ).artists.items - client.get_featured_playlists.return_value = FeaturedPlaylistResponse.from_json( - load_fixture("featured_playlists.json", DOMAIN) - ).playlists.items - client.get_categories.return_value = CategoriesResponse.from_json( - load_fixture("categories.json", DOMAIN) - ).categories.items - client.get_category_playlists.return_value = CategoryPlaylistResponse.from_json( - load_fixture("category_playlists.json", DOMAIN) - ).playlists.items client.get_new_releases.return_value = NewReleasesResponse.from_json( load_fixture("new_releases.json", DOMAIN) ).albums.items diff --git a/tests/components/spotify/fixtures/audio_features.json b/tests/components/spotify/fixtures/audio_features.json deleted file mode 100644 index 52dfee060f7..00000000000 --- a/tests/components/spotify/fixtures/audio_features.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "danceability": 0.696, - "energy": 0.905, - "key": 3, - "loudness": -2.743, - "mode": 1, - "speechiness": 0.103, - "acousticness": 0.011, - "instrumentalness": 0.000905, - "liveness": 0.302, - "valence": 0.625, - "tempo": 114.944, - "type": "audio_features", - "id": "11dFghVXANMlKmJXsNCbNl", - "uri": "spotify:track:11dFghVXANMlKmJXsNCbNl", - "track_href": "https://api.spotify.com/v1/tracks/11dFghVXANMlKmJXsNCbNl", - "analysis_url": "https://api.spotify.com/v1/audio-analysis/11dFghVXANMlKmJXsNCbNl", - "duration_ms": 207960, - "time_signature": 4 -} diff --git a/tests/components/spotify/fixtures/categories.json b/tests/components/spotify/fixtures/categories.json deleted file mode 100644 index ed873c95c30..00000000000 --- a/tests/components/spotify/fixtures/categories.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "categories": { - "href": "https://api.spotify.com/v1/browse/categories?offset=0&limit=20&locale=en-US,en;q%3D0.5", - "items": [ - { - "href": "https://api.spotify.com/v1/browse/categories/0JQ5DAt0tbjZptfcdMSKl3", - "id": "0JQ5DAt0tbjZptfcdMSKl3", - "icons": [ - { - "height": 274, - "url": "https://t.scdn.co/images/728ed47fc1674feb95f7ac20236eb6d7.jpeg", - "width": 274 - } - ], - "name": "Made For You" - }, - { - "href": "https://api.spotify.com/v1/browse/categories/0JQ5DAqbMKFz6FAsUtgAab", - "id": "0JQ5DAqbMKFz6FAsUtgAab", - "icons": [ - { - "height": 274, - "url": "https://t.scdn.co/images/728ed47fc1674feb95f7ac20236eb6d7.jpeg", - "width": 274 - } - ], - "name": "New Releases" - } - ], - "limit": 20, - "next": "https://api.spotify.com/v1/browse/categories?offset=20&limit=20&locale=en-US,en;q%3D0.5", - "offset": 0, - "previous": null, - "total": 56 - } -} diff --git a/tests/components/spotify/fixtures/category.json b/tests/components/spotify/fixtures/category.json deleted file mode 100644 index d60605cf94f..00000000000 --- a/tests/components/spotify/fixtures/category.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "href": "https://api.spotify.com/v1/browse/categories/0JQ5DAqbMKFRY5ok2pxXJ0", - "id": "0JQ5DAqbMKFRY5ok2pxXJ0", - "icons": [ - { - "height": 274, - "url": "https://t.scdn.co/media/original/dinner_1b6506abba0ba52c54e6d695c8571078_274x274.jpg", - "width": 274 - } - ], - "name": "Cooking & Dining" -} diff --git a/tests/components/spotify/fixtures/category_playlists.json b/tests/components/spotify/fixtures/category_playlists.json deleted file mode 100644 index c2262708d5a..00000000000 --- a/tests/components/spotify/fixtures/category_playlists.json +++ /dev/null @@ -1,84 +0,0 @@ -{ - "playlists": { - "href": "https://api.spotify.com/v1/browse/categories/0JQ5DAqbMKFRY5ok2pxXJ0/playlists?country=NL&offset=0&limit=20", - "items": [ - { - "collaborative": false, - "description": "Lekker eten en lang natafelen? Daar hoort muziek bij.", - "external_urls": { - "spotify": "https://open.spotify.com/playlist/37i9dQZF1DX7yhuKT9G4qk" - }, - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DX7yhuKT9G4qk", - "id": "37i9dQZF1DX7yhuKT9G4qk", - "images": [ - { - "height": null, - "url": "https://i.scdn.co/image/ab67706f0000000343319faa9428405f3312b588", - "width": null - } - ], - "name": "eten met vrienden", - "owner": { - "display_name": "Spotify", - "external_urls": { - "spotify": "https://open.spotify.com/user/spotify" - }, - "href": "https://api.spotify.com/v1/users/spotify", - "id": "spotify", - "type": "user", - "uri": "spotify:user:spotify" - }, - "primary_color": null, - "public": null, - "snapshot_id": "MTcwMTY5Njk3NywwMDAwMDAwMDkyY2JjZDA1MjA2YTBmNzMxMmFlNGI0YzRhMjg0ZWZl", - "tracks": { - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DX7yhuKT9G4qk/tracks", - "total": 313 - }, - "type": "playlist", - "uri": "spotify:playlist:37i9dQZF1DX7yhuKT9G4qk" - }, - { - "collaborative": false, - "description": "From new retro to classic country blues, honky tonk, rockabilly, and more.", - "external_urls": { - "spotify": "https://open.spotify.com/playlist/37i9dQZF1DXbvE0SE0Cczh" - }, - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DXbvE0SE0Cczh", - "id": "37i9dQZF1DXbvE0SE0Cczh", - "images": [ - { - "height": null, - "url": "https://i.scdn.co/image/ab67706f00000003b93c270883619dde61725fc8", - "width": null - } - ], - "name": "Jukebox Joint", - "owner": { - "display_name": "Spotify", - "external_urls": { - "spotify": "https://open.spotify.com/user/spotify" - }, - "href": "https://api.spotify.com/v1/users/spotify", - "id": "spotify", - "type": "user", - "uri": "spotify:user:spotify" - }, - "primary_color": null, - "public": null, - "snapshot_id": "MTY4NjkxODgwMiwwMDAwMDAwMGUwNWRkNjY5N2UzM2Q4NzI4NzRiZmNhMGVmMzAyZTA5", - "tracks": { - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DXbvE0SE0Cczh/tracks", - "total": 60 - }, - "type": "playlist", - "uri": "spotify:playlist:37i9dQZF1DXbvE0SE0Cczh" - } - ], - "limit": 20, - "next": "https://api.spotify.com/v1/browse/categories/0JQ5DAqbMKFRY5ok2pxXJ0/playlists?country=NL&offset=20&limit=20", - "offset": 0, - "previous": null, - "total": 46 - } -} diff --git a/tests/components/spotify/fixtures/featured_playlists.json b/tests/components/spotify/fixtures/featured_playlists.json deleted file mode 100644 index 5e6e53a7ee1..00000000000 --- a/tests/components/spotify/fixtures/featured_playlists.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "message": "Popular Playlists", - "playlists": { - "href": "https://api.spotify.com/v1/browse/featured-playlists?country=NL×tamp=2023-12-18T18%3A35%3A35&offset=0&limit=20", - "items": [ - { - "collaborative": false, - "description": "De ideale playlist voor het fijne kerstgevoel bij de boom!", - "external_urls": { - "spotify": "https://open.spotify.com/playlist/37i9dQZF1DX4dopZ9vOp1t" - }, - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DX4dopZ9vOp1t", - "id": "37i9dQZF1DX4dopZ9vOp1t", - "images": [ - { - "height": null, - "url": "https://i.scdn.co/image/ab67706f000000037d14c267b8ee5fea2246a8fe", - "width": null - } - ], - "name": "Kerst Hits 2023", - "owner": { - "display_name": "Spotify", - "external_urls": { - "spotify": "https://open.spotify.com/user/spotify" - }, - "href": "https://api.spotify.com/v1/users/spotify", - "id": "spotify", - "type": "user", - "uri": "spotify:user:spotify" - }, - "primary_color": null, - "public": null, - "snapshot_id": "MTcwMjU2ODI4MSwwMDAwMDAwMDE1ZGRiNzI3OGY4OGU2MzA1MWNkZGMyNTdmNDUwMTc1", - "tracks": { - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DX4dopZ9vOp1t/tracks", - "total": 298 - }, - "type": "playlist", - "uri": "spotify:playlist:37i9dQZF1DX4dopZ9vOp1t" - }, - { - "collaborative": false, - "description": "De 50 populairste hits van Nederland. Cover: Jack Harlow", - "external_urls": { - "spotify": "https://open.spotify.com/playlist/37i9dQZF1DWSBi5svWQ9Nk" - }, - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DWSBi5svWQ9Nk", - "id": "37i9dQZF1DWSBi5svWQ9Nk", - "images": [ - { - "height": null, - "url": "https://i.scdn.co/image/ab67706f00000003f7b99051789611a49101c1cf", - "width": null - } - ], - "name": "Top Hits NL", - "owner": { - "display_name": "Spotify", - "external_urls": { - "spotify": "https://open.spotify.com/user/spotify" - }, - "href": "https://api.spotify.com/v1/users/spotify", - "id": "spotify", - "type": "user", - "uri": "spotify:user:spotify" - }, - "primary_color": null, - "public": null, - "snapshot_id": "MTcwMjU5NDgwMCwwMDAwMDAwMDU4NWY2MTE4NmU4NmIwMDdlMGE4ZGRkOTZkN2U2MzAx", - "tracks": { - "href": "https://api.spotify.com/v1/playlists/37i9dQZF1DWSBi5svWQ9Nk/tracks", - "total": 50 - }, - "type": "playlist", - "uri": "spotify:playlist:37i9dQZF1DWSBi5svWQ9Nk" - } - ], - "limit": 20, - "next": "https://api.spotify.com/v1/browse/featured-playlists?country=NL×tamp=2023-12-18T18%3A35%3A35&offset=20&limit=20", - "offset": 0, - "previous": null, - "total": 24 - } -} diff --git a/tests/components/spotify/fixtures/playlist.json b/tests/components/spotify/fixtures/playlist.json index 36c28cc814b..5680ac9109c 100644 --- a/tests/components/spotify/fixtures/playlist.json +++ b/tests/components/spotify/fixtures/playlist.json @@ -514,6 +514,472 @@ "uri": "spotify:track:2E2znCPaS8anQe21GLxcvJ", "is_local": false } + }, + { + "added_at": "2024-11-28T11:20:58Z", + "added_by": { + "external_urls": { + "spotify": "https://open.spotify.com/user/1112264649" + }, + "href": "https://api.spotify.com/v1/users/1112264649", + "id": "1112264649", + "type": "user", + "uri": "spotify:user:1112264649" + }, + "is_local": false, + "primary_color": null, + "track": { + "explicit": false, + "audio_preview_url": "https://podz-content.spotifycdn.com/audio/clips/06lRxUmh8UNVTByuyxLYqh/clip_132296_192296.mp3", + "description": "Patreon: https://www.patreon.com/safetythirdMerch: https://safetythird.shopYouTube: https://www.youtube.com/@safetythird/Advertising Inquiries: https://redcircle.com/brandsPrivacy & Opt-Out: https://redcircle.com/privacy", + "duration_ms": 3690161, + "episode": true, + "external_urls": { + "spotify": "https://open.spotify.com/episode/3o0RYoo5iOMKSmEbunsbvW" + }, + "href": "https://api.spotify.com/v1/episodes/3o0RYoo5iOMKSmEbunsbvW", + "html_description": "

Patreon: https://www.patreon.com/safetythird

Merch: https://safetythird.shop

YouTube: https://www.youtube.com/@safetythird/



Advertising Inquiries: https://redcircle.com/brands

Privacy & Opt-Out: https://redcircle.com/privacy", + "id": "3o0RYoo5iOMKSmEbunsbvW", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a", + "width": 64 + } + ], + "is_externally_hosted": false, + "language": "en-US", + "languages": ["en-US"], + "name": "My Squirrel Has Brain Damage - Safety Third 119", + "release_date": "2024-07-26", + "release_date_precision": "day", + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "show": { + "available_markets": [ + "AR", + "AU", + "AT", + "BE", + "BO", + "BR", + "BG", + "CA", + "CL", + "CO", + "CR", + "CY", + "CZ", + "DK", + "DO", + "DE", + "EC", + "EE", + "SV", + "FI", + "FR", + "GR", + "GT", + "HN", + "HK", + "HU", + "IS", + "IE", + "IT", + "LV", + "LT", + "LU", + "MY", + "MT", + "MX", + "NL", + "NZ", + "NI", + "NO", + "PA", + "PY", + "PE", + "PH", + "PL", + "PT", + "SG", + "SK", + "ES", + "SE", + "CH", + "TW", + "TR", + "UY", + "US", + "GB", + "AD", + "LI", + "MC", + "ID", + "JP", + "TH", + "VN", + "RO", + "IL", + "ZA", + "SA", + "AE", + "BH", + "QA", + "OM", + "KW", + "EG", + "MA", + "DZ", + "TN", + "LB", + "JO", + "PS", + "IN", + "BY", + "KZ", + "MD", + "UA", + "AL", + "BA", + "HR", + "ME", + "MK", + "RS", + "SI", + "KR", + "BD", + "PK", + "LK", + "GH", + "KE", + "NG", + "TZ", + "UG", + "AG", + "AM", + "BS", + "BB", + "BZ", + "BT", + "BW", + "BF", + "CV", + "CW", + "DM", + "FJ", + "GM", + "GE", + "GD", + "GW", + "GY", + "HT", + "JM", + "KI", + "LS", + "LR", + "MW", + "MV", + "ML", + "MH", + "FM", + "NA", + "NR", + "NE", + "PW", + "PG", + "PR", + "WS", + "SM", + "ST", + "SN", + "SC", + "SL", + "SB", + "KN", + "LC", + "VC", + "SR", + "TL", + "TO", + "TT", + "TV", + "VU", + "AZ", + "BN", + "BI", + "KH", + "CM", + "TD", + "KM", + "GQ", + "SZ", + "GA", + "GN", + "KG", + "LA", + "MO", + "MR", + "MN", + "NP", + "RW", + "TG", + "UZ", + "ZW", + "BJ", + "MG", + "MU", + "MZ", + "AO", + "CI", + "DJ", + "ZM", + "CD", + "CG", + "IQ", + "LY", + "TJ", + "VE", + "ET", + "XK" + ], + "copyrights": [], + "description": "Safety Third is a weekly show hosted by William Osman, NileRed, The Backyard Scientist, Allen Pan, and a couple other YouTube \"Scientists\". Sometimes we have guests, sometimes it's just us, but always: safety is our number three priority.", + "explicit": true, + "external_urls": { + "spotify": "https://open.spotify.com/show/1Y9ExMgMxoBVrgrfU7u0nD" + }, + "href": "https://api.spotify.com/v1/shows/1Y9ExMgMxoBVrgrfU7u0nD", + "html_description": "

Safety Third is a weekly show hosted by William Osman, NileRed, The Backyard Scientist, Allen Pan, and a couple other YouTube "Scientists". Sometimes we have guests, sometimes it's just us, but always: safety is our number three priority.

", + "id": "1Y9ExMgMxoBVrgrfU7u0nD", + "images": [ + { + "height": 640, + "url": "https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a", + "width": 640 + }, + { + "height": 300, + "url": "https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a", + "width": 300 + }, + { + "height": 64, + "url": "https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a", + "width": 64 + } + ], + "is_externally_hosted": false, + "languages": ["en-US"], + "media_type": "audio", + "name": "Safety Third", + "publisher": "Safety Third ", + "total_episodes": 120, + "type": "show", + "uri": "spotify:show:1Y9ExMgMxoBVrgrfU7u0nD" + }, + "track": false, + "type": "episode", + "uri": "spotify:episode:3o0RYoo5iOMKSmEbunsbvW" + }, + "video_thumbnail": { + "url": null + } } ] } diff --git a/tests/components/spotify/snapshots/test_diagnostics.ambr b/tests/components/spotify/snapshots/test_diagnostics.ambr index 161b6025ff3..0ac375d18e3 100644 --- a/tests/components/spotify/snapshots/test_diagnostics.ambr +++ b/tests/components/spotify/snapshots/test_diagnostics.ambr @@ -14,20 +14,6 @@ }), ]), 'playback': dict({ - 'audio_features': dict({ - 'acousticness': 0.011, - 'danceability': 0.696, - 'energy': 0.905, - 'instrumentalness': 0.000905, - 'key': 3, - 'liveness': 0.302, - 'loudness': -2.743, - 'mode': 1, - 'speechiness': 0.103, - 'tempo': 114.944, - 'time_signature': 4, - 'valence': 0.625, - }), 'current_playback': dict({ 'context': dict({ 'context_type': 'playlist', @@ -423,6 +409,69 @@ 'uri': 'spotify:track:2E2znCPaS8anQe21GLxcvJ', }), }), + dict({ + 'track': dict({ + 'description': 'Patreon: https://www.patreon.com/safetythirdMerch: https://safetythird.shopYouTube: https://www.youtube.com/@safetythird/Advertising Inquiries: https://redcircle.com/brandsPrivacy & Opt-Out: https://redcircle.com/privacy', + 'duration_ms': 3690161, + 'episode_id': '3o0RYoo5iOMKSmEbunsbvW', + 'explicit': False, + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/episode/3o0RYoo5iOMKSmEbunsbvW', + }), + 'href': 'https://api.spotify.com/v1/episodes/3o0RYoo5iOMKSmEbunsbvW', + 'images': list([ + dict({ + 'height': 640, + 'url': 'https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a', + 'width': 640, + }), + dict({ + 'height': 300, + 'url': 'https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a', + 'width': 300, + }), + dict({ + 'height': 64, + 'url': 'https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a', + 'width': 64, + }), + ]), + 'name': 'My Squirrel Has Brain Damage - Safety Third 119', + 'release_date': '2024-07-26', + 'release_date_precision': 'day', + 'show': dict({ + 'description': 'Safety Third is a weekly show hosted by William Osman, NileRed, The Backyard Scientist, Allen Pan, and a couple other YouTube "Scientists". Sometimes we have guests, sometimes it\'s just us, but always: safety is our number three priority.', + 'external_urls': dict({ + 'spotify': 'https://open.spotify.com/show/1Y9ExMgMxoBVrgrfU7u0nD', + }), + 'href': 'https://api.spotify.com/v1/shows/1Y9ExMgMxoBVrgrfU7u0nD', + 'images': list([ + dict({ + 'height': 640, + 'url': 'https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a', + 'width': 640, + }), + dict({ + 'height': 300, + 'url': 'https://i.scdn.co/image/ab67656300005f1fc7bedd27a4413b1abf926d8a', + 'width': 300, + }), + dict({ + 'height': 64, + 'url': 'https://i.scdn.co/image/ab6765630000f68dc7bedd27a4413b1abf926d8a', + 'width': 64, + }), + ]), + 'name': 'Safety Third', + 'publisher': 'Safety Third ', + 'show_id': '1Y9ExMgMxoBVrgrfU7u0nD', + 'total_episodes': 120, + 'uri': 'spotify:show:1Y9ExMgMxoBVrgrfU7u0nD', + }), + 'type': 'episode', + 'uri': 'spotify:episode:3o0RYoo5iOMKSmEbunsbvW', + }), + }), ]), }), 'uri': 'spotify:playlist:3cEYpjA9oz9GiPac4AsH4n', diff --git a/tests/components/spotify/snapshots/test_media_browser.ambr b/tests/components/spotify/snapshots/test_media_browser.ambr index e1ff42cb7c8..6b217977227 100644 --- a/tests/components/spotify/snapshots/test_media_browser.ambr +++ b/tests/components/spotify/snapshots/test_media_browser.ambr @@ -84,26 +84,6 @@ 'thumbnail': None, 'title': 'Top Tracks', }), - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/categories', - 'media_content_type': 'spotify://categories', - 'thumbnail': None, - 'title': 'Categories', - }), - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/featured_playlists', - 'media_content_type': 'spotify://featured_playlists', - 'thumbnail': None, - 'title': 'Featured Playlists', - }), dict({ 'can_expand': True, 'can_play': False, @@ -299,76 +279,6 @@ 'title': 'Pitbull', }) # --- -# name: test_browsing[categories-categories] - dict({ - 'can_expand': True, - 'can_play': False, - 'children': list([ - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/0JQ5DAt0tbjZptfcdMSKl3', - 'media_content_type': 'spotify://category_playlists', - 'thumbnail': 'https://t.scdn.co/images/728ed47fc1674feb95f7ac20236eb6d7.jpeg', - 'title': 'Made For You', - }), - dict({ - 'can_expand': True, - 'can_play': False, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/0JQ5DAqbMKFz6FAsUtgAab', - 'media_content_type': 'spotify://category_playlists', - 'thumbnail': 'https://t.scdn.co/images/728ed47fc1674feb95f7ac20236eb6d7.jpeg', - 'title': 'New Releases', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/categories', - 'media_content_type': 'spotify://categories', - 'not_shown': 0, - 'thumbnail': None, - 'title': 'Categories', - }) -# --- -# name: test_browsing[category_playlists-dinner] - dict({ - 'can_expand': True, - 'can_play': False, - 'children': list([ - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:37i9dQZF1DX7yhuKT9G4qk', - 'media_content_type': 'spotify://playlist', - 'thumbnail': 'https://i.scdn.co/image/ab67706f0000000343319faa9428405f3312b588', - 'title': 'eten met vrienden', - }), - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:37i9dQZF1DXbvE0SE0Cczh', - 'media_content_type': 'spotify://playlist', - 'thumbnail': 'https://i.scdn.co/image/ab67706f00000003b93c270883619dde61725fc8', - 'title': 'Jukebox Joint', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/dinner', - 'media_content_type': 'spotify://category_playlists', - 'not_shown': 0, - 'thumbnail': 'https://t.scdn.co/media/original/dinner_1b6506abba0ba52c54e6d695c8571078_274x274.jpg', - 'title': 'Cooking & Dining', - }) -# --- # name: test_browsing[current_user_followed_artists-current_user_followed_artists] dict({ 'can_expand': True, @@ -649,41 +559,6 @@ 'title': 'Top Tracks', }) # --- -# name: test_browsing[featured_playlists-featured_playlists] - dict({ - 'can_expand': True, - 'can_play': False, - 'children': list([ - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:37i9dQZF1DX4dopZ9vOp1t', - 'media_content_type': 'spotify://playlist', - 'thumbnail': 'https://i.scdn.co/image/ab67706f000000037d14c267b8ee5fea2246a8fe', - 'title': 'Kerst Hits 2023', - }), - dict({ - 'can_expand': True, - 'can_play': True, - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:37i9dQZF1DWSBi5svWQ9Nk', - 'media_content_type': 'spotify://playlist', - 'thumbnail': 'https://i.scdn.co/image/ab67706f00000003f7b99051789611a49101c1cf', - 'title': 'Top Hits NL', - }), - ]), - 'children_media_class': , - 'media_class': , - 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/featured_playlists', - 'media_content_type': 'spotify://featured_playlists', - 'not_shown': 0, - 'thumbnail': None, - 'title': 'Featured Playlists', - }) -# --- # name: test_browsing[new_releases-new_releases] dict({ 'can_expand': True, @@ -774,6 +649,16 @@ 'thumbnail': 'https://i.scdn.co/image/ab67616d0000b27304e57d181ff062f8339d6c71', 'title': 'You Are So Beautiful', }), + dict({ + 'can_expand': False, + 'can_play': True, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:episode:3o0RYoo5iOMKSmEbunsbvW', + 'media_content_type': 'spotify://episode', + 'thumbnail': 'https://i.scdn.co/image/ab6765630000ba8ac7bedd27a4413b1abf926d8a', + 'title': 'My Squirrel Has Brain Damage - Safety Third 119', + }), ]), 'children_media_class': , 'media_class': , diff --git a/tests/components/spotify/snapshots/test_sensor.ambr b/tests/components/spotify/snapshots/test_sensor.ambr deleted file mode 100644 index ce77dda479f..00000000000 --- a/tests/components/spotify/snapshots/test_sensor.ambr +++ /dev/null @@ -1,595 +0,0 @@ -# serializer version: 1 -# name: test_entities[sensor.spotify_spotify_1_song_acousticness-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_acousticness', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song acousticness', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'acousticness', - 'unique_id': '1112264111_acousticness', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_acousticness-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song acousticness', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_acousticness', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.1', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_danceability-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_danceability', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song danceability', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'danceability', - 'unique_id': '1112264111_danceability', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_danceability-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song danceability', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_danceability', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '69.6', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_energy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_energy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song energy', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'energy', - 'unique_id': '1112264111_energy', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_energy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song energy', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_energy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '90.5', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_instrumentalness-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_instrumentalness', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song instrumentalness', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'instrumentalness', - 'unique_id': '1112264111_instrumentalness', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_instrumentalness-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song instrumentalness', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_instrumentalness', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0905', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_key-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'C', - 'C♯/D♭', - 'D', - 'D♯/E♭', - 'E', - 'F', - 'F♯/G♭', - 'G', - 'G♯/A♭', - 'A', - 'A♯/B♭', - 'B', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_key', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Song key', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'key', - 'unique_id': '1112264111_key', - 'unit_of_measurement': None, - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_key-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Spotify spotify_1 Song key', - 'options': list([ - 'C', - 'C♯/D♭', - 'D', - 'D♯/E♭', - 'E', - 'F', - 'F♯/G♭', - 'G', - 'G♯/A♭', - 'A', - 'A♯/B♭', - 'B', - ]), - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_key', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'D♯/E♭', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_liveness-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_liveness', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song liveness', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'liveness', - 'unique_id': '1112264111_liveness', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_liveness-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song liveness', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_liveness', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '30.2', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'major', - 'minor', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Song mode', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'mode', - 'unique_id': '1112264111_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Spotify spotify_1 Song mode', - 'options': list([ - 'major', - 'minor', - ]), - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'major', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_speechiness-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_speechiness', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song speechiness', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'speechiness', - 'unique_id': '1112264111_speechiness', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_speechiness-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song speechiness', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_speechiness', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10.3', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_tempo-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_tempo', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song tempo', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'song_tempo', - 'unique_id': '1112264111_bpm', - 'unit_of_measurement': 'bpm', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_tempo-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song tempo', - 'unit_of_measurement': 'bpm', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_tempo', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '114.944', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_time_signature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - '3/4', - '4/4', - '5/4', - '6/4', - '7/4', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_time_signature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Song time signature', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'time_signature', - 'unique_id': '1112264111_time_signature', - 'unit_of_measurement': None, - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_time_signature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Spotify spotify_1 Song time signature', - 'options': list([ - '3/4', - '4/4', - '5/4', - '6/4', - '7/4', - ]), - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_time_signature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4/4', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_valence-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.spotify_spotify_1_song_valence', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Song valence', - 'platform': 'spotify', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'valence', - 'unique_id': '1112264111_valence', - 'unit_of_measurement': '%', - }) -# --- -# name: test_entities[sensor.spotify_spotify_1_song_valence-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Spotify spotify_1 Song valence', - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.spotify_spotify_1_song_valence', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '62.5', - }) -# --- diff --git a/tests/components/spotify/test_media_browser.py b/tests/components/spotify/test_media_browser.py index dcacc23bbee..ff3404dcfe9 100644 --- a/tests/components/spotify/test_media_browser.py +++ b/tests/components/spotify/test_media_browser.py @@ -112,9 +112,6 @@ async def test_browse_media_playlists( ("current_user_recently_played", "current_user_recently_played"), ("current_user_top_artists", "current_user_top_artists"), ("current_user_top_tracks", "current_user_top_tracks"), - ("featured_playlists", "featured_playlists"), - ("categories", "categories"), - ("category_playlists", "dinner"), ("new_releases", "new_releases"), ("playlist", "spotify:playlist:3cEYpjA9oz9GiPac4AsH4n"), ("album", "spotify:album:3IqzqH6ShrRtie9Yd2ODyG"), diff --git a/tests/components/spotify/test_media_player.py b/tests/components/spotify/test_media_player.py index b03424f8459..55e0ea8f1d8 100644 --- a/tests/components/spotify/test_media_player.py +++ b/tests/components/spotify/test_media_player.py @@ -10,6 +10,7 @@ from spotifyaio import ( ProductType, RepeatMode as SpotifyRepeatMode, SpotifyConnectionError, + SpotifyNotFoundError, ) from syrupy import SnapshotAssertion @@ -142,6 +143,7 @@ async def test_spotify_dj_list( hass: HomeAssistant, mock_spotify: MagicMock, mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, ) -> None: """Test the Spotify entities with a Spotify DJ playlist.""" mock_spotify.return_value.get_playback.return_value.context.uri = ( @@ -152,12 +154,67 @@ async def test_spotify_dj_list( assert state assert state.attributes["media_playlist"] == "DJ" + mock_spotify.return_value.get_playlist.assert_not_called() + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert state.attributes["media_playlist"] == "DJ" + + mock_spotify.return_value.get_playlist.assert_not_called() + + +@pytest.mark.usefixtures("setup_credentials") +async def test_normal_playlist( + hass: HomeAssistant, + mock_spotify: MagicMock, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, +) -> None: + """Test normal playlist switching.""" + await setup_integration(hass, mock_config_entry) + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert state.attributes["media_playlist"] == "Spotify Web API Testing playlist" + + mock_spotify.return_value.get_playlist.assert_called_once_with( + "spotify:user:rushofficial:playlist:2r35vbe6hHl6yDSMfjKgmm" + ) + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert state.attributes["media_playlist"] == "Spotify Web API Testing playlist" + + mock_spotify.return_value.get_playlist.assert_called_once_with( + "spotify:user:rushofficial:playlist:2r35vbe6hHl6yDSMfjKgmm" + ) + + mock_spotify.return_value.get_playback.return_value.context.uri = ( + "spotify:playlist:123123123123123" + ) + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_spotify.return_value.get_playlist.assert_called_with( + "spotify:playlist:123123123123123" + ) + @pytest.mark.usefixtures("setup_credentials") async def test_fetching_playlist_does_not_fail( hass: HomeAssistant, mock_spotify: MagicMock, mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, ) -> None: """Test failing fetching playlist does not fail update.""" mock_spotify.return_value.get_playlist.side_effect = SpotifyConnectionError @@ -166,6 +223,42 @@ async def test_fetching_playlist_does_not_fail( assert state assert "media_playlist" not in state.attributes + mock_spotify.return_value.get_playlist.assert_called_once() + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert mock_spotify.return_value.get_playlist.call_count == 2 + + +@pytest.mark.usefixtures("setup_credentials") +async def test_fetching_playlist_once( + hass: HomeAssistant, + mock_spotify: MagicMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test that not being able to find a playlist doesn't retry.""" + mock_spotify.return_value.get_playlist.side_effect = SpotifyNotFoundError + await setup_integration(hass, mock_config_entry) + + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert "media_playlist" not in state.attributes + + mock_spotify.return_value.get_playlist.assert_called_once() + + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("media_player.spotify_spotify_1") + assert state + assert "media_playlist" not in state.attributes + + mock_spotify.return_value.get_playlist.assert_called_once() + @pytest.mark.usefixtures("setup_credentials") async def test_idle( diff --git a/tests/components/spotify/test_sensor.py b/tests/components/spotify/test_sensor.py deleted file mode 100644 index 11ce361034a..00000000000 --- a/tests/components/spotify/test_sensor.py +++ /dev/null @@ -1,66 +0,0 @@ -"""Tests for the Spotify sensor platform.""" - -from unittest.mock import MagicMock, patch - -import pytest -from spotifyaio import PlaybackState -from syrupy import SnapshotAssertion - -from homeassistant.components.spotify import DOMAIN -from homeassistant.const import STATE_UNKNOWN, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - -from . import setup_integration - -from tests.common import MockConfigEntry, load_fixture, snapshot_platform - - -@pytest.mark.usefixtures("setup_credentials") -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_entities( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the Spotify entities.""" - with patch("homeassistant.components.spotify.PLATFORMS", [Platform.SENSOR]): - await setup_integration(hass, mock_config_entry) - - await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - - -@pytest.mark.usefixtures("setup_credentials") -async def test_audio_features_unavailable( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the Spotify entities.""" - mock_spotify.return_value.get_audio_features.return_value = None - - await setup_integration(hass, mock_config_entry) - - assert hass.states.get("sensor.spotify_spotify_1_song_tempo").state == STATE_UNKNOWN - - -@pytest.mark.usefixtures("setup_credentials") -async def test_audio_features_unknown_during_podcast( - hass: HomeAssistant, - mock_spotify: MagicMock, - mock_config_entry: MockConfigEntry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test the Spotify audio features sensor during a podcast.""" - mock_spotify.return_value.get_playback.return_value = PlaybackState.from_json( - load_fixture("playback_episode.json", DOMAIN) - ) - - await setup_integration(hass, mock_config_entry) - - assert hass.states.get("sensor.spotify_spotify_1_song_tempo").state == STATE_UNKNOWN diff --git a/tests/components/sql/test_config_flow.py b/tests/components/sql/test_config_flow.py index cb990e454b7..3f2400c0a32 100644 --- a/tests/components/sql/test_config_flow.py +++ b/tests/components/sql/test_config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from pathlib import Path from unittest.mock import patch from sqlalchemy.exc import SQLAlchemyError @@ -597,9 +598,6 @@ async def test_options_flow_db_url_empty( "homeassistant.components.sql.async_setup_entry", return_value=True, ), - patch( - "homeassistant.components.sql.config_flow.sqlalchemy.create_engine", - ), ): result = await hass.config_entries.options.async_configure( result["flow_id"], @@ -621,7 +619,9 @@ async def test_options_flow_db_url_empty( async def test_full_flow_not_recorder_db( - recorder_mock: Recorder, hass: HomeAssistant + recorder_mock: Recorder, + hass: HomeAssistant, + tmp_path: Path, ) -> None: """Test full config flow with not using recorder db.""" result = await hass.config_entries.flow.async_init( @@ -629,20 +629,19 @@ async def test_full_flow_not_recorder_db( ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} + db_path = tmp_path / "db.db" + db_path_str = f"sqlite:///{db_path}" with ( patch( "homeassistant.components.sql.async_setup_entry", return_value=True, ), - patch( - "homeassistant.components.sql.config_flow.sqlalchemy.create_engine", - ), ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - "db_url": "sqlite://path/to/db.db", + "db_url": db_path_str, "name": "Get Value", "query": "SELECT 5 as value", "column": "value", @@ -654,7 +653,7 @@ async def test_full_flow_not_recorder_db( assert result2["title"] == "Get Value" assert result2["options"] == { "name": "Get Value", - "db_url": "sqlite://path/to/db.db", + "db_url": db_path_str, "query": "SELECT 5 as value", "column": "value", } @@ -671,15 +670,12 @@ async def test_full_flow_not_recorder_db( "homeassistant.components.sql.async_setup_entry", return_value=True, ), - patch( - "homeassistant.components.sql.config_flow.sqlalchemy.create_engine", - ), ): result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ "query": "SELECT 5 as value", - "db_url": "sqlite://path/to/db.db", + "db_url": db_path_str, "column": "value", "unit_of_measurement": "MiB", }, @@ -689,7 +685,7 @@ async def test_full_flow_not_recorder_db( assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { "name": "Get Value", - "db_url": "sqlite://path/to/db.db", + "db_url": db_path_str, "query": "SELECT 5 as value", "column": "value", "unit_of_measurement": "MiB", @@ -697,24 +693,22 @@ async def test_full_flow_not_recorder_db( # Need to test same again to mitigate issue with db_url removal result = await hass.config_entries.options.async_init(entry.entry_id) - with patch( - "homeassistant.components.sql.config_flow.sqlalchemy.create_engine", - ): - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - "query": "SELECT 5 as value", - "db_url": "sqlite://path/to/db.db", - "column": "value", - "unit_of_measurement": "MB", - }, - ) - await hass.async_block_till_done() + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + "query": "SELECT 5 as value", + "db_url": db_path_str, + "column": "value", + "unit_of_measurement": "MB", + }, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { "name": "Get Value", - "db_url": "sqlite://path/to/db.db", + "db_url": db_path_str, "query": "SELECT 5 as value", "column": "value", "unit_of_measurement": "MB", @@ -722,7 +716,7 @@ async def test_full_flow_not_recorder_db( assert entry.options == { "name": "Get Value", - "db_url": "sqlite://path/to/db.db", + "db_url": db_path_str, "query": "SELECT 5 as value", "column": "value", "unit_of_measurement": "MB", diff --git a/tests/components/sql/test_sensor.py b/tests/components/sql/test_sensor.py index b219ad47f3a..6b4032323d0 100644 --- a/tests/components/sql/test_sensor.py +++ b/tests/components/sql/test_sensor.py @@ -3,12 +3,13 @@ from __future__ import annotations from datetime import timedelta +from pathlib import Path +import sqlite3 from typing import Any from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest -from sqlalchemy import text as sql_text from sqlalchemy.exc import SQLAlchemyError from homeassistant.components.recorder import Recorder @@ -143,29 +144,37 @@ async def test_query_no_value( assert text in caplog.text -async def test_query_mssql_no_result( - recorder_mock: Recorder, hass: HomeAssistant, caplog: pytest.LogCaptureFixture +async def test_query_on_disk_sqlite_no_result( + recorder_mock: Recorder, + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + tmp_path: Path, ) -> None: """Test the SQL sensor with a query that returns no value.""" - config = { - "db_url": "mssql://", - "query": "SELECT 5 as value where 1=2", - "column": "value", - "name": "count_tables", - } - with ( - patch("homeassistant.components.sql.sensor.sqlalchemy"), - patch( - "homeassistant.components.sql.sensor.sqlalchemy.text", - return_value=sql_text("SELECT TOP 1 5 as value where 1=2"), - ), - ): - await init_integration(hass, config) + db_path = tmp_path / "test.db" + db_path_str = f"sqlite:///{db_path}" - state = hass.states.get("sensor.count_tables") + def make_test_db(): + """Create a test database.""" + conn = sqlite3.connect(db_path) + conn.execute("CREATE TABLE users (value INTEGER)") + conn.commit() + conn.close() + + await hass.async_add_executor_job(make_test_db) + + config = { + "db_url": db_path_str, + "query": "SELECT value from users", + "column": "value", + "name": "count_users", + } + await init_integration(hass, config) + + state = hass.states.get("sensor.count_users") assert state.state == STATE_UNKNOWN - text = "SELECT TOP 1 5 AS VALUE WHERE 1=2 returned no results" + text = "SELECT value from users LIMIT 1; returned no results" assert text in caplog.text diff --git a/tests/components/statistics/test_sensor.py b/tests/components/statistics/test_sensor.py index 7e2bc1cb16b..1dff13bb21a 100644 --- a/tests/components/statistics/test_sensor.py +++ b/tests/components/statistics/test_sensor.py @@ -118,7 +118,6 @@ async def test_sensor_defaults_numeric(hass: HomeAssistant) -> None: assert state.attributes.get("buffer_usage_ratio") == round(9 / 20, 2) assert state.attributes.get("source_value_valid") is True assert "age_coverage_ratio" not in state.attributes - # Source sensor turns unavailable, then available with valid value, # statistics sensor should follow state = hass.states.get("sensor.test") @@ -576,7 +575,7 @@ async def test_age_limit_expiry(hass: HomeAssistant) -> None: assert state is not None assert state.state == STATE_UNKNOWN assert state.attributes.get("buffer_usage_ratio") == round(0 / 20, 2) - assert state.attributes.get("age_coverage_ratio") is None + assert state.attributes.get("age_coverage_ratio") == 0 async def test_age_limit_expiry_with_keep_last_sample(hass: HomeAssistant) -> None: @@ -2032,3 +2031,61 @@ async def test_not_valid_device_class(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None assert state.attributes.get(ATTR_DEVICE_CLASS) is None assert state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + + +async def test_attributes_remains(recorder_mock: Recorder, hass: HomeAssistant) -> None: + """Test attributes are always present.""" + for value in VALUES_NUMERIC: + hass.states.async_set( + "sensor.test_monitored", + str(value), + {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, + ) + await hass.async_block_till_done() + await async_wait_recording_done(hass) + + current_time = dt_util.utcnow() + with freeze_time(current_time) as freezer: + assert await async_setup_component( + hass, + "sensor", + { + "sensor": [ + { + "platform": "statistics", + "name": "test", + "entity_id": "sensor.test_monitored", + "state_characteristic": "mean", + "max_age": {"seconds": 10}, + }, + ] + }, + ) + await hass.async_block_till_done() + + state = hass.states.get("sensor.test") + assert state is not None + assert state.state == str(round(sum(VALUES_NUMERIC) / len(VALUES_NUMERIC), 2)) + assert state.attributes == { + "age_coverage_ratio": 0.0, + "friendly_name": "test", + "icon": "mdi:calculator", + "source_value_valid": True, + "state_class": SensorStateClass.MEASUREMENT, + "unit_of_measurement": "°C", + } + + freezer.move_to(current_time + timedelta(minutes=1)) + async_fire_time_changed(hass) + + state = hass.states.get("sensor.test") + assert state is not None + assert state.state == STATE_UNKNOWN + assert state.attributes == { + "age_coverage_ratio": 0, + "friendly_name": "test", + "icon": "mdi:calculator", + "source_value_valid": True, + "state_class": SensorStateClass.MEASUREMENT, + "unit_of_measurement": "°C", + } diff --git a/tests/components/stookwijzer/conftest.py b/tests/components/stookwijzer/conftest.py new file mode 100644 index 00000000000..3f7303e97f6 --- /dev/null +++ b/tests/components/stookwijzer/conftest.py @@ -0,0 +1,99 @@ +"""Fixtures for Stookwijzer integration tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +from homeassistant.components.stookwijzer.const import DOMAIN +from homeassistant.const import CONF_LATITUDE, CONF_LOCATION, CONF_LONGITUDE +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + title="Stookwijzer", + domain=DOMAIN, + data={ + CONF_LATITUDE: 200000.1234567890, + CONF_LONGITUDE: 450000.1234567890, + }, + version=2, + entry_id="12345", + ) + + +@pytest.fixture +def mock_v1_config_entry() -> MockConfigEntry: + """Return the default mocked version 1 config entry.""" + return MockConfigEntry( + title="Stookwijzer", + domain=DOMAIN, + data={ + CONF_LOCATION: { + CONF_LATITUDE: 1.0, + CONF_LONGITUDE: 1.1, + }, + }, + version=1, + entry_id="12345", + ) + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Mock setting up a config entry.""" + with patch( + "homeassistant.components.stookwijzer.async_setup_entry", return_value=True + ) as mock_setup: + yield mock_setup + + +@pytest.fixture +def mock_stookwijzer() -> Generator[MagicMock]: + """Return a mocked Stookwijzer client.""" + with ( + patch( + "homeassistant.components.stookwijzer.Stookwijzer", + autospec=True, + ) as stookwijzer_mock, + patch( + "homeassistant.components.stookwijzer.coordinator.Stookwijzer", + new=stookwijzer_mock, + ), + patch( + "homeassistant.components.stookwijzer.config_flow.Stookwijzer", + new=stookwijzer_mock, + ), + ): + stookwijzer_mock.async_transform_coordinates.return_value = ( + 200000.123456789, + 450000.123456789, + ) + + client = stookwijzer_mock.return_value + client.lki = 2 + client.windspeed_ms = 2.5 + client.windspeed_bft = 2 + client.advice = "code_yellow" + + yield stookwijzer_mock + + +@pytest.fixture +async def init_integration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stookwijzer: MagicMock, +) -> MockConfigEntry: + """Set up the Stookwijzer integration for testing.""" + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + return mock_config_entry diff --git a/tests/components/stookwijzer/snapshots/test_diagnostics.ambr b/tests/components/stookwijzer/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..e2535d54466 --- /dev/null +++ b/tests/components/stookwijzer/snapshots/test_diagnostics.ambr @@ -0,0 +1,8 @@ +# serializer version: 1 +# name: test_get_diagnostics + dict({ + 'advice': 'code_yellow', + 'air_quality_index': 2, + 'windspeed_ms': 2.5, + }) +# --- diff --git a/tests/components/stookwijzer/snapshots/test_sensor.ambr b/tests/components/stookwijzer/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..f6751a84f22 --- /dev/null +++ b/tests/components/stookwijzer/snapshots/test_sensor.ambr @@ -0,0 +1,169 @@ +# serializer version: 1 +# name: test_entities[sensor.stookwijzer_advice_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'code_yellow', + 'code_orange', + 'code_red', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stookwijzer_advice_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Advice code', + 'platform': 'stookwijzer', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'advice', + 'unique_id': '12345_advice', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[sensor.stookwijzer_advice_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by atlasleefomgeving.nl', + 'device_class': 'enum', + 'friendly_name': 'Stookwijzer Advice code', + 'options': list([ + 'code_yellow', + 'code_orange', + 'code_red', + ]), + }), + 'context': , + 'entity_id': 'sensor.stookwijzer_advice_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'code_yellow', + }) +# --- +# name: test_entities[sensor.stookwijzer_air_quality_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stookwijzer_air_quality_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Air quality index', + 'platform': 'stookwijzer', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345_air_quality_index', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[sensor.stookwijzer_air_quality_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by atlasleefomgeving.nl', + 'device_class': 'aqi', + 'friendly_name': 'Stookwijzer Air quality index', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.stookwijzer_air_quality_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_entities[sensor.stookwijzer_wind_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.stookwijzer_wind_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wind speed', + 'platform': 'stookwijzer', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '12345_windspeed', + 'unit_of_measurement': , + }) +# --- +# name: test_entities[sensor.stookwijzer_wind_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by atlasleefomgeving.nl', + 'device_class': 'wind_speed', + 'friendly_name': 'Stookwijzer Wind speed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.stookwijzer_wind_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- diff --git a/tests/components/stookwijzer/test_config_flow.py b/tests/components/stookwijzer/test_config_flow.py index 732e8abfc98..6dddf83c27a 100644 --- a/tests/components/stookwijzer/test_config_flow.py +++ b/tests/components/stookwijzer/test_config_flow.py @@ -1,6 +1,8 @@ """Tests for the Stookwijzer config flow.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, MagicMock + +import pytest from homeassistant.components.stookwijzer.const import DOMAIN from homeassistant.config_entries import SOURCE_USER @@ -9,35 +11,65 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -async def test_full_user_flow(hass: HomeAssistant) -> None: +async def test_full_user_flow( + hass: HomeAssistant, + mock_stookwijzer: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: """Test the full user configuration flow.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) - assert result.get("type") is FlowResultType.FORM - assert result.get("step_id") == "user" - assert "flow_id" in result + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" - with patch( - "homeassistant.components.stookwijzer.async_setup_entry", return_value=True - ) as mock_setup_entry: - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_LOCATION: { - CONF_LATITUDE: 1.0, - CONF_LONGITUDE: 1.1, - } - }, - ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_LOCATION: {CONF_LATITUDE: 1.0, CONF_LONGITUDE: 1.1}}, + ) - assert result2.get("type") is FlowResultType.CREATE_ENTRY - assert result2.get("data") == { - "location": { - "latitude": 1.0, - "longitude": 1.1, - }, + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Stookwijzer" + assert result["data"] == { + CONF_LATITUDE: 200000.123456789, + CONF_LONGITUDE: 450000.123456789, } assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_stookwijzer.async_transform_coordinates.mock_calls) == 1 + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_connection_error( + hass: HomeAssistant, + mock_stookwijzer: MagicMock, +) -> None: + """Test user configuration flow while connection fails.""" + original_return_value = mock_stookwijzer.async_transform_coordinates.return_value + mock_stookwijzer.async_transform_coordinates.return_value = (None, None) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_LOCATION: {CONF_LATITUDE: 1.0, CONF_LONGITUDE: 1.1}}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "unknown"} + + # Ensure we can continue the flow, when it now works + mock_stookwijzer.async_transform_coordinates.return_value = original_return_value + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_LOCATION: {CONF_LATITUDE: 1.0, CONF_LONGITUDE: 1.1}}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY diff --git a/tests/components/stookwijzer/test_diagnostics.py b/tests/components/stookwijzer/test_diagnostics.py new file mode 100644 index 00000000000..f40165020c1 --- /dev/null +++ b/tests/components/stookwijzer/test_diagnostics.py @@ -0,0 +1,22 @@ +"""Test the Stookwijzer diagnostics.""" + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_get_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + init_integration: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the Stookwijzer diagnostics.""" + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, init_integration) + == snapshot + ) diff --git a/tests/components/stookwijzer/test_init.py b/tests/components/stookwijzer/test_init.py new file mode 100644 index 00000000000..0df9b55d1a9 --- /dev/null +++ b/tests/components/stookwijzer/test_init.py @@ -0,0 +1,134 @@ +"""Test the Stookwijzer init.""" + +from unittest.mock import MagicMock + +import pytest + +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.components.stookwijzer.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er, issue_registry as ir + +from tests.common import MockConfigEntry + + +async def test_load_unload_config_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stookwijzer: MagicMock, +) -> None: + """Test the Stookwijzer configuration entry loading and unloading.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.LOADED + assert len(mock_stookwijzer.return_value.async_update.mock_calls) == 1 + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_config_entry_not_ready( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_stookwijzer: MagicMock, +) -> None: + """Test the Stookwijzer configuration entry loading and unloading.""" + mock_stookwijzer.return_value.advice = None + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + assert len(mock_stookwijzer.return_value.async_update.mock_calls) == 1 + + +async def test_migrate_entry( + hass: HomeAssistant, + mock_v1_config_entry: MockConfigEntry, + mock_stookwijzer: MagicMock, +) -> None: + """Test successful migration of entry data.""" + assert mock_v1_config_entry.version == 1 + + mock_v1_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_v1_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_v1_config_entry.state is ConfigEntryState.LOADED + assert len(mock_stookwijzer.async_transform_coordinates.mock_calls) == 1 + + assert mock_v1_config_entry.version == 2 + assert mock_v1_config_entry.data == { + CONF_LATITUDE: 200000.123456789, + CONF_LONGITUDE: 450000.123456789, + } + + +async def test_entry_migration_failure( + hass: HomeAssistant, + mock_v1_config_entry: MockConfigEntry, + mock_stookwijzer: MagicMock, + issue_registry: ir.IssueRegistry, +) -> None: + """Test successful migration of entry data.""" + assert mock_v1_config_entry.version == 1 + + # Failed getting the transformed coordinates + mock_stookwijzer.async_transform_coordinates.return_value = (None, None) + + mock_v1_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_v1_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_v1_config_entry.state is ConfigEntryState.MIGRATION_ERROR + assert issue_registry.async_get_issue(DOMAIN, "location_migration_failed") + + assert len(mock_stookwijzer.async_transform_coordinates.mock_calls) == 1 + + +@pytest.mark.usefixtures("mock_stookwijzer") +async def test_entity_entry_migration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test successful migration of entry data.""" + entity = entity_registry.async_get_or_create( + suggested_object_id="advice", + disabled_by=None, + domain=SENSOR_DOMAIN, + platform=DOMAIN, + unique_id=mock_config_entry.entry_id, + config_entry=mock_config_entry, + ) + + assert entity.unique_id == mock_config_entry.entry_id + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert ( + entity_registry.async_get_entity_id( + SENSOR_DOMAIN, + DOMAIN, + mock_config_entry.entry_id, + ) + is None + ) + + assert ( + entity_registry.async_get_entity_id( + SENSOR_DOMAIN, + DOMAIN, + f"{mock_config_entry.entry_id}_advice", + ) + == "sensor.advice" + ) diff --git a/tests/components/stookwijzer/test_sensor.py b/tests/components/stookwijzer/test_sensor.py new file mode 100644 index 00000000000..10eeef72d74 --- /dev/null +++ b/tests/components/stookwijzer/test_sensor.py @@ -0,0 +1,20 @@ +"""Tests for the Stookwijzer sensor platform.""" + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("init_integration") +async def test_entities( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the Stookwijzer entities.""" + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/stt/test_init.py b/tests/components/stt/test_init.py index 92225123995..3d5daab2bec 100644 --- a/tests/components/stt/test_init.py +++ b/tests/components/stt/test_init.py @@ -34,6 +34,7 @@ from tests.common import ( mock_integration, mock_platform, mock_restore_cache, + reset_translation_cache, ) from tests.typing import ClientSessionGenerator, WebSocketGenerator @@ -518,6 +519,9 @@ async def test_default_engine_prefer_cloud_entity( assert provider_engine.name == "test" assert async_default_engine(hass) == "stt.cloud_stt_entity" + # Reset the `cloud` translations cache to avoid flaky translation checks + reset_translation_cache(hass, ["cloud"]) + async def test_get_engine_legacy( hass: HomeAssistant, tmp_path: Path, mock_provider: MockSTTProvider diff --git a/tests/components/suez_water/__init__.py b/tests/components/suez_water/__init__.py index 4605e06344a..a90df738454 100644 --- a/tests/components/suez_water/__init__.py +++ b/tests/components/suez_water/__init__.py @@ -1 +1,15 @@ """Tests for the Suez Water integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Init suez water integration.""" + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/suez_water/conftest.py b/tests/components/suez_water/conftest.py index f218fb7d833..f634a053c65 100644 --- a/tests/components/suez_water/conftest.py +++ b/tests/components/suez_water/conftest.py @@ -3,8 +3,31 @@ from collections.abc import Generator from unittest.mock import AsyncMock, patch +from pysuez import AggregatedData, PriceResult +from pysuez.const import ATTRIBUTION import pytest +from homeassistant.components.suez_water.const import DOMAIN + +from tests.common import MockConfigEntry + +MOCK_DATA = { + "username": "test-username", + "password": "test-password", + "counter_id": "test-counter", +} + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Create mock config_entry needed by suez_water integration.""" + return MockConfigEntry( + unique_id=MOCK_DATA["username"], + domain=DOMAIN, + title="Suez mock device", + data=MOCK_DATA, + ) + @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: @@ -13,3 +36,45 @@ def mock_setup_entry() -> Generator[AsyncMock]: "homeassistant.components.suez_water.async_setup_entry", return_value=True ) as mock_setup_entry: yield mock_setup_entry + + +@pytest.fixture(name="suez_client") +def mock_suez_client() -> Generator[AsyncMock]: + """Create mock for suez_water external api.""" + with ( + patch( + "homeassistant.components.suez_water.coordinator.SuezClient", autospec=True + ) as mock_client, + patch( + "homeassistant.components.suez_water.config_flow.SuezClient", + new=mock_client, + ), + ): + suez_client = mock_client.return_value + suez_client.check_credentials.return_value = True + + result = AggregatedData( + value=160, + current_month={ + "2024-01-01": 130, + "2024-01-02": 145, + }, + previous_month={ + "2024-12-01": 154, + "2024-12-02": 166, + }, + current_year=1500, + previous_year=1000, + attribution=ATTRIBUTION, + highest_monthly_consumption=2558, + history={ + "2024-01-01": 130, + "2024-01-02": 145, + "2024-12-01": 154, + "2024-12-02": 166, + }, + ) + + suez_client.fetch_aggregated_data.return_value = result + suez_client.get_price.return_value = PriceResult("4.74") + yield suez_client diff --git a/tests/components/suez_water/snapshots/test_sensor.ambr b/tests/components/suez_water/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..da0ed3df7dd --- /dev/null +++ b/tests/components/suez_water/snapshots/test_sensor.ambr @@ -0,0 +1,116 @@ +# serializer version: 1 +# name: test_sensors_valid_state[sensor.suez_mock_device_water_price-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.suez_mock_device_water_price', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Water price', + 'platform': 'suez_water', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water_price', + 'unique_id': 'test-counter_water_price', + 'unit_of_measurement': '€', + }) +# --- +# name: test_sensors_valid_state[sensor.suez_mock_device_water_price-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by toutsurmoneau.fr', + 'device_class': 'monetary', + 'friendly_name': 'Suez mock device Water price', + 'unit_of_measurement': '€', + }), + 'context': , + 'entity_id': 'sensor.suez_mock_device_water_price', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.74', + }) +# --- +# name: test_sensors_valid_state[sensor.suez_mock_device_water_usage_yesterday-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.suez_mock_device_water_usage_yesterday', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Water usage yesterday', + 'platform': 'suez_water', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water_usage_yesterday', + 'unique_id': 'test-counter_water_usage_yesterday', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors_valid_state[sensor.suez_mock_device_water_usage_yesterday-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by toutsurmoneau.fr', + 'device_class': 'water', + 'friendly_name': 'Suez mock device Water usage yesterday', + 'highest_monthly_consumption': 2558, + 'history': dict({ + '2024-01-01': 130, + '2024-01-02': 145, + '2024-12-01': 154, + '2024-12-02': 166, + }), + 'last_year_overall': 1000, + 'previous_month_consumption': dict({ + '2024-12-01': 154, + '2024-12-02': 166, + }), + 'this_month_consumption': dict({ + '2024-01-01': 130, + '2024-01-02': 145, + }), + 'this_year_overall': 1500, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.suez_mock_device_water_usage_yesterday', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '160', + }) +# --- diff --git a/tests/components/suez_water/test_config_flow.py b/tests/components/suez_water/test_config_flow.py index 3170a6779f0..6779b4c7d02 100644 --- a/tests/components/suez_water/test_config_flow.py +++ b/tests/components/suez_water/test_config_flow.py @@ -1,25 +1,23 @@ """Test the Suez Water config flow.""" -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock -from pysuez.client import PySuezError +from pysuez.exception import PySuezError import pytest from homeassistant import config_entries -from homeassistant.components.suez_water.const import DOMAIN +from homeassistant.components.suez_water.const import CONF_COUNTER_ID, DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from .conftest import MOCK_DATA + from tests.common import MockConfigEntry -MOCK_DATA = { - "username": "test-username", - "password": "test-password", - "counter_id": "test-counter", -} - -async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: +async def test_form( + hass: HomeAssistant, mock_setup_entry: AsyncMock, suez_client: AsyncMock +) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -27,12 +25,11 @@ async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - with patch("homeassistant.components.suez_water.config_flow.SuezClient"): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) - await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "test-username" @@ -42,37 +39,28 @@ async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: async def test_form_invalid_auth( - hass: HomeAssistant, mock_setup_entry: AsyncMock + hass: HomeAssistant, mock_setup_entry: AsyncMock, suez_client: AsyncMock ) -> None: """Test we handle invalid auth.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with ( - patch( - "homeassistant.components.suez_water.config_flow.SuezClient.__init__", - return_value=None, - ), - patch( - "homeassistant.components.suez_water.config_flow.SuezClient.check_credentials", - return_value=False, - ), - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) + suez_client.check_credentials.return_value = False + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "invalid_auth"} - with patch("homeassistant.components.suez_water.config_flow.SuezClient"): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) - await hass.async_block_till_done() + suez_client.check_credentials.return_value = True + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "test-username" @@ -108,34 +96,71 @@ async def test_form_already_configured(hass: HomeAssistant) -> None: ("exception", "error"), [(PySuezError, "cannot_connect"), (Exception, "unknown")] ) async def test_form_error( - hass: HomeAssistant, mock_setup_entry: AsyncMock, exception: Exception, error: str + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + exception: Exception, + suez_client: AsyncMock, + error: str, ) -> None: """Test we handle errors.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with patch( - "homeassistant.components.suez_water.config_flow.SuezClient", - side_effect=exception, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) + suez_client.check_credentials.side_effect = exception + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": error} - with patch( - "homeassistant.components.suez_water.config_flow.SuezClient", - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - MOCK_DATA, - ) + suez_client.check_credentials.return_value = True + suez_client.check_credentials.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_DATA, + ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "test-username" assert result["data"] == MOCK_DATA assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_auto_counter( + hass: HomeAssistant, mock_setup_entry: AsyncMock, suez_client: AsyncMock +) -> None: + """Test form set counter if not set by user.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + partial_form = {**MOCK_DATA} + partial_form.pop(CONF_COUNTER_ID) + suez_client.find_counter.side_effect = PySuezError("test counter not found") + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + partial_form, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "counter_not_found"} + + suez_client.find_counter.side_effect = None + suez_client.find_counter.return_value = MOCK_DATA[CONF_COUNTER_ID] + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + partial_form, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "test-username" + assert result["result"].unique_id == "test-username" + assert result["data"] == MOCK_DATA + assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/suez_water/test_init.py b/tests/components/suez_water/test_init.py new file mode 100644 index 00000000000..78d086af38f --- /dev/null +++ b/tests/components/suez_water/test_init.py @@ -0,0 +1,37 @@ +"""Test Suez_water integration initialization.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.suez_water.coordinator import PySuezError +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_initialization_invalid_credentials( + hass: HomeAssistant, + suez_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that suez_water can't be loaded with invalid credentials.""" + + suez_client.check_credentials.return_value = False + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + +async def test_initialization_setup_api_error( + hass: HomeAssistant, + suez_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that suez_water needs to retry loading if api failed to connect.""" + + suez_client.check_credentials.side_effect = PySuezError("Test failure") + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/suez_water/test_sensor.py b/tests/components/suez_water/test_sensor.py new file mode 100644 index 00000000000..cb578432f62 --- /dev/null +++ b/tests/components/suez_water/test_sensor.py @@ -0,0 +1,67 @@ +"""Test Suez_water sensor platform.""" + +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.suez_water.const import DATA_REFRESH_INTERVAL +from homeassistant.components.suez_water.coordinator import PySuezError +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_sensors_valid_state( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + suez_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test that suez_water sensor is loaded and in a valid state.""" + with patch("homeassistant.components.suez_water.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.parametrize("method", [("fetch_aggregated_data"), ("get_price")]) +async def test_sensors_failed_update( + hass: HomeAssistant, + suez_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + method: str, +) -> None: + """Test that suez_water sensor reflect failure when api fails.""" + + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + + entity_ids = await hass.async_add_executor_job(hass.states.entity_ids) + assert len(entity_ids) == 2 + + for entity in entity_ids: + state = hass.states.get(entity) + assert entity + assert state.state != STATE_UNAVAILABLE + + getattr(suez_client, method).side_effect = PySuezError("Should fail to update") + + freezer.tick(DATA_REFRESH_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(True) + + for entity in entity_ids: + state = hass.states.get(entity) + assert entity + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/swiss_public_transport/test_config_flow.py b/tests/components/swiss_public_transport/test_config_flow.py index 027336e28a6..7c17b0d4c30 100644 --- a/tests/components/swiss_public_transport/test_config_flow.py +++ b/tests/components/swiss_public_transport/test_config_flow.py @@ -12,6 +12,10 @@ from homeassistant.components.swiss_public_transport import config_flow from homeassistant.components.swiss_public_transport.const import ( CONF_DESTINATION, CONF_START, + CONF_TIME_FIXED, + CONF_TIME_MODE, + CONF_TIME_OFFSET, + CONF_TIME_STATION, CONF_VIA, MAX_VIA, ) @@ -23,40 +27,86 @@ from tests.common import MockConfigEntry pytestmark = pytest.mark.usefixtures("mock_setup_entry") -MOCK_DATA_STEP = { +MOCK_USER_DATA_STEP = { CONF_START: "test_start", CONF_DESTINATION: "test_destination", + CONF_TIME_STATION: "departure", + CONF_TIME_MODE: "now", } -MOCK_DATA_STEP_ONE_VIA = { - **MOCK_DATA_STEP, +MOCK_USER_DATA_STEP_ONE_VIA = { + **MOCK_USER_DATA_STEP, CONF_VIA: ["via_station"], } -MOCK_DATA_STEP_MANY_VIA = { - **MOCK_DATA_STEP, +MOCK_USER_DATA_STEP_MANY_VIA = { + **MOCK_USER_DATA_STEP, CONF_VIA: ["via_station_1", "via_station_2", "via_station_3"], } -MOCK_DATA_STEP_TOO_MANY_STATIONS = { - **MOCK_DATA_STEP, - CONF_VIA: MOCK_DATA_STEP_ONE_VIA[CONF_VIA] * (MAX_VIA + 1), +MOCK_USER_DATA_STEP_TOO_MANY_STATIONS = { + **MOCK_USER_DATA_STEP, + CONF_VIA: MOCK_USER_DATA_STEP_ONE_VIA[CONF_VIA] * (MAX_VIA + 1), +} + +MOCK_USER_DATA_STEP_ARRIVAL = { + **MOCK_USER_DATA_STEP, + CONF_TIME_STATION: "arrival", +} + +MOCK_USER_DATA_STEP_TIME_FIXED = { + **MOCK_USER_DATA_STEP, + CONF_TIME_MODE: "fixed", +} + +MOCK_USER_DATA_STEP_TIME_FIXED_OFFSET = { + **MOCK_USER_DATA_STEP, + CONF_TIME_MODE: "offset", +} + +MOCK_USER_DATA_STEP_BAD = { + **MOCK_USER_DATA_STEP, + CONF_TIME_MODE: "bad", +} + +MOCK_ADVANCED_DATA_STEP_TIME = { + CONF_TIME_FIXED: "18:03:00", +} + +MOCK_ADVANCED_DATA_STEP_TIME_OFFSET = { + CONF_TIME_OFFSET: {"hours": 0, "minutes": 10, "seconds": 0}, } @pytest.mark.parametrize( - ("user_input", "config_title"), + ("user_input", "time_mode_input", "config_title"), [ - (MOCK_DATA_STEP, "test_start test_destination"), - (MOCK_DATA_STEP_ONE_VIA, "test_start test_destination via via_station"), + (MOCK_USER_DATA_STEP, None, "test_start test_destination"), ( - MOCK_DATA_STEP_MANY_VIA, + MOCK_USER_DATA_STEP_ONE_VIA, + None, + "test_start test_destination via via_station", + ), + ( + MOCK_USER_DATA_STEP_MANY_VIA, + None, "test_start test_destination via via_station_1, via_station_2, via_station_3", ), + (MOCK_USER_DATA_STEP_ARRIVAL, None, "test_start test_destination arrival"), + ( + MOCK_USER_DATA_STEP_TIME_FIXED, + MOCK_ADVANCED_DATA_STEP_TIME, + "test_start test_destination at 18:03:00", + ), + ( + MOCK_USER_DATA_STEP_TIME_FIXED_OFFSET, + MOCK_ADVANCED_DATA_STEP_TIME_OFFSET, + "test_start test_destination in 00:10:00", + ), ], ) async def test_flow_user_init_data_success( - hass: HomeAssistant, user_input, config_title + hass: HomeAssistant, user_input, time_mode_input, config_title ) -> None: """Test success response.""" result = await hass.config_entries.flow.async_init( @@ -66,48 +116,56 @@ async def test_flow_user_init_data_success( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["handler"] == "swiss_public_transport" - assert result["data_schema"] == config_flow.DATA_SCHEMA + assert result["data_schema"] == config_flow.USER_DATA_SCHEMA with patch( "homeassistant.components.swiss_public_transport.config_flow.OpendataTransport.async_get_data", autospec=True, return_value=True, ): - result = await hass.config_entries.flow.async_init( - config_flow.DOMAIN, context={"source": "user"} - ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=user_input, ) + if time_mode_input: + assert result["type"] == FlowResultType.FORM + if CONF_TIME_FIXED in time_mode_input: + assert result["step_id"] == "time_fixed" + if CONF_TIME_OFFSET in time_mode_input: + assert result["step_id"] == "time_offset" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=time_mode_input, + ) + assert result["type"] == FlowResultType.CREATE_ENTRY assert result["result"].title == config_title - assert result["data"] == user_input + assert result["data"] == {**user_input, **(time_mode_input or {})} @pytest.mark.parametrize( ("raise_error", "text_error", "user_input_error"), [ - (OpendataTransportConnectionError(), "cannot_connect", MOCK_DATA_STEP), - (OpendataTransportError(), "bad_config", MOCK_DATA_STEP), - (None, "too_many_via_stations", MOCK_DATA_STEP_TOO_MANY_STATIONS), - (IndexError(), "unknown", MOCK_DATA_STEP), + (OpendataTransportConnectionError(), "cannot_connect", MOCK_USER_DATA_STEP), + (OpendataTransportError(), "bad_config", MOCK_USER_DATA_STEP), + (None, "too_many_via_stations", MOCK_USER_DATA_STEP_TOO_MANY_STATIONS), + (IndexError(), "unknown", MOCK_USER_DATA_STEP), ], ) -async def test_flow_user_init_data_error_and_recover( +async def test_flow_user_init_data_error_and_recover_on_step_1( hass: HomeAssistant, raise_error, text_error, user_input_error ) -> None: - """Test unknown errors.""" + """Test errors in user step.""" + result = await hass.config_entries.flow.async_init( + config_flow.DOMAIN, context={"source": "user"} + ) with patch( "homeassistant.components.swiss_public_transport.config_flow.OpendataTransport.async_get_data", autospec=True, side_effect=raise_error, ) as mock_OpendataTransport: - result = await hass.config_entries.flow.async_init( - config_flow.DOMAIN, context={"source": "user"} - ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=user_input_error, @@ -121,13 +179,75 @@ async def test_flow_user_init_data_error_and_recover( mock_OpendataTransport.return_value = True result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input=MOCK_DATA_STEP, + user_input=MOCK_USER_DATA_STEP, ) assert result["type"] == FlowResultType.CREATE_ENTRY assert result["result"].title == "test_start test_destination" - assert result["data"] == MOCK_DATA_STEP + assert result["data"] == MOCK_USER_DATA_STEP + + +@pytest.mark.parametrize( + ("raise_error", "text_error", "user_input"), + [ + ( + OpendataTransportConnectionError(), + "cannot_connect", + MOCK_ADVANCED_DATA_STEP_TIME, + ), + (OpendataTransportError(), "bad_config", MOCK_ADVANCED_DATA_STEP_TIME), + (IndexError(), "unknown", MOCK_ADVANCED_DATA_STEP_TIME), + ], +) +async def test_flow_user_init_data_error_and_recover_on_step_2( + hass: HomeAssistant, raise_error, text_error, user_input +) -> None: + """Test errors in time mode step.""" + result = await hass.config_entries.flow.async_init( + config_flow.DOMAIN, context={"source": "user"} + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["handler"] == "swiss_public_transport" + assert result["data_schema"] == config_flow.USER_DATA_SCHEMA + + with patch( + "homeassistant.components.swiss_public_transport.config_flow.OpendataTransport.async_get_data", + autospec=True, + return_value=True, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_USER_DATA_STEP_TIME_FIXED, + ) + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "time_fixed" + + with patch( + "homeassistant.components.swiss_public_transport.config_flow.OpendataTransport.async_get_data", + autospec=True, + side_effect=raise_error, + ) as mock_OpendataTransport: + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=user_input, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"]["base"] == text_error + + # Recover + mock_OpendataTransport.side_effect = None + mock_OpendataTransport.return_value = True + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=user_input, + ) + + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["result"].title == "test_start test_destination at 18:03:00" async def test_flow_user_init_data_already_configured(hass: HomeAssistant) -> None: @@ -135,8 +255,8 @@ async def test_flow_user_init_data_already_configured(hass: HomeAssistant) -> No entry = MockConfigEntry( domain=config_flow.DOMAIN, - data=MOCK_DATA_STEP, - unique_id=unique_id_from_config(MOCK_DATA_STEP), + data=MOCK_USER_DATA_STEP, + unique_id=unique_id_from_config(MOCK_USER_DATA_STEP), ) entry.add_to_hass(hass) @@ -151,7 +271,7 @@ async def test_flow_user_init_data_already_configured(hass: HomeAssistant) -> No result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input=MOCK_DATA_STEP, + user_input=MOCK_USER_DATA_STEP, ) assert result["type"] is FlowResultType.ABORT diff --git a/tests/components/swiss_public_transport/test_init.py b/tests/components/swiss_public_transport/test_init.py index 9ad4a8d50b0..963f5e6fa40 100644 --- a/tests/components/swiss_public_transport/test_init.py +++ b/tests/components/swiss_public_transport/test_init.py @@ -7,6 +7,9 @@ import pytest from homeassistant.components.swiss_public_transport.const import ( CONF_DESTINATION, CONF_START, + CONF_TIME_FIXED, + CONF_TIME_OFFSET, + CONF_TIME_STATION, CONF_VIA, DOMAIN, ) @@ -28,6 +31,17 @@ MOCK_DATA_STEP_VIA = { CONF_VIA: ["via_station"], } +MOCK_DATA_STEP_TIME_FIXED = { + **MOCK_DATA_STEP_VIA, + CONF_TIME_FIXED: "18:03:00", +} + +MOCK_DATA_STEP_TIME_OFFSET = { + **MOCK_DATA_STEP_VIA, + CONF_TIME_OFFSET: {"hours": 0, "minutes": 10, "seconds": 0}, + CONF_TIME_STATION: "arrival", +} + CONNECTIONS = [ { "departure": "2024-01-06T18:03:00+0100", @@ -70,6 +84,8 @@ CONNECTIONS = [ (1, 1, MOCK_DATA_STEP_BASE, "None_departure"), (1, 2, MOCK_DATA_STEP_BASE, None), (2, 1, MOCK_DATA_STEP_VIA, None), + (3, 1, MOCK_DATA_STEP_TIME_FIXED, None), + (3, 1, MOCK_DATA_STEP_TIME_OFFSET, None), ], ) async def test_migration_from( @@ -113,7 +129,7 @@ async def test_migration_from( ) # Check change in config entry and verify most recent version - assert config_entry.version == 2 + assert config_entry.version == 3 assert config_entry.minor_version == 1 assert config_entry.unique_id == unique_id @@ -130,7 +146,7 @@ async def test_migrate_error_from_future(hass: HomeAssistant) -> None: mock_entry = MockConfigEntry( domain=DOMAIN, - version=3, + version=4, minor_version=1, unique_id="some_crazy_future_unique_id", data=MOCK_DATA_STEP_BASE, diff --git a/tests/components/switchbot_cloud/test_init.py b/tests/components/switchbot_cloud/test_init.py index 25ea370efe5..43431ae04c0 100644 --- a/tests/components/switchbot_cloud/test_init.py +++ b/tests/components/switchbot_cloud/test_init.py @@ -50,6 +50,18 @@ async def test_setup_entry_success( remoteType="DIY Plug", hubDeviceId="test-hub-id", ), + Remote( + deviceId="meter-pro-1", + deviceName="meter-pro-name-1", + deviceType="MeterPro(CO2)", + hubDeviceId="test-hub-id", + ), + Remote( + deviceId="hub2-1", + deviceName="hub2-name-1", + deviceType="Hub 2", + hubDeviceId="test-hub-id", + ), ] mock_get_status.return_value = {"power": PowerState.ON.value} entry = configure_integration(hass) diff --git a/tests/components/switcher_kis/conftest.py b/tests/components/switcher_kis/conftest.py index 2cf123af2b0..518c36616ee 100644 --- a/tests/components/switcher_kis/conftest.py +++ b/tests/components/switcher_kis/conftest.py @@ -60,19 +60,19 @@ def mock_api(): patchers = [ patch( - "homeassistant.components.switcher_kis.switch.SwitcherType1Api.connect", + "homeassistant.components.switcher_kis.switch.SwitcherApi.connect", new=api_mock, ), patch( - "homeassistant.components.switcher_kis.switch.SwitcherType1Api.disconnect", + "homeassistant.components.switcher_kis.switch.SwitcherApi.disconnect", new=api_mock, ), patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.connect", + "homeassistant.components.switcher_kis.climate.SwitcherApi.connect", new=api_mock, ), patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.disconnect", + "homeassistant.components.switcher_kis.climate.SwitcherApi.disconnect", new=api_mock, ), ] diff --git a/tests/components/switcher_kis/consts.py b/tests/components/switcher_kis/consts.py index ab0bef4e335..e9d96673e24 100644 --- a/tests/components/switcher_kis/consts.py +++ b/tests/components/switcher_kis/consts.py @@ -5,6 +5,7 @@ from aioswitcher.device import ( DeviceType, ShutterDirection, SwitcherDualShutterSingleLight, + SwitcherLight, SwitcherPowerPlug, SwitcherShutter, SwitcherSingleShutterDualLight, @@ -23,18 +24,27 @@ DUMMY_DEVICE_ID3 = "bada77" DUMMY_DEVICE_ID4 = "bbd164" DUMMY_DEVICE_ID5 = "bcdb64" DUMMY_DEVICE_ID6 = "bcdc64" +DUMMY_DEVICE_ID7 = "bcdd64" +DUMMY_DEVICE_ID8 = "bcde64" +DUMMY_DEVICE_ID9 = "bcdf64" DUMMY_DEVICE_KEY1 = "18" DUMMY_DEVICE_KEY2 = "01" DUMMY_DEVICE_KEY3 = "12" DUMMY_DEVICE_KEY4 = "07" DUMMY_DEVICE_KEY5 = "15" DUMMY_DEVICE_KEY6 = "16" +DUMMY_DEVICE_KEY7 = "17" +DUMMY_DEVICE_KEY8 = "18" +DUMMY_DEVICE_KEY9 = "19" DUMMY_DEVICE_NAME1 = "Plug 23BC" DUMMY_DEVICE_NAME2 = "Heater FE12" DUMMY_DEVICE_NAME3 = "Breeze AB39" DUMMY_DEVICE_NAME4 = "Runner DD77" DUMMY_DEVICE_NAME5 = "RunnerS11 6CF5" DUMMY_DEVICE_NAME6 = "RunnerS12 A9BE" +DUMMY_DEVICE_NAME7 = "Light 36BB" +DUMMY_DEVICE_NAME8 = "Light 36CB" +DUMMY_DEVICE_NAME9 = "Light 36DB" DUMMY_DEVICE_PASSWORD = "12345678" DUMMY_ELECTRIC_CURRENT1 = 0.5 DUMMY_ELECTRIC_CURRENT2 = 12.8 @@ -44,18 +54,27 @@ DUMMY_IP_ADDRESS3 = "192.168.100.159" DUMMY_IP_ADDRESS4 = "192.168.100.160" DUMMY_IP_ADDRESS5 = "192.168.100.161" DUMMY_IP_ADDRESS6 = "192.168.100.162" +DUMMY_IP_ADDRESS7 = "192.168.100.163" +DUMMY_IP_ADDRESS8 = "192.168.100.164" +DUMMY_IP_ADDRESS9 = "192.168.100.165" DUMMY_MAC_ADDRESS1 = "A1:B2:C3:45:67:D8" DUMMY_MAC_ADDRESS2 = "A1:B2:C3:45:67:D9" DUMMY_MAC_ADDRESS3 = "A1:B2:C3:45:67:DA" DUMMY_MAC_ADDRESS4 = "A1:B2:C3:45:67:DB" DUMMY_MAC_ADDRESS5 = "A1:B2:C3:45:67:DC" DUMMY_MAC_ADDRESS6 = "A1:B2:C3:45:67:DD" +DUMMY_MAC_ADDRESS7 = "A1:B2:C3:45:67:DE" +DUMMY_MAC_ADDRESS8 = "A1:B2:C3:45:67:DF" +DUMMY_MAC_ADDRESS9 = "A1:B2:C3:45:67:DG" DUMMY_TOKEN_NEEDED1 = False DUMMY_TOKEN_NEEDED2 = False DUMMY_TOKEN_NEEDED3 = False DUMMY_TOKEN_NEEDED4 = False DUMMY_TOKEN_NEEDED5 = True DUMMY_TOKEN_NEEDED6 = True +DUMMY_TOKEN_NEEDED7 = True +DUMMY_TOKEN_NEEDED8 = True +DUMMY_TOKEN_NEEDED9 = True DUMMY_PHONE_ID = "1234" DUMMY_POWER_CONSUMPTION1 = 100 DUMMY_POWER_CONSUMPTION2 = 2780 @@ -75,6 +94,7 @@ DUMMY_USERNAME = "email" DUMMY_TOKEN = "zvVvd7JxtN7CgvkD1Psujw==" DUMMY_LIGHT = [DeviceState.ON] DUMMY_LIGHT_2 = [DeviceState.ON, DeviceState.ON] +DUMMY_LIGHT_3 = [DeviceState.ON, DeviceState.ON, DeviceState.ON] DUMMY_PLUG_DEVICE = SwitcherPowerPlug( DeviceType.POWER_PLUG, @@ -162,4 +182,46 @@ DUMMY_THERMOSTAT_DEVICE = SwitcherThermostat( DUMMY_REMOTE_ID, ) +DUMMY_LIGHT_DEVICE = SwitcherLight( + DeviceType.LIGHT_SL01, + DeviceState.ON, + DUMMY_DEVICE_ID7, + DUMMY_DEVICE_KEY7, + DUMMY_IP_ADDRESS7, + DUMMY_MAC_ADDRESS7, + DUMMY_DEVICE_NAME7, + DUMMY_TOKEN_NEEDED7, + DUMMY_LIGHT, +) + +DUMMY_DUAL_LIGHT_DEVICE = SwitcherLight( + DeviceType.LIGHT_SL02, + DeviceState.ON, + DUMMY_DEVICE_ID8, + DUMMY_DEVICE_KEY8, + DUMMY_IP_ADDRESS8, + DUMMY_MAC_ADDRESS8, + DUMMY_DEVICE_NAME8, + DUMMY_TOKEN_NEEDED8, + DUMMY_LIGHT_2, +) + +DUMMY_TRIPLE_LIGHT_DEVICE = SwitcherLight( + DeviceType.LIGHT_SL03, + DeviceState.ON, + DUMMY_DEVICE_ID9, + DUMMY_DEVICE_KEY9, + DUMMY_IP_ADDRESS9, + DUMMY_MAC_ADDRESS9, + DUMMY_DEVICE_NAME9, + DUMMY_TOKEN_NEEDED9, + DUMMY_LIGHT_3, +) + DUMMY_SWITCHER_DEVICES = [DUMMY_PLUG_DEVICE, DUMMY_WATER_HEATER_DEVICE] + +DUMMY_SWITCHER_SENSORS_DEVICES = [ + DUMMY_PLUG_DEVICE, + DUMMY_WATER_HEATER_DEVICE, + DUMMY_THERMOSTAT_DEVICE, +] diff --git a/tests/components/switcher_kis/test_button.py b/tests/components/switcher_kis/test_button.py index d0604487370..50c015b4024 100644 --- a/tests/components/switcher_kis/test_button.py +++ b/tests/components/switcher_kis/test_button.py @@ -42,7 +42,7 @@ async def test_assume_button( assert hass.states.get(SWING_OFF_EID) is None with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( BUTTON_DOMAIN, @@ -79,7 +79,7 @@ async def test_swing_button( assert hass.states.get(SWING_OFF_EID) is not None with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( BUTTON_DOMAIN, @@ -103,7 +103,7 @@ async def test_control_device_fail( # Test exception during set hvac mode with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", side_effect=RuntimeError("fake error"), ) as mock_control_device: with pytest.raises(HomeAssistantError): @@ -130,7 +130,7 @@ async def test_control_device_fail( # Test error response during turn on with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", return_value=SwitcherBaseResponse(None), ) as mock_control_device: with pytest.raises(HomeAssistantError): diff --git a/tests/components/switcher_kis/test_climate.py b/tests/components/switcher_kis/test_climate.py index c9f7abf34dc..72e1a93d1c3 100644 --- a/tests/components/switcher_kis/test_climate.py +++ b/tests/components/switcher_kis/test_climate.py @@ -49,7 +49,7 @@ async def test_climate_hvac_mode( # Test set hvac mode heat with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -71,7 +71,7 @@ async def test_climate_hvac_mode( # Test set hvac mode off with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -108,7 +108,7 @@ async def test_climate_temperature( # Test set target temperature with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -128,7 +128,7 @@ async def test_climate_temperature( # Test set target temperature - incorrect params with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", ) as mock_control_device: with pytest.raises(ServiceValidationError): await hass.services.async_call( @@ -160,7 +160,7 @@ async def test_climate_fan_level( # Test set fan level to high with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -195,7 +195,7 @@ async def test_climate_swing( # Test set swing mode on with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -218,7 +218,7 @@ async def test_climate_swing( # Test set swing mode off with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", ) as mock_control_device: await hass.services.async_call( CLIMATE_DOMAIN, @@ -249,7 +249,7 @@ async def test_control_device_fail(hass: HomeAssistant, mock_bridge, mock_api) - # Test exception during set hvac mode with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", side_effect=RuntimeError("fake error"), ) as mock_control_device: with pytest.raises(HomeAssistantError): @@ -276,7 +276,7 @@ async def test_control_device_fail(hass: HomeAssistant, mock_bridge, mock_api) - # Test error response during turn on with patch( - "homeassistant.components.switcher_kis.climate.SwitcherType2Api.control_breeze_device", + "homeassistant.components.switcher_kis.climate.SwitcherApi.control_breeze_device", return_value=SwitcherBaseResponse(None), ) as mock_control_device: with pytest.raises(HomeAssistantError): diff --git a/tests/components/switcher_kis/test_cover.py b/tests/components/switcher_kis/test_cover.py index d26fff8754c..2936cafdd53 100644 --- a/tests/components/switcher_kis/test_cover.py +++ b/tests/components/switcher_kis/test_cover.py @@ -115,7 +115,7 @@ async def test_cover( # Test set position with patch( - "homeassistant.components.switcher_kis.cover.SwitcherType2Api.set_position" + "homeassistant.components.switcher_kis.cover.SwitcherApi.set_position" ) as mock_control_device: await hass.services.async_call( COVER_DOMAIN, @@ -136,7 +136,7 @@ async def test_cover( # Test open with patch( - "homeassistant.components.switcher_kis.cover.SwitcherType2Api.set_position" + "homeassistant.components.switcher_kis.cover.SwitcherApi.set_position" ) as mock_control_device: await hass.services.async_call( COVER_DOMAIN, @@ -156,7 +156,7 @@ async def test_cover( # Test close with patch( - "homeassistant.components.switcher_kis.cover.SwitcherType2Api.set_position" + "homeassistant.components.switcher_kis.cover.SwitcherApi.set_position" ) as mock_control_device: await hass.services.async_call( COVER_DOMAIN, @@ -176,7 +176,7 @@ async def test_cover( # Test stop with patch( - "homeassistant.components.switcher_kis.cover.SwitcherType2Api.stop_shutter" + "homeassistant.components.switcher_kis.cover.SwitcherApi.stop_shutter" ) as mock_control_device: await hass.services.async_call( COVER_DOMAIN, @@ -232,7 +232,7 @@ async def test_cover_control_fail( # Test exception during set position with patch( - "homeassistant.components.switcher_kis.cover.SwitcherType2Api.set_position", + "homeassistant.components.switcher_kis.cover.SwitcherApi.set_position", side_effect=RuntimeError("fake error"), ) as mock_control_device: with pytest.raises(HomeAssistantError): @@ -257,7 +257,7 @@ async def test_cover_control_fail( # Test error response during set position with patch( - "homeassistant.components.switcher_kis.cover.SwitcherType2Api.set_position", + "homeassistant.components.switcher_kis.cover.SwitcherApi.set_position", return_value=SwitcherBaseResponse(None), ) as mock_control_device: with pytest.raises(HomeAssistantError): diff --git a/tests/components/switcher_kis/test_light.py b/tests/components/switcher_kis/test_light.py index d360cb11291..aa7d6551d75 100644 --- a/tests/components/switcher_kis/test_light.py +++ b/tests/components/switcher_kis/test_light.py @@ -21,26 +21,43 @@ from homeassistant.util import slugify from . import init_integration from .consts import ( + DUMMY_DUAL_LIGHT_DEVICE as DEVICE4, DUMMY_DUAL_SHUTTER_SINGLE_LIGHT_DEVICE as DEVICE2, + DUMMY_LIGHT_DEVICE as DEVICE3, DUMMY_SINGLE_SHUTTER_DUAL_LIGHT_DEVICE as DEVICE, DUMMY_TOKEN as TOKEN, + DUMMY_TRIPLE_LIGHT_DEVICE as DEVICE5, DUMMY_USERNAME as USERNAME, ) ENTITY_ID = f"{LIGHT_DOMAIN}.{slugify(DEVICE.name)}_light_1" -ENTITY_ID2 = f"{LIGHT_DOMAIN}.{slugify(DEVICE.name)}_light_2" -ENTITY_ID3 = f"{LIGHT_DOMAIN}.{slugify(DEVICE2.name)}" +ENTITY_ID_2 = f"{LIGHT_DOMAIN}.{slugify(DEVICE.name)}_light_2" +ENTITY_ID2 = f"{LIGHT_DOMAIN}.{slugify(DEVICE2.name)}" +ENTITY_ID3 = f"{LIGHT_DOMAIN}.{slugify(DEVICE3.name)}" +ENTITY_ID4 = f"{LIGHT_DOMAIN}.{slugify(DEVICE4.name)}_light_1" +ENTITY_ID4_2 = f"{LIGHT_DOMAIN}.{slugify(DEVICE4.name)}_light_2" +ENTITY_ID5 = f"{LIGHT_DOMAIN}.{slugify(DEVICE5.name)}_light_1" +ENTITY_ID5_2 = f"{LIGHT_DOMAIN}.{slugify(DEVICE5.name)}_light_2" +ENTITY_ID5_3 = f"{LIGHT_DOMAIN}.{slugify(DEVICE5.name)}_light_3" @pytest.mark.parametrize( ("device", "entity_id", "light_id", "device_state"), [ (DEVICE, ENTITY_ID, 0, [DeviceState.OFF, DeviceState.ON]), - (DEVICE, ENTITY_ID2, 1, [DeviceState.ON, DeviceState.OFF]), - (DEVICE2, ENTITY_ID3, 0, [DeviceState.OFF]), + (DEVICE, ENTITY_ID_2, 1, [DeviceState.ON, DeviceState.OFF]), + (DEVICE2, ENTITY_ID2, 0, [DeviceState.OFF]), + (DEVICE3, ENTITY_ID3, 0, [DeviceState.OFF]), + (DEVICE4, ENTITY_ID4, 0, [DeviceState.OFF, DeviceState.ON]), + (DEVICE4, ENTITY_ID4_2, 1, [DeviceState.ON, DeviceState.OFF]), + (DEVICE5, ENTITY_ID5, 0, [DeviceState.OFF, DeviceState.ON, DeviceState.ON]), + (DEVICE5, ENTITY_ID5_2, 1, [DeviceState.ON, DeviceState.OFF, DeviceState.ON]), + (DEVICE5, ENTITY_ID5_3, 2, [DeviceState.ON, DeviceState.ON, DeviceState.OFF]), ], ) -@pytest.mark.parametrize("mock_bridge", [[DEVICE, DEVICE2]], indirect=True) +@pytest.mark.parametrize( + "mock_bridge", [[DEVICE, DEVICE2, DEVICE3, DEVICE4, DEVICE5]], indirect=True +) async def test_light( hass: HomeAssistant, mock_bridge, @@ -69,7 +86,7 @@ async def test_light( # Test turning on light with patch( - "homeassistant.components.switcher_kis.light.SwitcherType2Api.set_light", + "homeassistant.components.switcher_kis.light.SwitcherApi.set_light", ) as mock_set_light: await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True @@ -82,7 +99,7 @@ async def test_light( # Test turning off light with patch( - "homeassistant.components.switcher_kis.light.SwitcherType2Api.set_light" + "homeassistant.components.switcher_kis.light.SwitcherApi.set_light" ) as mock_set_light: await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True @@ -98,11 +115,19 @@ async def test_light( ("device", "entity_id", "light_id", "device_state"), [ (DEVICE, ENTITY_ID, 0, [DeviceState.OFF, DeviceState.ON]), - (DEVICE, ENTITY_ID2, 1, [DeviceState.ON, DeviceState.OFF]), - (DEVICE2, ENTITY_ID3, 0, [DeviceState.OFF]), + (DEVICE, ENTITY_ID_2, 1, [DeviceState.ON, DeviceState.OFF]), + (DEVICE2, ENTITY_ID2, 0, [DeviceState.OFF]), + (DEVICE3, ENTITY_ID3, 0, [DeviceState.OFF]), + (DEVICE4, ENTITY_ID4, 0, [DeviceState.OFF, DeviceState.ON]), + (DEVICE4, ENTITY_ID4_2, 1, [DeviceState.ON, DeviceState.OFF]), + (DEVICE5, ENTITY_ID5, 0, [DeviceState.OFF, DeviceState.ON, DeviceState.ON]), + (DEVICE5, ENTITY_ID5_2, 1, [DeviceState.ON, DeviceState.OFF, DeviceState.ON]), + (DEVICE5, ENTITY_ID5_3, 2, [DeviceState.ON, DeviceState.ON, DeviceState.OFF]), ], ) -@pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) +@pytest.mark.parametrize( + "mock_bridge", [[DEVICE, DEVICE2, DEVICE3, DEVICE4, DEVICE5]], indirect=True +) async def test_light_control_fail( hass: HomeAssistant, mock_bridge, @@ -128,7 +153,7 @@ async def test_light_control_fail( # Test exception during turn on with patch( - "homeassistant.components.switcher_kis.cover.SwitcherType2Api.set_light", + "homeassistant.components.switcher_kis.cover.SwitcherApi.set_light", side_effect=RuntimeError("fake error"), ) as mock_control_device: with pytest.raises(HomeAssistantError): @@ -153,7 +178,7 @@ async def test_light_control_fail( # Test error response during turn on with patch( - "homeassistant.components.switcher_kis.cover.SwitcherType2Api.set_light", + "homeassistant.components.switcher_kis.cover.SwitcherApi.set_light", return_value=SwitcherBaseResponse(None), ) as mock_control_device: with pytest.raises(HomeAssistantError): diff --git a/tests/components/switcher_kis/test_sensor.py b/tests/components/switcher_kis/test_sensor.py index 8ccc33f2d37..f99d91bd9a3 100644 --- a/tests/components/switcher_kis/test_sensor.py +++ b/tests/components/switcher_kis/test_sensor.py @@ -7,7 +7,12 @@ from homeassistant.helpers import entity_registry as er from homeassistant.util import slugify from . import init_integration -from .consts import DUMMY_PLUG_DEVICE, DUMMY_SWITCHER_DEVICES, DUMMY_WATER_HEATER_DEVICE +from .consts import ( + DUMMY_PLUG_DEVICE, + DUMMY_SWITCHER_SENSORS_DEVICES, + DUMMY_THERMOSTAT_DEVICE, + DUMMY_WATER_HEATER_DEVICE, +) DEVICE_SENSORS_TUPLE = ( ( @@ -25,17 +30,23 @@ DEVICE_SENSORS_TUPLE = ( ("remaining_time", "remaining_time"), ], ), + ( + DUMMY_THERMOSTAT_DEVICE, + [ + ("current_temperature", "temperature"), + ], + ), ) -@pytest.mark.parametrize("mock_bridge", [DUMMY_SWITCHER_DEVICES], indirect=True) +@pytest.mark.parametrize("mock_bridge", [DUMMY_SWITCHER_SENSORS_DEVICES], indirect=True) async def test_sensor_platform(hass: HomeAssistant, mock_bridge) -> None: """Test sensor platform.""" entry = await init_integration(hass) assert mock_bridge assert mock_bridge.is_running is True - assert len(entry.runtime_data) == 2 + assert len(entry.runtime_data) == 3 for device, sensors in DEVICE_SENSORS_TUPLE: for sensor, field in sensors: diff --git a/tests/components/switcher_kis/test_services.py b/tests/components/switcher_kis/test_services.py index 26c54ee53ed..65e1967cbac 100644 --- a/tests/components/switcher_kis/test_services.py +++ b/tests/components/switcher_kis/test_services.py @@ -48,7 +48,7 @@ async def test_turn_on_with_timer_service( assert state.state == STATE_OFF with patch( - "homeassistant.components.switcher_kis.switch.SwitcherType1Api.control_device" + "homeassistant.components.switcher_kis.switch.SwitcherApi.control_device" ) as mock_control_device: await hass.services.async_call( DOMAIN, @@ -78,7 +78,7 @@ async def test_set_auto_off_service(hass: HomeAssistant, mock_bridge, mock_api) entity_id = f"{SWITCH_DOMAIN}.{slugify(device.name)}" with patch( - "homeassistant.components.switcher_kis.switch.SwitcherType1Api.set_auto_shutdown" + "homeassistant.components.switcher_kis.switch.SwitcherApi.set_auto_shutdown" ) as mock_set_auto_shutdown: await hass.services.async_call( DOMAIN, @@ -105,7 +105,7 @@ async def test_set_auto_off_service_fail( entity_id = f"{SWITCH_DOMAIN}.{slugify(device.name)}" with patch( - "homeassistant.components.switcher_kis.switch.SwitcherType1Api.set_auto_shutdown", + "homeassistant.components.switcher_kis.switch.SwitcherApi.set_auto_shutdown", return_value=None, ) as mock_set_auto_shutdown: await hass.services.async_call( diff --git a/tests/components/switcher_kis/test_switch.py b/tests/components/switcher_kis/test_switch.py index f14a8f5b1ca..443c7bc930d 100644 --- a/tests/components/switcher_kis/test_switch.py +++ b/tests/components/switcher_kis/test_switch.py @@ -47,7 +47,7 @@ async def test_switch( # Test turning on with patch( - "homeassistant.components.switcher_kis.switch.SwitcherType1Api.control_device", + "homeassistant.components.switcher_kis.switch.SwitcherApi.control_device", ) as mock_control_device: await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True @@ -60,7 +60,7 @@ async def test_switch( # Test turning off with patch( - "homeassistant.components.switcher_kis.switch.SwitcherType1Api.control_device" + "homeassistant.components.switcher_kis.switch.SwitcherApi.control_device" ) as mock_control_device: await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True @@ -97,7 +97,7 @@ async def test_switch_control_fail( # Test exception during turn on with patch( - "homeassistant.components.switcher_kis.switch.SwitcherType1Api.control_device", + "homeassistant.components.switcher_kis.switch.SwitcherApi.control_device", side_effect=RuntimeError("fake error"), ) as mock_control_device: await hass.services.async_call( @@ -121,7 +121,7 @@ async def test_switch_control_fail( # Test error response during turn on with patch( - "homeassistant.components.switcher_kis.switch.SwitcherType1Api.control_device", + "homeassistant.components.switcher_kis.switch.SwitcherApi.control_device", return_value=SwitcherBaseResponse(None), ) as mock_control_device: await hass.services.async_call( diff --git a/tests/components/tado/fixtures/home.json b/tests/components/tado/fixtures/home.json new file mode 100644 index 00000000000..3431c1c2471 --- /dev/null +++ b/tests/components/tado/fixtures/home.json @@ -0,0 +1,47 @@ +{ + "id": 1, + "name": "My Home", + "dateTimeZone": "Europe/Berlin", + "dateCreated": "2019-03-24T16:16:19.541Z", + "temperatureUnit": "CELSIUS", + "partner": null, + "simpleSmartScheduleEnabled": true, + "awayRadiusInMeters": 100.0, + "installationCompleted": true, + "incidentDetection": { "supported": true, "enabled": true }, + "generation": "PRE_LINE_X", + "zonesCount": 7, + "language": "de-DE", + "skills": ["AUTO_ASSIST"], + "christmasModeEnabled": true, + "showAutoAssistReminders": true, + "contactDetails": { + "name": "Max Mustermann", + "email": "max@example.com", + "phone": "+493023125431" + }, + "address": { + "addressLine1": "Musterstrasse 123", + "addressLine2": null, + "zipCode": "12345", + "city": "Berlin", + "state": null, + "country": "DEU" + }, + "geolocation": { "latitude": 52.0, "longitude": 13.0 }, + "consentGrantSkippable": true, + "enabledFeatures": [ + "EIQ_SETTINGS_AS_WEBVIEW", + "HIDE_BOILER_REPAIR_SERVICE", + "INTERCOM_ENABLED", + "MORE_AS_WEBVIEW", + "OWD_SETTINGS_AS_WEBVIEW", + "SETTINGS_OVERVIEW_AS_WEBVIEW" + ], + "isAirComfortEligible": true, + "isBalanceAcEligible": false, + "isEnergyIqEligible": true, + "isHeatSourceInstalled": false, + "isHeatPumpInstalled": false, + "supportsFlowTemperatureOptimization": false +} diff --git a/tests/components/tado/util.py b/tests/components/tado/util.py index de4fd515e5a..a76858ab98e 100644 --- a/tests/components/tado/util.py +++ b/tests/components/tado/util.py @@ -20,6 +20,7 @@ async def async_init_integration( mobile_devices_fixture = "tado/mobile_devices.json" me_fixture = "tado/me.json" weather_fixture = "tado/weather.json" + home_fixture = "tado/home.json" home_state_fixture = "tado/home_state.json" zones_fixture = "tado/zones.json" zone_states_fixture = "tado/zone_states.json" @@ -65,6 +66,10 @@ async def async_init_integration( "https://my.tado.com/api/v2/me", text=load_fixture(me_fixture), ) + m.get( + "https://my.tado.com/api/v2/homes/1/", + text=load_fixture(home_fixture), + ) m.get( "https://my.tado.com/api/v2/homes/1/weather", text=load_fixture(weather_fixture), diff --git a/tests/components/tedee/conftest.py b/tests/components/tedee/conftest.py index 68444de640c..8e028cb5300 100644 --- a/tests/components/tedee/conftest.py +++ b/tests/components/tedee/conftest.py @@ -6,8 +6,8 @@ from collections.abc import Generator import json from unittest.mock import AsyncMock, MagicMock, patch -from pytedee_async.bridge import TedeeBridge -from pytedee_async.lock import TedeeLock +from aiotedee.bridge import TedeeBridge +from aiotedee.lock import TedeeLock import pytest from homeassistant.components.tedee.const import CONF_LOCAL_ACCESS_TOKEN, DOMAIN diff --git a/tests/components/tedee/test_binary_sensor.py b/tests/components/tedee/test_binary_sensor.py index 788d31c84d2..dfe70e7a2ea 100644 --- a/tests/components/tedee/test_binary_sensor.py +++ b/tests/components/tedee/test_binary_sensor.py @@ -3,8 +3,8 @@ from datetime import timedelta from unittest.mock import MagicMock +from aiotedee import TedeeLock from freezegun.api import FrozenDateTimeFactory -from pytedee_async import TedeeLock import pytest from syrupy import SnapshotAssertion diff --git a/tests/components/tedee/test_config_flow.py b/tests/components/tedee/test_config_flow.py index 2e86286c8da..825e01aca70 100644 --- a/tests/components/tedee/test_config_flow.py +++ b/tests/components/tedee/test_config_flow.py @@ -2,12 +2,12 @@ from unittest.mock import MagicMock, patch -from pytedee_async import ( +from aiotedee import ( TedeeClientException, TedeeDataUpdateException, TedeeLocalAuthException, ) -from pytedee_async.bridge import TedeeBridge +from aiotedee.bridge import TedeeBridge import pytest from homeassistant.components.tedee.const import CONF_LOCAL_ACCESS_TOKEN, DOMAIN diff --git a/tests/components/tedee/test_init.py b/tests/components/tedee/test_init.py index d4ac1c9d290..63701bb1788 100644 --- a/tests/components/tedee/test_init.py +++ b/tests/components/tedee/test_init.py @@ -5,7 +5,7 @@ from typing import Any from unittest.mock import MagicMock, patch from urllib.parse import urlparse -from pytedee_async.exception import ( +from aiotedee.exception import ( TedeeAuthException, TedeeClientException, TedeeWebhookException, diff --git a/tests/components/tedee/test_lock.py b/tests/components/tedee/test_lock.py index 3f6b97e2c70..45eae6e22d9 100644 --- a/tests/components/tedee/test_lock.py +++ b/tests/components/tedee/test_lock.py @@ -4,13 +4,13 @@ from datetime import timedelta from unittest.mock import MagicMock from urllib.parse import urlparse -from freezegun.api import FrozenDateTimeFactory -from pytedee_async import TedeeLock, TedeeLockState -from pytedee_async.exception import ( +from aiotedee import TedeeLock, TedeeLockState +from aiotedee.exception import ( TedeeClientException, TedeeDataUpdateException, TedeeLocalAuthException, ) +from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion diff --git a/tests/components/tedee/test_sensor.py b/tests/components/tedee/test_sensor.py index 72fbd9cbe8d..ddbcd5086af 100644 --- a/tests/components/tedee/test_sensor.py +++ b/tests/components/tedee/test_sensor.py @@ -3,8 +3,8 @@ from datetime import timedelta from unittest.mock import MagicMock +from aiotedee import TedeeLock from freezegun.api import FrozenDateTimeFactory -from pytedee_async import TedeeLock import pytest from syrupy import SnapshotAssertion diff --git a/tests/components/template/test_config_flow.py b/tests/components/template/test_config_flow.py index 72c453d48dc..e0d95ff968d 100644 --- a/tests/components/template/test_config_flow.py +++ b/tests/components/template/test_config_flow.py @@ -222,8 +222,8 @@ async def test_config_flow( state = hass.states.get(f"{template_type}.my_template") assert state.state == template_state - for key in extra_attrs: - assert state.attributes[key] == extra_attrs[key] + for key, value in extra_attrs.items(): + assert state.attributes[key] == value @pytest.mark.parametrize( @@ -273,11 +273,21 @@ async def test_config_flow( "min": "0", "max": "100", "step": "0.1", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, }, { "min": 0, "max": 100, "step": 0.1, + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, }, ), ( @@ -794,7 +804,7 @@ EARLY_END_ERROR = "invalid template (TemplateSyntaxError: unexpected 'end of tem ), "unit_of_measurement": ( "'None' is not a valid unit for device class 'energy'; " - "expected one of 'cal', 'Gcal', 'GJ', 'J', 'kcal', 'kJ', 'kWh', 'Mcal', 'MJ', 'MWh', 'Wh'" + "expected one of 'cal', 'Gcal', 'GJ', 'GWh', 'J', 'kcal', 'kJ', 'kWh', 'Mcal', 'MJ', 'MWh', 'TWh', 'Wh'" ), }, ), @@ -1263,11 +1273,21 @@ async def test_option_flow_sensor_preview_config_entry_removed( "min": 0, "max": 100, "step": 0.1, + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, }, { "min": 0, "max": 100, "step": 0.1, + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, }, ), ( diff --git a/tests/components/template/test_lock.py b/tests/components/template/test_lock.py index 186a84d5365..d9cb294c41f 100644 --- a/tests/components/template/test_lock.py +++ b/tests/components/template/test_lock.py @@ -10,6 +10,7 @@ from homeassistant.const import ( ATTR_ENTITY_ID, STATE_OFF, STATE_ON, + STATE_OPEN, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant, ServiceCall @@ -30,6 +31,13 @@ OPTIMISTIC_LOCK_CONFIG = { "caller": "{{ this.entity_id }}", }, }, + "open": { + "service": "test.automation", + "data_template": { + "action": "open", + "caller": "{{ this.entity_id }}", + }, + }, } OPTIMISTIC_CODED_LOCK_CONFIG = { @@ -81,6 +89,53 @@ async def test_template_state(hass: HomeAssistant) -> None: state = hass.states.get("lock.test_template_lock") assert state.state == LockState.UNLOCKED + hass.states.async_set("switch.test_state", STATE_OPEN) + await hass.async_block_till_done() + + state = hass.states.get("lock.test_template_lock") + assert state.state == LockState.OPEN + + +@pytest.mark.parametrize(("count", "domain"), [(1, lock.DOMAIN)]) +@pytest.mark.parametrize( + "config", + [ + { + lock.DOMAIN: { + **OPTIMISTIC_LOCK_CONFIG, + "name": "Test lock", + "optimistic": True, + "value_template": "{{ states.switch.test_state.state }}", + } + }, + ], +) +@pytest.mark.usefixtures("start_ha") +async def test_open_lock_optimistic( + hass: HomeAssistant, calls: list[ServiceCall] +) -> None: + """Test optimistic open.""" + await setup.async_setup_component(hass, "switch", {}) + hass.states.async_set("switch.test_state", STATE_ON) + await hass.async_block_till_done() + + state = hass.states.get("lock.test_lock") + assert state.state == LockState.LOCKED + + await hass.services.async_call( + lock.DOMAIN, + lock.SERVICE_OPEN, + {ATTR_ENTITY_ID: "lock.test_lock"}, + ) + await hass.async_block_till_done() + + assert len(calls) == 1 + assert calls[0].data["action"] == "open" + assert calls[0].data["caller"] == "lock.test_lock" + + state = hass.states.get("lock.test_lock") + assert state.state == LockState.OPEN + @pytest.mark.parametrize(("count", "domain"), [(1, lock.DOMAIN)]) @pytest.mark.parametrize( @@ -282,6 +337,40 @@ async def test_unlock_action(hass: HomeAssistant, calls: list[ServiceCall]) -> N assert calls[0].data["caller"] == "lock.template_lock" +@pytest.mark.parametrize(("count", "domain"), [(1, lock.DOMAIN)]) +@pytest.mark.parametrize( + "config", + [ + { + lock.DOMAIN: { + **OPTIMISTIC_LOCK_CONFIG, + "value_template": "{{ states.switch.test_state.state }}", + } + }, + ], +) +@pytest.mark.usefixtures("start_ha") +async def test_open_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: + """Test open action.""" + await setup.async_setup_component(hass, "switch", {}) + hass.states.async_set("switch.test_state", STATE_ON) + await hass.async_block_till_done() + + state = hass.states.get("lock.template_lock") + assert state.state == LockState.LOCKED + + await hass.services.async_call( + lock.DOMAIN, + lock.SERVICE_OPEN, + {ATTR_ENTITY_ID: "lock.template_lock"}, + ) + await hass.async_block_till_done() + + assert len(calls) == 1 + assert calls[0].data["action"] == "open" + assert calls[0].data["caller"] == "lock.template_lock" + + @pytest.mark.parametrize(("count", "domain"), [(1, lock.DOMAIN)]) @pytest.mark.parametrize( "config", diff --git a/tests/components/tesla_fleet/conftest.py b/tests/components/tesla_fleet/conftest.py index cc580212233..0dc5d87984f 100644 --- a/tests/components/tesla_fleet/conftest.py +++ b/tests/components/tesla_fleet/conftest.py @@ -167,3 +167,13 @@ def mock_request(): return_value=COMMAND_OK, ) as mock_request: yield mock_request + + +@pytest.fixture(autouse=True) +def mock_signed_command() -> Generator[AsyncMock]: + """Mock Tesla Fleet Api signed_command method.""" + with patch( + "homeassistant.components.tesla_fleet.VehicleSigned.signed_command", + return_value=COMMAND_OK, + ) as mock_signed_command: + yield mock_signed_command diff --git a/tests/components/tesla_fleet/snapshots/test_media_player.ambr b/tests/components/tesla_fleet/snapshots/test_media_player.ambr index d6f3f3e4825..cc3018364a5 100644 --- a/tests/components/tesla_fleet/snapshots/test_media_player.ambr +++ b/tests/components/tesla_fleet/snapshots/test_media_player.ambr @@ -105,7 +105,7 @@ 'original_name': 'Media player', 'platform': 'tesla_fleet', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': 0, 'translation_key': 'media', 'unique_id': 'LRWXF7EK4KC700000-media', 'unit_of_measurement': None, @@ -123,7 +123,7 @@ 'media_position': 1.0, 'media_title': 'Chapter 51: Cybertruck: Tesla, 2018–2019', 'source': 'Audible', - 'supported_features': , + 'supported_features': , 'volume_level': 0.16129355359011466, }), 'context': , diff --git a/tests/components/tesla_fleet/test_button.py b/tests/components/tesla_fleet/test_button.py index addba00b93d..ef1cfd90357 100644 --- a/tests/components/tesla_fleet/test_button.py +++ b/tests/components/tesla_fleet/test_button.py @@ -1,13 +1,16 @@ """Test the Tesla Fleet button platform.""" -from unittest.mock import patch +from copy import deepcopy +from unittest.mock import AsyncMock, patch import pytest from syrupy import SnapshotAssertion +from tesla_fleet_api.exceptions import NotOnWhitelistFault from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from . import assert_entities, setup_platform @@ -63,3 +66,34 @@ async def test_press( blocking=True, ) command.assert_called_once() + + +async def test_press_signing_error( + hass: HomeAssistant, normal_config_entry: MockConfigEntry, mock_products: AsyncMock +) -> None: + """Test pressing a button with a signing error.""" + # Enable Signing + new_product = deepcopy(mock_products.return_value) + new_product["response"][0]["command_signing"] = "required" + mock_products.return_value = new_product + + with ( + patch("homeassistant.components.tesla_fleet.TeslaFleetApi.get_private_key"), + ): + await setup_platform(hass, normal_config_entry, [Platform.BUTTON]) + + with ( + patch("homeassistant.components.tesla_fleet.TeslaFleetApi.get_private_key"), + patch( + "homeassistant.components.tesla_fleet.VehicleSigned.flash_lights", + side_effect=NotOnWhitelistFault, + ), + pytest.raises(HomeAssistantError) as error, + ): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: ["button.test_flash_lights"]}, + blocking=True, + ) + assert error.from_exception(NotOnWhitelistFault) diff --git a/tests/components/tesla_fleet/test_init.py b/tests/components/tesla_fleet/test_init.py index 9dcac4ec388..7c17f986663 100644 --- a/tests/components/tesla_fleet/test_init.py +++ b/tests/components/tesla_fleet/test_init.py @@ -1,5 +1,6 @@ """Test the Tesla Fleet init.""" +from copy import deepcopy from unittest.mock import AsyncMock, patch from aiohttp import RequestInfo @@ -404,3 +405,22 @@ async def test_init_region_issue_failed( await setup_platform(hass, normal_config_entry) mock_find_server.assert_called_once() assert normal_config_entry.state is ConfigEntryState.SETUP_ERROR + + +async def test_signing( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_products: AsyncMock, +) -> None: + """Tests when a vehicle requires signing.""" + + # Make the vehicle require command signing + products = deepcopy(mock_products.return_value) + products["response"][0]["command_signing"] = "required" + mock_products.return_value = products + + with patch( + "homeassistant.components.tesla_fleet.TeslaFleetApi.get_private_key" + ) as mock_get_private_key: + await setup_platform(hass, normal_config_entry) + mock_get_private_key.assert_called_once() diff --git a/tests/components/tesla_fleet/test_switch.py b/tests/components/tesla_fleet/test_switch.py index 5cf812439a5..fba4fc05cc4 100644 --- a/tests/components/tesla_fleet/test_switch.py +++ b/tests/components/tesla_fleet/test_switch.py @@ -1,6 +1,5 @@ """Test the tesla_fleet switch platform.""" -from copy import deepcopy from unittest.mock import AsyncMock, patch import pytest @@ -166,29 +165,3 @@ async def test_switch_no_scope( {ATTR_ENTITY_ID: "switch.test_auto_steering_wheel_heater"}, blocking=True, ) - - -async def test_switch_no_signing( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - normal_config_entry: MockConfigEntry, - mock_products: AsyncMock, -) -> None: - """Tests that the switch entities are correct.""" - - # Make the vehicle require command signing - products = deepcopy(mock_products.return_value) - products["response"][0]["command_signing"] = "required" - mock_products.return_value = products - - await setup_platform(hass, normal_config_entry, [Platform.SWITCH]) - with pytest.raises( - ServiceValidationError, - match="Vehicle requires command signing. Please see documentation for more details", - ): - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "switch.test_auto_steering_wheel_heater"}, - blocking=True, - ) diff --git a/tests/components/tessie/test_cover.py b/tests/components/tessie/test_cover.py index 451d1758e56..49a53fd327c 100644 --- a/tests/components/tessie/test_cover.py +++ b/tests/components/tessie/test_cover.py @@ -112,4 +112,4 @@ async def test_errors(hass: HomeAssistant) -> None: blocking=True, ) mock_set.assert_called_once() - assert str(error.value) == TEST_RESPONSE_ERROR["reason"] + assert str(error.value) == f"Command failed, {TEST_RESPONSE_ERROR["reason"]}" diff --git a/tests/components/thethingsnetwork/test_init.py b/tests/components/thethingsnetwork/test_init.py index 1e0b64c933d..e39c764d5f9 100644 --- a/tests/components/thethingsnetwork/test_init.py +++ b/tests/components/thethingsnetwork/test_init.py @@ -4,22 +4,6 @@ import pytest from ttn_client import TTNAuthError from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -from homeassistant.setup import async_setup_component - -from .conftest import DOMAIN - - -async def test_error_configuration( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, -) -> None: - """Test issue is logged when deprecated configuration is used.""" - await async_setup_component( - hass, DOMAIN, {DOMAIN: {"app_id": "123", "access_key": "42"}} - ) - await hass.async_block_till_done() - assert issue_registry.async_get_issue(DOMAIN, "manual_migration") @pytest.mark.parametrize(("exception_class"), [TTNAuthError, Exception]) diff --git a/tests/components/threshold/test_binary_sensor.py b/tests/components/threshold/test_binary_sensor.py index e0973c7a580..259009c6319 100644 --- a/tests/components/threshold/test_binary_sensor.py +++ b/tests/components/threshold/test_binary_sensor.py @@ -538,7 +538,7 @@ async def test_sensor_no_lower_upper( await async_setup_component(hass, Platform.BINARY_SENSOR, config) await hass.async_block_till_done() - assert "Lower or Upper thresholds not provided" in caplog.text + assert "Lower or Upper thresholds are not provided" in caplog.text async def test_device_id( diff --git a/tests/components/tibber/test_diagnostics.py b/tests/components/tibber/test_diagnostics.py index 34ecb63dfec..16c735596d0 100644 --- a/tests/components/tibber/test_diagnostics.py +++ b/tests/components/tibber/test_diagnostics.py @@ -19,12 +19,9 @@ async def test_entry_diagnostics( config_entry, ) -> None: """Test config entry diagnostics.""" - with ( - patch( - "tibber.Tibber.update_info", - return_value=None, - ), - patch("homeassistant.components.tibber.discovery.async_load_platform"), + with patch( + "tibber.Tibber.update_info", + return_value=None, ): assert await async_setup_component(hass, "tibber", {}) diff --git a/tests/components/tibber/test_notify.py b/tests/components/tibber/test_notify.py index 69af92c4d5d..9b731e78bf6 100644 --- a/tests/components/tibber/test_notify.py +++ b/tests/components/tibber/test_notify.py @@ -6,7 +6,6 @@ from unittest.mock import MagicMock import pytest from homeassistant.components.recorder import Recorder -from homeassistant.components.tibber import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -19,18 +18,8 @@ async def test_notification_services( notify_state = hass.states.get("notify.tibber") assert notify_state is not None - # Assert legacy notify service hass been added - assert hass.services.has_service("notify", DOMAIN) - - # Test legacy notify service - service = "tibber" - service_data = {"message": "The message", "title": "A title"} - await hass.services.async_call("notify", service, service_data, blocking=True) calls: MagicMock = mock_tibber_setup.send_notification - calls.assert_called_once_with(message="The message", title="A title") - calls.reset_mock() - # Test notify entity service service = "send_message" service_data = { @@ -44,15 +33,6 @@ async def test_notification_services( calls.side_effect = TimeoutError - with pytest.raises(HomeAssistantError): - # Test legacy notify service - await hass.services.async_call( - "notify", - service="tibber", - service_data={"message": "The message", "title": "A title"}, - blocking=True, - ) - with pytest.raises(HomeAssistantError): # Test notify entity service await hass.services.async_call( diff --git a/tests/components/tibber/test_repairs.py b/tests/components/tibber/test_repairs.py deleted file mode 100644 index 5e5fde4569e..00000000000 --- a/tests/components/tibber/test_repairs.py +++ /dev/null @@ -1,56 +0,0 @@ -"""Test loading of the Tibber config entry.""" - -from unittest.mock import MagicMock - -from homeassistant.components.recorder import Recorder -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir - -from tests.components.repairs import process_repair_fix_flow, start_repair_fix_flow -from tests.typing import ClientSessionGenerator - - -async def test_repair_flow( - recorder_mock: Recorder, - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - mock_tibber_setup: MagicMock, - hass_client: ClientSessionGenerator, -) -> None: - """Test unloading the entry.""" - - # Test legacy notify service - service = "tibber" - service_data = {"message": "The message", "title": "A title"} - await hass.services.async_call("notify", service, service_data, blocking=True) - calls: MagicMock = mock_tibber_setup.send_notification - - calls.assert_called_once_with(message="The message", title="A title") - calls.reset_mock() - - http_client = await hass_client() - # Assert the issue is present - assert issue_registry.async_get_issue( - domain="notify", - issue_id=f"migrate_notify_tibber_{service}", - ) - assert len(issue_registry.issues) == 1 - - data = await start_repair_fix_flow( - http_client, "notify", f"migrate_notify_tibber_{service}" - ) - - flow_id = data["flow_id"] - assert data["step_id"] == "confirm" - - # Simulate the users confirmed the repair flow - data = await process_repair_fix_flow(http_client, flow_id) - assert data["type"] == "create_entry" - await hass.async_block_till_done() - - # Assert the issue is no longer present - assert not issue_registry.async_get_issue( - domain="notify", - issue_id=f"migrate_notify_tibber_{service}", - ) - assert len(issue_registry.issues) == 0 diff --git a/tests/components/tplink/__init__.py b/tests/components/tplink/__init__.py index 75eab8eeb73..809ab3bfd78 100644 --- a/tests/components/tplink/__init__.py +++ b/tests/components/tplink/__init__.py @@ -6,6 +6,7 @@ from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from kasa import ( + BaseProtocol, Device, DeviceConfig, DeviceConnectionParameters, @@ -17,7 +18,6 @@ from kasa import ( Module, ) from kasa.interfaces import Fan, Light, LightEffect, LightState -from kasa.protocol import BaseProtocol from kasa.smart.modules.alarm import Alarm from syrupy import SnapshotAssertion @@ -62,7 +62,9 @@ CONN_PARAMS_LEGACY = DeviceConnectionParameters( DeviceFamily.IotSmartPlugSwitch, DeviceEncryptionType.Xor ) DEVICE_CONFIG_LEGACY = DeviceConfig(IP_ADDRESS) -DEVICE_CONFIG_DICT_LEGACY = DEVICE_CONFIG_LEGACY.to_dict(exclude_credentials=True) +DEVICE_CONFIG_DICT_LEGACY = { + k: v for k, v in DEVICE_CONFIG_LEGACY.to_dict().items() if k != "credentials" +} CREDENTIALS = Credentials("foo", "bar") CREDENTIALS_HASH_AES = "AES/abcdefghijklmnopqrstuvabcdefghijklmnopqrstuv==" CREDENTIALS_HASH_KLAP = "KLAP/abcdefghijklmnopqrstuv==" @@ -86,8 +88,12 @@ DEVICE_CONFIG_AES = DeviceConfig( uses_http=True, aes_keys=AES_KEYS, ) -DEVICE_CONFIG_DICT_KLAP = DEVICE_CONFIG_KLAP.to_dict(exclude_credentials=True) -DEVICE_CONFIG_DICT_AES = DEVICE_CONFIG_AES.to_dict(exclude_credentials=True) +DEVICE_CONFIG_DICT_KLAP = { + k: v for k, v in DEVICE_CONFIG_KLAP.to_dict().items() if k != "credentials" +} +DEVICE_CONFIG_DICT_AES = { + k: v for k, v in DEVICE_CONFIG_AES.to_dict().items() if k != "credentials" +} CREATE_ENTRY_DATA_LEGACY = { CONF_HOST: IP_ADDRESS, CONF_ALIAS: ALIAS, diff --git a/tests/components/tplink/conftest.py b/tests/components/tplink/conftest.py index 78cc9304bf7..25a4bd20270 100644 --- a/tests/components/tplink/conftest.py +++ b/tests/components/tplink/conftest.py @@ -37,7 +37,7 @@ def mock_discovery(): device = _mocked_device( device_config=DeviceConfig.from_dict(DEVICE_CONFIG_KLAP.to_dict()), credentials_hash=CREDENTIALS_HASH_KLAP, - alias=None, + alias="My Bulb", ) devices = { "127.0.0.1": _mocked_device( diff --git a/tests/components/tplink/fixtures/features.json b/tests/components/tplink/fixtures/features.json index 550592d3f48..f60132fd2c2 100644 --- a/tests/components/tplink/fixtures/features.json +++ b/tests/components/tplink/fixtures/features.json @@ -34,6 +34,16 @@ "type": "Switch", "category": "Config" }, + "child_lock": { + "value": true, + "type": "Switch", + "category": "Config" + }, + "pir_enabled": { + "value": true, + "type": "Switch", + "category": "Config" + }, "current_consumption": { "value": 5.23, "type": "Sensor", @@ -303,5 +313,10 @@ "type": "Choice", "category": "Config", "choices": ["low", "normal", "high"] + }, + "water_alert_timestamp": { + "type": "Sensor", + "category": "Info", + "value": "2024-06-24 10:03:11.046643+01:00" } } diff --git a/tests/components/tplink/snapshots/test_sensor.ambr b/tests/components/tplink/snapshots/test_sensor.ambr index 39682cd4a17..739f02e51f0 100644 --- a/tests/components/tplink/snapshots/test_sensor.ambr +++ b/tests/components/tplink/snapshots/test_sensor.ambr @@ -358,6 +358,53 @@ 'state': '12', }) # --- +# name: test_states[sensor.my_device_last_water_leak_alert-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_device_last_water_leak_alert', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last water leak alert', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'water_alert_timestamp', + 'unique_id': '123456789ABCDEFGH_water_alert_timestamp', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[sensor.my_device_last_water_leak_alert-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'my_device Last water leak alert', + }), + 'context': , + 'entity_id': 'sensor.my_device_last_water_leak_alert', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-06-24T09:03:11+00:00', + }) +# --- # name: test_states[sensor.my_device_on_since-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tplink/snapshots/test_switch.ambr b/tests/components/tplink/snapshots/test_switch.ambr index 4354ea1905a..36c630474c8 100644 --- a/tests/components/tplink/snapshots/test_switch.ambr +++ b/tests/components/tplink/snapshots/test_switch.ambr @@ -173,6 +173,52 @@ 'state': 'on', }) # --- +# name: test_states[switch.my_device_child_lock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.my_device_child_lock', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Child lock', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'child_lock', + 'unique_id': '123456789ABCDEFGH_child_lock', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.my_device_child_lock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Child lock', + }), + 'context': , + 'entity_id': 'switch.my_device_child_lock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_states[switch.my_device_fan_sleep_mode-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -265,6 +311,52 @@ 'state': 'on', }) # --- +# name: test_states[switch.my_device_motion_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.my_device_motion_sensor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Motion sensor', + 'platform': 'tplink', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pir_enabled', + 'unique_id': '123456789ABCDEFGH_pir_enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_states[switch.my_device_motion_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my_device Motion sensor', + }), + 'context': , + 'entity_id': 'switch.my_device_motion_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_states[switch.my_device_smooth_transitions-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tplink/test_config_flow.py b/tests/components/tplink/test_config_flow.py index 12a5741058c..2697696c667 100644 --- a/tests/components/tplink/test_config_flow.py +++ b/tests/components/tplink/test_config_flow.py @@ -2,7 +2,7 @@ from contextlib import contextmanager import logging -from unittest.mock import AsyncMock, patch +from unittest.mock import ANY, AsyncMock, patch from kasa import TimeoutError import pytest @@ -30,6 +30,7 @@ from homeassistant.const import ( CONF_HOST, CONF_MAC, CONF_PASSWORD, + CONF_PORT, CONF_USERNAME, ) from homeassistant.core import HomeAssistant @@ -665,6 +666,93 @@ async def test_manual_auth_errors( await hass.async_block_till_done() +@pytest.mark.parametrize( + ("host_str", "host", "port"), + [ + (f"{IP_ADDRESS}:1234", IP_ADDRESS, 1234), + ("[2001:db8:0::1]:4321", "2001:db8:0::1", 4321), + ], +) +async def test_manual_port_override( + hass: HomeAssistant, + mock_connect: AsyncMock, + mock_discovery: AsyncMock, + host_str, + host, + port, +) -> None: + """Test manually setup.""" + mock_discovery["mock_device"].config.port_override = port + mock_discovery["mock_device"].host = host + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + + # side_effects to cause auth confirm as the port override usually only + # works with direct connections. + mock_discovery["discover_single"].side_effect = TimeoutError + mock_connect["connect"].side_effect = AuthenticationError + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: host_str} + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "user_auth_confirm" + assert not result2["errors"] + + creds = Credentials("fake_username", "fake_password") + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) + await hass.async_block_till_done() + mock_discovery["try_connect_all"].assert_called_once_with( + host, credentials=creds, port=port, http_client=ANY + ) + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == DEFAULT_ENTRY_TITLE + assert result3["data"] == { + **CREATE_ENTRY_DATA_KLAP, + CONF_PORT: port, + CONF_HOST: host, + } + assert result3["context"]["unique_id"] == MAC_ADDRESS + + +async def test_manual_port_override_invalid( + hass: HomeAssistant, mock_connect: AsyncMock, mock_discovery: AsyncMock +) -> None: + """Test manually setup.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: f"{IP_ADDRESS}:foo"} + ) + await hass.async_block_till_done() + + mock_discovery["discover_single"].assert_called_once_with( + "127.0.0.1", credentials=None, port=None + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == DEFAULT_ENTRY_TITLE + assert result2["data"] == CREATE_ENTRY_DATA_KLAP + assert result2["context"]["unique_id"] == MAC_ADDRESS + + async def test_discovered_by_discovery_and_dhcp(hass: HomeAssistant) -> None: """Test we get the form with discovery and abort for dhcp source when we get both.""" @@ -1072,7 +1160,7 @@ async def test_reauth( ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials + "127.0.0.1", credentials=credentials, port=None ) mock_discovery["mock_device"].update.assert_called_once_with() assert result2["type"] is FlowResultType.ABORT @@ -1107,7 +1195,7 @@ async def test_reauth_try_connect_all( ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials + "127.0.0.1", credentials=credentials, port=None ) mock_discovery["try_connect_all"].assert_called_once() assert result2["type"] is FlowResultType.ABORT @@ -1145,7 +1233,7 @@ async def test_reauth_try_connect_all_fail( ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials + "127.0.0.1", credentials=credentials, port=None ) mock_discovery["try_connect_all"].assert_called_once() assert result2["errors"] == {"base": "cannot_connect"} @@ -1214,7 +1302,7 @@ async def test_reauth_update_with_encryption_change( assert "Connection type changed for 127.0.0.2" in caplog.text credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.2", credentials=credentials + "127.0.0.2", credentials=credentials, port=None ) mock_discovery["mock_device"].update.assert_called_once_with() assert result2["type"] is FlowResultType.ABORT @@ -1416,7 +1504,7 @@ async def test_reauth_errors( credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials + "127.0.0.1", credentials=credentials, port=None ) mock_discovery["mock_device"].update.assert_called_once_with() assert result2["type"] is FlowResultType.FORM @@ -1434,7 +1522,7 @@ async def test_reauth_errors( ) mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials + "127.0.0.1", credentials=credentials, port=None ) mock_discovery["mock_device"].update.assert_called_once_with() @@ -1643,7 +1731,7 @@ async def test_reauth_update_other_flows( ) credentials = Credentials("fake_username", "fake_password") mock_discovery["discover_single"].assert_called_once_with( - "127.0.0.1", credentials=credentials + "127.0.0.1", credentials=credentials, port=None ) mock_discovery["mock_device"].update.assert_called_once_with() assert result2["type"] is FlowResultType.ABORT diff --git a/tests/components/tplink/test_init.py b/tests/components/tplink/test_init.py index dd01c381adf..766e6784c8b 100644 --- a/tests/components/tplink/test_init.py +++ b/tests/components/tplink/test_init.py @@ -45,6 +45,7 @@ from . import ( CREDENTIALS_HASH_AES, CREDENTIALS_HASH_KLAP, DEVICE_CONFIG_AES, + DEVICE_CONFIG_DICT_KLAP, DEVICE_CONFIG_KLAP, DEVICE_CONFIG_LEGACY, DEVICE_ID, @@ -538,9 +539,8 @@ async def test_move_credentials_hash( from the device. """ device_config = { - **DEVICE_CONFIG_KLAP.to_dict( - exclude_credentials=True, credentials_hash="theHash" - ) + **DEVICE_CONFIG_DICT_KLAP, + "credentials_hash": "theHash", } entry_data = {**CREATE_ENTRY_DATA_KLAP, CONF_DEVICE_CONFIG: device_config} @@ -586,9 +586,8 @@ async def test_move_credentials_hash_auth_error( in async_setup_entry. """ device_config = { - **DEVICE_CONFIG_KLAP.to_dict( - exclude_credentials=True, credentials_hash="theHash" - ) + **DEVICE_CONFIG_DICT_KLAP, + "credentials_hash": "theHash", } entry_data = {**CREATE_ENTRY_DATA_KLAP, CONF_DEVICE_CONFIG: device_config} @@ -630,9 +629,8 @@ async def test_move_credentials_hash_other_error( at the end of the test. """ device_config = { - **DEVICE_CONFIG_KLAP.to_dict( - exclude_credentials=True, credentials_hash="theHash" - ) + **DEVICE_CONFIG_DICT_KLAP, + "credentials_hash": "theHash", } entry_data = {**CREATE_ENTRY_DATA_KLAP, CONF_DEVICE_CONFIG: device_config} @@ -729,7 +727,7 @@ async def test_credentials_hash_auth_error( await hass.async_block_till_done() expected_config = DeviceConfig.from_dict( - DEVICE_CONFIG_KLAP.to_dict(exclude_credentials=True, credentials_hash="theHash") + {**DEVICE_CONFIG_DICT_KLAP, "credentials_hash": "theHash"} ) expected_config.uses_http = False expected_config.http_client = "Foo" @@ -767,7 +765,9 @@ async def test_migrate_remove_device_config( CONF_HOST: expected_entry_data[CONF_HOST], CONF_ALIAS: ALIAS, CONF_MODEL: MODEL, - CONF_DEVICE_CONFIG: device_config.to_dict(exclude_credentials=True), + CONF_DEVICE_CONFIG: { + k: v for k, v in device_config.to_dict().items() if k != "credentials" + }, } entry = MockConfigEntry( diff --git a/tests/components/trafikverket_camera/conftest.py b/tests/components/trafikverket_camera/conftest.py index cef85af2228..5e0e9bfa593 100644 --- a/tests/components/trafikverket_camera/conftest.py +++ b/tests/components/trafikverket_camera/conftest.py @@ -6,7 +6,7 @@ from datetime import datetime from unittest.mock import patch import pytest -from pytrafikverket.models import CameraInfoModel +from pytrafikverket import CameraInfoModel from homeassistant.components.trafikverket_camera.const import DOMAIN from homeassistant.config_entries import SOURCE_USER diff --git a/tests/components/trafikverket_camera/test_binary_sensor.py b/tests/components/trafikverket_camera/test_binary_sensor.py index 6750c05772b..46cf93726c7 100644 --- a/tests/components/trafikverket_camera/test_binary_sensor.py +++ b/tests/components/trafikverket_camera/test_binary_sensor.py @@ -3,7 +3,7 @@ from __future__ import annotations import pytest -from pytrafikverket.models import CameraInfoModel +from pytrafikverket import CameraInfoModel from homeassistant.config_entries import ConfigEntry from homeassistant.const import STATE_ON diff --git a/tests/components/trafikverket_camera/test_camera.py b/tests/components/trafikverket_camera/test_camera.py index 51d4563c19b..f61dd497c9c 100644 --- a/tests/components/trafikverket_camera/test_camera.py +++ b/tests/components/trafikverket_camera/test_camera.py @@ -7,7 +7,7 @@ from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest -from pytrafikverket.models import CameraInfoModel +from pytrafikverket import CameraInfoModel from homeassistant.components.camera import async_get_image from homeassistant.config_entries import ConfigEntry diff --git a/tests/components/trafikverket_camera/test_config_flow.py b/tests/components/trafikverket_camera/test_config_flow.py index 48162a17e2c..cc37e2b5441 100644 --- a/tests/components/trafikverket_camera/test_config_flow.py +++ b/tests/components/trafikverket_camera/test_config_flow.py @@ -5,8 +5,12 @@ from __future__ import annotations from unittest.mock import patch import pytest -from pytrafikverket.exceptions import InvalidAuthentication, NoCameraFound, UnknownError -from pytrafikverket.models import CameraInfoModel +from pytrafikverket import ( + CameraInfoModel, + InvalidAuthentication, + NoCameraFound, + UnknownError, +) from homeassistant import config_entries from homeassistant.components.trafikverket_camera.const import DOMAIN diff --git a/tests/components/trafikverket_camera/test_coordinator.py b/tests/components/trafikverket_camera/test_coordinator.py index f50ab56724e..7deeeccf8ad 100644 --- a/tests/components/trafikverket_camera/test_coordinator.py +++ b/tests/components/trafikverket_camera/test_coordinator.py @@ -5,13 +5,13 @@ from __future__ import annotations from unittest.mock import patch import pytest -from pytrafikverket.exceptions import ( +from pytrafikverket import ( + CameraInfoModel, InvalidAuthentication, MultipleCamerasFound, NoCameraFound, UnknownError, ) -from pytrafikverket.models import CameraInfoModel from homeassistant.components.trafikverket_camera.const import DOMAIN from homeassistant.config_entries import SOURCE_USER, ConfigEntryState diff --git a/tests/components/trafikverket_camera/test_init.py b/tests/components/trafikverket_camera/test_init.py index aaa4c3cfed7..5b77f17ac3e 100644 --- a/tests/components/trafikverket_camera/test_init.py +++ b/tests/components/trafikverket_camera/test_init.py @@ -6,8 +6,7 @@ from datetime import datetime from unittest.mock import patch import pytest -from pytrafikverket.exceptions import UnknownError -from pytrafikverket.models import CameraInfoModel +from pytrafikverket import CameraInfoModel, UnknownError from homeassistant.components.trafikverket_camera import async_migrate_entry from homeassistant.components.trafikverket_camera.const import DOMAIN diff --git a/tests/components/trafikverket_camera/test_recorder.py b/tests/components/trafikverket_camera/test_recorder.py index d9778ab851a..c14f05ca7ab 100644 --- a/tests/components/trafikverket_camera/test_recorder.py +++ b/tests/components/trafikverket_camera/test_recorder.py @@ -3,7 +3,7 @@ from __future__ import annotations import pytest -from pytrafikverket.models import CameraInfoModel +from pytrafikverket import CameraInfoModel from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.history import get_significant_states diff --git a/tests/components/trafikverket_camera/test_sensor.py b/tests/components/trafikverket_camera/test_sensor.py index 0f4ef02a850..f8e0342b0f6 100644 --- a/tests/components/trafikverket_camera/test_sensor.py +++ b/tests/components/trafikverket_camera/test_sensor.py @@ -3,7 +3,7 @@ from __future__ import annotations import pytest -from pytrafikverket.models import CameraInfoModel +from pytrafikverket import CameraInfoModel from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant diff --git a/tests/components/trafikverket_train/conftest.py b/tests/components/trafikverket_train/conftest.py index 14671d27252..234269cc9f8 100644 --- a/tests/components/trafikverket_train/conftest.py +++ b/tests/components/trafikverket_train/conftest.py @@ -38,7 +38,7 @@ async def load_integration_from_entry( return_value=get_train_stop, ), patch( - "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_search_train_station", ), ): await hass.config_entries.async_setup(config_entry_id) @@ -50,7 +50,8 @@ async def load_integration_from_entry( data=ENTRY_CONFIG, options=OPTIONS_CONFIG, entry_id="1", - unique_id="stockholmc-uppsalac--['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) await setup_config_entry_with_mocked_data(config_entry.entry_id) @@ -60,7 +61,8 @@ async def load_integration_from_entry( source=SOURCE_USER, data=ENTRY_CONFIG2, entry_id="2", - unique_id="stockholmc-uppsalac-1100-['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']", + version=1, + minor_version=2, ) config_entry2.add_to_hass(hass) await setup_config_entry_with_mocked_data(config_entry2.entry_id) diff --git a/tests/components/trafikverket_train/snapshots/test_init.ambr b/tests/components/trafikverket_train/snapshots/test_init.ambr index c32995fdb76..2b3693eddc1 100644 --- a/tests/components/trafikverket_train/snapshots/test_init.ambr +++ b/tests/components/trafikverket_train/snapshots/test_init.ambr @@ -7,7 +7,7 @@ 'title_placeholders': dict({ 'name': 'Mock Title', }), - 'unique_id': '321', + 'unique_id': None, }), 'flow_id': , 'handler': 'trafikverket_train', diff --git a/tests/components/trafikverket_train/snapshots/test_sensor.ambr b/tests/components/trafikverket_train/snapshots/test_sensor.ambr index cae0457bbff..6caf1f86b51 100644 --- a/tests/components/trafikverket_train/snapshots/test_sensor.ambr +++ b/tests/components/trafikverket_train/snapshots/test_sensor.ambr @@ -222,7 +222,7 @@ 'title_placeholders': dict({ 'name': 'Mock Title', }), - 'unique_id': "stockholmc-uppsalac--['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']", + 'unique_id': None, }), 'flow_id': , 'handler': 'trafikverket_train', diff --git a/tests/components/trafikverket_train/test_config_flow.py b/tests/components/trafikverket_train/test_config_flow.py index 9fe02994f05..eac5e629bf0 100644 --- a/tests/components/trafikverket_train/test_config_flow.py +++ b/tests/components/trafikverket_train/test_config_flow.py @@ -16,6 +16,7 @@ from pytrafikverket.models import TrainStopModel from homeassistant import config_entries from homeassistant.components.trafikverket_train.const import ( + CONF_FILTER_PRODUCT, CONF_FROM, CONF_TIME, CONF_TO, @@ -39,7 +40,7 @@ async def test_form(hass: HomeAssistant) -> None: with ( patch( - "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_search_train_station", ), patch( "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_stop", @@ -73,7 +74,6 @@ async def test_form(hass: HomeAssistant) -> None: } assert result["options"] == {"filter_product": None} assert len(mock_setup_entry.mock_calls) == 1 - assert result["result"].unique_id == "stockholmc-uppsalac-10:00-['mon', 'fri']" async def test_form_entry_already_exist(hass: HomeAssistant) -> None: @@ -88,8 +88,10 @@ async def test_form_entry_already_exist(hass: HomeAssistant) -> None: CONF_TO: "Uppsala C", CONF_TIME: "10:00", CONF_WEEKDAY: WEEKDAYS, + CONF_FILTER_PRODUCT: None, }, - unique_id=f"stockholmc-uppsalac-10:00-{WEEKDAYS}", + version=1, + minor_version=2, ) entry.add_to_hass(hass) @@ -101,7 +103,7 @@ async def test_form_entry_already_exist(hass: HomeAssistant) -> None: with ( patch( - "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_search_train_station", ), patch( "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_stop", @@ -161,7 +163,7 @@ async def test_flow_fails( with ( patch( - "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_search_train_station", side_effect=side_effect(), ), patch( @@ -206,7 +208,7 @@ async def test_flow_fails_departures( with ( patch( - "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_search_train_station", ), patch( "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_next_train_stops", @@ -240,7 +242,8 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: CONF_TIME: "10:00", CONF_WEEKDAY: WEEKDAYS, }, - unique_id=f"stockholmc-uppsalac-10:00-{WEEKDAYS}", + version=1, + minor_version=2, ) entry.add_to_hass(hass) @@ -251,7 +254,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: with ( patch( - "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_search_train_station", ), patch( "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_stop", @@ -314,7 +317,8 @@ async def test_reauth_flow_error( CONF_TIME: "10:00", CONF_WEEKDAY: WEEKDAYS, }, - unique_id=f"stockholmc-uppsalac-10:00-{WEEKDAYS}", + version=1, + minor_version=2, ) entry.add_to_hass(hass) @@ -322,7 +326,7 @@ async def test_reauth_flow_error( with ( patch( - "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_search_train_station", side_effect=side_effect(), ), patch( @@ -341,7 +345,7 @@ async def test_reauth_flow_error( with ( patch( - "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_search_train_station", ), patch( "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_stop", @@ -396,7 +400,8 @@ async def test_reauth_flow_error_departures( CONF_TIME: "10:00", CONF_WEEKDAY: WEEKDAYS, }, - unique_id=f"stockholmc-uppsalac-10:00-{WEEKDAYS}", + version=1, + minor_version=2, ) entry.add_to_hass(hass) @@ -404,7 +409,7 @@ async def test_reauth_flow_error_departures( with ( patch( - "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_search_train_station", ), patch( "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_stop", @@ -423,7 +428,7 @@ async def test_reauth_flow_error_departures( with ( patch( - "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_search_train_station", ), patch( "homeassistant.components.trafikverket_train.config_flow.TrafikverketTrain.async_get_train_stop", @@ -467,13 +472,14 @@ async def test_options_flow( CONF_TIME: "10:00", CONF_WEEKDAY: WEEKDAYS, }, - unique_id=f"stockholmc-uppsalac-10:00-{WEEKDAYS}", + version=1, + minor_version=2, ) entry.add_to_hass(hass) with ( patch( - "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_search_train_station", ), patch( "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_next_train_stops", diff --git a/tests/components/trafikverket_train/test_init.py b/tests/components/trafikverket_train/test_init.py index c8fea174e83..41c8e2432ef 100644 --- a/tests/components/trafikverket_train/test_init.py +++ b/tests/components/trafikverket_train/test_init.py @@ -28,13 +28,14 @@ async def test_unload_entry( data=ENTRY_CONFIG, options=OPTIONS_CONFIG, entry_id="1", - unique_id="321", + version=1, + minor_version=2, ) entry.add_to_hass(hass) with ( patch( - "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_search_train_station", ), patch( "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_next_train_stops", @@ -64,12 +65,13 @@ async def test_auth_failed( data=ENTRY_CONFIG, options=OPTIONS_CONFIG, entry_id="1", - unique_id="321", + version=1, + minor_version=2, ) entry.add_to_hass(hass) with patch( - "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_search_train_station", side_effect=InvalidAuthentication, ): await hass.config_entries.async_setup(entry.entry_id) @@ -94,12 +96,13 @@ async def test_no_stations( data=ENTRY_CONFIG, options=OPTIONS_CONFIG, entry_id="1", - unique_id="321", + version=1, + minor_version=2, ) entry.add_to_hass(hass) with patch( - "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_search_train_station", side_effect=NoTrainStationFound, ): await hass.config_entries.async_setup(entry.entry_id) @@ -121,7 +124,8 @@ async def test_migrate_entity_unique_id( data=ENTRY_CONFIG, options=OPTIONS_CONFIG, entry_id="1", - unique_id="321", + version=1, + minor_version=2, ) entry.add_to_hass(hass) @@ -135,7 +139,7 @@ async def test_migrate_entity_unique_id( with ( patch( - "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_search_train_station", ), patch( "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_next_train_stops", @@ -149,3 +153,69 @@ async def test_migrate_entity_unique_id( entity = entity_registry.async_get(entity.entity_id) assert entity.unique_id == f"{entry.entry_id}-departure_time" + + +async def test_migrate_entry( + hass: HomeAssistant, + get_trains: list[TrainStopModel], +) -> None: + """Test migrate entry unique id.""" + entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + data=ENTRY_CONFIG, + options=OPTIONS_CONFIG, + version=1, + minor_version=1, + entry_id="1", + unique_id="321", + ) + entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_search_train_station", + ), + patch( + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_next_train_stops", + return_value=get_trains, + ), + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.LOADED + + assert entry.version == 1 + assert entry.minor_version == 2 + assert entry.unique_id is None + + +async def test_migrate_entry_from_future_version_fails( + hass: HomeAssistant, + get_trains: list[TrainStopModel], +) -> None: + """Test migrate entry from future version fails.""" + entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + data=ENTRY_CONFIG, + options=OPTIONS_CONFIG, + version=2, + entry_id="1", + ) + entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_search_train_station", + ), + patch( + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_next_train_stops", + return_value=get_trains, + ), + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.MIGRATION_ERROR diff --git a/tests/components/trend/test_binary_sensor.py b/tests/components/trend/test_binary_sensor.py index ad85f65a9fc..4a829bb86d2 100644 --- a/tests/components/trend/test_binary_sensor.py +++ b/tests/components/trend/test_binary_sensor.py @@ -9,7 +9,7 @@ import pytest from homeassistant import setup from homeassistant.components.trend.const import DOMAIN -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant, State from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component @@ -395,3 +395,45 @@ async def test_device_id( trend_entity = entity_registry.async_get("binary_sensor.trend") assert trend_entity is not None assert trend_entity.device_id == source_entity.device_id + + +@pytest.mark.parametrize( + "error_state", + [ + STATE_UNKNOWN, + STATE_UNAVAILABLE, + ], +) +async def test_unavailable_source( + hass: HomeAssistant, + config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + setup_component: ComponentSetup, + error_state: str, +) -> None: + """Test for unavailable source.""" + await setup_component( + { + "sample_duration": 10000, + "min_gradient": 1, + "max_samples": 25, + "min_samples": 5, + }, + ) + + for val in (10, 20, 30, 40, 50, 60): + freezer.tick(timedelta(seconds=2)) + hass.states.async_set("sensor.test_state", val) + await hass.async_block_till_done() + + assert hass.states.get("binary_sensor.test_trend_sensor").state == "on" + + hass.states.async_set("sensor.test_state", error_state) + await hass.async_block_till_done() + + assert hass.states.get("binary_sensor.test_trend_sensor").state == STATE_UNAVAILABLE + + hass.states.async_set("sensor.test_state", 50) + await hass.async_block_till_done() + + assert hass.states.get("binary_sensor.test_trend_sensor").state == "on" diff --git a/tests/components/tts/test_init.py b/tests/components/tts/test_init.py index 2ab6dc16629..0b01a24720d 100644 --- a/tests/components/tts/test_init.py +++ b/tests/components/tts/test_init.py @@ -45,6 +45,7 @@ from tests.common import ( mock_integration, mock_platform, mock_restore_cache, + reset_translation_cache, ) from tests.typing import ClientSessionGenerator, WebSocketGenerator @@ -204,18 +205,20 @@ async def test_service( blocking=True, ) - assert len(calls) == 1 - assert calls[0].data[ATTR_MEDIA_ANNOUNCE] is True - assert calls[0].data[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC - assert await get_media_source_url(hass, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_-_{expected_url_suffix}.mp3" - ) - await hass.async_block_till_done() - assert ( - mock_tts_cache_dir - / f"42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_{expected_url_suffix}.mp3" - ).is_file() + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + assert len(calls) == 1 + assert calls[0].data[ATTR_MEDIA_ANNOUNCE] is True + assert calls[0].data[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC + assert await get_media_source_url( + hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] + ) == ("/api/tts_proxy/test_token.mp3") + await hass.async_block_till_done() + assert ( + mock_tts_cache_dir + / f"42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_{expected_url_suffix}.mp3" + ).is_file() @pytest.mark.parametrize( @@ -266,17 +269,20 @@ async def test_service_default_language( ) assert len(calls) == 1 assert calls[0].data[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC - assert await get_media_source_url(hass, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_de-de_-_{expected_url_suffix}.mp3" - ) - await hass.async_block_till_done() - assert ( - mock_tts_cache_dir - / ( - f"42f18378fd4393d18c8dd11d03fa9563c1e54491_de-de_-_{expected_url_suffix}.mp3" - ) - ).is_file() + + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + assert await get_media_source_url( + hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] + ) == ("/api/tts_proxy/test_token.mp3") + await hass.async_block_till_done() + assert ( + mock_tts_cache_dir + / ( + f"42f18378fd4393d18c8dd11d03fa9563c1e54491_de-de_-_{expected_url_suffix}.mp3" + ) + ).is_file() @pytest.mark.parametrize( @@ -327,15 +333,18 @@ async def test_service_default_special_language( ) assert len(calls) == 1 assert calls[0].data[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC - assert await get_media_source_url(hass, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_-_{expected_url_suffix}.mp3" - ) - await hass.async_block_till_done() - assert ( - mock_tts_cache_dir - / f"42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_{expected_url_suffix}.mp3" - ).is_file() + + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + assert await get_media_source_url( + hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] + ) == ("/api/tts_proxy/test_token.mp3") + await hass.async_block_till_done() + assert ( + mock_tts_cache_dir + / f"42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_{expected_url_suffix}.mp3" + ).is_file() @pytest.mark.parametrize( @@ -384,15 +393,18 @@ async def test_service_language( ) assert len(calls) == 1 assert calls[0].data[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC - assert await get_media_source_url(hass, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_de-de_-_{expected_url_suffix}.mp3" - ) - await hass.async_block_till_done() - assert ( - mock_tts_cache_dir - / f"42f18378fd4393d18c8dd11d03fa9563c1e54491_de-de_-_{expected_url_suffix}.mp3" - ).is_file() + + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + assert await get_media_source_url( + hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] + ) == ("/api/tts_proxy/test_token.mp3") + await hass.async_block_till_done() + assert ( + mock_tts_cache_dir + / f"42f18378fd4393d18c8dd11d03fa9563c1e54491_de-de_-_{expected_url_suffix}.mp3" + ).is_file() @pytest.mark.parametrize( @@ -497,18 +509,21 @@ async def test_service_options( assert len(calls) == 1 assert calls[0].data[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC - assert await get_media_source_url(hass, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_de-de_{opt_hash}_{expected_url_suffix}.mp3" - ) - await hass.async_block_till_done() - assert ( - mock_tts_cache_dir - / ( - "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_de-de_{opt_hash}_{expected_url_suffix}.mp3" - ) - ).is_file() + + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + assert await get_media_source_url( + hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] + ) == ("/api/tts_proxy/test_token.mp3") + await hass.async_block_till_done() + assert ( + mock_tts_cache_dir + / ( + "42f18378fd4393d18c8dd11d03fa9563c1e54491" + f"_de-de_{opt_hash}_{expected_url_suffix}.mp3" + ) + ).is_file() class MockProviderWithDefaults(MockTTSProvider): @@ -578,18 +593,21 @@ async def test_service_default_options( assert len(calls) == 1 assert calls[0].data[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC - assert await get_media_source_url(hass, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_de-de_{opt_hash}_{expected_url_suffix}.mp3" - ) - await hass.async_block_till_done() - assert ( - mock_tts_cache_dir - / ( - "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_de-de_{opt_hash}_{expected_url_suffix}.mp3" - ) - ).is_file() + + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + assert await get_media_source_url( + hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] + ) == ("/api/tts_proxy/test_token.mp3") + await hass.async_block_till_done() + assert ( + mock_tts_cache_dir + / ( + "42f18378fd4393d18c8dd11d03fa9563c1e54491" + f"_de-de_{opt_hash}_{expected_url_suffix}.mp3" + ) + ).is_file() @pytest.mark.parametrize( @@ -649,18 +667,21 @@ async def test_merge_default_service_options( assert len(calls) == 1 assert calls[0].data[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC - assert await get_media_source_url(hass, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_de-de_{opt_hash}_{expected_url_suffix}.mp3" - ) - await hass.async_block_till_done() - assert ( - mock_tts_cache_dir - / ( - "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_de-de_{opt_hash}_{expected_url_suffix}.mp3" - ) - ).is_file() + + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + assert await get_media_source_url( + hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] + ) == ("/api/tts_proxy/test_token.mp3") + await hass.async_block_till_done() + assert ( + mock_tts_cache_dir + / ( + "42f18378fd4393d18c8dd11d03fa9563c1e54491" + f"_de-de_{opt_hash}_{expected_url_suffix}.mp3" + ) + ).is_file() @pytest.mark.parametrize( @@ -1065,10 +1086,14 @@ async def test_setup_legacy_cache_dir( ) assert len(calls) == 1 - assert await get_media_source_url(hass, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_test.mp3" - ) - await hass.async_block_till_done() + + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + assert await get_media_source_url( + hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] + ) == ("/api/tts_proxy/test_token.mp3") + await hass.async_block_till_done() @pytest.mark.parametrize("mock_tts_entity", [MockEntityBoom(DEFAULT_LANG)]) @@ -1100,10 +1125,13 @@ async def test_setup_cache_dir( ) assert len(calls) == 1 - assert await get_media_source_url(hass, calls[0].data[ATTR_MEDIA_CONTENT_ID]) == ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_tts.test.mp3" - ) - await hass.async_block_till_done() + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + assert await get_media_source_url( + hass, calls[0].data[ATTR_MEDIA_CONTENT_ID] + ) == ("/api/tts_proxy/test_token.mp3") + await hass.async_block_till_done() class MockProviderEmpty(MockTTSProvider): @@ -1176,13 +1204,13 @@ async def test_service_get_tts_error( ) -async def test_load_cache_legacy_retrieve_without_mem_cache( +async def test_legacy_cannot_retrieve_without_token( hass: HomeAssistant, mock_provider: MockTTSProvider, mock_tts_cache_dir: Path, hass_client: ClientSessionGenerator, ) -> None: - """Set up component and load cache and get without mem cache.""" + """Verify that a TTS cannot be retrieved by filename directly.""" tts_data = b"" cache_file = ( mock_tts_cache_dir / "42f18378fd4393d18c8dd11d03fa9563c1e54491_en_-_test.mp3" @@ -1196,17 +1224,16 @@ async def test_load_cache_legacy_retrieve_without_mem_cache( url = "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491_en_-_test.mp3" req = await client.get(url) - assert req.status == HTTPStatus.OK - assert await req.read() == tts_data + assert req.status == HTTPStatus.NOT_FOUND -async def test_load_cache_retrieve_without_mem_cache( +async def test_cannot_retrieve_without_token( hass: HomeAssistant, mock_tts_entity: MockTTSEntity, mock_tts_cache_dir: Path, hass_client: ClientSessionGenerator, ) -> None: - """Set up component and load cache and get without mem cache.""" + """Verify that a TTS cannot be retrieved by filename directly.""" tts_data = b"" cache_file = mock_tts_cache_dir / ( "42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_tts.test.mp3" @@ -1220,45 +1247,37 @@ async def test_load_cache_retrieve_without_mem_cache( url = "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_tts.test.mp3" req = await client.get(url) - assert req.status == HTTPStatus.OK - assert await req.read() == tts_data + assert req.status == HTTPStatus.NOT_FOUND @pytest.mark.parametrize( - ("setup", "data", "expected_url_suffix"), + ("setup", "data"), [ - ("mock_setup", {"platform": "test"}, "test"), - ("mock_setup", {"engine_id": "test"}, "test"), - ("mock_config_entry_setup", {"engine_id": "tts.test"}, "tts.test"), + ("mock_setup", {"platform": "test"}), + ("mock_setup", {"engine_id": "test"}), + ("mock_config_entry_setup", {"engine_id": "tts.test"}), ], indirect=["setup"], ) async def test_web_get_url( - hass_client: ClientSessionGenerator, - setup: str, - data: dict[str, Any], - expected_url_suffix: str, + hass_client: ClientSessionGenerator, setup: str, data: dict[str, Any] ) -> None: """Set up a TTS platform and receive file from web.""" client = await hass_client() - url = "/api/tts_get_url" - data |= {"message": "There is someone at the door."} + with patch( + "homeassistant.components.tts.secrets.token_urlsafe", return_value="test_token" + ): + url = "/api/tts_get_url" + data |= {"message": "There is someone at the door."} - req = await client.post(url, json=data) - assert req.status == HTTPStatus.OK - response = await req.json() - assert response == { - "url": ( - "http://example.local:8123/api/tts_proxy/" - "42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_-_{expected_url_suffix}.mp3" - ), - "path": ( - "/api/tts_proxy/42f18378fd4393d18c8dd11d03fa9563c1e54491" - f"_en-us_-_{expected_url_suffix}.mp3" - ), - } + req = await client.post(url, json=data) + assert req.status == HTTPStatus.OK + response = await req.json() + assert response == { + "url": ("http://example.local:8123/api/tts_proxy/test_token.mp3"), + "path": ("/api/tts_proxy/test_token.mp3"), + } @pytest.mark.parametrize( @@ -1970,3 +1989,6 @@ async def test_default_engine_prefer_cloud_entity( provider_engine = tts.async_resolve_engine(hass, "test") assert provider_engine == "test" assert tts.async_default_engine(hass) == "tts.cloud_tts_entity" + + # Reset the `cloud` translations cache to avoid flaky translation checks + reset_translation_cache(hass, ["cloud"]) diff --git a/tests/components/twentemilieu/test_init.py b/tests/components/twentemilieu/test_init.py index d4c519d6f66..7e08b5f4938 100644 --- a/tests/components/twentemilieu/test_init.py +++ b/tests/components/twentemilieu/test_init.py @@ -44,18 +44,3 @@ async def test_config_entry_not_ready( assert mock_request.call_count == 1 assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY - - -@pytest.mark.usefixtures("mock_twentemilieu") -async def test_update_config_entry_unique_id( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, -) -> None: - """Test the we update old config entries with an unique ID.""" - mock_config_entry.add_to_hass(hass) - hass.config_entries.async_update_entry(mock_config_entry, unique_id=None) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert mock_config_entry.unique_id == "12345" diff --git a/tests/components/unifiprotect/conftest.py b/tests/components/unifiprotect/conftest.py index 0bef1ff0eb9..fad65c095df 100644 --- a/tests/components/unifiprotect/conftest.py +++ b/tests/components/unifiprotect/conftest.py @@ -233,6 +233,8 @@ def doorbell_fixture(camera: Camera, fixed_now: datetime): doorbell.feature_flags.has_speaker = True doorbell.feature_flags.has_privacy_mask = True doorbell.feature_flags.is_doorbell = True + doorbell.feature_flags.has_fingerprint_sensor = True + doorbell.feature_flags.support_nfc = True doorbell.feature_flags.has_chime = True doorbell.feature_flags.has_smart_detect = True doorbell.feature_flags.has_package_camera = True diff --git a/tests/components/unifiprotect/fixtures/sample_bootstrap.json b/tests/components/unifiprotect/fixtures/sample_bootstrap.json index 2b7326831eb..240a9938b64 100644 --- a/tests/components/unifiprotect/fixtures/sample_bootstrap.json +++ b/tests/components/unifiprotect/fixtures/sample_bootstrap.json @@ -57,7 +57,7 @@ "schedule:create,read,write,delete:*", "legacyUFV:read,write,delete:*", "bridge:create,read,write,delete:*", - "camera:create,read,write,delete,readmedia,deletemedia:*", + "camera:create,read,write,delete,readmedia,readlive,deletemedia:*", "light:create,read,write,delete:*", "sensor:create,read,write,delete:*", "doorlock:create,read,write,delete:*", @@ -118,7 +118,7 @@ "liveview:create", "user:read,write,delete:$", "bridge:read:*", - "camera:read,readmedia:*", + "camera:read,readmedia,readlive:*", "doorlock:read:*", "light:read:*", "sensor:read:*", @@ -134,7 +134,7 @@ "schedule:create,read,write,delete:*", "legacyUFV:read,write,delete:*", "bridge:create,read,write,delete:*", - "camera:create,read,write,delete,readmedia,deletemedia:*", + "camera:create,read,write,delete,readmedia,readlive,deletemedia:*", "light:create,read,write,delete:*", "sensor:create,read,write,delete:*", "doorlock:create,read,write,delete:*", @@ -246,7 +246,7 @@ "schedule:create,read,write,delete:*", "legacyUFV:read,write,delete:*", "bridge:create,read,write,delete:*", - "camera:create,read,write,delete,readmedia,deletemedia:*", + "camera:create,read,write,delete,readmedia,readlive,deletemedia:*", "light:create,read,write,delete:*", "sensor:create,read,write,delete:*", "doorlock:create,read,write,delete:*", @@ -314,7 +314,7 @@ "liveview:create", "user:read,write,delete:$", "bridge:read:*", - "camera:read,readmedia:*", + "camera:read,readmedia,readlive:*", "doorlock:read:*", "light:read:*", "sensor:read:*", @@ -365,7 +365,7 @@ "liveview:create", "user:read,write,delete:$", "bridge:read:*", - "camera:read,readmedia:*", + "camera:read,readmedia,readlive:*", "doorlock:read:*", "light:read:*", "sensor:read:*", @@ -381,7 +381,7 @@ "schedule:create,read,write,delete:*", "legacyUFV:read,write,delete:*", "bridge:create,read,write,delete:*", - "camera:create,read,write,delete,readmedia,deletemedia:*", + "camera:create,read,write,delete,readmedia,readlive,deletemedia:*", "light:create,read,write,delete:*", "sensor:create,read,write,delete:*", "doorlock:create,read,write,delete:*", @@ -432,7 +432,7 @@ "liveview:create", "user:read,write,delete:$", "bridge:read:*", - "camera:read,readmedia:*", + "camera:read,readmedia,readlive:*", "doorlock:read:*", "light:read:*", "sensor:read:*", @@ -448,7 +448,7 @@ "schedule:create,read,write,delete:*", "legacyUFV:read,write,delete:*", "bridge:create,read,write,delete:*", - "camera:create,read,write,delete,readmedia,deletemedia:*", + "camera:create,read,write,delete,readmedia,readlive,deletemedia:*", "light:create,read,write,delete:*", "sensor:create,read,write,delete:*", "doorlock:create,read,write,delete:*", @@ -496,7 +496,7 @@ "liveview:create", "user:read,write,delete:$", "bridge:read:*", - "camera:read,readmedia:*", + "camera:read,readmedia,readlive:*", "doorlock:read:*", "light:read:*", "sensor:read:*", @@ -526,7 +526,7 @@ "schedule:create,read,write,delete:*", "legacyUFV:read,write,delete:*", "bridge:create,read,write,delete:*", - "camera:create,read,write,delete,readmedia,deletemedia:*", + "camera:create,read,write,delete,readmedia,readlive,deletemedia:*", "light:create,read,write,delete:*", "sensor:create,read,write,delete:*", "doorlock:create,read,write,delete:*", @@ -546,7 +546,7 @@ "liveview:create", "user:read,write,delete:$", "bridge:read:*", - "camera:read,readmedia:*", + "camera:read,readmedia,readlive:*", "doorlock:read:*", "light:read:*", "sensor:read:*", diff --git a/tests/components/unifiprotect/test_camera.py b/tests/components/unifiprotect/test_camera.py index 379f443923a..689352d8aa3 100644 --- a/tests/components/unifiprotect/test_camera.py +++ b/tests/components/unifiprotect/test_camera.py @@ -9,12 +9,12 @@ from uiprotect.api import DEVICE_UPDATE_INTERVAL from uiprotect.data import Camera as ProtectCamera, CameraChannel, StateType from uiprotect.exceptions import NvrError from uiprotect.websocket import WebsocketState +from webrtc_models import RTCIceCandidateInit from homeassistant.components.camera import ( CameraEntityFeature, CameraState, CameraWebRTCProvider, - RTCIceCandidate, StreamType, WebRTCSendMessage, async_get_image, @@ -77,7 +77,7 @@ class MockWebRTCProvider(CameraWebRTCProvider): """Handle the WebRTC offer and return the answer via the provided callback.""" async def async_on_webrtc_candidate( - self, session_id: str, candidate: RTCIceCandidate + self, session_id: str, candidate: RTCIceCandidateInit ) -> None: """Handle the WebRTC candidate.""" diff --git a/tests/components/unifiprotect/test_event.py b/tests/components/unifiprotect/test_event.py index 9d1a701fe39..cc2195c1dba 100644 --- a/tests/components/unifiprotect/test_event.py +++ b/tests/components/unifiprotect/test_event.py @@ -33,11 +33,11 @@ async def test_camera_remove( ufp.api.bootstrap.nvr.system_info.ustorage = None await init_entry(hass, ufp, [doorbell, unadopted_camera]) - assert_entity_counts(hass, Platform.EVENT, 1, 1) + assert_entity_counts(hass, Platform.EVENT, 3, 3) await remove_entities(hass, ufp, [doorbell, unadopted_camera]) assert_entity_counts(hass, Platform.EVENT, 0, 0) await adopt_devices(hass, ufp, [doorbell, unadopted_camera]) - assert_entity_counts(hass, Platform.EVENT, 1, 1) + assert_entity_counts(hass, Platform.EVENT, 3, 3) async def test_doorbell_ring( @@ -50,7 +50,7 @@ async def test_doorbell_ring( """Test a doorbell ring event.""" await init_entry(hass, ufp, [doorbell, unadopted_camera]) - assert_entity_counts(hass, Platform.EVENT, 1, 1) + assert_entity_counts(hass, Platform.EVENT, 3, 3) events: list[HAEvent] = [] @callback @@ -152,3 +152,177 @@ async def test_doorbell_ring( assert state assert state.state == timestamp unsub() + + +async def test_doorbell_nfc_scanned( + hass: HomeAssistant, + ufp: MockUFPFixture, + doorbell: Camera, + unadopted_camera: Camera, + fixed_now: datetime, +) -> None: + """Test a doorbell NFC scanned event.""" + + await init_entry(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 3, 3) + events: list[HAEvent] = [] + + @callback + def _capture_event(event: HAEvent) -> None: + events.append(event) + + _, entity_id = ids_from_device_description( + Platform.EVENT, doorbell, EVENT_DESCRIPTIONS[1] + ) + + unsub = async_track_state_change_event(hass, entity_id, _capture_event) + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.NFC_CARD_SCANNED, + start=fixed_now - timedelta(seconds=1), + end=None, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + metadata={"nfc": {"nfc_id": "test_nfc_id", "user_id": "test_user_id"}}, + ) + + new_camera = doorbell.copy() + new_camera.last_nfc_card_scanned_event_id = "test_event_id" + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + assert len(events) == 1 + state = events[0].data["new_state"] + assert state + assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION + assert state.attributes[ATTR_EVENT_ID] == "test_event_id" + assert state.attributes["nfc_id"] == "test_nfc_id" + + unsub() + + +async def test_doorbell_fingerprint_identified( + hass: HomeAssistant, + ufp: MockUFPFixture, + doorbell: Camera, + unadopted_camera: Camera, + fixed_now: datetime, +) -> None: + """Test a doorbell fingerprint identified event.""" + + await init_entry(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 3, 3) + events: list[HAEvent] = [] + + @callback + def _capture_event(event: HAEvent) -> None: + events.append(event) + + _, entity_id = ids_from_device_description( + Platform.EVENT, doorbell, EVENT_DESCRIPTIONS[2] + ) + + unsub = async_track_state_change_event(hass, entity_id, _capture_event) + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.FINGERPRINT_IDENTIFIED, + start=fixed_now - timedelta(seconds=1), + end=None, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + metadata={"fingerprint": {"ulp_id": "test_ulp_id"}}, + ) + + new_camera = doorbell.copy() + new_camera.last_fingerprint_identified_event_id = "test_event_id" + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + assert len(events) == 1 + state = events[0].data["new_state"] + assert state + assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION + assert state.attributes[ATTR_EVENT_ID] == "test_event_id" + assert state.attributes["ulp_id"] == "test_ulp_id" + + unsub() + + +async def test_doorbell_fingerprint_not_identified( + hass: HomeAssistant, + ufp: MockUFPFixture, + doorbell: Camera, + unadopted_camera: Camera, + fixed_now: datetime, +) -> None: + """Test a doorbell fingerprint identified event.""" + + await init_entry(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 3, 3) + events: list[HAEvent] = [] + + @callback + def _capture_event(event: HAEvent) -> None: + events.append(event) + + _, entity_id = ids_from_device_description( + Platform.EVENT, doorbell, EVENT_DESCRIPTIONS[2] + ) + + unsub = async_track_state_change_event(hass, entity_id, _capture_event) + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.FINGERPRINT_IDENTIFIED, + start=fixed_now - timedelta(seconds=1), + end=None, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + metadata={"fingerprint": {}}, + ) + + new_camera = doorbell.copy() + new_camera.last_fingerprint_identified_event_id = "test_event_id" + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + assert len(events) == 1 + state = events[0].data["new_state"] + assert state + assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION + assert state.attributes[ATTR_EVENT_ID] == "test_event_id" + assert state.attributes["ulp_id"] == "" + + unsub() diff --git a/tests/components/unifiprotect/test_init.py b/tests/components/unifiprotect/test_init.py index 46e57c62101..0d88754a110 100644 --- a/tests/components/unifiprotect/test_init.py +++ b/tests/components/unifiprotect/test_init.py @@ -2,8 +2,9 @@ from __future__ import annotations -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, Mock, patch +import pytest from uiprotect import NotAuthorized, NvrError, ProtectApiClient from uiprotect.api import DEVICE_UPDATE_INTERVAL from uiprotect.data import NVR, Bootstrap, CloudAccount, Light @@ -13,6 +14,9 @@ from homeassistant.components.unifiprotect.const import ( CONF_DISABLE_RTSP, DOMAIN, ) +from homeassistant.components.unifiprotect.data import ( + async_ufp_instance_for_config_entry_ids, +) from homeassistant.config_entries import ConfigEntry, ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -286,3 +290,58 @@ async def test_device_remove_devices_nvr( client = await hass_ws_client(hass) response = await client.remove_device(live_device_entry.id, entry_id) assert not response["success"] + + +@pytest.mark.parametrize( + ("mock_entries", "expected_result"), + [ + pytest.param( + [ + MockConfigEntry( + domain=DOMAIN, + entry_id="1", + data={}, + ), + MockConfigEntry( + domain="other_domain", + entry_id="2", + data={}, + ), + ], + "mock_api_instance_1", + id="one_matching_domain", + ), + pytest.param( + [ + MockConfigEntry( + domain="other_domain", + entry_id="1", + data={}, + ), + MockConfigEntry( + domain="other_domain", + entry_id="2", + data={}, + ), + ], + None, + id="no_matching_domain", + ), + ], +) +async def test_async_ufp_instance_for_config_entry_ids( + hass: HomeAssistant, + mock_entries: list[MockConfigEntry], + expected_result: str | None, +) -> None: + """Test async_ufp_instance_for_config_entry_ids with various entry configurations.""" + + for index, entry in enumerate(mock_entries): + entry.add_to_hass(hass) + entry.runtime_data = Mock(api=f"mock_api_instance_{index + 1}") + + entry_ids = {entry.entry_id for entry in mock_entries} + + result = async_ufp_instance_for_config_entry_ids(hass, entry_ids) + + assert result == expected_result diff --git a/tests/components/unifiprotect/test_media_source.py b/tests/components/unifiprotect/test_media_source.py index 60cd3150884..18944460ca5 100644 --- a/tests/components/unifiprotect/test_media_source.py +++ b/tests/components/unifiprotect/test_media_source.py @@ -669,7 +669,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.RING, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=[], @@ -683,7 +683,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.MOTION, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=[], @@ -697,7 +697,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["person"], @@ -706,7 +706,7 @@ async def test_browse_media_recent_truncated( metadata={ "detected_thumbnails": [ { - "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), "type": "person", "cropped_id": "event_id", } @@ -720,7 +720,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "person"], @@ -734,7 +734,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "licensePlate"], @@ -748,7 +748,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "licensePlate"], @@ -758,7 +758,7 @@ async def test_browse_media_recent_truncated( "license_plate": {"name": "ABC1234", "confidence_level": 95}, "detected_thumbnails": [ { - "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), "type": "vehicle", "cropped_id": "event_id", } @@ -772,7 +772,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "licensePlate"], @@ -782,7 +782,7 @@ async def test_browse_media_recent_truncated( "license_plate": {"name": "ABC1234", "confidence_level": 95}, "detected_thumbnails": [ { - "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), "type": "vehicle", "cropped_id": "event_id", "attributes": { @@ -802,7 +802,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle", "licensePlate"], @@ -812,7 +812,7 @@ async def test_browse_media_recent_truncated( "license_plate": {"name": "ABC1234", "confidence_level": 95}, "detected_thumbnails": [ { - "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), "type": "vehicle", "cropped_id": "event_id", "attributes": { @@ -823,7 +823,7 @@ async def test_browse_media_recent_truncated( }, }, { - "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), "type": "person", "cropped_id": "event_id", }, @@ -837,7 +837,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["vehicle"], @@ -846,7 +846,7 @@ async def test_browse_media_recent_truncated( metadata={ "detected_thumbnails": [ { - "clock_best_wall": datetime(1000, 1, 1, 0, 0, 0), + "clock_best_wall": datetime(2000, 1, 1, 0, 0, 0), "type": "vehicle", "cropped_id": "event_id", "attributes": { @@ -870,7 +870,7 @@ async def test_browse_media_recent_truncated( model=ModelType.EVENT, id="test_event_id", type=EventType.SMART_AUDIO_DETECT, - start=datetime(1000, 1, 1, 0, 0, 0), + start=datetime(2000, 1, 1, 0, 0, 0), end=None, score=100, smart_detect_types=["alrmSpeak"], diff --git a/tests/components/unifiprotect/test_repairs.py b/tests/components/unifiprotect/test_repairs.py index adb9555e6ea..1117038bbd0 100644 --- a/tests/components/unifiprotect/test_repairs.py +++ b/tests/components/unifiprotect/test_repairs.py @@ -363,3 +363,30 @@ async def test_rtsp_writable_fix_when_not_setup( ufp.api.update_device.assert_called_with( ModelType.CAMERA, doorbell.id, {"channels": channels} ) + + +async def test_rtsp_no_fix_if_third_party( + hass: HomeAssistant, + ufp: MockUFPFixture, + doorbell: Camera, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test no RTSP disabled warning if camera is third-party.""" + + for channel in doorbell.channels: + channel.is_rtsp_enabled = False + for user in ufp.api.bootstrap.users.values(): + user.all_permissions = [] + + ufp.api.get_camera = AsyncMock(return_value=doorbell) + doorbell.is_third_party_camera = True + + await init_entry(hass, ufp, [doorbell]) + await async_process_repairs_platforms(hass) + ws_client = await hass_ws_client(hass) + + await ws_client.send_json({"id": 1, "type": "repairs/list_issues"}) + msg = await ws_client.receive_json() + + assert msg["success"] + assert not msg["result"]["issues"] diff --git a/tests/components/unifiprotect/test_views.py b/tests/components/unifiprotect/test_views.py index fed0a98552d..0f1b7791680 100644 --- a/tests/components/unifiprotect/test_views.py +++ b/tests/components/unifiprotect/test_views.py @@ -11,6 +11,7 @@ from uiprotect.exceptions import ClientError from homeassistant.components.unifiprotect.views import ( async_generate_event_video_url, + async_generate_proxy_event_video_url, async_generate_thumbnail_url, ) from homeassistant.core import HomeAssistant @@ -520,3 +521,219 @@ async def test_video_entity_id( assert response.status == 200 ufp.api.request.assert_called_once() + + +async def test_video_event_bad_nvr_id( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + camera: Camera, + ufp: MockUFPFixture, +) -> None: + """Test video proxy URL with bad NVR id.""" + + ufp.api.request = AsyncMock() + await init_entry(hass, ufp, [camera]) + + url = async_generate_proxy_event_video_url("bad_id", "test_id") + + http_client = await hass_client() + response = cast(ClientResponse, await http_client.get(url)) + + assert response.status == 404 + ufp.api.request.assert_not_called() + + +async def test_video_event_bad_event( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + ufp: MockUFPFixture, + camera: Camera, +) -> None: + """Test generating event with bad event ID.""" + + ufp.api.get_event = AsyncMock(side_effect=ClientError()) + + await init_entry(hass, ufp, [camera]) + url = async_generate_proxy_event_video_url(ufp.api.bootstrap.nvr.id, "bad_event_id") + http_client = await hass_client() + response = cast(ClientResponse, await http_client.get(url)) + assert response.status == 404 + ufp.api.request.assert_not_called() + + +async def test_video_event_bad_camera( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + ufp: MockUFPFixture, + camera: Camera, +) -> None: + """Test generating event with bad camera ID.""" + + ufp.api.get_event = AsyncMock(side_effect=ClientError()) + + await init_entry(hass, ufp, [camera]) + url = async_generate_proxy_event_video_url(ufp.api.bootstrap.nvr.id, "bad_event_id") + http_client = await hass_client() + response = cast(ClientResponse, await http_client.get(url)) + assert response.status == 404 + ufp.api.request.assert_not_called() + + +async def test_video_event_bad_camera_perms( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + ufp: MockUFPFixture, + camera: Camera, + fixed_now: datetime, +) -> None: + """Test video URL with bad camera perms.""" + + ufp.api.request = AsyncMock() + await init_entry(hass, ufp, [camera]) + + event_start = fixed_now - timedelta(seconds=30) + event = Event( + model=ModelType.EVENT, + api=ufp.api, + start=event_start, + end=fixed_now, + id="test_id", + type=EventType.MOTION, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id="bad_id", + camera=camera, + ) + + ufp.api.get_event = AsyncMock(return_value=event) + + url = async_generate_proxy_event_video_url(ufp.api.bootstrap.nvr.id, "test_id") + + ufp.api.bootstrap.auth_user.all_permissions = [] + ufp.api.bootstrap.auth_user._perm_cache = {} + + http_client = await hass_client() + response = cast(ClientResponse, await http_client.get(url)) + + assert response.status == 404 + ufp.api.request.assert_not_called() + + +async def test_video_event_ongoing( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + ufp: MockUFPFixture, + camera: Camera, + fixed_now: datetime, +) -> None: + """Test video URL with ongoing event.""" + + ufp.api.request = AsyncMock() + await init_entry(hass, ufp, [camera]) + + event_start = fixed_now - timedelta(seconds=30) + event = Event( + model=ModelType.EVENT, + api=ufp.api, + start=event_start, + id="test_id", + type=EventType.MOTION, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=camera.id, + camera=camera, + ) + + ufp.api.get_event = AsyncMock(return_value=event) + + url = async_generate_proxy_event_video_url(ufp.api.bootstrap.nvr.id, "test_id") + + http_client = await hass_client() + response = cast(ClientResponse, await http_client.get(url)) + + assert response.status == 400 + ufp.api.request.assert_not_called() + + +async def test_event_video_no_data( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + ufp: MockUFPFixture, + camera: Camera, + fixed_now: datetime, +) -> None: + """Test invalid no event video returned.""" + + await init_entry(hass, ufp, [camera]) + event_start = fixed_now - timedelta(seconds=30) + event = Event( + model=ModelType.EVENT, + api=ufp.api, + start=event_start, + end=fixed_now, + id="test_id", + type=EventType.MOTION, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=camera.id, + camera=camera, + ) + + ufp.api.request = AsyncMock(side_effect=ClientError) + ufp.api.get_event = AsyncMock(return_value=event) + + url = async_generate_proxy_event_video_url(ufp.api.bootstrap.nvr.id, "test_id") + + http_client = await hass_client() + response = cast(ClientResponse, await http_client.get(url)) + + assert response.status == 404 + + +async def test_event_video( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + ufp: MockUFPFixture, + camera: Camera, + fixed_now: datetime, +) -> None: + """Test event video URL with no video.""" + + content = Mock() + content.__anext__ = AsyncMock(side_effect=[b"test", b"test", StopAsyncIteration()]) + content.__aiter__ = Mock(return_value=content) + + mock_response = Mock() + mock_response.content_length = 8 + mock_response.content.iter_chunked = Mock(return_value=content) + + ufp.api.request = AsyncMock(return_value=mock_response) + await init_entry(hass, ufp, [camera]) + event_start = fixed_now - timedelta(seconds=30) + event = Event( + model=ModelType.EVENT, + api=ufp.api, + start=event_start, + end=fixed_now, + id="test_id", + type=EventType.MOTION, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=camera.id, + camera=camera, + ) + + ufp.api.get_event = AsyncMock(return_value=event) + + url = async_generate_proxy_event_video_url(ufp.api.bootstrap.nvr.id, "test_id") + + http_client = await hass_client() + response = cast(ClientResponse, await http_client.get(url)) + assert await response.content.read() == b"testtest" + + assert response.status == 200 + ufp.api.request.assert_called_once() diff --git a/tests/components/unifiprotect/utils.py b/tests/components/unifiprotect/utils.py index 25a9ddcbb92..5a1ffa8258e 100644 --- a/tests/components/unifiprotect/utils.py +++ b/tests/components/unifiprotect/utils.py @@ -109,7 +109,11 @@ def ids_from_device_description( """Return expected unique_id and entity_id for a give platform/device/description combination.""" entity_name = normalize_name(device.display_name) - description_entity_name = normalize_name(str(description.name)) + + if description.name and isinstance(description.name, str): + description_entity_name = normalize_name(description.name) + else: + description_entity_name = normalize_name(description.key) unique_id = f"{device.mac}_{description.key}" entity_id = f"{platform.value}.{entity_name}_{description_entity_name}" diff --git a/tests/components/utility_meter/snapshots/test_diagnostics.ambr b/tests/components/utility_meter/snapshots/test_diagnostics.ambr index c69164264da..6cdf121d7e3 100644 --- a/tests/components/utility_meter/snapshots/test_diagnostics.ambr +++ b/tests/components/utility_meter/snapshots/test_diagnostics.ambr @@ -41,7 +41,17 @@ 'status': 'collecting', 'tariff': 'tariff0', }), - 'last_sensor_data': None, + 'last_sensor_data': dict({ + 'last_period': '0', + 'last_reset': '2024-04-05T00:00:00+00:00', + 'last_valid_state': 3, + 'native_unit_of_measurement': 'kWh', + 'native_value': dict({ + '__type': "", + 'decimal_str': '3', + }), + 'status': 'collecting', + }), 'name': 'Energy Bill tariff0', 'period': 'monthly', 'source': 'sensor.input1', @@ -57,7 +67,17 @@ 'status': 'paused', 'tariff': 'tariff1', }), - 'last_sensor_data': None, + 'last_sensor_data': dict({ + 'last_period': '0', + 'last_reset': '2024-04-05T00:00:00+00:00', + 'last_valid_state': 7, + 'native_unit_of_measurement': 'kWh', + 'native_value': dict({ + '__type': "", + 'decimal_str': '7', + }), + 'status': 'paused', + }), 'name': 'Energy Bill tariff1', 'period': 'monthly', 'source': 'sensor.input1', diff --git a/tests/components/utility_meter/test_diagnostics.py b/tests/components/utility_meter/test_diagnostics.py index 9ecabe813b1..8be5f949940 100644 --- a/tests/components/utility_meter/test_diagnostics.py +++ b/tests/components/utility_meter/test_diagnostics.py @@ -91,7 +91,17 @@ async def test_diagnostics( ATTR_LAST_RESET: last_reset, }, ), - {}, + { + "native_value": { + "__type": "", + "decimal_str": "3", + }, + "native_unit_of_measurement": "kWh", + "last_reset": last_reset, + "last_period": "0", + "last_valid_state": 3, + "status": "collecting", + }, ), ( State( @@ -101,7 +111,17 @@ async def test_diagnostics( ATTR_LAST_RESET: last_reset, }, ), - {}, + { + "native_value": { + "__type": "", + "decimal_str": "7", + }, + "native_unit_of_measurement": "kWh", + "last_reset": last_reset, + "last_period": "0", + "last_valid_state": 7, + "status": "paused", + }, ), ], ) diff --git a/tests/components/utility_meter/test_sensor.py b/tests/components/utility_meter/test_sensor.py index 745bf0ce012..348afac57f7 100644 --- a/tests/components/utility_meter/test_sensor.py +++ b/tests/components/utility_meter/test_sensor.py @@ -26,7 +26,6 @@ from homeassistant.components.utility_meter.const import ( ) from homeassistant.components.utility_meter.sensor import ( ATTR_LAST_RESET, - ATTR_LAST_VALID_STATE, ATTR_STATUS, COLLECTING, PAUSED, @@ -760,64 +759,6 @@ async def test_restore_state( "status": "paused", }, ), - # sensor.energy_bill_tariff2 has missing keys and falls back to - # saved state - ( - State( - "sensor.energy_bill_tariff2", - "2.1", - attributes={ - ATTR_STATUS: PAUSED, - ATTR_LAST_RESET: last_reset_1, - ATTR_LAST_VALID_STATE: None, - ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.MEGA_WATT_HOUR, - }, - ), - { - "native_value": { - "__type": "", - "decimal_str": "2.2", - }, - "native_unit_of_measurement": "kWh", - "last_valid_state": "None", - }, - ), - # sensor.energy_bill_tariff3 has invalid data and falls back to - # saved state - ( - State( - "sensor.energy_bill_tariff3", - "3.1", - attributes={ - ATTR_STATUS: COLLECTING, - ATTR_LAST_RESET: last_reset_1, - ATTR_LAST_VALID_STATE: None, - ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.MEGA_WATT_HOUR, - }, - ), - { - "native_value": { - "__type": "", - "decimal_str": "3f", # Invalid - }, - "native_unit_of_measurement": "kWh", - "last_valid_state": "None", - }, - ), - # No extra saved data, fall back to saved state - ( - State( - "sensor.energy_bill_tariff4", - "error", - attributes={ - ATTR_STATUS: COLLECTING, - ATTR_LAST_RESET: last_reset_1, - ATTR_LAST_VALID_STATE: None, - ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.MEGA_WATT_HOUR, - }, - ), - {}, - ), ], ) @@ -852,25 +793,6 @@ async def test_restore_state( assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.KILO_WATT_HOUR assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY - state = hass.states.get("sensor.energy_bill_tariff2") - assert state.state == "2.1" - assert state.attributes.get("status") == PAUSED - assert state.attributes.get("last_reset") == last_reset_1 - assert state.attributes.get("last_valid_state") == "None" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.MEGA_WATT_HOUR - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY - - state = hass.states.get("sensor.energy_bill_tariff3") - assert state.state == "3.1" - assert state.attributes.get("status") == COLLECTING - assert state.attributes.get("last_reset") == last_reset_1 - assert state.attributes.get("last_valid_state") == "None" - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.MEGA_WATT_HOUR - assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY - - state = hass.states.get("sensor.energy_bill_tariff4") - assert state.state == STATE_UNKNOWN - # utility_meter is loaded, now set sensors according to utility_meter: hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) @@ -882,12 +804,7 @@ async def test_restore_state( state = hass.states.get("sensor.energy_bill_tariff0") assert state.attributes.get("status") == COLLECTING - for entity_id in ( - "sensor.energy_bill_tariff1", - "sensor.energy_bill_tariff2", - "sensor.energy_bill_tariff3", - "sensor.energy_bill_tariff4", - ): + for entity_id in ("sensor.energy_bill_tariff1",): state = hass.states.get(entity_id) assert state.attributes.get("status") == PAUSED @@ -939,7 +856,18 @@ async def test_service_reset_no_tariffs( ATTR_LAST_RESET: last_reset, }, ), - {}, + { + "native_value": { + "__type": "", + "decimal_str": "3", + }, + "native_unit_of_measurement": "kWh", + "last_reset": last_reset, + "last_period": "0", + "last_valid_state": None, + "status": "collecting", + "input_device_class": "energy", + }, ), ], ) @@ -1045,21 +973,33 @@ async def test_service_reset_no_tariffs_correct_with_multi( State( "sensor.energy_bill", "3", - attributes={ - ATTR_LAST_RESET: last_reset, - }, ), - {}, + { + "native_value": { + "__type": "", + "decimal_str": "3", + }, + "native_unit_of_measurement": "kWh", + "last_reset": last_reset, + "last_period": "0", + "status": "collecting", + }, ), ( State( "sensor.water_bill", "6", - attributes={ - ATTR_LAST_RESET: last_reset, - }, ), - {}, + { + "native_value": { + "__type": "", + "decimal_str": "6", + }, + "native_unit_of_measurement": "kWh", + "last_reset": last_reset, + "last_period": "0", + "status": "collecting", + }, ), ], ) @@ -1804,6 +1744,43 @@ async def test_self_reset_hourly_dst(hass: HomeAssistant) -> None: ) +async def test_self_reset_hourly_dst2(hass: HomeAssistant) -> None: + """Test weekly reset of meter in DST change conditions.""" + + hass.config.time_zone = "Europe/Berlin" + dt_util.set_default_time_zone(dt_util.get_time_zone(hass.config.time_zone)) + await _test_self_reset( + hass, gen_config("daily"), "2024-10-26T23:59:00.000000+02:00" + ) + + state = hass.states.get("sensor.energy_bill") + last_reset = dt_util.parse_datetime("2024-10-27T00:00:00.000000+02:00") + assert ( + dt_util.as_local(dt_util.parse_datetime(state.attributes.get("last_reset"))) + == last_reset + ) + + next_reset = dt_util.parse_datetime("2024-10-28T00:00:00.000000+01:00").isoformat() + assert state.attributes.get("next_reset") == next_reset + + +async def test_tz_changes(hass: HomeAssistant) -> None: + """Test that a timezone change changes the scheduler.""" + + await hass.config.async_update(time_zone="Europe/Prague") + + await _test_self_reset( + hass, gen_config("daily"), "2024-10-26T23:59:00.000000+02:00" + ) + state = hass.states.get("sensor.energy_bill") + assert state.attributes.get("next_reset") == "2024-10-28T00:00:00+01:00" + + await hass.config.async_update(time_zone="Pacific/Fiji") + + state = hass.states.get("sensor.energy_bill") + assert state.attributes.get("next_reset") != "2024-10-28T00:00:00+01:00" + + async def test_self_reset_daily(hass: HomeAssistant) -> None: """Test daily reset of meter.""" await _test_self_reset( diff --git a/tests/components/vera/test_light.py b/tests/components/vera/test_light.py index 6bdc3df9a64..e66d19ec46e 100644 --- a/tests/components/vera/test_light.py +++ b/tests/components/vera/test_light.py @@ -52,13 +52,13 @@ async def test_light( {"entity_id": entity_id, ATTR_HS_COLOR: [300, 70]}, ) await hass.async_block_till_done() - vera_device.set_color.assert_called_with((255, 76, 255)) + vera_device.set_color.assert_called_with((255, 77, 255)) vera_device.is_switched_on.return_value = True - vera_device.get_color.return_value = (255, 76, 255) + vera_device.get_color.return_value = (255, 77, 255) update_callback(vera_device) await hass.async_block_till_done() assert hass.states.get(entity_id).state == "on" - assert hass.states.get(entity_id).attributes["hs_color"] == (300.0, 70.196) + assert hass.states.get(entity_id).attributes["hs_color"] == (300.0, 69.804) await hass.services.async_call( "light", diff --git a/tests/components/vesync/snapshots/test_fan.ambr b/tests/components/vesync/snapshots/test_fan.ambr index 21985afd7bf..60af4ae3d5b 100644 --- a/tests/components/vesync/snapshots/test_fan.ambr +++ b/tests/components/vesync/snapshots/test_fan.ambr @@ -67,7 +67,7 @@ 'platform': 'vesync', 'previous_unique_id': None, 'supported_features': , - 'translation_key': None, + 'translation_key': 'vesync', 'unique_id': 'air-purifier', 'unit_of_measurement': None, }), @@ -158,7 +158,7 @@ 'platform': 'vesync', 'previous_unique_id': None, 'supported_features': , - 'translation_key': None, + 'translation_key': 'vesync', 'unique_id': 'asd_sdfKIHG7IJHGwJGJ7GJ_ag5h3G55', 'unit_of_measurement': None, }), @@ -256,7 +256,7 @@ 'platform': 'vesync', 'previous_unique_id': None, 'supported_features': , - 'translation_key': None, + 'translation_key': 'vesync', 'unique_id': '400s-purifier', 'unit_of_measurement': None, }), @@ -355,7 +355,7 @@ 'platform': 'vesync', 'previous_unique_id': None, 'supported_features': , - 'translation_key': None, + 'translation_key': 'vesync', 'unique_id': '600s-purifier', 'unit_of_measurement': None, }), diff --git a/tests/components/vicare/snapshots/test_fan.ambr b/tests/components/vicare/snapshots/test_fan.ambr index 8ec4bc41d8d..3ecc4277fd9 100644 --- a/tests/components/vicare/snapshots/test_fan.ambr +++ b/tests/components/vicare/snapshots/test_fan.ambr @@ -29,7 +29,7 @@ 'options': dict({ }), 'original_device_class': None, - 'original_icon': None, + 'original_icon': 'mdi:fan', 'original_name': 'Ventilation', 'platform': 'vicare', 'previous_unique_id': None, @@ -43,6 +43,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'model0 Ventilation', + 'icon': 'mdi:fan', 'percentage': 0, 'percentage_step': 25.0, 'preset_mode': None, diff --git a/tests/components/websocket_api/test_auth.py b/tests/components/websocket_api/test_auth.py index 20a728cf3cd..d55d2f97017 100644 --- a/tests/components/websocket_api/test_auth.py +++ b/tests/components/websocket_api/test_auth.py @@ -293,6 +293,6 @@ async def test_auth_sending_unknown_type_disconnects( auth_msg = await ws.receive_json() assert auth_msg["type"] == TYPE_AUTH_REQUIRED - await ws._writer._send_frame(b"1" * 130, 0x30) + await ws._writer.send_frame(b"1" * 130, 0x30) auth_msg = await ws.receive() assert auth_msg.type == WSMsgType.close diff --git a/tests/components/websocket_api/test_commands.py b/tests/components/websocket_api/test_commands.py index c1a043f915b..22e839d84e4 100644 --- a/tests/components/websocket_api/test_commands.py +++ b/tests/components/websocket_api/test_commands.py @@ -2390,6 +2390,9 @@ async def test_execute_script( ), ], ) +@pytest.mark.parametrize( + "ignore_translations", ["component.test.exceptions.test_error.message"] +) async def test_execute_script_err_localization( hass: HomeAssistant, websocket_client: MockHAClientWebSocket, diff --git a/tests/components/websocket_api/test_http.py b/tests/components/websocket_api/test_http.py index 2530d885942..03e30c11ee9 100644 --- a/tests/components/websocket_api/test_http.py +++ b/tests/components/websocket_api/test_http.py @@ -5,7 +5,7 @@ from datetime import timedelta from typing import Any, cast from unittest.mock import patch -from aiohttp import WSMsgType, WSServerHandshakeError, web +from aiohttp import ServerDisconnectedError, WSMsgType, web import pytest from homeassistant.components.websocket_api import ( @@ -374,7 +374,7 @@ async def test_prepare_fail_timeout( "homeassistant.components.websocket_api.http.web.WebSocketResponse.prepare", side_effect=(TimeoutError, web.WebSocketResponse.prepare), ), - pytest.raises(WSServerHandshakeError), + pytest.raises(ServerDisconnectedError), ): await hass_ws_client(hass) @@ -392,7 +392,7 @@ async def test_prepare_fail_connection_reset( "homeassistant.components.websocket_api.http.web.WebSocketResponse.prepare", side_effect=(ConnectionResetError, web.WebSocketResponse.prepare), ), - pytest.raises(WSServerHandshakeError), + pytest.raises(ServerDisconnectedError), ): await hass_ws_client(hass) diff --git a/tests/components/wled/snapshots/test_diagnostics.ambr b/tests/components/wled/snapshots/test_diagnostics.ambr index 90732c02c36..46953b00440 100644 --- a/tests/components/wled/snapshots/test_diagnostics.ambr +++ b/tests/components/wled/snapshots/test_diagnostics.ambr @@ -224,7 +224,7 @@ 'udpport': 21324, 'uptime': 966, 'ver': '0.14.4', - 'vid': 2405180, + 'vid': '2405180', 'wifi': '**REDACTED**', }), 'palettes': dict({ diff --git a/tests/components/workday/test_config_flow.py b/tests/components/workday/test_config_flow.py index cc83cee93a2..1bf0f176fe9 100644 --- a/tests/components/workday/test_config_flow.py +++ b/tests/components/workday/test_config_flow.py @@ -557,7 +557,7 @@ pytestmark = pytest.mark.usefixtures() ("language", "holiday"), [ ("de", "Weihnachtstag"), - ("en", "Christmas"), + ("en_US", "Christmas"), ], ) async def test_language( diff --git a/tests/components/workday/test_repairs.py b/tests/components/workday/test_repairs.py index e25d4e0ca45..adbae5676e6 100644 --- a/tests/components/workday/test_repairs.py +++ b/tests/components/workday/test_repairs.py @@ -2,6 +2,8 @@ from __future__ import annotations +import pytest + from homeassistant.components.workday.const import CONF_REMOVE_HOLIDAYS, DOMAIN from homeassistant.const import CONF_COUNTRY from homeassistant.core import HomeAssistant @@ -427,6 +429,10 @@ async def test_bad_date_holiday( assert issue +@pytest.mark.parametrize( + "ignore_translations", + ["component.workday.issues.issue_1.title"], +) async def test_other_fixable_issues( hass: HomeAssistant, hass_client: ClientSessionGenerator, diff --git a/tests/components/yale_smart_alarm/test_config_flow.py b/tests/components/yale_smart_alarm/test_config_flow.py index e325e259806..e5b59f79463 100644 --- a/tests/components/yale_smart_alarm/test_config_flow.py +++ b/tests/components/yale_smart_alarm/test_config_flow.py @@ -239,6 +239,211 @@ async def test_reauth_flow_error( } +async def test_reconfigure(hass: HomeAssistant) -> None: + """Test reconfigure config flow.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="test-username", + data={ + "username": "test-username", + "password": "test-password", + "name": "Yale Smart Alarm", + "area_id": "1", + }, + version=2, + ) + entry.add_to_hass(hass) + + result = await entry.start_reconfigure_flow(hass) + + with ( + patch( + "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", + return_value="", + ), + patch( + "homeassistant.components.yale_smart_alarm.async_setup_entry", + return_value=True, + ), + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "username": "test-username", + "password": "new-test-password", + "area_id": "2", + }, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reconfigure_successful" + assert entry.data == { + "username": "test-username", + "password": "new-test-password", + "name": "Yale Smart Alarm", + "area_id": "2", + } + + +async def test_reconfigure_username_exist(hass: HomeAssistant) -> None: + """Test reconfigure config flow abort other username already exist.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="test-username", + data={ + "username": "test-username", + "password": "test-password", + "name": "Yale Smart Alarm", + "area_id": "1", + }, + version=2, + ) + entry.add_to_hass(hass) + entry2 = MockConfigEntry( + domain=DOMAIN, + unique_id="other-username", + data={ + "username": "other-username", + "password": "test-password", + "name": "Yale Smart Alarm 2", + "area_id": "1", + }, + version=2, + ) + entry2.add_to_hass(hass) + + result = await entry.start_reconfigure_flow(hass) + + with ( + patch( + "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", + return_value="", + ), + patch( + "homeassistant.components.yale_smart_alarm.async_setup_entry", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "username": "other-username", + "password": "test-password", + "area_id": "1", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "unique_id_exists"} + + with ( + patch( + "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", + return_value="", + ), + patch( + "homeassistant.components.yale_smart_alarm.async_setup_entry", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "username": "other-new-username", + "password": "test-password", + "area_id": "1", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert entry.data == { + "username": "other-new-username", + "name": "Yale Smart Alarm", + "password": "test-password", + "area_id": "1", + } + + +@pytest.mark.parametrize( + ("sideeffect", "p_error"), + [ + (AuthenticationError, "invalid_auth"), + (ConnectionError, "cannot_connect"), + (TimeoutError, "cannot_connect"), + (UnknownError, "cannot_connect"), + ], +) +async def test_reconfigure_flow_error( + hass: HomeAssistant, sideeffect: Exception, p_error: str +) -> None: + """Test a reauthentication flow.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="test-username", + data={ + "username": "test-username", + "password": "test-password", + "name": "Yale Smart Alarm", + "area_id": "1", + }, + version=2, + ) + entry.add_to_hass(hass) + + result = await entry.start_reconfigure_flow(hass) + + with patch( + "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", + side_effect=sideeffect, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "username": "test-username", + "password": "update-password", + "area_id": "1", + }, + ) + await hass.async_block_till_done() + + assert result["step_id"] == "reconfigure" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": p_error} + + with ( + patch( + "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", + return_value="", + ), + patch( + "homeassistant.components.yale_smart_alarm.async_setup_entry", + return_value=True, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "username": "test-username", + "password": "new-test-password", + "area_id": "1", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert entry.data == { + "username": "test-username", + "name": "Yale Smart Alarm", + "password": "new-test-password", + "area_id": "1", + } + + async def test_options_flow(hass: HomeAssistant) -> None: """Test options config flow.""" entry = MockConfigEntry( diff --git a/tests/components/yeelight/test_light.py b/tests/components/yeelight/test_light.py index eba4d4fe284..518537262b2 100644 --- a/tests/components/yeelight/test_light.py +++ b/tests/components/yeelight/test_light.py @@ -946,8 +946,8 @@ async def test_device_types( "color_mode": "color_temp", "supported_color_modes": ["color_temp", "hs", "rgb"], "hs_color": (26.812, 34.87), - "rgb_color": (255, 205, 166), - "xy_color": (0.421, 0.364), + "rgb_color": (255, 206, 166), + "xy_color": (0.42, 0.365), }, nightlight_entity_properties={ "supported_features": 0, @@ -959,8 +959,8 @@ async def test_device_types( "effect": None, "supported_features": SUPPORT_YEELIGHT, "hs_color": (28.401, 100.0), - "rgb_color": (255, 120, 0), - "xy_color": (0.621, 0.367), + "rgb_color": (255, 121, 0), + "xy_color": (0.62, 0.368), "min_color_temp_kelvin": model_specs["color_temp"]["min"], "max_color_temp_kelvin": color_temperature_mired_to_kelvin( color_temperature_kelvin_to_mired(model_specs["color_temp"]["max"]) @@ -1191,8 +1191,8 @@ async def test_device_types( "color_mode": "color_temp", "supported_color_modes": ["color_temp"], "hs_color": (26.812, 34.87), - "rgb_color": (255, 205, 166), - "xy_color": (0.421, 0.364), + "rgb_color": (255, 206, 166), + "xy_color": (0.42, 0.365), }, nightlight_entity_properties={ "supported_features": 0, @@ -1226,8 +1226,8 @@ async def test_device_types( "color_mode": "color_temp", "supported_color_modes": ["color_temp"], "hs_color": (28.391, 65.659), - "rgb_color": (255, 166, 87), - "xy_color": (0.526, 0.387), + "rgb_color": (255, 167, 88), + "xy_color": (0.524, 0.388), }, ) @@ -1263,8 +1263,8 @@ async def test_device_types( "color_mode": "color_temp", "supported_color_modes": ["color_temp"], "hs_color": (26.812, 34.87), - "rgb_color": (255, 205, 166), - "xy_color": (0.421, 0.364), + "rgb_color": (255, 206, 166), + "xy_color": (0.42, 0.365), }, nightlight_entity_properties={ "supported_features": 0, @@ -1301,8 +1301,8 @@ async def test_device_types( "color_mode": "color_temp", "supported_color_modes": ["color_temp"], "hs_color": (28.391, 65.659), - "rgb_color": (255, 166, 87), - "xy_color": (0.526, 0.387), + "rgb_color": (255, 167, 88), + "xy_color": (0.524, 0.388), }, ) # Background light - color mode CT @@ -1326,8 +1326,8 @@ async def test_device_types( "color_mode": "color_temp", "supported_color_modes": ["color_temp", "hs", "rgb"], "hs_color": (27.001, 19.243), - "rgb_color": (255, 228, 205), - "xy_color": (0.372, 0.35), + "rgb_color": (255, 228, 206), + "xy_color": (0.371, 0.349), }, name=f"{UNIQUE_FRIENDLY_NAME} Ambilight", entity_id=f"{ENTITY_LIGHT}_ambilight", diff --git a/tests/components/zerproc/test_light.py b/tests/components/zerproc/test_light.py index 6e00cfbde4c..724414b5965 100644 --- a/tests/components/zerproc/test_light.py +++ b/tests/components/zerproc/test_light.py @@ -215,7 +215,7 @@ async def test_light_turn_on(hass: HomeAssistant, mock_light) -> None: ) await hass.async_block_till_done() - mock_set_color.assert_called_with(19, 17, 25) + mock_set_color.assert_called_with(20, 17, 25) with patch.object(mock_light, "set_color") as mock_set_color: await hass.services.async_call( @@ -226,7 +226,7 @@ async def test_light_turn_on(hass: HomeAssistant, mock_light) -> None: ) await hass.async_block_till_done() - mock_set_color.assert_called_with(220, 201, 110) + mock_set_color.assert_called_with(220, 202, 110) with patch.object( mock_light, @@ -246,7 +246,7 @@ async def test_light_turn_on(hass: HomeAssistant, mock_light) -> None: ) await hass.async_block_till_done() - mock_set_color.assert_called_with(75, 68, 37) + mock_set_color.assert_called_with(75, 69, 38) with patch.object(mock_light, "set_color") as mock_set_color: await hass.services.async_call( @@ -261,7 +261,7 @@ async def test_light_turn_on(hass: HomeAssistant, mock_light) -> None: ) await hass.async_block_till_done() - mock_set_color.assert_called_with(162, 200, 50) + mock_set_color.assert_called_with(163, 200, 50) async def test_light_turn_off(hass: HomeAssistant, mock_light) -> None: @@ -352,6 +352,6 @@ async def test_light_update(hass: HomeAssistant, mock_light) -> None: ATTR_COLOR_MODE: ColorMode.HS, ATTR_BRIGHTNESS: 220, ATTR_HS_COLOR: (261.429, 31.818), - ATTR_RGB_COLOR: (202, 173, 255), - ATTR_XY_COLOR: (0.291, 0.232), + ATTR_RGB_COLOR: (203, 174, 255), + ATTR_XY_COLOR: (0.292, 0.234), } diff --git a/tests/components/zha/test_config_flow.py b/tests/components/zha/test_config_flow.py index 87ba46a4ced..e0229ebe049 100644 --- a/tests/components/zha/test_config_flow.py +++ b/tests/components/zha/test_config_flow.py @@ -154,104 +154,180 @@ def com_port(device="/dev/ttyUSB1234") -> ListPortInfo: return port +@pytest.mark.parametrize( + ("entry_name", "unique_id", "radio_type", "service_info"), + [ + ( + # TubesZB, old ESPHome devices (ZNP) + "tubeszb-cc2652-poe", + "tubeszb-cc2652-poe", + RadioType.znp, + zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("192.168.1.200"), + ip_addresses=[ip_address("192.168.1.200")], + hostname="tubeszb-cc2652-poe.local.", + name="tubeszb-cc2652-poe._esphomelib._tcp.local.", + port=6053, # the ESPHome API port is remapped to 6638 + type="_esphomelib._tcp.local.", + properties={ + "project_version": "3.0", + "project_name": "tubezb.cc2652-poe", + "network": "ethernet", + "board": "esp32-poe", + "platform": "ESP32", + "maс": "8c4b14c33c24", + "version": "2023.12.8", + }, + ), + ), + ( + # TubesZB, old ESPHome device (EFR32) + "tubeszb-efr32-poe", + "tubeszb-efr32-poe", + RadioType.ezsp, + zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("192.168.1.200"), + ip_addresses=[ip_address("192.168.1.200")], + hostname="tubeszb-efr32-poe.local.", + name="tubeszb-efr32-poe._esphomelib._tcp.local.", + port=6053, # the ESPHome API port is remapped to 6638 + type="_esphomelib._tcp.local.", + properties={ + "project_version": "3.0", + "project_name": "tubezb.efr32-poe", + "network": "ethernet", + "board": "esp32-poe", + "platform": "ESP32", + "maс": "8c4b14c33c24", + "version": "2023.12.8", + }, + ), + ), + ( + # TubesZB, newer devices + "TubeZB", + "tubeszb-cc2652-poe", + RadioType.znp, + zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("192.168.1.200"), + ip_addresses=[ip_address("192.168.1.200")], + hostname="tubeszb-cc2652-poe.local.", + name="tubeszb-cc2652-poe._tubeszb._tcp.local.", + port=6638, + properties={ + "name": "TubeZB", + "radio_type": "znp", + "version": "1.0", + "baud_rate": "115200", + "data_flow_control": "software", + }, + type="_tubeszb._tcp.local.", + ), + ), + ( + # Expected format for all new devices + "Some Zigbee Gateway (12345)", + "aabbccddeeff", + RadioType.znp, + zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("192.168.1.200"), + ip_addresses=[ip_address("192.168.1.200")], + hostname="some-zigbee-gateway-12345.local.", + name="Some Zigbee Gateway (12345)._zigbee-coordinator._tcp.local.", + port=6638, + properties={"radio_type": "znp", "serial_number": "aabbccddeeff"}, + type="_zigbee-coordinator._tcp.local.", + ), + ), + ], +) @patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) @patch(f"zigpy_znp.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True)) -async def test_zeroconf_discovery_znp(hass: HomeAssistant) -> None: +@patch(f"bellows.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True)) +async def test_zeroconf_discovery( + entry_name: str, + unique_id: str, + radio_type: RadioType, + service_info: zeroconf.ZeroconfServiceInfo, + hass: HomeAssistant, +) -> None: """Test zeroconf flow -- radio detected.""" - service_info = zeroconf.ZeroconfServiceInfo( - ip_address=ip_address("192.168.1.200"), - ip_addresses=[ip_address("192.168.1.200")], - hostname="tube._tube_zb_gw._tcp.local.", - name="tube", - port=6053, - properties={"name": "tube_123456"}, - type="mock_type", - ) - flow = await hass.config_entries.flow.async_init( + result_init = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_ZEROCONF}, data=service_info ) - assert flow["step_id"] == "confirm" - - # Confirm discovery - result1 = await hass.config_entries.flow.async_configure( - flow["flow_id"], user_input={} - ) - assert result1["step_id"] == "manual_port_config" + assert result_init["step_id"] == "confirm" # Confirm port settings - result2 = await hass.config_entries.flow.async_configure( - result1["flow_id"], user_input={} + result_confirm = await hass.config_entries.flow.async_configure( + result_init["flow_id"], user_input={} ) - assert result2["type"] is FlowResultType.MENU - assert result2["step_id"] == "choose_formation_strategy" + assert result_confirm["type"] is FlowResultType.MENU + assert result_confirm["step_id"] == "choose_formation_strategy" - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], + result_form = await hass.config_entries.flow.async_configure( + result_confirm["flow_id"], user_input={"next_step_id": config_flow.FORMATION_REUSE_SETTINGS}, ) await hass.async_block_till_done() - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == "socket://192.168.1.200:6638" - assert result3["data"] == { + assert result_form["type"] is FlowResultType.CREATE_ENTRY + assert result_form["title"] == entry_name + assert result_form["context"]["unique_id"] == unique_id + assert result_form["data"] == { CONF_DEVICE: { CONF_BAUDRATE: 115200, CONF_FLOW_CONTROL: None, CONF_DEVICE_PATH: "socket://192.168.1.200:6638", }, - CONF_RADIO_TYPE: "znp", + CONF_RADIO_TYPE: radio_type.name, } @patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) @patch(f"zigpy_zigate.{PROBE_FUNCTION_PATH}") -async def test_zigate_via_zeroconf(setup_entry_mock, hass: HomeAssistant) -> None: +async def test_legacy_zeroconf_discovery_zigate( + setup_entry_mock, hass: HomeAssistant +) -> None: """Test zeroconf flow -- zigate radio detected.""" service_info = zeroconf.ZeroconfServiceInfo( ip_address=ip_address("192.168.1.200"), ip_addresses=[ip_address("192.168.1.200")], - hostname="_zigate-zigbee-gateway._tcp.local.", - name="any", + hostname="_zigate-zigbee-gateway.local.", + name="some name._zigate-zigbee-gateway._tcp.local.", port=1234, - properties={"radio_type": "zigate"}, + properties={}, type="mock_type", ) - flow = await hass.config_entries.flow.async_init( + result_init = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_ZEROCONF}, data=service_info ) - assert flow["step_id"] == "confirm" - - # Confirm discovery - result1 = await hass.config_entries.flow.async_configure( - flow["flow_id"], user_input={} - ) - assert result1["step_id"] == "manual_port_config" + assert result_init["step_id"] == "confirm" # Confirm the radio is deprecated - result2 = await hass.config_entries.flow.async_configure( - flow["flow_id"], user_input={} + result_confirm_deprecated = await hass.config_entries.flow.async_configure( + result_init["flow_id"], user_input={} ) - assert result2["step_id"] == "verify_radio" - assert "ZiGate" in result2["description_placeholders"]["name"] + assert result_confirm_deprecated["step_id"] == "verify_radio" + assert "ZiGate" in result_confirm_deprecated["description_placeholders"]["name"] # Confirm port settings - result3 = await hass.config_entries.flow.async_configure( - result1["flow_id"], user_input={} + result_confirm = await hass.config_entries.flow.async_configure( + result_confirm_deprecated["flow_id"], user_input={} ) - assert result3["type"] is FlowResultType.MENU - assert result3["step_id"] == "choose_formation_strategy" + assert result_confirm["type"] is FlowResultType.MENU + assert result_confirm["step_id"] == "choose_formation_strategy" - result4 = await hass.config_entries.flow.async_configure( - result3["flow_id"], + result_form = await hass.config_entries.flow.async_configure( + result_confirm["flow_id"], user_input={"next_step_id": config_flow.FORMATION_REUSE_SETTINGS}, ) await hass.async_block_till_done() - assert result4["type"] is FlowResultType.CREATE_ENTRY - assert result4["title"] == "socket://192.168.1.200:1234" - assert result4["data"] == { + assert result_form["type"] is FlowResultType.CREATE_ENTRY + assert result_form["title"] == "some name" + assert result_form["data"] == { CONF_DEVICE: { CONF_DEVICE_PATH: "socket://192.168.1.200:1234", CONF_BAUDRATE: 115200, @@ -261,75 +337,50 @@ async def test_zigate_via_zeroconf(setup_entry_mock, hass: HomeAssistant) -> Non } -@patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) -@patch(f"bellows.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True)) -async def test_efr32_via_zeroconf(hass: HomeAssistant) -> None: - """Test zeroconf flow -- efr32 radio detected.""" +async def test_zeroconf_discovery_bad_payload(hass: HomeAssistant) -> None: + """Test zeroconf flow with a bad payload.""" service_info = zeroconf.ZeroconfServiceInfo( ip_address=ip_address("192.168.1.200"), ip_addresses=[ip_address("192.168.1.200")], - hostname="efr32._esphomelib._tcp.local.", - name="efr32", + hostname="some.hostname", + name="any", port=1234, - properties={}, - type="mock_type", + properties={"radio_type": "some bogus radio"}, + type="_zigbee-coordinator._tcp.local.", ) - flow = await hass.config_entries.flow.async_init( + result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_ZEROCONF}, data=service_info ) - assert flow["step_id"] == "confirm" - - # Confirm discovery - result1 = await hass.config_entries.flow.async_configure( - flow["flow_id"], user_input={} - ) - assert result1["step_id"] == "manual_port_config" - - # Confirm port settings - result2 = await hass.config_entries.flow.async_configure( - result1["flow_id"], user_input={} - ) - - assert result2["type"] is FlowResultType.MENU - assert result2["step_id"] == "choose_formation_strategy" - - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], - user_input={"next_step_id": config_flow.FORMATION_REUSE_SETTINGS}, - ) - await hass.async_block_till_done() - - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == "socket://192.168.1.200:1234" - assert result3["data"] == { - CONF_DEVICE: { - CONF_DEVICE_PATH: "socket://192.168.1.200:1234", - CONF_BAUDRATE: 115200, - CONF_FLOW_CONTROL: None, - }, - CONF_RADIO_TYPE: "ezsp", - } + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "invalid_zeroconf_data" @patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) @patch(f"zigpy_znp.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True)) -async def test_discovery_via_zeroconf_ip_change_ignored(hass: HomeAssistant) -> None: +async def test_legacy_zeroconf_discovery_ip_change_ignored(hass: HomeAssistant) -> None: """Test zeroconf flow that was ignored gets updated.""" + entry = MockConfigEntry( domain=DOMAIN, - unique_id="tube_zb_gw_cc2652p2_poe", + unique_id="tubeszb-cc2652-poe", source=config_entries.SOURCE_IGNORE, ) entry.add_to_hass(hass) service_info = zeroconf.ZeroconfServiceInfo( - ip_address=ip_address("192.168.1.22"), - ip_addresses=[ip_address("192.168.1.22")], - hostname="tube_zb_gw_cc2652p2_poe.local.", - name="mock_name", - port=6053, - properties={"address": "tube_zb_gw_cc2652p2_poe.local"}, - type="mock_type", + ip_address=ip_address("192.168.1.200"), + ip_addresses=[ip_address("192.168.1.200")], + hostname="tubeszb-cc2652-poe.local.", + name="tubeszb-cc2652-poe._tubeszb._tcp.local.", + port=6638, + properties={ + "name": "TubeZB", + "radio_type": "znp", + "version": "1.0", + "baud_rate": "115200", + "data_flow_control": "software", + }, + type="_tubeszb._tcp.local.", ) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_ZEROCONF}, data=service_info @@ -338,11 +389,13 @@ async def test_discovery_via_zeroconf_ip_change_ignored(hass: HomeAssistant) -> assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" assert entry.data[CONF_DEVICE] == { - CONF_DEVICE_PATH: "socket://192.168.1.22:6638", + CONF_DEVICE_PATH: "socket://192.168.1.200:6638", } -async def test_discovery_confirm_final_abort_if_entries(hass: HomeAssistant) -> None: +async def test_legacy_zeroconf_discovery_confirm_final_abort_if_entries( + hass: HomeAssistant, +) -> None: """Test discovery aborts if ZHA was set up after the confirmation dialog is shown.""" service_info = zeroconf.ZeroconfServiceInfo( ip_address=ip_address("192.168.1.200"), @@ -677,7 +730,7 @@ async def test_discovery_via_usb_zha_ignored_updates(hass: HomeAssistant) -> Non @patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) @patch(f"zigpy_znp.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True)) -async def test_discovery_already_setup(hass: HomeAssistant) -> None: +async def test_legacy_zeroconf_discovery_already_setup(hass: HomeAssistant) -> None: """Test zeroconf flow -- radio detected.""" service_info = zeroconf.ZeroconfServiceInfo( ip_address=ip_address("192.168.1.200"), diff --git a/tests/components/zha/test_helpers.py b/tests/components/zha/test_helpers.py index f6dc8291d9f..f8a809df51e 100644 --- a/tests/components/zha/test_helpers.py +++ b/tests/components/zha/test_helpers.py @@ -182,8 +182,8 @@ def test_exclude_none_values( result = exclude_none_values(obj) assert result == expected_output - for key in expected_output: - assert expected_output[key] == obj[key] + for key, value in expected_output.items(): + assert value == obj[key] async def test_create_zha_config_remove_unused( diff --git a/tests/components/zha/test_update.py b/tests/components/zha/test_update.py index cd48ae62ff3..c8cbc407106 100644 --- a/tests/components/zha/test_update.py +++ b/tests/components/zha/test_update.py @@ -1,6 +1,6 @@ """Test ZHA firmware updates.""" -from unittest.mock import AsyncMock, call, patch +from unittest.mock import AsyncMock, PropertyMock, call, patch import pytest from zha.application.platforms.update import ( @@ -14,6 +14,7 @@ from zigpy.profiles import zha import zigpy.types as t from zigpy.zcl import foundation from zigpy.zcl.clusters import general +import zigpy.zdo.types as zdo_t from homeassistant.components.homeassistant import ( DOMAIN as HA_DOMAIN, @@ -33,6 +34,10 @@ from homeassistant.components.zha.helpers import ( get_zha_gateway, get_zha_gateway_proxy, ) +from homeassistant.components.zha.update import ( + OTA_MESSAGE_BATTERY_POWERED, + OTA_MESSAGE_RELIABILITY, +) from homeassistant.const import ( ATTR_ENTITY_ID, STATE_OFF, @@ -84,7 +89,26 @@ async def setup_test_data( SIG_EP_PROFILE: zha.PROFILE_ID, } }, - node_descriptor=b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", + node_descriptor=zdo_t.NodeDescriptor( + logical_type=zdo_t.LogicalType.Router, + complex_descriptor_available=0, + user_descriptor_available=0, + reserved=0, + aps_flags=0, + frequency_band=zdo_t.NodeDescriptor.FrequencyBand.Freq2400MHz, + mac_capability_flags=( + zdo_t.NodeDescriptor.MACCapabilityFlags.FullFunctionDevice + | zdo_t.NodeDescriptor.MACCapabilityFlags.MainsPowered + | zdo_t.NodeDescriptor.MACCapabilityFlags.RxOnWhenIdle + | zdo_t.NodeDescriptor.MACCapabilityFlags.AllocateAddress + ), + manufacturer_code=4107, + maximum_buffer_size=82, + maximum_incoming_transfer_size=128, + server_mask=11264, + maximum_outgoing_transfer_size=128, + descriptor_capability_field=zdo_t.NodeDescriptor.DescriptorCapability.NONE, + ).serialize(), ) gateway.get_or_create_device(zigpy_device) @@ -568,27 +592,8 @@ async def test_update_release_notes( ) -> None: """Test ZHA update platform release notes.""" await setup_zha() + zha_device, _, _, _ = await setup_test_data(hass, zigpy_device_mock) - gateway = get_zha_gateway(hass) - gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [general.Basic.cluster_id, general.OnOff.cluster_id], - SIG_EP_OUTPUT: [general.Ota.cluster_id], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - node_descriptor=b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", - ) - - gateway.get_or_create_device(zigpy_device) - await gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done(wait_background_tasks=True) - - zha_device: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) zha_lib_entity = next( e for e in zha_device.device.platform_entities.values() @@ -602,14 +607,39 @@ async def test_update_release_notes( assert entity_id is not None ws_client = await hass_ws_client(hass) - await ws_client.send_json( - { - "id": 1, - "type": "update/release_notes", - "entity_id": entity_id, - } - ) - result = await ws_client.receive_json() - assert result["success"] is True - assert result["result"] == "Some lengthy release notes" + # Mains-powered devices + with patch( + "zha.zigbee.device.Device.is_mains_powered", PropertyMock(return_value=True) + ): + await ws_client.send_json( + { + "id": 1, + "type": "update/release_notes", + "entity_id": entity_id, + } + ) + + result = await ws_client.receive_json() + assert result["success"] is True + assert "Some lengthy release notes" in result["result"] + assert OTA_MESSAGE_RELIABILITY in result["result"] + assert OTA_MESSAGE_BATTERY_POWERED not in result["result"] + + # Battery-powered devices + with patch( + "zha.zigbee.device.Device.is_mains_powered", PropertyMock(return_value=False) + ): + await ws_client.send_json( + { + "id": 2, + "type": "update/release_notes", + "entity_id": entity_id, + } + ) + + result = await ws_client.receive_json() + assert result["success"] is True + assert "Some lengthy release notes" in result["result"] + assert OTA_MESSAGE_RELIABILITY in result["result"] + assert OTA_MESSAGE_BATTERY_POWERED in result["result"] diff --git a/tests/components/zwave_js/test_api.py b/tests/components/zwave_js/test_api.py index 05ffcee7f4e..0807e9e09a5 100644 --- a/tests/components/zwave_js/test_api.py +++ b/tests/components/zwave_js/test_api.py @@ -5,7 +5,7 @@ from http import HTTPStatus from io import BytesIO import json from typing import Any -from unittest.mock import PropertyMock, patch +from unittest.mock import MagicMock, PropertyMock, patch import pytest from zwave_js_server.const import ( @@ -78,17 +78,26 @@ from homeassistant.components.zwave_js.api import ( TYPE, UUID, VALUE, + VALUE_FORMAT, + VALUE_SIZE, VERSION, ) from homeassistant.components.zwave_js.const import ( + ATTR_COMMAND_CLASS, + ATTR_ENDPOINT, + ATTR_METHOD_NAME, + ATTR_PARAMETERS, + ATTR_WAIT_FOR_RESULT, CONF_DATA_COLLECTION_OPTED_IN, + CONF_INSTALLER_MODE, DOMAIN, ) from homeassistant.components.zwave_js.helpers import get_device_id from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr +from homeassistant.setup import async_setup_component -from tests.common import MockUser +from tests.common import MockConfigEntry, MockUser from tests.typing import ClientSessionGenerator, WebSocketGenerator CONTROLLER_PATCH_PREFIX = "zwave_js_server.model.controller.Controller" @@ -3132,6 +3141,180 @@ async def test_get_config_parameters( assert msg["error"]["code"] == ERR_NOT_LOADED +async def test_set_raw_config_parameter( + hass: HomeAssistant, + client, + multisensor_6, + integration, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test that the set_raw_config_parameter WS API call works.""" + entry = integration + ws_client = await hass_ws_client(hass) + device = get_device(hass, multisensor_6) + + # Change from async_send_command to async_send_command_no_wait + client.async_send_command_no_wait.return_value = None + + # Test setting a raw config parameter value + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/set_raw_config_parameter", + DEVICE_ID: device.id, + PROPERTY: 102, + VALUE: 1, + VALUE_SIZE: 2, + VALUE_FORMAT: 1, + } + ) + + msg = await ws_client.receive_json() + assert msg["success"] + assert msg["result"]["status"] == "queued" + + assert len(client.async_send_command_no_wait.call_args_list) == 1 + args = client.async_send_command_no_wait.call_args[0][0] + assert args["command"] == "endpoint.set_raw_config_parameter_value" + assert args["nodeId"] == multisensor_6.node_id + assert args["options"]["parameter"] == 102 + assert args["options"]["value"] == 1 + assert args["options"]["valueSize"] == 2 + assert args["options"]["valueFormat"] == 1 + + # Reset the mock for async_send_command_no_wait instead + client.async_send_command_no_wait.reset_mock() + + # Test getting non-existent node fails + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/set_raw_config_parameter", + DEVICE_ID: "fake_device", + PROPERTY: 102, + VALUE: 1, + VALUE_SIZE: 2, + VALUE_FORMAT: 1, + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_FOUND + + # Test sending command with not loaded entry fails + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/set_raw_config_parameter", + DEVICE_ID: device.id, + PROPERTY: 102, + VALUE: 1, + VALUE_SIZE: 2, + VALUE_FORMAT: 1, + } + ) + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_LOADED + + +async def test_get_raw_config_parameter( + hass: HomeAssistant, + multisensor_6, + integration, + client, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test the get_raw_config_parameter websocket command.""" + entry = integration + ws_client = await hass_ws_client(hass) + device = get_device(hass, multisensor_6) + + client.async_send_command.return_value = {"value": 1} + + # Test getting a raw config parameter value + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/get_raw_config_parameter", + DEVICE_ID: device.id, + PROPERTY: 102, + } + ) + + msg = await ws_client.receive_json() + assert msg["success"] + assert msg["result"]["value"] == 1 + + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args[0][0] + assert args["command"] == "endpoint.get_raw_config_parameter_value" + assert args["nodeId"] == multisensor_6.node_id + assert args["options"]["parameter"] == 102 + + client.async_send_command.reset_mock() + + # Test FailedZWaveCommand is caught + with patch( + "zwave_js_server.model.node.Node.async_get_raw_config_parameter_value", + side_effect=FailedZWaveCommand("failed_command", 1, "error message"), + ): + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/get_raw_config_parameter", + DEVICE_ID: device.id, + PROPERTY: 102, + } + ) + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == "zwave_error" + assert msg["error"]["message"] == "zwave_error: Z-Wave error 1 - error message" + + # Test getting non-existent node fails + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/get_raw_config_parameter", + DEVICE_ID: "fake_device", + PROPERTY: 102, + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_FOUND + + # Test FailedCommand exception + client.async_send_command.side_effect = FailedCommand("test", "test") + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/get_raw_config_parameter", + DEVICE_ID: device.id, + PROPERTY: 102, + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == "test" + assert msg["error"]["message"] == "Command failed: test" + + # Test sending command with not loaded entry fails + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/get_raw_config_parameter", + DEVICE_ID: device.id, + PROPERTY: 102, + } + ) + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_LOADED + + @pytest.mark.parametrize( ("firmware_data", "expected_data"), [({"target": "1"}, {"firmware_target": 1}), ({}, {})], @@ -4828,3 +5011,187 @@ async def test_hard_reset_controller( assert not msg["success"] assert msg["error"]["code"] == ERR_NOT_FOUND + + +async def test_node_capabilities( + hass: HomeAssistant, + multisensor_6: Node, + integration: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test the node_capabilities websocket command.""" + entry = integration + ws_client = await hass_ws_client(hass) + + node = multisensor_6 + device = get_device(hass, node) + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/node_capabilities", + DEVICE_ID: device.id, + } + ) + msg = await ws_client.receive_json() + assert msg["result"] == { + "0": [ + { + "id": 113, + "name": "Notification", + "version": 8, + "isSecure": False, + "is_secure": False, + } + ] + } + + # Test getting non-existent node fails + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/node_status", + DEVICE_ID: "fake_device", + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_FOUND + + # Test sending command with not loaded entry fails + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/node_status", + DEVICE_ID: device.id, + } + ) + msg = await ws_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == ERR_NOT_LOADED + + +async def test_invoke_cc_api( + hass: HomeAssistant, + client, + climate_radio_thermostat_ct100_plus_different_endpoints: Node, + integration: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test the invoke_cc_api websocket command.""" + ws_client = await hass_ws_client(hass) + + device_radio_thermostat = get_device( + hass, climate_radio_thermostat_ct100_plus_different_endpoints + ) + assert device_radio_thermostat + + # Test successful invoke_cc_api call with a static endpoint + client.async_send_command.return_value = {"response": True} + client.async_send_command_no_wait.return_value = {"response": True} + + # Test with wait_for_result=False (default) + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/invoke_cc_api", + DEVICE_ID: device_radio_thermostat.id, + ATTR_COMMAND_CLASS: 67, + ATTR_METHOD_NAME: "someMethod", + ATTR_PARAMETERS: [1, 2], + } + ) + msg = await ws_client.receive_json() + assert msg["success"] + assert msg["result"] is None # We did not specify wait_for_result=True + + await hass.async_block_till_done() + + assert len(client.async_send_command_no_wait.call_args_list) == 1 + args = client.async_send_command_no_wait.call_args[0][0] + assert args == { + "command": "endpoint.invoke_cc_api", + "nodeId": 26, + "endpoint": 0, + "commandClass": 67, + "methodName": "someMethod", + "args": [1, 2], + } + + client.async_send_command_no_wait.reset_mock() + + # Test with wait_for_result=True + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/invoke_cc_api", + DEVICE_ID: device_radio_thermostat.id, + ATTR_COMMAND_CLASS: 67, + ATTR_ENDPOINT: 0, + ATTR_METHOD_NAME: "someMethod", + ATTR_PARAMETERS: [1, 2], + ATTR_WAIT_FOR_RESULT: True, + } + ) + msg = await ws_client.receive_json() + assert msg["success"] + assert msg["result"] is True + + await hass.async_block_till_done() + + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args[0][0] + assert args == { + "command": "endpoint.invoke_cc_api", + "nodeId": 26, + "endpoint": 0, + "commandClass": 67, + "methodName": "someMethod", + "args": [1, 2], + } + + client.async_send_command.side_effect = NotFoundError + + # Ensure an error is returned + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/invoke_cc_api", + DEVICE_ID: device_radio_thermostat.id, + ATTR_COMMAND_CLASS: 67, + ATTR_ENDPOINT: 0, + ATTR_METHOD_NAME: "someMethod", + ATTR_PARAMETERS: [1, 2], + ATTR_WAIT_FOR_RESULT: True, + } + ) + msg = await ws_client.receive_json() + assert not msg["success"] + assert msg["error"] == {"code": "NotFoundError", "message": ""} + + +@pytest.mark.parametrize( + ("config", "installer_mode"), [({}, False), ({CONF_INSTALLER_MODE: True}, True)] +) +async def test_get_integration_settings( + config: dict[str, Any], + installer_mode: bool, + hass: HomeAssistant, + client: MagicMock, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test that the get_integration_settings WS API call works.""" + ws_client = await hass_ws_client(hass) + + entry = MockConfigEntry(domain="zwave_js", data={"url": "ws://test.org"}) + entry.add_to_hass(hass) + assert await async_setup_component(hass, DOMAIN, {DOMAIN: config}) + await hass.async_block_till_done() + + await ws_client.send_json_auto_id( + { + TYPE: "zwave_js/get_integration_settings", + } + ) + msg = await ws_client.receive_json() + assert msg["success"] + assert msg["result"] == { + CONF_INSTALLER_MODE: installer_mode, + } diff --git a/tests/components/zwave_js/test_repairs.py b/tests/components/zwave_js/test_repairs.py index 2f10b70b48a..d237a6e410a 100644 --- a/tests/components/zwave_js/test_repairs.py +++ b/tests/components/zwave_js/test_repairs.py @@ -3,6 +3,7 @@ from copy import deepcopy from unittest.mock import patch +import pytest from zwave_js_server.event import Event from zwave_js_server.model.node import Node @@ -179,6 +180,10 @@ async def test_device_config_file_changed_ignore_step( assert msg["result"]["issues"][0].get("dismissed_version") is not None +@pytest.mark.parametrize( + "ignore_translations", + ["component.zwave_js.issues.invalid_issue.title"], +) async def test_invalid_issue( hass: HomeAssistant, hass_client: ClientSessionGenerator, diff --git a/tests/components/zwave_js/test_services.py b/tests/components/zwave_js/test_services.py index ec13d0262f8..41477f18b97 100644 --- a/tests/components/zwave_js/test_services.py +++ b/tests/components/zwave_js/test_services.py @@ -497,13 +497,12 @@ async def test_set_config_parameter( caplog.clear() - config_value = aeotec_zw164_siren.values["2-112-0-32"] cmd_result = SetConfigParameterResult("accepted", {"status": 255}) # Test accepted return with patch( "homeassistant.components.zwave_js.services.Endpoint.async_set_raw_config_parameter_value", - return_value=(config_value, cmd_result), + return_value=cmd_result, ) as mock_set_raw_config_parameter_value: await hass.services.async_call( DOMAIN, @@ -534,7 +533,7 @@ async def test_set_config_parameter( cmd_result.status = "queued" with patch( "homeassistant.components.zwave_js.services.Endpoint.async_set_raw_config_parameter_value", - return_value=(config_value, cmd_result), + return_value=cmd_result, ) as mock_set_raw_config_parameter_value: await hass.services.async_call( DOMAIN, diff --git a/tests/conftest.py b/tests/conftest.py index 10c9a740256..c46ed0407e5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -36,6 +36,7 @@ import pytest_socket import requests_mock import respx from syrupy.assertion import SnapshotAssertion +from syrupy.session import SnapshotSession from homeassistant import block_async_io from homeassistant.exceptions import ServiceNotFound @@ -92,7 +93,7 @@ from homeassistant.util.async_ import create_eager_task, get_scheduled_timer_han from homeassistant.util.json import json_loads from .ignore_uncaught_exceptions import IGNORE_UNCAUGHT_EXCEPTIONS -from .syrupy import HomeAssistantSnapshotExtension +from .syrupy import HomeAssistantSnapshotExtension, override_syrupy_finish from .typing import ( ClientSessionGenerator, MockHAClientWebSocket, @@ -149,6 +150,11 @@ def pytest_configure(config: pytest.Config) -> None: if config.getoption("verbose") > 0: logging.getLogger().setLevel(logging.DEBUG) + # Override default finish to detect unused snapshots despite xdist + # Temporary workaround until it is finalised inside syrupy + # See https://github.com/syrupy-project/syrupy/pull/901 + SnapshotSession.finish = override_syrupy_finish + def pytest_runtest_setup() -> None: """Prepare pytest_socket and freezegun. @@ -504,30 +510,31 @@ def aiohttp_client( clients = [] async def go( - __param: Application | BaseTestServer, + param: Application | BaseTestServer, + /, *args: Any, server_kwargs: dict[str, Any] | None = None, **kwargs: Any, ) -> TestClient: - if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type] - __param, (Application, BaseTestServer) + if isinstance(param, Callable) and not isinstance( # type: ignore[arg-type] + param, (Application, BaseTestServer) ): - __param = __param(loop, *args, **kwargs) + param = param(loop, *args, **kwargs) kwargs = {} else: assert not args, "args should be empty" client: TestClient - if isinstance(__param, Application): + if isinstance(param, Application): server_kwargs = server_kwargs or {} - server = TestServer(__param, loop=loop, **server_kwargs) + server = TestServer(param, loop=loop, **server_kwargs) # Registering a view after starting the server should still work. server.app._router.freeze = lambda: None client = CoalescingClient(server, loop=loop, **kwargs) - elif isinstance(__param, BaseTestServer): - client = TestClient(__param, loop=loop, **kwargs) + elif isinstance(param, BaseTestServer): + client = TestClient(param, loop=loop, **kwargs) else: - raise TypeError(f"Unknown argument type: {type(__param)!r}") + raise TypeError(f"Unknown argument type: {type(param)!r}") await client.start_server() clients.append(client) @@ -1185,7 +1192,12 @@ def mock_get_source_ip() -> Generator[_patch]: @pytest.fixture(autouse=True, scope="session") def translations_once() -> Generator[_patch]: - """Only load translations once per session.""" + """Only load translations once per session. + + Warning: having this as a session fixture can cause issues with tests that + create mock integrations, overriding the real integration translations + with empty ones. Translations should be reset after such tests (see #131628) + """ cache = _TranslationsCacheData({}, {}) patcher = patch( "homeassistant.helpers.translation._TranslationsCacheData", @@ -1766,10 +1778,30 @@ def mock_bleak_scanner_start() -> Generator[MagicMock]: @pytest.fixture -def mock_integration_frame() -> Generator[Mock]: - """Mock as if we're calling code from inside an integration.""" +def integration_frame_path() -> str: + """Return the path to the integration frame. + + Can be parametrized with + `@pytest.mark.parametrize("integration_frame_path", ["path_to_frame"])` + + - "custom_components/XYZ" for a custom integration + - "homeassistant/components/XYZ" for a core integration + - "homeassistant/XYZ" for core (no integration) + + Defaults to core component `hue` + """ + return "homeassistant/components/hue" + + +@pytest.fixture +def mock_integration_frame(integration_frame_path: str) -> Generator[Mock]: + """Mock where we are calling code from. + + Defaults to calling from `hue` core integration, and can be parametrized + with `integration_frame_path`. + """ correct_frame = Mock( - filename="/home/paulus/homeassistant/components/hue/light.py", + filename=f"/home/paulus/{integration_frame_path}/light.py", lineno="23", line="self.light.is_on", ) diff --git a/tests/helpers/test_aiohttp_client.py b/tests/helpers/test_aiohttp_client.py index 126ed3f9287..1788da74c3b 100644 --- a/tests/helpers/test_aiohttp_client.py +++ b/tests/helpers/test_aiohttp_client.py @@ -286,8 +286,8 @@ async def test_warning_close_session_integration( await session.close() assert ( "Detected that integration 'hue' closes the Home Assistant aiohttp session at " - "homeassistant/components/hue/light.py, line 23: await session.close(), " - "please create a bug report at https://github.com/home-assistant/core/issues?" + "homeassistant/components/hue/light.py, line 23: await session.close(). " + "Please create a bug report at https://github.com/home-assistant/core/issues?" "q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+hue%22" ) in caplog.text @@ -330,8 +330,8 @@ async def test_warning_close_session_custom( await session.close() assert ( "Detected that custom integration 'hue' closes the Home Assistant aiohttp " - "session at custom_components/hue/light.py, line 23: await session.close(), " - "please report it to the author of the 'hue' custom integration" + "session at custom_components/hue/light.py, line 23: await session.close(). " + "Please report it to the author of the 'hue' custom integration" ) in caplog.text diff --git a/tests/helpers/test_condition.py b/tests/helpers/test_condition.py index 31f813469cc..1ec78b20535 100644 --- a/tests/helpers/test_condition.py +++ b/tests/helpers/test_condition.py @@ -15,6 +15,8 @@ from homeassistant.const import ( CONF_CONDITION, CONF_DEVICE_ID, CONF_DOMAIN, + STATE_UNAVAILABLE, + STATE_UNKNOWN, SUN_EVENT_SUNRISE, SUN_EVENT_SUNSET, ) @@ -992,6 +994,83 @@ async def test_time_using_input_datetime(hass: HomeAssistant) -> None: condition.time(hass, before="input_datetime.not_existing") +async def test_time_using_time(hass: HomeAssistant) -> None: + """Test time conditions using time entities.""" + hass.states.async_set( + "time.am", + "06:00:00", # 6 am local time + ) + hass.states.async_set( + "time.pm", + "18:00:00", # 6 pm local time + ) + hass.states.async_set( + "time.unknown_state", + STATE_UNKNOWN, + ) + hass.states.async_set( + "time.unavailable_state", + STATE_UNAVAILABLE, + ) + + with patch( + "homeassistant.helpers.condition.dt_util.now", + return_value=dt_util.now().replace(hour=3), + ): + assert not condition.time(hass, after="time.am", before="time.pm") + assert condition.time(hass, after="time.pm", before="time.am") + + with patch( + "homeassistant.helpers.condition.dt_util.now", + return_value=dt_util.now().replace(hour=9), + ): + assert condition.time(hass, after="time.am", before="time.pm") + assert not condition.time(hass, after="time.pm", before="time.am") + + with patch( + "homeassistant.helpers.condition.dt_util.now", + return_value=dt_util.now().replace(hour=15), + ): + assert condition.time(hass, after="time.am", before="time.pm") + assert not condition.time(hass, after="time.pm", before="time.am") + + with patch( + "homeassistant.helpers.condition.dt_util.now", + return_value=dt_util.now().replace(hour=21), + ): + assert not condition.time(hass, after="time.am", before="time.pm") + assert condition.time(hass, after="time.pm", before="time.am") + + # Trigger on PM time + with patch( + "homeassistant.helpers.condition.dt_util.now", + return_value=dt_util.now().replace(hour=18, minute=0, second=0), + ): + assert condition.time(hass, after="time.pm", before="time.am") + assert not condition.time(hass, after="time.am", before="time.pm") + assert condition.time(hass, after="time.pm") + assert not condition.time(hass, before="time.pm") + + # Trigger on AM time + with patch( + "homeassistant.helpers.condition.dt_util.now", + return_value=dt_util.now().replace(hour=6, minute=0, second=0), + ): + assert not condition.time(hass, after="time.pm", before="time.am") + assert condition.time(hass, after="time.am", before="time.pm") + assert condition.time(hass, after="time.am") + assert not condition.time(hass, before="time.am") + + assert not condition.time(hass, after="time.unknown_state") + assert not condition.time(hass, before="time.unavailable_state") + + with pytest.raises(ConditionError): + condition.time(hass, after="time.not_existing") + + with pytest.raises(ConditionError): + condition.time(hass, before="time.not_existing") + + async def test_time_using_sensor(hass: HomeAssistant) -> None: """Test time conditions using sensor entities.""" hass.states.async_set( diff --git a/tests/helpers/test_event.py b/tests/helpers/test_event.py index a45b418c526..a0014587cd0 100644 --- a/tests/helpers/test_event.py +++ b/tests/helpers/test_event.py @@ -4387,8 +4387,8 @@ async def test_call_later(hass: HomeAssistant) -> None: schedule_utctime = dt_util.utcnow() @callback - def action(__utcnow: datetime): - _current_delay = __utcnow.timestamp() - schedule_utctime.timestamp() + def action(utcnow: datetime, /): + _current_delay = utcnow.timestamp() - schedule_utctime.timestamp() future.set_result(delay < _current_delay < (delay + delay_tolerance)) async_call_later(hass, delay, action) @@ -4407,8 +4407,8 @@ async def test_async_call_later(hass: HomeAssistant) -> None: schedule_utctime = dt_util.utcnow() @callback - def action(__utcnow: datetime): - _current_delay = __utcnow.timestamp() - schedule_utctime.timestamp() + def action(utcnow: datetime, /): + _current_delay = utcnow.timestamp() - schedule_utctime.timestamp() future.set_result(delay < _current_delay < (delay + delay_tolerance)) remove = async_call_later(hass, delay, action) @@ -4429,8 +4429,8 @@ async def test_async_call_later_timedelta(hass: HomeAssistant) -> None: schedule_utctime = dt_util.utcnow() @callback - def action(__utcnow: datetime): - _current_delay = __utcnow.timestamp() - schedule_utctime.timestamp() + def action(utcnow: datetime, /): + _current_delay = utcnow.timestamp() - schedule_utctime.timestamp() future.set_result(delay < _current_delay < (delay + delay_tolerance)) remove = async_call_later(hass, timedelta(seconds=delay), action) @@ -4450,7 +4450,7 @@ async def test_async_call_later_cancel(hass: HomeAssistant) -> None: delay_tolerance = 0.1 @callback - def action(__now: datetime): + def action(now: datetime, /): future.set_result(False) remove = async_call_later(hass, delay, action) @@ -4895,7 +4895,7 @@ async def test_track_state_change_deprecated( assert ( "Detected code that calls `async_track_state_change` instead " "of `async_track_state_change_event` which is deprecated and " - "will be removed in Home Assistant 2025.5. Please report this issue." + "will be removed in Home Assistant 2025.5. Please report this issue" ) in caplog.text @@ -4946,7 +4946,8 @@ async def test_async_track_template_no_hass_deprecated( """Test async_track_template with a template without hass is deprecated.""" message = ( "Detected code that calls async_track_template_result with template without " - "hass, which will stop working in HA Core 2025.10. Please report this issue." + "hass. This will stop working in Home Assistant 2025.10, please " + "report this issue" ) async_track_template(hass, Template("blah"), lambda x, y, z: None) @@ -4964,7 +4965,8 @@ async def test_async_track_template_result_no_hass_deprecated( """Test async_track_template_result with a template without hass is deprecated.""" message = ( "Detected code that calls async_track_template_result with template without " - "hass, which will stop working in HA Core 2025.10. Please report this issue." + "hass. This will stop working in Home Assistant 2025.10, please " + "report this issue" ) async_track_template_result( diff --git a/tests/helpers/test_frame.py b/tests/helpers/test_frame.py index b3fbb0faaf4..fb98111fd42 100644 --- a/tests/helpers/test_frame.py +++ b/tests/helpers/test_frame.py @@ -1,11 +1,13 @@ """Test the frame helper.""" +from typing import Any from unittest.mock import ANY, Mock, patch import pytest from homeassistant.core import HomeAssistant from homeassistant.helpers import frame +from homeassistant.loader import async_get_integration from tests.common import extract_stack_to_frame @@ -156,6 +158,97 @@ async def test_get_integration_logger_no_integration( assert logger.name == __name__ +@pytest.mark.parametrize( + ("integration_frame_path", "keywords", "expected_error", "expected_log"), + [ + pytest.param( + "homeassistant/test_core", + {}, + True, + 0, + id="core default", + ), + pytest.param( + "homeassistant/components/test_core_integration", + {}, + False, + 1, + id="core integration default", + ), + pytest.param( + "custom_components/test_custom_integration", + {}, + False, + 1, + id="custom integration default", + ), + pytest.param( + "custom_components/test_custom_integration", + {"custom_integration_behavior": frame.ReportBehavior.IGNORE}, + False, + 0, + id="custom integration ignore", + ), + pytest.param( + "custom_components/test_custom_integration", + {"custom_integration_behavior": frame.ReportBehavior.ERROR}, + True, + 1, + id="custom integration error", + ), + pytest.param( + "homeassistant/components/test_integration_frame", + {"core_integration_behavior": frame.ReportBehavior.IGNORE}, + False, + 0, + id="core_integration_behavior ignore", + ), + pytest.param( + "homeassistant/components/test_integration_frame", + {"core_integration_behavior": frame.ReportBehavior.ERROR}, + True, + 1, + id="core_integration_behavior error", + ), + pytest.param( + "homeassistant/test_integration_frame", + {"core_behavior": frame.ReportBehavior.IGNORE}, + False, + 0, + id="core_behavior ignore", + ), + pytest.param( + "homeassistant/test_integration_frame", + {"core_behavior": frame.ReportBehavior.LOG}, + False, + 1, + id="core_behavior log", + ), + ], +) +@pytest.mark.usefixtures("mock_integration_frame") +async def test_report_usage( + caplog: pytest.LogCaptureFixture, + keywords: dict[str, Any], + expected_error: bool, + expected_log: int, +) -> None: + """Test report.""" + + what = "test_report_string" + + errored = False + try: + with patch.object(frame, "_REPORTED_INTEGRATIONS", set()): + frame.report_usage(what, **keywords) + except RuntimeError: + errored = True + + assert errored == expected_error + + assert caplog.text.count(what) == expected_log + + @patch.object(frame, "_REPORTED_INTEGRATIONS", set()) async def test_prevent_flooding( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_integration_frame: Mock @@ -169,8 +262,8 @@ async def test_prevent_flooding( expected_message = ( f"Detected that integration '{integration}' {what} at {filename}, line " - f"{mock_integration_frame.lineno}: {mock_integration_frame.line}, " - f"please create a bug report at https://github.com/home-assistant/core/issues?" + f"{mock_integration_frame.lineno}: {mock_integration_frame.line}. " + f"Please create a bug report at https://github.com/home-assistant/core/issues?" f"q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+{integration}%22" ) @@ -187,6 +280,28 @@ async def test_prevent_flooding( assert len(frame._REPORTED_INTEGRATIONS) == 1 +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_breaks_in_ha_version( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_integration_frame: Mock +) -> None: + """Test to ensure a report is only written once to the log.""" + + what = "accessed hi instead of hello" + integration = "hue" + filename = "homeassistant/components/hue/light.py" + + expected_message = ( + f"Detected that integration '{integration}' {what} at {filename}, line " + f"{mock_integration_frame.lineno}: {mock_integration_frame.line}. " + f"This will stop working in Home Assistant 2024.11, please create a bug " + "report at https://github.com/home-assistant/core/issues?" + f"q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+{integration}%22" + ) + + frame.report_usage(what, breaks_in_ha_version="2024.11") + assert expected_message in caplog.text + + async def test_report_missing_integration_frame( caplog: pytest.LogCaptureFixture, ) -> None: @@ -247,3 +362,173 @@ async def test_report_error_if_integration( ), ): frame.report("did a bad thing", error_if_integration=True) + + +@pytest.mark.parametrize( + ("integration_frame_path", "keywords", "expected_error", "expected_log"), + [ + pytest.param( + "homeassistant/test_core", + {}, + True, + 0, + id="core default", + ), + pytest.param( + "homeassistant/components/test_core_integration", + {}, + False, + 1, + id="core integration default", + ), + pytest.param( + "custom_components/test_custom_integration", + {}, + False, + 1, + id="custom integration default", + ), + pytest.param( + "custom_components/test_integration_frame", + {"log_custom_component_only": True}, + False, + 1, + id="log_custom_component_only with custom integration", + ), + pytest.param( + "homeassistant/components/test_integration_frame", + {"log_custom_component_only": True}, + False, + 0, + id="log_custom_component_only with core integration", + ), + pytest.param( + "homeassistant/test_integration_frame", + {"error_if_core": False}, + False, + 1, + id="disable error_if_core", + ), + pytest.param( + "custom_components/test_integration_frame", + {"error_if_integration": True}, + True, + 1, + id="error_if_integration with custom integration", + ), + pytest.param( + "homeassistant/components/test_integration_frame", + {"error_if_integration": True}, + True, + 1, + id="error_if_integration with core integration", + ), + ], +) +@pytest.mark.usefixtures("mock_integration_frame") +async def test_report( + caplog: pytest.LogCaptureFixture, + keywords: dict[str, Any], + expected_error: bool, + expected_log: int, +) -> None: + """Test report.""" + + what = "test_report_string" + + errored = False + try: + with patch.object(frame, "_REPORTED_INTEGRATIONS", set()): + frame.report(what, **keywords) + except RuntimeError: + errored = True + + assert errored == expected_error + + assert caplog.text.count(what) == expected_log + + +@pytest.mark.parametrize( + ("behavior", "integration_domain", "source", "logs_again"), + [ + pytest.param( + "core_behavior", + None, + "code that", + True, + id="core", + ), + pytest.param( + "core_behavior", + "unknown_integration", + "code that", + True, + id="unknown integration", + ), + pytest.param( + "core_integration_behavior", + "sensor", + "that integration 'sensor'", + False, + id="core integration", + ), + pytest.param( + "custom_integration_behavior", + "test_package", + "that custom integration 'test_package'", + False, + id="custom integration", + ), + ], +) +@pytest.mark.usefixtures("enable_custom_integrations") +async def test_report_integration_domain( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + behavior: str, + integration_domain: str | None, + source: str, + logs_again: bool, +) -> None: + """Test report.""" + await async_get_integration(hass, "sensor") + await async_get_integration(hass, "test_package") + + what = "test_report_string" + lookup_text = f"Detected {source} {what}" + + caplog.clear() + frame.report_usage( + what, + **{behavior: frame.ReportBehavior.IGNORE}, + integration_domain=integration_domain, + ) + + assert lookup_text not in caplog.text + + with patch.object(frame, "_REPORTED_INTEGRATIONS", set()): + frame.report_usage( + what, + **{behavior: frame.ReportBehavior.LOG}, + integration_domain=integration_domain, + ) + + assert lookup_text in caplog.text + + # Check that it does not log again + caplog.clear() + frame.report_usage( + what, + **{behavior: frame.ReportBehavior.LOG}, + integration_domain=integration_domain, + ) + + assert (lookup_text in caplog.text) == logs_again + + # Check that it raises + with pytest.raises(RuntimeError, match=lookup_text): + frame.report_usage( + what, + **{behavior: frame.ReportBehavior.ERROR}, + integration_domain=integration_domain, + ) diff --git a/tests/helpers/test_httpx_client.py b/tests/helpers/test_httpx_client.py index ccfccb3d698..684778fe1b1 100644 --- a/tests/helpers/test_httpx_client.py +++ b/tests/helpers/test_httpx_client.py @@ -138,8 +138,8 @@ async def test_warning_close_session_integration( assert ( "Detected that integration 'hue' closes the Home Assistant httpx client at " - "homeassistant/components/hue/light.py, line 23: await session.aclose(), " - "please create a bug report at https://github.com/home-assistant/core/issues?" + "homeassistant/components/hue/light.py, line 23: await session.aclose(). " + "Please create a bug report at https://github.com/home-assistant/core/issues?" "q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+hue%22" ) in caplog.text @@ -182,6 +182,6 @@ async def test_warning_close_session_custom( await httpx_session.aclose() assert ( "Detected that custom integration 'hue' closes the Home Assistant httpx client " - "at custom_components/hue/light.py, line 23: await session.aclose(), " - "please report it to the author of the 'hue' custom integration" + "at custom_components/hue/light.py, line 23: await session.aclose(). " + "Please report it to the author of the 'hue' custom integration" ) in caplog.text diff --git a/tests/helpers/test_llm.py b/tests/helpers/test_llm.py index cd36fe18933..3787526c433 100644 --- a/tests/helpers/test_llm.py +++ b/tests/helpers/test_llm.py @@ -306,6 +306,7 @@ async def test_assist_api_tools( "HassSetPosition", "HassStartTimer", "HassCancelTimer", + "HassCancelAllTimers", "HassIncreaseTimer", "HassDecreaseTimer", "HassPauseTimer", @@ -516,10 +517,6 @@ async def test_assist_api_prompt( ) ) exposed_entities_prompt = """An overview of the areas and the devices in this smart home: -- names: script_with_no_fields - domain: script - state: 'off' - description: This is another test script - names: Kitchen domain: light state: 'on' @@ -655,7 +652,10 @@ async def test_script_tool( "script": { "test_script": { "description": "This is a test script", - "sequence": [], + "sequence": [ + {"variables": {"result": {"drinks": 2}}}, + {"stop": True, "response_variable": "result"}, + ], "fields": { "beer": {"description": "Number of beers", "required": True}, "wine": {"selector": {"number": {"min": 0, "max": 3}}}, @@ -691,7 +691,7 @@ async def test_script_tool( api = await llm.async_get_api(hass, "assist", llm_context) tools = [tool for tool in api.tools if isinstance(tool, llm.ScriptTool)] - assert len(tools) == 1 + assert len(tools) == 2 tool = tools[0] assert tool.name == "test_script" @@ -718,6 +718,7 @@ async def test_script_tool( "script_with_no_fields": ("This is another test script", vol.Schema({})), } + # Test script with response tool_input = llm.ToolInput( tool_name="test_script", tool_args={ @@ -730,26 +731,56 @@ async def test_script_tool( }, ) - with patch("homeassistant.core.ServiceRegistry.async_call") as mock_service_call: + with patch( + "homeassistant.core.ServiceRegistry.async_call", + side_effect=hass.services.async_call, + ) as mock_service_call: response = await api.async_call_tool(tool_input) mock_service_call.assert_awaited_once_with( "script", - "turn_on", + "test_script", { - "entity_id": "script.test_script", - "variables": { - "beer": "3", - "wine": 0, - "where": area.id, - "area_list": [area.id], - "floor": floor.floor_id, - "floor_list": [floor.floor_id], - }, + "beer": "3", + "wine": 0, + "where": area.id, + "area_list": [area.id], + "floor": floor.floor_id, + "floor_list": [floor.floor_id], }, context=context, + blocking=True, + return_response=True, ) - assert response == {"success": True} + assert response == { + "success": True, + "result": {"drinks": 2}, + } + + # Test script with no response + tool_input = llm.ToolInput( + tool_name="script_with_no_fields", + tool_args={}, + ) + + with patch( + "homeassistant.core.ServiceRegistry.async_call", + side_effect=hass.services.async_call, + ) as mock_service_call: + response = await api.async_call_tool(tool_input) + + mock_service_call.assert_awaited_once_with( + "script", + "script_with_no_fields", + {}, + context=context, + blocking=True, + return_response=True, + ) + assert response == { + "success": True, + "result": {}, + } # Test reload script with new parameters config = { @@ -781,7 +812,7 @@ async def test_script_tool( api = await llm.async_get_api(hass, "assist", llm_context) tools = [tool for tool in api.tools if isinstance(tool, llm.ScriptTool)] - assert len(tools) == 1 + assert len(tools) == 2 tool = tools[0] assert tool.name == "test_script" diff --git a/tests/helpers/test_restore_state.py b/tests/helpers/test_restore_state.py index 865ee5efaf7..7adb3dd5b5e 100644 --- a/tests/helpers/test_restore_state.py +++ b/tests/helpers/test_restore_state.py @@ -6,8 +6,6 @@ import logging from typing import Any from unittest.mock import Mock, patch -import pytest - from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP from homeassistant.core import CoreState, HomeAssistant, State from homeassistant.exceptions import HomeAssistantError @@ -94,20 +92,6 @@ async def test_caching_data(hass: HomeAssistant) -> None: assert mock_write_data.called -async def test_async_get_instance_backwards_compatibility(hass: HomeAssistant) -> None: - """Test async_get_instance backwards compatibility.""" - await async_load(hass) - data = async_get(hass) - # When called from core it should raise - with pytest.raises(RuntimeError): - await RestoreStateData.async_get_instance(hass) - - # When called from a component it should not raise - # but it should report - with patch("homeassistant.helpers.restore_state.report"): - assert data is await RestoreStateData.async_get_instance(hass) - - async def test_periodic_write(hass: HomeAssistant) -> None: """Test that we write periodiclly but not after stop.""" data = async_get(hass) diff --git a/tests/helpers/test_schema_config_entry_flow.py b/tests/helpers/test_schema_config_entry_flow.py index 877e3762d3b..e67525253bc 100644 --- a/tests/helpers/test_schema_config_entry_flow.py +++ b/tests/helpers/test_schema_config_entry_flow.py @@ -648,6 +648,10 @@ async def test_options_flow_state(hass: HomeAssistant) -> None: options_handler = hass.config_entries.options._progress[result["flow_id"]] assert options_handler._common_handler.flow_state == {"idx": None} + # Ensure that self.options and self._common_handler.options refer to the + # same mutable copy of the options + assert options_handler.options is options_handler._common_handler.options + # In step 1, flow state is updated with user input result = await hass.config_entries.options.async_configure( result["flow_id"], {"option1": "blublu"} diff --git a/tests/helpers/test_script.py b/tests/helpers/test_script.py index f67519905a1..c438e333ae6 100644 --- a/tests/helpers/test_script.py +++ b/tests/helpers/test_script.py @@ -5632,6 +5632,91 @@ async def test_stop_action_subscript( ) +@pytest.mark.parametrize( + ("var", "response"), + [(1, "If: Then"), (2, "Testing 123")], +) +async def test_stop_action_response_variables( + hass: HomeAssistant, + var: int, + response: str, +) -> None: + """Test setting stop response_variable in a subscript.""" + sequence = cv.SCRIPT_SCHEMA( + [ + {"variables": {"output": {"value": "Testing 123"}}}, + { + "if": { + "condition": "template", + "value_template": "{{ var == 1 }}", + }, + "then": [ + {"variables": {"output": {"value": "If: Then"}}}, + {"stop": "In the name of love", "response_variable": "output"}, + ], + }, + {"stop": "In the name of love", "response_variable": "output"}, + ] + ) + script_obj = script.Script(hass, sequence, "Test Name", "test_domain") + + run_vars = MappingProxyType({"var": var}) + result = await script_obj.async_run(run_vars, context=Context()) + assert result.service_response == {"value": response} + + +@pytest.mark.parametrize( + ("var", "if_result", "choice", "response"), + [(1, True, "then", "If: Then"), (2, False, "else", "If: Else")], +) +async def test_stop_action_nested_response_variables( + hass: HomeAssistant, + var: int, + if_result: bool, + choice: str, + response: str, +) -> None: + """Test setting stop response_variable in a subscript.""" + sequence = cv.SCRIPT_SCHEMA( + [ + {"variables": {"output": {"value": "Testing 123"}}}, + { + "if": { + "condition": "template", + "value_template": "{{ var == 1 }}", + }, + "then": [ + {"variables": {"output": {"value": "If: Then"}}}, + {"stop": "In the name of love", "response_variable": "output"}, + ], + "else": [ + {"variables": {"output": {"value": "If: Else"}}}, + {"stop": "In the name of love", "response_variable": "output"}, + ], + }, + ] + ) + script_obj = script.Script(hass, sequence, "Test Name", "test_domain") + + run_vars = MappingProxyType({"var": var}) + result = await script_obj.async_run(run_vars, context=Context()) + assert result.service_response == {"value": response} + + expected_trace = { + "0": [ + { + "variables": {"var": var, "output": {"value": "Testing 123"}}, + } + ], + "1": [{"result": {"choice": choice}}], + "1/if": [{"result": {"result": if_result}}], + "1/if/condition/0": [{"result": {"result": var == 1, "entities": []}}], + f"1/{choice}/0": [{"variables": {"output": {"value": response}}}], + f"1/{choice}/1": [{"result": {"stop": "In the name of love", "error": False}}], + } + assert_action_trace(expected_trace) + + async def test_stop_action_with_error( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: diff --git a/tests/helpers/test_template.py b/tests/helpers/test_template.py index b8c6b5a25af..628aea20900 100644 --- a/tests/helpers/test_template.py +++ b/tests/helpers/test_template.py @@ -24,6 +24,7 @@ from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, STATE_ON, STATE_UNAVAILABLE, + UnitOfArea, UnitOfLength, UnitOfMass, UnitOfPrecipitationDepth, @@ -61,6 +62,7 @@ def _set_up_units(hass: HomeAssistant) -> None: hass.config.units = UnitSystem( "custom", accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, + area=UnitOfArea.SQUARE_METERS, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, diff --git a/tests/helpers/test_translation.py b/tests/helpers/test_translation.py index 3b60c7f695b..d4a78807e2b 100644 --- a/tests/helpers/test_translation.py +++ b/tests/helpers/test_translation.py @@ -64,10 +64,16 @@ def test_load_translations_files_by_language( "test": { "entity": { "switch": { - "other1": {"name": "Other 1"}, + "other1": { + "name": "Other 1", + "unit_of_measurement": "units", + }, "other2": {"name": "Other 2"}, "other3": {"name": "Other 3"}, - "other4": {"name": "Other 4"}, + "other4": { + "name": "Other 4", + "unit_of_measurement": "quantities", + }, "outlet": {"name": "Outlet " "{placeholder}"}, } }, @@ -87,9 +93,11 @@ def test_load_translations_files_by_language( "en", { "component.test.entity.switch.other1.name": "Other 1", + "component.test.entity.switch.other1.unit_of_measurement": "units", "component.test.entity.switch.other2.name": "Other 2", "component.test.entity.switch.other3.name": "Other 3", "component.test.entity.switch.other4.name": "Other 4", + "component.test.entity.switch.other4.unit_of_measurement": "quantities", "component.test.entity.switch.outlet.name": "Outlet {placeholder}", }, [], @@ -98,9 +106,11 @@ def test_load_translations_files_by_language( "es", { "component.test.entity.switch.other1.name": "Otra 1", + "component.test.entity.switch.other1.unit_of_measurement": "units", "component.test.entity.switch.other2.name": "Otra 2", "component.test.entity.switch.other3.name": "Otra 3", "component.test.entity.switch.other4.name": "Otra 4", + "component.test.entity.switch.other4.unit_of_measurement": "quantities", "component.test.entity.switch.outlet.name": "Enchufe {placeholder}", }, [], @@ -110,12 +120,14 @@ def test_load_translations_files_by_language( { # Correct "component.test.entity.switch.other1.name": "Anderes 1", + "component.test.entity.switch.other1.unit_of_measurement": "einheiten", # Translation has placeholder missing in English "component.test.entity.switch.other2.name": "Other 2", # Correct (empty translation) "component.test.entity.switch.other3.name": "", # Translation missing "component.test.entity.switch.other4.name": "Other 4", + "component.test.entity.switch.other4.unit_of_measurement": "quantities", # Mismatch in placeholders "component.test.entity.switch.outlet.name": "Outlet {placeholder}", }, @@ -166,9 +178,11 @@ async def test_get_translations(hass: HomeAssistant, mock_config_flows) -> None: assert translations == { "component.test.entity.switch.other1.name": "Other 1", + "component.test.entity.switch.other1.unit_of_measurement": "units", "component.test.entity.switch.other2.name": "Other 2", "component.test.entity.switch.other3.name": "Other 3", "component.test.entity.switch.other4.name": "Other 4", + "component.test.entity.switch.other4.unit_of_measurement": "quantities", "component.test.entity.switch.outlet.name": "Outlet {placeholder}", } @@ -176,24 +190,33 @@ async def test_get_translations(hass: HomeAssistant, mock_config_flows) -> None: hass, "de", "entity", {"test"} ) + # Test a partial translation assert translations == { + # Correct "component.test.entity.switch.other1.name": "Anderes 1", + "component.test.entity.switch.other1.unit_of_measurement": "einheiten", + # Translation has placeholder missing in English "component.test.entity.switch.other2.name": "Other 2", + # Correct (empty translation) "component.test.entity.switch.other3.name": "", + # Translation missing "component.test.entity.switch.other4.name": "Other 4", + "component.test.entity.switch.other4.unit_of_measurement": "quantities", + # Mismatch in placeholders "component.test.entity.switch.outlet.name": "Outlet {placeholder}", } - # Test a partial translation translations = await translation.async_get_translations( hass, "es", "entity", {"test"} ) assert translations == { "component.test.entity.switch.other1.name": "Otra 1", + "component.test.entity.switch.other1.unit_of_measurement": "units", "component.test.entity.switch.other2.name": "Otra 2", "component.test.entity.switch.other3.name": "Otra 3", "component.test.entity.switch.other4.name": "Otra 4", + "component.test.entity.switch.other4.unit_of_measurement": "quantities", "component.test.entity.switch.outlet.name": "Enchufe {placeholder}", } @@ -204,9 +227,11 @@ async def test_get_translations(hass: HomeAssistant, mock_config_flows) -> None: assert translations == { "component.test.entity.switch.other1.name": "Other 1", + "component.test.entity.switch.other1.unit_of_measurement": "units", "component.test.entity.switch.other2.name": "Other 2", "component.test.entity.switch.other3.name": "Other 3", "component.test.entity.switch.other4.name": "Other 4", + "component.test.entity.switch.other4.unit_of_measurement": "quantities", "component.test.entity.switch.outlet.name": "Outlet {placeholder}", } @@ -507,9 +532,11 @@ async def test_get_cached_translations(hass: HomeAssistant, mock_config_flows) - ) assert translations == { "component.test.entity.switch.other1.name": "Other 1", + "component.test.entity.switch.other1.unit_of_measurement": "units", "component.test.entity.switch.other2.name": "Other 2", "component.test.entity.switch.other3.name": "Other 3", "component.test.entity.switch.other4.name": "Other 4", + "component.test.entity.switch.other4.unit_of_measurement": "quantities", "component.test.entity.switch.outlet.name": "Outlet {placeholder}", } @@ -522,9 +549,11 @@ async def test_get_cached_translations(hass: HomeAssistant, mock_config_flows) - assert translations == { "component.test.entity.switch.other1.name": "Otra 1", + "component.test.entity.switch.other1.unit_of_measurement": "units", "component.test.entity.switch.other2.name": "Otra 2", "component.test.entity.switch.other3.name": "Otra 3", "component.test.entity.switch.other4.name": "Otra 4", + "component.test.entity.switch.other4.unit_of_measurement": "quantities", "component.test.entity.switch.outlet.name": "Enchufe {placeholder}", } @@ -539,9 +568,11 @@ async def test_get_cached_translations(hass: HomeAssistant, mock_config_flows) - assert translations == { "component.test.entity.switch.other1.name": "Other 1", + "component.test.entity.switch.other1.unit_of_measurement": "units", "component.test.entity.switch.other2.name": "Other 2", "component.test.entity.switch.other3.name": "Other 3", "component.test.entity.switch.other4.name": "Other 4", + "component.test.entity.switch.other4.unit_of_measurement": "quantities", "component.test.entity.switch.outlet.name": "Outlet {placeholder}", } @@ -678,7 +709,6 @@ async def test_get_translations_still_has_title_without_translations_files( ) assert translations == translations_again - assert translations == { "component.component1.title": "Component 1", } diff --git a/tests/components/template/test_manual_trigger_entity.py b/tests/helpers/test_trigger_template_entity.py similarity index 100% rename from tests/components/template/test_manual_trigger_entity.py rename to tests/helpers/test_trigger_template_entity.py diff --git a/tests/helpers/test_update_coordinator.py b/tests/helpers/test_update_coordinator.py index 50da0ab6332..539762a60ff 100644 --- a/tests/helpers/test_update_coordinator.py +++ b/tests/helpers/test_update_coordinator.py @@ -628,8 +628,7 @@ async def test_async_config_entry_first_refresh_invalid_state( RuntimeError, match="Detected code that uses `async_config_entry_first_refresh`, which " "is only supported when entry state is ConfigEntryState.SETUP_IN_PROGRESS, " - "but it is in state ConfigEntryState.NOT_LOADED. This will stop working " - "in Home Assistant 2025.11. Please report this issue.", + "but it is in state ConfigEntryState.NOT_LOADED. Please report this issue", ): await crd.async_config_entry_first_refresh() @@ -653,8 +652,9 @@ async def test_async_config_entry_first_refresh_invalid_state_in_integration( assert ( "Detected that integration 'hue' uses `async_config_entry_first_refresh`, which " "is only supported when entry state is ConfigEntryState.SETUP_IN_PROGRESS, " - "but it is in state ConfigEntryState.NOT_LOADED, This will stop working " - "in Home Assistant 2025.11" + "but it is in state ConfigEntryState.NOT_LOADED at " + "homeassistant/components/hue/light.py, line 23: self.light.is_on. " + "This will stop working in Home Assistant 2025.11" ) in caplog.text @@ -665,8 +665,8 @@ async def test_async_config_entry_first_refresh_no_entry(hass: HomeAssistant) -> with pytest.raises( RuntimeError, match="Detected code that uses `async_config_entry_first_refresh`, " - "which is only supported for coordinators with a config entry and will " - "stop working in Home Assistant 2025.11. Please report this issue.", + "which is only supported for coordinators with a config entry. " + "Please report this issue", ): await crd.async_config_entry_first_refresh() diff --git a/tests/script/test_gen_requirements_all.py b/tests/script/test_gen_requirements_all.py index 793b3de63c5..519a5c21855 100644 --- a/tests/script/test_gen_requirements_all.py +++ b/tests/script/test_gen_requirements_all.py @@ -1,5 +1,7 @@ """Tests for the gen_requirements_all script.""" +from unittest.mock import patch + from script import gen_requirements_all @@ -23,3 +25,27 @@ def test_include_overrides_subsets() -> None: for overrides in gen_requirements_all.OVERRIDDEN_REQUIREMENTS_ACTIONS.values(): for req in overrides["include"]: assert req in gen_requirements_all.EXCLUDED_REQUIREMENTS_ALL + + +def test_requirement_override_markers() -> None: + """Test override markers are applied to the correct requirements.""" + data = { + "pytest": { + "exclude": set(), + "include": set(), + "markers": {"env-canada": "python_version<'3.13'"}, + } + } + with patch.dict( + gen_requirements_all.OVERRIDDEN_REQUIREMENTS_ACTIONS, data, clear=True + ): + assert ( + gen_requirements_all.process_action_requirement( + "env-canada==0.7.2", "pytest" + ) + == "env-canada==0.7.2;python_version<'3.13'" + ) + assert ( + gen_requirements_all.process_action_requirement("other==1.0", "pytest") + == "other==1.0" + ) diff --git a/tests/syrupy.py b/tests/syrupy.py index 268ee59243f..a3b3f763063 100644 --- a/tests/syrupy.py +++ b/tests/syrupy.py @@ -5,14 +5,22 @@ from __future__ import annotations from contextlib import suppress import dataclasses from enum import IntFlag +import json +import os from pathlib import Path from typing import Any import attr import attrs +import pytest +from syrupy.constants import EXIT_STATUS_FAIL_UNUSED +from syrupy.data import Snapshot, SnapshotCollection, SnapshotCollections from syrupy.extensions.amber import AmberDataSerializer, AmberSnapshotExtension from syrupy.location import PyTestLocation +from syrupy.report import SnapshotReport +from syrupy.session import ItemStatus, SnapshotSession from syrupy.types import PropertyFilter, PropertyMatcher, PropertyPath, SerializableData +from syrupy.utils import is_xdist_controller, is_xdist_worker import voluptuous as vol import voluptuous_serialize @@ -246,3 +254,164 @@ class HomeAssistantSnapshotExtension(AmberSnapshotExtension): """ test_dir = Path(test_location.filepath).parent return str(test_dir.joinpath("snapshots")) + + +# Classes and Methods to override default finish behavior in syrupy +# This is needed to handle the xdist plugin in pytest +# The default implementation does not handle the xdist plugin +# and will not work correctly when running tests in parallel +# with pytest-xdist. +# Temporary workaround until it is finalised inside syrupy +# See https://github.com/syrupy-project/syrupy/pull/901 + + +class _FakePytestObject: + """Fake object.""" + + def __init__(self, collected_item: dict[str, str]) -> None: + """Initialise fake object.""" + self.__module__ = collected_item["modulename"] + self.__name__ = collected_item["methodname"] + + +class _FakePytestItem: + """Fake pytest.Item object.""" + + def __init__(self, collected_item: dict[str, str]) -> None: + """Initialise fake pytest.Item object.""" + self.nodeid = collected_item["nodeid"] + self.name = collected_item["name"] + self.path = Path(collected_item["path"]) + self.obj = _FakePytestObject(collected_item) + + +def _serialize_collections(collections: SnapshotCollections) -> dict[str, Any]: + return { + k: [c.name for c in v] for k, v in collections._snapshot_collections.items() + } + + +def _serialize_report( + report: SnapshotReport, + collected_items: set[pytest.Item], + selected_items: dict[str, ItemStatus], +) -> dict[str, Any]: + return { + "discovered": _serialize_collections(report.discovered), + "created": _serialize_collections(report.created), + "failed": _serialize_collections(report.failed), + "matched": _serialize_collections(report.matched), + "updated": _serialize_collections(report.updated), + "used": _serialize_collections(report.used), + "_collected_items": [ + { + "nodeid": c.nodeid, + "name": c.name, + "path": str(c.path), + "modulename": c.obj.__module__, + "methodname": c.obj.__name__, + } + for c in list(collected_items) + ], + "_selected_items": { + key: status.value for key, status in selected_items.items() + }, + } + + +def _merge_serialized_collections( + collections: SnapshotCollections, json_data: dict[str, list[str]] +) -> None: + if not json_data: + return + for location, names in json_data.items(): + snapshot_collection = SnapshotCollection(location=location) + for name in names: + snapshot_collection.add(Snapshot(name)) + collections.update(snapshot_collection) + + +def _merge_serialized_report(report: SnapshotReport, json_data: dict[str, Any]) -> None: + _merge_serialized_collections(report.discovered, json_data["discovered"]) + _merge_serialized_collections(report.created, json_data["created"]) + _merge_serialized_collections(report.failed, json_data["failed"]) + _merge_serialized_collections(report.matched, json_data["matched"]) + _merge_serialized_collections(report.updated, json_data["updated"]) + _merge_serialized_collections(report.used, json_data["used"]) + for collected_item in json_data["_collected_items"]: + custom_item = _FakePytestItem(collected_item) + if not any( + t.nodeid == custom_item.nodeid and t.name == custom_item.nodeid + for t in report.collected_items + ): + report.collected_items.add(custom_item) + for key, selected_item in json_data["_selected_items"].items(): + if key in report.selected_items: + status = ItemStatus(selected_item) + if status != ItemStatus.NOT_RUN: + report.selected_items[key] = status + else: + report.selected_items[key] = ItemStatus(selected_item) + + +def override_syrupy_finish(self: SnapshotSession) -> int: + """Override the finish method to allow for custom handling.""" + exitstatus = 0 + self.flush_snapshot_write_queue() + self.report = SnapshotReport( + base_dir=self.pytest_session.config.rootpath, + collected_items=self._collected_items, + selected_items=self._selected_items, + assertions=self._assertions, + options=self.pytest_session.config.option, + ) + + needs_xdist_merge = self.update_snapshots or bool( + self.pytest_session.config.option.include_snapshot_details + ) + + if is_xdist_worker(): + if not needs_xdist_merge: + return exitstatus + with open(".pytest_syrupy_worker_count", "w", encoding="utf-8") as f: + f.write(os.getenv("PYTEST_XDIST_WORKER_COUNT")) + with open( + f".pytest_syrupy_{os.getenv("PYTEST_XDIST_WORKER")}_result", + "w", + encoding="utf-8", + ) as f: + json.dump( + _serialize_report( + self.report, self._collected_items, self._selected_items + ), + f, + indent=2, + ) + return exitstatus + if is_xdist_controller(): + return exitstatus + + if needs_xdist_merge: + worker_count = None + try: + with open(".pytest_syrupy_worker_count", encoding="utf-8") as f: + worker_count = f.read() + os.remove(".pytest_syrupy_worker_count") + except FileNotFoundError: + pass + + if worker_count: + for i in range(int(worker_count)): + with open(f".pytest_syrupy_gw{i}_result", encoding="utf-8") as f: + _merge_serialized_report(self.report, json.load(f)) + os.remove(f".pytest_syrupy_gw{i}_result") + + if self.report.num_unused: + if self.update_snapshots: + self.remove_unused_snapshots( + unused_snapshot_collections=self.report.unused, + used_snapshot_collections=self.report.used, + ) + elif not self.warn_unused_snapshots: + exitstatus |= EXIT_STATUS_FAIL_UNUSED + return exitstatus diff --git a/tests/test_backup_restore.py b/tests/test_backup_restore.py new file mode 100644 index 00000000000..44a05c0540e --- /dev/null +++ b/tests/test_backup_restore.py @@ -0,0 +1,215 @@ +"""Test methods in backup_restore.""" + +from pathlib import Path +import tarfile +from unittest import mock + +import pytest + +from homeassistant import backup_restore + +from .common import get_test_config_dir + + +@pytest.mark.parametrize( + ("side_effect", "content", "expected"), + [ + (FileNotFoundError, "", None), + (None, "", None), + ( + None, + '{"path": "test"}', + backup_restore.RestoreBackupFileContent(backup_file_path=Path("test")), + ), + ], +) +def test_reading_the_instruction_contents( + side_effect: Exception | None, + content: str, + expected: backup_restore.RestoreBackupFileContent | None, +) -> None: + """Test reading the content of the .HA_RESTORE file.""" + with ( + mock.patch( + "pathlib.Path.read_text", + return_value=content, + side_effect=side_effect, + ), + ): + read_content = backup_restore.restore_backup_file_content( + Path(get_test_config_dir()) + ) + assert read_content == expected + + +def test_restoring_backup_that_does_not_exist() -> None: + """Test restoring a backup that does not exist.""" + backup_file_path = Path(get_test_config_dir("backups", "test")) + with ( + mock.patch( + "homeassistant.backup_restore.restore_backup_file_content", + return_value=backup_restore.RestoreBackupFileContent( + backup_file_path=backup_file_path + ), + ), + mock.patch("pathlib.Path.read_text", side_effect=FileNotFoundError), + pytest.raises( + ValueError, match=f"Backup file {backup_file_path} does not exist" + ), + ): + assert backup_restore.restore_backup(Path(get_test_config_dir())) is False + + +def test_restoring_backup_when_instructions_can_not_be_read() -> None: + """Test restoring a backup when instructions can not be read.""" + with ( + mock.patch( + "homeassistant.backup_restore.restore_backup_file_content", + return_value=None, + ), + ): + assert backup_restore.restore_backup(Path(get_test_config_dir())) is False + + +def test_restoring_backup_that_is_not_a_file() -> None: + """Test restoring a backup that is not a file.""" + backup_file_path = Path(get_test_config_dir("backups", "test")) + with ( + mock.patch( + "homeassistant.backup_restore.restore_backup_file_content", + return_value=backup_restore.RestoreBackupFileContent( + backup_file_path=backup_file_path + ), + ), + mock.patch("pathlib.Path.exists", return_value=True), + mock.patch("pathlib.Path.is_file", return_value=False), + pytest.raises( + ValueError, match=f"Backup file {backup_file_path} does not exist" + ), + ): + assert backup_restore.restore_backup(Path(get_test_config_dir())) is False + + +def test_aborting_for_older_versions() -> None: + """Test that we abort for older versions.""" + config_dir = Path(get_test_config_dir()) + backup_file_path = Path(config_dir, "backups", "test.tar") + + def _patched_path_read_text(path: Path, **kwargs): + return '{"homeassistant": {"version": "9999.99.99"}, "compressed": false}' + + with ( + mock.patch( + "homeassistant.backup_restore.restore_backup_file_content", + return_value=backup_restore.RestoreBackupFileContent( + backup_file_path=backup_file_path + ), + ), + mock.patch("securetar.SecureTarFile"), + mock.patch("homeassistant.backup_restore.TemporaryDirectory"), + mock.patch("pathlib.Path.read_text", _patched_path_read_text), + mock.patch("homeassistant.backup_restore.HA_VERSION", "2013.09.17"), + pytest.raises( + ValueError, + match="You need at least Home Assistant version 9999.99.99 to restore this backup", + ), + ): + assert backup_restore.restore_backup(config_dir) is True + + +def test_removal_of_current_configuration_when_restoring() -> None: + """Test that we are removing the current configuration directory.""" + config_dir = Path(get_test_config_dir()) + backup_file_path = Path(config_dir, "backups", "test.tar") + mock_config_dir = [ + {"path": Path(config_dir, ".HA_RESTORE"), "is_file": True}, + {"path": Path(config_dir, ".HA_VERSION"), "is_file": True}, + {"path": Path(config_dir, "backups"), "is_file": False}, + {"path": Path(config_dir, "www"), "is_file": False}, + ] + + def _patched_path_read_text(path: Path, **kwargs): + return '{"homeassistant": {"version": "2013.09.17"}, "compressed": false}' + + def _patched_path_is_file(path: Path, **kwargs): + return [x for x in mock_config_dir if x["path"] == path][0]["is_file"] + + def _patched_path_is_dir(path: Path, **kwargs): + return not [x for x in mock_config_dir if x["path"] == path][0]["is_file"] + + with ( + mock.patch( + "homeassistant.backup_restore.restore_backup_file_content", + return_value=backup_restore.RestoreBackupFileContent( + backup_file_path=backup_file_path + ), + ), + mock.patch("securetar.SecureTarFile"), + mock.patch("homeassistant.backup_restore.TemporaryDirectory"), + mock.patch("homeassistant.backup_restore.HA_VERSION", "2013.09.17"), + mock.patch("pathlib.Path.read_text", _patched_path_read_text), + mock.patch("pathlib.Path.is_file", _patched_path_is_file), + mock.patch("pathlib.Path.is_dir", _patched_path_is_dir), + mock.patch( + "pathlib.Path.iterdir", + return_value=[x["path"] for x in mock_config_dir], + ), + mock.patch("pathlib.Path.unlink") as unlink_mock, + mock.patch("shutil.rmtree") as rmtreemock, + ): + assert backup_restore.restore_backup(config_dir) is True + assert unlink_mock.call_count == 2 + assert ( + rmtreemock.call_count == 1 + ) # We have 2 directories in the config directory, but backups is kept + + removed_directories = {Path(call.args[0]) for call in rmtreemock.mock_calls} + assert removed_directories == {Path(config_dir, "www")} + + +def test_extracting_the_contents_of_a_backup_file() -> None: + """Test extracting the contents of a backup file.""" + config_dir = Path(get_test_config_dir()) + backup_file_path = Path(config_dir, "backups", "test.tar") + + def _patched_path_read_text(path: Path, **kwargs): + return '{"homeassistant": {"version": "2013.09.17"}, "compressed": false}' + + getmembers_mock = mock.MagicMock( + return_value=[ + tarfile.TarInfo(name="data"), + tarfile.TarInfo(name="data/../test"), + tarfile.TarInfo(name="data/.HA_VERSION"), + tarfile.TarInfo(name="data/.storage"), + tarfile.TarInfo(name="data/www"), + ] + ) + extractall_mock = mock.MagicMock() + + with ( + mock.patch( + "homeassistant.backup_restore.restore_backup_file_content", + return_value=backup_restore.RestoreBackupFileContent( + backup_file_path=backup_file_path + ), + ), + mock.patch( + "tarfile.open", + return_value=mock.MagicMock( + getmembers=getmembers_mock, + extractall=extractall_mock, + __iter__=lambda x: iter(getmembers_mock.return_value), + ), + ), + mock.patch("homeassistant.backup_restore.TemporaryDirectory"), + mock.patch("pathlib.Path.read_text", _patched_path_read_text), + mock.patch("pathlib.Path.is_file", return_value=False), + mock.patch("pathlib.Path.iterdir", return_value=[]), + ): + assert backup_restore.restore_backup(config_dir) is True + assert getmembers_mock.call_count == 1 + assert extractall_mock.call_count == 2 + + assert { + member.name for member in extractall_mock.mock_calls[-1].kwargs["members"] + } == {".HA_VERSION", ".storage", "www"} diff --git a/tests/test_config_entries.py b/tests/test_config_entries.py index d530628d27c..aba85a35349 100644 --- a/tests/test_config_entries.py +++ b/tests/test_config_entries.py @@ -37,7 +37,7 @@ from homeassistant.exceptions import ( ConfigEntryNotReady, HomeAssistantError, ) -from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.helpers import entity_registry as er, frame, issue_registry as ir from homeassistant.helpers.discovery_flow import DiscoveryKey from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.json import json_dumps @@ -1115,8 +1115,8 @@ async def test_async_forward_entry_setup_deprecated( assert ( "Detected code that calls async_forward_entry_setup for integration, " f"original with title: Mock Title and entry_id: {entry_id}, " - "which is deprecated and will stop working in Home Assistant 2025.6, " - "await async_forward_entry_setups instead. Please report this issue." + "which is deprecated, await async_forward_entry_setups instead. " + "This will stop working in Home Assistant 2025.6, please report this issue" ) in caplog.text @@ -4779,6 +4779,75 @@ async def test_reauth( assert len(hass.config_entries.flow.async_progress()) == 1 +@pytest.mark.parametrize( + "source", [config_entries.SOURCE_REAUTH, config_entries.SOURCE_RECONFIGURE] +) +async def test_reauth_reconfigure_missing_entry( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + source: str, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the async_reauth_helper.""" + entry = MockConfigEntry(title="test_title", domain="test") + entry.add_to_hass(hass) + + mock_setup_entry = AsyncMock(return_value=True) + mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) + mock_platform(hass, "test.config_flow", None) + + await manager.async_setup(entry.entry_id) + await hass.async_block_till_done() + + with pytest.raises( + RuntimeError, + match=f"Detected code that initialises a {source} flow without a link " + "to the config entry. Please report this issue", + ): + await manager.flow.async_init("test", context={"source": source}) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 0 + + +@pytest.mark.usefixtures("mock_integration_frame") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +@pytest.mark.parametrize( + "source", [config_entries.SOURCE_REAUTH, config_entries.SOURCE_RECONFIGURE] +) +async def test_reauth_reconfigure_missing_entry_component( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + source: str, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the async_reauth_helper.""" + entry = MockConfigEntry(title="test_title", domain="test") + entry.add_to_hass(hass) + + mock_setup_entry = AsyncMock(return_value=True) + mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) + mock_platform(hass, "test.config_flow", None) + + await manager.async_setup(entry.entry_id) + await hass.async_block_till_done() + + with patch.object(frame, "_REPORTED_INTEGRATIONS", set()): + await manager.flow.async_init("test", context={"source": source}) + await hass.async_block_till_done() + + # Flow still created, but deprecation logged + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + assert flows[0]["context"]["source"] == source + + assert ( + f"Detected that integration 'hue' initialises a {source} flow" + " without a link to the config entry at homeassistant/components" in caplog.text + ) + + async def test_reconfigure( hass: HomeAssistant, manager: config_entries.ConfigEntries ) -> None: @@ -4971,20 +5040,46 @@ async def test_async_wait_component_startup(hass: HomeAssistant) -> None: assert "test" in hass.config.components -async def test_options_flow_options_not_mutated() -> None: +@pytest.mark.parametrize( + "integration_frame_path", + ["homeassistant/components/my_integration", "homeassistant.core"], +) +@pytest.mark.usefixtures("mock_integration_frame") +async def test_options_flow_with_config_entry_core() -> None: + """Test that OptionsFlowWithConfigEntry cannot be used in core.""" + entry = MockConfigEntry( + domain="hue", + data={"first": True}, + options={"sub_dict": {"1": "one"}, "sub_list": ["one"]}, + ) + + with pytest.raises(RuntimeError, match="inherits from OptionsFlowWithConfigEntry"): + _ = config_entries.OptionsFlowWithConfigEntry(entry) + + +@pytest.mark.parametrize("integration_frame_path", ["custom_components/my_integration"]) +@pytest.mark.usefixtures("mock_integration_frame") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_options_flow_with_config_entry(caplog: pytest.LogCaptureFixture) -> None: """Test that OptionsFlowWithConfigEntry doesn't mutate entry options.""" entry = MockConfigEntry( - domain="test", + domain="hue", data={"first": True}, options={"sub_dict": {"1": "one"}, "sub_list": ["one"]}, ) options_flow = config_entries.OptionsFlowWithConfigEntry(entry) + assert caplog.text == "" # No deprecation warning for custom components - options_flow._options["sub_dict"]["2"] = "two" - options_flow._options["sub_list"].append("two") + # Ensure available at startup + assert options_flow.config_entry is entry + assert options_flow.options == entry.options - assert options_flow._options == { + options_flow.options["sub_dict"]["2"] = "two" + options_flow.options["sub_list"].append("two") + + # Ensure it does not mutate the entry options + assert options_flow.options == { "sub_dict": {"1": "one", "2": "two"}, "sub_list": ["one", "two"], } @@ -5012,7 +5107,9 @@ async def test_initializing_flows_canceled_on_shutdown( config_entries.HANDLERS, {"comp": MockFlowHandler, "test": MockFlowHandler} ): task = asyncio.create_task( - manager.flow.async_init("test", context={"source": "reauth"}) + manager.flow.async_init( + "test", context={"source": "reauth", "entry_id": "abc"} + ) ) await hass.async_block_till_done() manager.flow.async_shutdown() @@ -5600,8 +5697,8 @@ async def test_starting_config_flow_on_single_config_entry( "comp", context=context, data=user_input ) - for key in expected_result: - assert result[key] == expected_result[key] + for key, value in expected_result.items(): + assert result[key] == value @pytest.mark.parametrize( @@ -5681,8 +5778,8 @@ async def test_starting_config_flow_on_single_config_entry_2( "comp", context=context, data=user_input ) - for key in expected_result: - assert result[key] == expected_result[key] + for key, value in expected_result.items(): + assert result[key] == value async def test_avoid_adding_second_config_entry_on_single_config_entry( @@ -6147,7 +6244,7 @@ async def test_non_awaited_async_forward_entry_setups( "test with title: Mock Title and entry_id: test2, during setup without " "awaiting async_forward_entry_setups, which can cause the setup lock " "to be released before the setup is done. This will stop working in " - "Home Assistant 2025.1. Please report this issue." + "Home Assistant 2025.1, please report this issue" ) in caplog.text @@ -6219,7 +6316,7 @@ async def test_non_awaited_async_forward_entry_setup( "test with title: Mock Title and entry_id: test2, during setup without " "awaiting async_forward_entry_setup, which can cause the setup lock " "to be released before the setup is done. This will stop working in " - "Home Assistant 2025.1. Please report this issue." + "Home Assistant 2025.1, please report this issue" ) in caplog.text @@ -7060,7 +7157,10 @@ async def test_create_entry_reauth_reconfigure( assert len(hass.config_entries.async_entries("test")) == 1 - with mock_config_flow("test", TestFlow): + with ( + mock_config_flow("test", TestFlow), + patch.object(frame, "_REPORTED_INTEGRATIONS", set()), + ): result = await getattr(entry, f"start_{source}_flow")(hass) await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY @@ -7072,10 +7172,10 @@ async def test_create_entry_reauth_reconfigure( assert entries[0].entry_id != entry.entry_id assert ( - f"Detected {source} config flow creating a new entry, when it is expected " - "to update an existing entry and abort. This will stop working in " - "2025.11, please create a bug report at https://github.com/home" - "-assistant/core/issues?q=is%3Aopen+is%3Aissue+" + f"Detected that integration 'test' creates a new entry in a '{source}' flow, " + "when it is expected to update an existing entry and abort. This will stop " + "working in Home Assistant 2025.11, please create a bug report at " + "https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue+" "label%3A%22integration%3A+test%22" ) in caplog.text @@ -7308,6 +7408,168 @@ async def test_context_no_leak(hass: HomeAssistant) -> None: assert config_entries.current_entry.get() is None +async def test_options_flow_config_entry( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test _config_entry_id and config_entry properties in options flow.""" + original_entry = MockConfigEntry(domain="test", data={}) + original_entry.add_to_hass(hass) + + mock_setup_entry = AsyncMock(return_value=True) + + mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry)) + mock_platform(hass, "test.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_options_flow(config_entry): + """Test options flow.""" + + class _OptionsFlow(config_entries.OptionsFlow): + """Test flow.""" + + def __init__(self) -> None: + """Test initialisation.""" + try: + self.init_entry_id = self._config_entry_id + except ValueError as err: + self.init_entry_id = err + try: + self.init_entry = self.config_entry + except ValueError as err: + self.init_entry = err + + async def async_step_init(self, user_input=None): + """Test user step.""" + errors = {} + if user_input is not None: + if user_input.get("abort"): + return self.async_abort(reason="abort") + + errors["entry_id"] = self._config_entry_id + try: + errors["entry"] = self.config_entry + except config_entries.UnknownEntry as err: + errors["entry"] = err + + return self.async_show_form(step_id="init", errors=errors) + + return _OptionsFlow() + + with mock_config_flow("test", TestFlow): + result = await hass.config_entries.options.async_init(original_entry.entry_id) + + options_flow = hass.config_entries.options._progress.get(result["flow_id"]) + assert isinstance(options_flow, config_entries.OptionsFlow) + assert options_flow.handler == original_entry.entry_id + assert isinstance(options_flow.init_entry_id, ValueError) + assert ( + str(options_flow.init_entry_id) + == "The config entry id is not available during initialisation" + ) + assert isinstance(options_flow.init_entry, ValueError) + assert ( + str(options_flow.init_entry) + == "The config entry is not available during initialisation" + ) + + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "init" + assert result["errors"] == {} + + result = await hass.config_entries.options.async_configure(result["flow_id"], {}) + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "init" + assert result["errors"]["entry_id"] == original_entry.entry_id + assert result["errors"]["entry"] is original_entry + + # Bad handler - not linked to a config entry + options_flow.handler = "123" + result = await hass.config_entries.options.async_configure(result["flow_id"], {}) + result = await hass.config_entries.options.async_configure(result["flow_id"], {}) + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "init" + assert result["errors"]["entry_id"] == "123" + assert isinstance(result["errors"]["entry"], config_entries.UnknownEntry) + # Reset handler + options_flow.handler = original_entry.entry_id + + result = await hass.config_entries.options.async_configure( + result["flow_id"], {"abort": True} + ) + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "abort" + + +@pytest.mark.parametrize("integration_frame_path", ["custom_components/my_integration"]) +@pytest.mark.usefixtures("mock_integration_frame") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_options_flow_deprecated_config_entry_setter( + hass: HomeAssistant, + manager: config_entries.ConfigEntries, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that setting config_entry explicitly still works.""" + original_entry = MockConfigEntry(domain="my_integration", data={}) + original_entry.add_to_hass(hass) + + mock_setup_entry = AsyncMock(return_value=True) + + mock_integration( + hass, MockModule("my_integration", async_setup_entry=mock_setup_entry) + ) + mock_platform(hass, "my_integration.config_flow", None) + + class TestFlow(config_entries.ConfigFlow): + """Test flow.""" + + @staticmethod + @callback + def async_get_options_flow(config_entry): + """Test options flow.""" + + class _OptionsFlow(config_entries.OptionsFlow): + """Test flow.""" + + def __init__(self, entry) -> None: + """Test initialisation.""" + self.config_entry = entry + + async def async_step_init(self, user_input=None): + """Test user step.""" + errors = {} + if user_input is not None: + if user_input.get("abort"): + return self.async_abort(reason="abort") + + errors["entry_id"] = self._config_entry_id + try: + errors["entry"] = self.config_entry + except config_entries.UnknownEntry as err: + errors["entry"] = err + + return self.async_show_form(step_id="init", errors=errors) + + return _OptionsFlow(config_entry) + + with mock_config_flow("my_integration", TestFlow): + result = await hass.config_entries.options.async_init(original_entry.entry_id) + + options_flow = hass.config_entries.options._progress.get(result["flow_id"]) + assert options_flow.config_entry is original_entry + + assert ( + "Detected that custom integration 'my_integration' sets option flow " + "config_entry explicitly, which is deprecated at " + "custom_components/my_integration/light.py, line 23: " + "self.light.is_on. This will stop working in Home Assistant 2025.12, please " + "create a bug report at " in caplog.text + ) + + async def test_add_description_placeholder_automatically( hass: HomeAssistant, manager: config_entries.ConfigEntries, diff --git a/tests/test_const.py b/tests/test_const.py index c572c4a08d7..ca598de39e1 100644 --- a/tests/test_const.py +++ b/tests/test_const.py @@ -66,7 +66,14 @@ def test_all() -> None: "DEVICE_CLASS_", ) + _create_tuples(const.UnitOfApparentPower, "POWER_") - + _create_tuples(const.UnitOfPower, "POWER_") + + _create_tuples( + [ + const.UnitOfPower.WATT, + const.UnitOfPower.KILO_WATT, + const.UnitOfPower.BTU_PER_HOUR, + ], + "POWER_", + ) + _create_tuples( [ const.UnitOfEnergy.KILO_WATT_HOUR, @@ -76,7 +83,13 @@ def test_all() -> None: "ENERGY_", ) + _create_tuples(const.UnitOfElectricCurrent, "ELECTRIC_CURRENT_") - + _create_tuples(const.UnitOfElectricPotential, "ELECTRIC_POTENTIAL_") + + _create_tuples( + [ + const.UnitOfElectricPotential.MILLIVOLT, + const.UnitOfElectricPotential.VOLT, + ], + "ELECTRIC_POTENTIAL_", + ) + _create_tuples(const.UnitOfTemperature, "TEMP_") + _create_tuples(const.UnitOfTime, "TIME_") + _create_tuples( @@ -164,18 +177,24 @@ def test_deprecated_constants( @pytest.mark.parametrize( - ("replacement", "constant_name"), + ("replacement", "constant_name", "breaks_in_version"), [ - (const.UnitOfLength.YARDS, "LENGTH_YARD"), - (const.UnitOfSoundPressure.DECIBEL, "SOUND_PRESSURE_DB"), - (const.UnitOfSoundPressure.WEIGHTED_DECIBEL_A, "SOUND_PRESSURE_WEIGHTED_DBA"), - (const.UnitOfVolume.FLUID_OUNCES, "VOLUME_FLUID_OUNCE"), + (const.UnitOfLength.YARDS, "LENGTH_YARD", "2025.1"), + (const.UnitOfSoundPressure.DECIBEL, "SOUND_PRESSURE_DB", "2025.1"), + ( + const.UnitOfSoundPressure.WEIGHTED_DECIBEL_A, + "SOUND_PRESSURE_WEIGHTED_DBA", + "2025.1", + ), + (const.UnitOfVolume.FLUID_OUNCES, "VOLUME_FLUID_OUNCE", "2025.1"), + (const.UnitOfArea.SQUARE_METERS, "AREA_SQUARE_METERS", "2025.12"), ], ) def test_deprecated_constant_name_changes( caplog: pytest.LogCaptureFixture, replacement: Enum, constant_name: str, + breaks_in_version: str, ) -> None: """Test deprecated constants, where the name is not the same as the enum value.""" import_and_test_deprecated_constant( @@ -184,7 +203,7 @@ def test_deprecated_constant_name_changes( constant_name, f"{replacement.__class__.__name__}.{replacement.name}", replacement, - "2025.1", + breaks_in_version, ) diff --git a/tests/test_core.py b/tests/test_core.py index 67ed99daa09..df2d916e166 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -3040,10 +3040,9 @@ async def test_async_run_job_deprecated( hass.async_run_job(_test) assert ( - "Detected code that calls `async_run_job`, which is deprecated " - "and will be removed in Home Assistant 2025.4; Please review " + "Detected code that calls `async_run_job`, which should be reviewed against " "https://developers.home-assistant.io/blog/2024/03/13/deprecate_add_run_job" - " for replacement options" + " for replacement options. This will stop working in Home Assistant 2025.4" ) in caplog.text @@ -3057,10 +3056,9 @@ async def test_async_add_job_deprecated( hass.async_add_job(_test) assert ( - "Detected code that calls `async_add_job`, which is deprecated " - "and will be removed in Home Assistant 2025.4; Please review " + "Detected code that calls `async_add_job`, which should be reviewed against " "https://developers.home-assistant.io/blog/2024/03/13/deprecate_add_run_job" - " for replacement options" + " for replacement options. This will stop working in Home Assistant 2025.4" ) in caplog.text @@ -3074,10 +3072,9 @@ async def test_async_add_hass_job_deprecated( hass.async_add_hass_job(HassJob(_test)) assert ( - "Detected code that calls `async_add_hass_job`, which is deprecated " - "and will be removed in Home Assistant 2025.5; Please review " + "Detected code that calls `async_add_hass_job`, which should be reviewed against " "https://developers.home-assistant.io/blog/2024/04/07/deprecate_add_hass_job" - " for replacement options" + " for replacement options. This will stop working in Home Assistant 2025.5" ) in caplog.text @@ -3245,8 +3242,8 @@ async def test_async_listen_with_run_immediately_deprecated( func = getattr(hass.bus, method) func(EVENT_HOMEASSISTANT_START, _test, run_immediately=run_immediately) assert ( - f"Detected code that calls `{method}` with run_immediately, which is " - "deprecated and will be removed in Home Assistant 2025.5." + f"Detected code that calls `{method}` with run_immediately. " + "This will stop working in Home Assistant 2025.5" ) in caplog.text @@ -3310,7 +3307,7 @@ async def test_thread_safety_message(hass: HomeAssistant) -> None: "which may cause Home Assistant to crash or data to corrupt. For more " "information, see " "https://developers.home-assistant.io/docs/asyncio_thread_safety/#test" - ". Please report this issue.", + ". Please report this issue", ), ): await hass.async_add_executor_job(hass.verify_event_loop_thread, "test") diff --git a/tests/test_core_config.py b/tests/test_core_config.py index 3e0c0999ad3..cd77e3608dd 100644 --- a/tests/test_core_config.py +++ b/tests/test_core_config.py @@ -1075,9 +1075,8 @@ async def test_set_time_zone_deprecated(hass: HomeAssistant) -> None: with pytest.raises( RuntimeError, match=re.escape( - "Detected code that set the time zone using set_time_zone instead of " - "async_set_time_zone which will stop working in Home Assistant 2025.6. " - "Please report this issue.", + "Detected code that sets the time zone using set_time_zone instead of " + "async_set_time_zone. Please report this issue" ), ): await hass.config.set_time_zone("America/New_York") diff --git a/tests/test_loader.py b/tests/test_loader.py index c4bcbed0107..4c3c4eb309f 100644 --- a/tests/test_loader.py +++ b/tests/test_loader.py @@ -6,7 +6,7 @@ import pathlib import sys import threading from typing import Any -from unittest.mock import MagicMock, Mock, patch +from unittest.mock import MagicMock, patch from awesomeversion import AwesomeVersion import pytest @@ -547,6 +547,7 @@ def test_integration_properties(hass: HomeAssistant) -> None: ], "mqtt": ["hue/discovery"], "version": "1.0.0", + "quality_scale": "gold", }, ) assert integration.name == "Philips Hue" @@ -585,6 +586,7 @@ def test_integration_properties(hass: HomeAssistant) -> None: assert integration.is_built_in is True assert integration.overwrites_built_in is False assert integration.version == "1.0.0" + assert integration.quality_scale == "gold" integration = loader.Integration( hass, @@ -595,6 +597,7 @@ def test_integration_properties(hass: HomeAssistant) -> None: "domain": "hue", "dependencies": ["test-dep"], "requirements": ["test-req==1.0.0"], + "quality_scale": "gold", }, ) assert integration.is_built_in is False @@ -607,6 +610,7 @@ def test_integration_properties(hass: HomeAssistant) -> None: assert integration.ssdp is None assert integration.mqtt is None assert integration.version is None + assert integration.quality_scale == "custom" integration = loader.Integration( hass, @@ -1295,26 +1299,29 @@ async def test_config_folder_not_in_path() -> None: import tests.testing_config.check_config_not_in_path # noqa: F401 -async def test_hass_components_use_reported( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_integration_frame: Mock -) -> None: - """Test that use of hass.components is reported.""" - mock_integration_frame.filename = ( - "/home/paulus/homeassistant/custom_components/demo/light.py" - ) - integration_frame = frame.IntegrationFrame( - custom_integration=True, - frame=mock_integration_frame, - integration="test_integration_frame", - module="custom_components.test_integration_frame", - relative_filename="custom_components/test_integration_frame/__init__.py", - ) - - with ( - patch( - "homeassistant.helpers.frame.get_integration_frame", - return_value=integration_frame, +@pytest.mark.parametrize( + ("integration_frame_path", "expected"), + [ + pytest.param( + "custom_components/test_integration_frame", True, id="custom integration" ), + pytest.param( + "homeassistant/components/test_integration_frame", + False, + id="core integration", + ), + pytest.param("homeassistant/test_integration_frame", False, id="core"), + ], +) +@pytest.mark.usefixtures("mock_integration_frame") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_hass_components_use_reported( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + expected: bool, +) -> None: + """Test whether use of hass.components is reported.""" + with ( patch( "homeassistant.components.http.start_http_server_and_save_config", return_value=None, @@ -1322,10 +1329,11 @@ async def test_hass_components_use_reported( ): await hass.components.http.start_http_server_and_save_config(hass, [], None) - assert ( + reported = ( "Detected that custom integration 'test_integration_frame'" - " accesses hass.components.http. This is deprecated" + " accesses hass.components.http, which should be updated" ) in caplog.text + assert reported == expected async def test_async_get_component_preloads_config_and_config_flow( @@ -1987,24 +1995,29 @@ async def test_has_services(hass: HomeAssistant) -> None: assert integration.has_services is True -async def test_hass_helpers_use_reported( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_integration_frame: Mock -) -> None: - """Test that use of hass.components is reported.""" - integration_frame = frame.IntegrationFrame( - custom_integration=True, - frame=mock_integration_frame, - integration="test_integration_frame", - module="custom_components.test_integration_frame", - relative_filename="custom_components/test_integration_frame/__init__.py", - ) - - with ( - patch.object(frame, "_REPORTED_INTEGRATIONS", new=set()), - patch( - "homeassistant.helpers.frame.get_integration_frame", - return_value=integration_frame, +@pytest.mark.parametrize( + ("integration_frame_path", "expected"), + [ + pytest.param( + "custom_components/test_integration_frame", True, id="custom integration" ), + pytest.param( + "homeassistant/components/test_integration_frame", + False, + id="core integration", + ), + pytest.param("homeassistant/test_integration_frame", False, id="core"), + ], +) +@pytest.mark.usefixtures("mock_integration_frame") +@patch.object(frame, "_REPORTED_INTEGRATIONS", set()) +async def test_hass_helpers_use_reported( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + expected: bool, +) -> None: + """Test whether use of hass.helpers is reported.""" + with ( patch( "homeassistant.helpers.aiohttp_client.async_get_clientsession", return_value=None, @@ -2012,10 +2025,11 @@ async def test_hass_helpers_use_reported( ): hass.helpers.aiohttp_client.async_get_clientsession() - assert ( + reported = ( "Detected that custom integration 'test_integration_frame' " - "accesses hass.helpers.aiohttp_client. This is deprecated" + "accesses hass.helpers.aiohttp_client, which should be updated" ) in caplog.text + assert reported == expected async def test_manifest_json_fragment_round_trip(hass: HomeAssistant) -> None: diff --git a/tests/test_main.py b/tests/test_main.py index 080787311a0..d32ca59a846 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -3,7 +3,7 @@ from unittest.mock import PropertyMock, patch from homeassistant import __main__ as main -from homeassistant.const import REQUIRED_PYTHON_VER +from homeassistant.const import REQUIRED_PYTHON_VER, RESTART_EXIT_CODE @patch("sys.exit") @@ -86,3 +86,13 @@ def test_skip_pip_mutually_exclusive(mock_exit) -> None: assert mock_exit.called is False args = parse_args("--skip-pip", "--skip-pip-packages", "foo") assert mock_exit.called is True + + +def test_restart_after_backup_restore() -> None: + """Test restarting if we restored a backup.""" + with ( + patch("sys.argv", ["python"]), + patch("homeassistant.__main__.restore_backup", return_value=True), + ): + exit_code = main.main() + assert exit_code == RESTART_EXIT_CODE diff --git a/tests/testing_config/custom_components/test/camera.py b/tests/testing_config/custom_components/test/camera.py new file mode 100644 index 00000000000..b2aa1bbc53b --- /dev/null +++ b/tests/testing_config/custom_components/test/camera.py @@ -0,0 +1,41 @@ +"""Provide a mock remote platform. + +Call init before using it in your tests to ensure clean test data. +""" + +from homeassistant.components.camera import Camera, CameraEntityFeature, StreamType +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType + + +async def async_setup_platform( + hass: HomeAssistant, + config: ConfigType, + async_add_entities_callback: AddEntitiesCallback, + discovery_info: DiscoveryInfoType | None = None, +) -> None: + """Return mock entities.""" + async_add_entities_callback( + [AttrFrontendStreamTypeCamera(), PropertyFrontendStreamTypeCamera()] + ) + + +class AttrFrontendStreamTypeCamera(Camera): + """attr frontend stream type Camera.""" + + _attr_name = "attr frontend stream type" + _attr_supported_features: CameraEntityFeature = CameraEntityFeature.STREAM + _attr_frontend_stream_type: StreamType = StreamType.WEB_RTC + + +class PropertyFrontendStreamTypeCamera(Camera): + """property frontend stream type Camera.""" + + _attr_name = "property frontend stream type" + _attr_supported_features: CameraEntityFeature = CameraEntityFeature.STREAM + + @property + def frontend_stream_type(self) -> StreamType | None: + """Return the stream type of the camera.""" + return StreamType.WEB_RTC diff --git a/tests/testing_config/custom_components/test/translations/de.json b/tests/testing_config/custom_components/test/translations/de.json index 57d26f28ec0..8cac140c753 100644 --- a/tests/testing_config/custom_components/test/translations/de.json +++ b/tests/testing_config/custom_components/test/translations/de.json @@ -1,7 +1,10 @@ { "entity": { "switch": { - "other1": { "name": "Anderes 1" }, + "other1": { + "name": "Anderes 1", + "unit_of_measurement": "einheiten" + }, "other2": { "name": "Anderes 2 {placeholder}" }, "other3": { "name": "" }, "outlet": { "name": "Steckdose {something}" } diff --git a/tests/testing_config/custom_components/test/translations/en.json b/tests/testing_config/custom_components/test/translations/en.json index 7ed32c224a7..802c859e922 100644 --- a/tests/testing_config/custom_components/test/translations/en.json +++ b/tests/testing_config/custom_components/test/translations/en.json @@ -1,10 +1,13 @@ { "entity": { "switch": { - "other1": { "name": "Other 1" }, + "other1": { "name": "Other 1", "unit_of_measurement": "units" }, "other2": { "name": "Other 2" }, "other3": { "name": "Other 3" }, - "other4": { "name": "Other 4" }, + "other4": { + "name": "Other 4", + "unit_of_measurement": "quantities" + }, "outlet": { "name": "Outlet {placeholder}" } } }, diff --git a/tests/testing_config/custom_sentences/en/beer.yaml b/tests/testing_config/custom_sentences/en/beer.yaml index f318e0221b2..7222ffcb0ca 100644 --- a/tests/testing_config/custom_sentences/en/beer.yaml +++ b/tests/testing_config/custom_sentences/en/beer.yaml @@ -3,11 +3,11 @@ intents: OrderBeer: data: - sentences: - - "I'd like to order a {beer_style} [please]" + - "[I'd like to ]order a {beer_style} [please]" OrderFood: data: - sentences: - - "I'd like to order {food_name:name} [please]" + - "[I'd like to ]order {food_name:name} [please]" lists: beer_style: values: diff --git a/tests/util/test_async.py b/tests/util/test_async.py index cda10b69c3f..cfa78228f0c 100644 --- a/tests/util/test_async.py +++ b/tests/util/test_async.py @@ -140,7 +140,7 @@ async def test_create_eager_task_from_thread(hass: HomeAssistant) -> None: with pytest.raises( RuntimeError, match=( - "Detected code that attempted to create an asyncio task from a thread. Please report this issue." + "Detected code that attempted to create an asyncio task from a thread. Please report this issue" ), ): await hass.async_add_executor_job(create_task) diff --git a/tests/util/test_color.py b/tests/util/test_color.py index c8a5e0c8587..165552b8792 100644 --- a/tests/util/test_color.py +++ b/tests/util/test_color.py @@ -181,7 +181,7 @@ def test_color_hs_to_xy() -> None: assert color_util.color_hs_to_xy(350, 12.5) == (0.356, 0.321) - assert color_util.color_hs_to_xy(140, 50) == (0.229, 0.474) + assert color_util.color_hs_to_xy(140, 50) == (0.23, 0.474) assert color_util.color_hs_to_xy(0, 40) == (0.474, 0.317) diff --git a/tests/util/test_unit_conversion.py b/tests/util/test_unit_conversion.py index 3b8fd3bc466..4d1eda3d8de 100644 --- a/tests/util/test_unit_conversion.py +++ b/tests/util/test_unit_conversion.py @@ -11,6 +11,8 @@ from homeassistant.const import ( CONCENTRATION_PARTS_PER_BILLION, CONCENTRATION_PARTS_PER_MILLION, PERCENTAGE, + UnitOfArea, + UnitOfBloodGlucoseConcentration, UnitOfConductivity, UnitOfDataRate, UnitOfElectricCurrent, @@ -31,7 +33,9 @@ from homeassistant.const import ( from homeassistant.exceptions import HomeAssistantError from homeassistant.util import unit_conversion from homeassistant.util.unit_conversion import ( + AreaConverter, BaseUnitConverter, + BloodGlucoseConcentrationConverter, ConductivityConverter, DataRateConverter, DistanceConverter, @@ -59,6 +63,8 @@ INVALID_SYMBOL = "bob" _ALL_CONVERTERS: dict[type[BaseUnitConverter], list[str | None]] = { converter: sorted(converter.VALID_UNITS, key=lambda x: (x is None, x)) for converter in ( + AreaConverter, + BloodGlucoseConcentrationConverter, ConductivityConverter, DataRateConverter, DistanceConverter, @@ -80,6 +86,12 @@ _ALL_CONVERTERS: dict[type[BaseUnitConverter], list[str | None]] = { # Dict containing all converters with a corresponding unit ratio. _GET_UNIT_RATIO: dict[type[BaseUnitConverter], tuple[str | None, str | None, float]] = { + AreaConverter: (UnitOfArea.SQUARE_KILOMETERS, UnitOfArea.SQUARE_METERS, 0.000001), + BloodGlucoseConcentrationConverter: ( + UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER, + UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER, + 18, + ), ConductivityConverter: ( UnitOfConductivity.MICROSIEMENS_PER_CM, UnitOfConductivity.MILLISIEMENS_PER_CM, @@ -130,6 +142,76 @@ _GET_UNIT_RATIO: dict[type[BaseUnitConverter], tuple[str | None, str | None, flo _CONVERTED_VALUE: dict[ type[BaseUnitConverter], list[tuple[float, str | None, float, str | None]] ] = { + AreaConverter: [ + # Square Meters to other units + (5, UnitOfArea.SQUARE_METERS, 50000, UnitOfArea.SQUARE_CENTIMETERS), + (5, UnitOfArea.SQUARE_METERS, 5000000, UnitOfArea.SQUARE_MILLIMETERS), + (5, UnitOfArea.SQUARE_METERS, 0.000005, UnitOfArea.SQUARE_KILOMETERS), + (5, UnitOfArea.SQUARE_METERS, 7750.015500031001, UnitOfArea.SQUARE_INCHES), + (5, UnitOfArea.SQUARE_METERS, 53.81955, UnitOfArea.SQUARE_FEET), + (5, UnitOfArea.SQUARE_METERS, 5.979950231505403, UnitOfArea.SQUARE_YARDS), + (5, UnitOfArea.SQUARE_METERS, 1.9305107927122295e-06, UnitOfArea.SQUARE_MILES), + (5, UnitOfArea.SQUARE_METERS, 0.0012355269073358272, UnitOfArea.ACRES), + (5, UnitOfArea.SQUARE_METERS, 0.0005, UnitOfArea.HECTARES), + # Square Kilometers to other units + (1, UnitOfArea.SQUARE_KILOMETERS, 1000000, UnitOfArea.SQUARE_METERS), + (1, UnitOfArea.SQUARE_KILOMETERS, 1e10, UnitOfArea.SQUARE_CENTIMETERS), + (1, UnitOfArea.SQUARE_KILOMETERS, 1e12, UnitOfArea.SQUARE_MILLIMETERS), + (5, UnitOfArea.SQUARE_KILOMETERS, 1.9305107927122296, UnitOfArea.SQUARE_MILES), + (5, UnitOfArea.SQUARE_KILOMETERS, 1235.5269073358272, UnitOfArea.ACRES), + (5, UnitOfArea.SQUARE_KILOMETERS, 500, UnitOfArea.HECTARES), + # Acres to other units + (5, UnitOfArea.ACRES, 20234.3, UnitOfArea.SQUARE_METERS), + (5, UnitOfArea.ACRES, 202342821.11999995, UnitOfArea.SQUARE_CENTIMETERS), + (5, UnitOfArea.ACRES, 20234282111.999992, UnitOfArea.SQUARE_MILLIMETERS), + (5, UnitOfArea.ACRES, 0.0202343, UnitOfArea.SQUARE_KILOMETERS), + (5, UnitOfArea.ACRES, 217800, UnitOfArea.SQUARE_FEET), + (5, UnitOfArea.ACRES, 24200.0, UnitOfArea.SQUARE_YARDS), + (5, UnitOfArea.ACRES, 0.0078125, UnitOfArea.SQUARE_MILES), + (5, UnitOfArea.ACRES, 2.02343, UnitOfArea.HECTARES), + # Hectares to other units + (5, UnitOfArea.HECTARES, 50000, UnitOfArea.SQUARE_METERS), + (5, UnitOfArea.HECTARES, 500000000, UnitOfArea.SQUARE_CENTIMETERS), + (5, UnitOfArea.HECTARES, 50000000000.0, UnitOfArea.SQUARE_MILLIMETERS), + (5, UnitOfArea.HECTARES, 0.019305107927122298, UnitOfArea.SQUARE_MILES), + (5, UnitOfArea.HECTARES, 538195.5, UnitOfArea.SQUARE_FEET), + (5, UnitOfArea.HECTARES, 59799.50231505403, UnitOfArea.SQUARE_YARDS), + (5, UnitOfArea.HECTARES, 12.355269073358272, UnitOfArea.ACRES), + # Square Miles to other units + (5, UnitOfArea.SQUARE_MILES, 12949940.551679997, UnitOfArea.SQUARE_METERS), + (5, UnitOfArea.SQUARE_MILES, 129499405516.79997, UnitOfArea.SQUARE_CENTIMETERS), + (5, UnitOfArea.SQUARE_MILES, 12949940551679.996, UnitOfArea.SQUARE_MILLIMETERS), + (5, UnitOfArea.SQUARE_MILES, 1294.9940551679997, UnitOfArea.HECTARES), + (5, UnitOfArea.SQUARE_MILES, 3200, UnitOfArea.ACRES), + # Square Yards to other units + (5, UnitOfArea.SQUARE_YARDS, 4.1806367999999985, UnitOfArea.SQUARE_METERS), + (5, UnitOfArea.SQUARE_YARDS, 41806.4, UnitOfArea.SQUARE_CENTIMETERS), + (5, UnitOfArea.SQUARE_YARDS, 4180636.7999999984, UnitOfArea.SQUARE_MILLIMETERS), + ( + 5, + UnitOfArea.SQUARE_YARDS, + 4.180636799999998e-06, + UnitOfArea.SQUARE_KILOMETERS, + ), + (5, UnitOfArea.SQUARE_YARDS, 45.0, UnitOfArea.SQUARE_FEET), + (5, UnitOfArea.SQUARE_YARDS, 6479.999999999998, UnitOfArea.SQUARE_INCHES), + (5, UnitOfArea.SQUARE_YARDS, 1.6141528925619832e-06, UnitOfArea.SQUARE_MILES), + (5, UnitOfArea.SQUARE_YARDS, 0.0010330578512396695, UnitOfArea.ACRES), + ], + BloodGlucoseConcentrationConverter: [ + ( + 90, + UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER, + 5, + UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER, + ), + ( + 1, + UnitOfBloodGlucoseConcentration.MILLIMOLE_PER_LITER, + 18, + UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER, + ), + ], ConductivityConverter: [ # Deprecated to deprecated (5, UnitOfConductivity.SIEMENS, 5e3, UnitOfConductivity.MILLISIEMENS), @@ -352,15 +434,25 @@ _CONVERTED_VALUE: dict[ ], ElectricPotentialConverter: [ (5, UnitOfElectricPotential.VOLT, 5000, UnitOfElectricPotential.MILLIVOLT), + (5, UnitOfElectricPotential.VOLT, 5e6, UnitOfElectricPotential.MICROVOLT), (5, UnitOfElectricPotential.MILLIVOLT, 0.005, UnitOfElectricPotential.VOLT), + (5, UnitOfElectricPotential.MILLIVOLT, 5e3, UnitOfElectricPotential.MICROVOLT), + (5, UnitOfElectricPotential.MICROVOLT, 5e-3, UnitOfElectricPotential.MILLIVOLT), + (5, UnitOfElectricPotential.MICROVOLT, 5e-6, UnitOfElectricPotential.VOLT), ], EnergyConverter: [ (10, UnitOfEnergy.WATT_HOUR, 0.01, UnitOfEnergy.KILO_WATT_HOUR), (10, UnitOfEnergy.WATT_HOUR, 0.00001, UnitOfEnergy.MEGA_WATT_HOUR), + (10, UnitOfEnergy.WATT_HOUR, 0.00000001, UnitOfEnergy.GIGA_WATT_HOUR), + (10, UnitOfEnergy.WATT_HOUR, 0.00000000001, UnitOfEnergy.TERA_WATT_HOUR), (10, UnitOfEnergy.KILO_WATT_HOUR, 10000, UnitOfEnergy.WATT_HOUR), (10, UnitOfEnergy.KILO_WATT_HOUR, 0.01, UnitOfEnergy.MEGA_WATT_HOUR), (10, UnitOfEnergy.MEGA_WATT_HOUR, 10000000, UnitOfEnergy.WATT_HOUR), (10, UnitOfEnergy.MEGA_WATT_HOUR, 10000, UnitOfEnergy.KILO_WATT_HOUR), + (10, UnitOfEnergy.GIGA_WATT_HOUR, 10e6, UnitOfEnergy.KILO_WATT_HOUR), + (10, UnitOfEnergy.GIGA_WATT_HOUR, 10e9, UnitOfEnergy.WATT_HOUR), + (10, UnitOfEnergy.TERA_WATT_HOUR, 10e9, UnitOfEnergy.KILO_WATT_HOUR), + (10, UnitOfEnergy.TERA_WATT_HOUR, 10e12, UnitOfEnergy.WATT_HOUR), (10, UnitOfEnergy.GIGA_JOULE, 2777.78, UnitOfEnergy.KILO_WATT_HOUR), (10, UnitOfEnergy.GIGA_JOULE, 2.77778, UnitOfEnergy.MEGA_WATT_HOUR), (10, UnitOfEnergy.MEGA_JOULE, 2.77778, UnitOfEnergy.KILO_WATT_HOUR), @@ -439,6 +531,9 @@ _CONVERTED_VALUE: dict[ ], PowerConverter: [ (10, UnitOfPower.KILO_WATT, 10000, UnitOfPower.WATT), + (10, UnitOfPower.MEGA_WATT, 10e6, UnitOfPower.WATT), + (10, UnitOfPower.GIGA_WATT, 10e9, UnitOfPower.WATT), + (10, UnitOfPower.TERA_WATT, 10e12, UnitOfPower.WATT), (10, UnitOfPower.WATT, 0.01, UnitOfPower.KILO_WATT), ], PressureConverter: [ @@ -648,6 +743,18 @@ _CONVERTED_VALUE: dict[ 7.48051948, UnitOfVolumeFlowRate.GALLONS_PER_MINUTE, ), + ( + 9, + UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR, + 2500, + UnitOfVolumeFlowRate.MILLILITERS_PER_SECOND, + ), + ( + 3, + UnitOfVolumeFlowRate.LITERS_PER_MINUTE, + 50, + UnitOfVolumeFlowRate.MILLILITERS_PER_SECOND, + ), ], } diff --git a/tests/util/test_unit_system.py b/tests/util/test_unit_system.py index c08555840bb..b2c604acbcf 100644 --- a/tests/util/test_unit_system.py +++ b/tests/util/test_unit_system.py @@ -7,12 +7,14 @@ import pytest from homeassistant.components.sensor import DEVICE_CLASS_UNITS, SensorDeviceClass from homeassistant.const import ( ACCUMULATED_PRECIPITATION, + AREA, LENGTH, MASS, PRESSURE, TEMPERATURE, VOLUME, WIND_SPEED, + UnitOfArea, UnitOfLength, UnitOfMass, UnitOfPrecipitationDepth, @@ -44,6 +46,7 @@ def test_invalid_units() -> None: UnitSystem( SYSTEM_NAME, accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, + area=UnitOfArea.SQUARE_METERS, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, @@ -57,6 +60,7 @@ def test_invalid_units() -> None: UnitSystem( SYSTEM_NAME, accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, + area=UnitOfArea.SQUARE_METERS, conversions={}, length=INVALID_UNIT, mass=UnitOfMass.GRAMS, @@ -70,6 +74,7 @@ def test_invalid_units() -> None: UnitSystem( SYSTEM_NAME, accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, + area=UnitOfArea.SQUARE_METERS, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, @@ -83,6 +88,7 @@ def test_invalid_units() -> None: UnitSystem( SYSTEM_NAME, accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, + area=UnitOfArea.SQUARE_METERS, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, @@ -96,6 +102,7 @@ def test_invalid_units() -> None: UnitSystem( SYSTEM_NAME, accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, + area=UnitOfArea.SQUARE_METERS, conversions={}, length=UnitOfLength.METERS, mass=INVALID_UNIT, @@ -109,6 +116,7 @@ def test_invalid_units() -> None: UnitSystem( SYSTEM_NAME, accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, + area=UnitOfArea.SQUARE_METERS, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, @@ -122,6 +130,21 @@ def test_invalid_units() -> None: UnitSystem( SYSTEM_NAME, accumulated_precipitation=INVALID_UNIT, + area=UnitOfArea.SQUARE_METERS, + conversions={}, + length=UnitOfLength.METERS, + mass=UnitOfMass.GRAMS, + pressure=UnitOfPressure.PA, + temperature=UnitOfTemperature.CELSIUS, + volume=UnitOfVolume.LITERS, + wind_speed=UnitOfSpeed.METERS_PER_SECOND, + ) + + with pytest.raises(ValueError): + UnitSystem( + SYSTEM_NAME, + accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, + area=INVALID_UNIT, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, @@ -146,6 +169,8 @@ def test_invalid_value() -> None: METRIC_SYSTEM.pressure("50Pa", UnitOfPressure.PA) with pytest.raises(TypeError): METRIC_SYSTEM.accumulated_precipitation("50mm", UnitOfLength.MILLIMETERS) + with pytest.raises(TypeError): + METRIC_SYSTEM.area("2m²", UnitOfArea.SQUARE_METERS) def test_as_dict() -> None: @@ -158,6 +183,7 @@ def test_as_dict() -> None: MASS: UnitOfMass.GRAMS, PRESSURE: UnitOfPressure.PA, ACCUMULATED_PRECIPITATION: UnitOfLength.MILLIMETERS, + AREA: UnitOfArea.SQUARE_METERS, } assert expected == METRIC_SYSTEM.as_dict() @@ -303,6 +329,29 @@ def test_accumulated_precipitation_to_imperial() -> None: ) == pytest.approx(10, abs=1e-4) +def test_area_same_unit() -> None: + """Test no conversion happens if to unit is same as from unit.""" + assert METRIC_SYSTEM.area(5, METRIC_SYSTEM.area_unit) == 5 + + +def test_area_unknown_unit() -> None: + """Test no conversion happens if unknown unit.""" + with pytest.raises(HomeAssistantError, match="is not a recognized .* unit"): + METRIC_SYSTEM.area(5, "abc") + + +def test_area_to_metric() -> None: + """Test area conversion to metric system.""" + assert METRIC_SYSTEM.area(25, METRIC_SYSTEM.area_unit) == 25 + assert round(METRIC_SYSTEM.area(10, IMPERIAL_SYSTEM.area_unit), 1) == 0.9 + + +def test_area_to_imperial() -> None: + """Test area conversion to imperial system.""" + assert IMPERIAL_SYSTEM.area(77, IMPERIAL_SYSTEM.area_unit) == 77 + assert IMPERIAL_SYSTEM.area(25, METRIC_SYSTEM.area_unit) == 269.09776041774313 + + def test_properties() -> None: """Test the unit properties are returned as expected.""" assert METRIC_SYSTEM.length_unit == UnitOfLength.KILOMETERS @@ -312,6 +361,7 @@ def test_properties() -> None: assert METRIC_SYSTEM.volume_unit == UnitOfVolume.LITERS assert METRIC_SYSTEM.pressure_unit == UnitOfPressure.PA assert METRIC_SYSTEM.accumulated_precipitation_unit == UnitOfLength.MILLIMETERS + assert METRIC_SYSTEM.area_unit == UnitOfArea.SQUARE_METERS @pytest.mark.parametrize( @@ -338,6 +388,18 @@ def test_get_unit_system_invalid(key: str) -> None: @pytest.mark.parametrize( ("device_class", "original_unit", "state_unit"), [ + # Test area conversion + (SensorDeviceClass.AREA, UnitOfArea.SQUARE_FEET, UnitOfArea.SQUARE_METERS), + ( + SensorDeviceClass.AREA, + UnitOfArea.SQUARE_INCHES, + UnitOfArea.SQUARE_CENTIMETERS, + ), + (SensorDeviceClass.AREA, UnitOfArea.SQUARE_MILES, UnitOfArea.SQUARE_KILOMETERS), + (SensorDeviceClass.AREA, UnitOfArea.SQUARE_YARDS, UnitOfArea.SQUARE_METERS), + (SensorDeviceClass.AREA, UnitOfArea.ACRES, UnitOfArea.HECTARES), + (SensorDeviceClass.AREA, UnitOfArea.SQUARE_KILOMETERS, None), + (SensorDeviceClass.AREA, "very_long", None), # Test atmospheric pressure ( SensorDeviceClass.ATMOSPHERIC_PRESSURE, @@ -495,6 +557,13 @@ def test_get_metric_converted_unit_( UNCONVERTED_UNITS_METRIC_SYSTEM = { + SensorDeviceClass.AREA: ( + UnitOfArea.SQUARE_MILLIMETERS, + UnitOfArea.SQUARE_CENTIMETERS, + UnitOfArea.SQUARE_METERS, + UnitOfArea.SQUARE_KILOMETERS, + UnitOfArea.HECTARES, + ), SensorDeviceClass.ATMOSPHERIC_PRESSURE: (UnitOfPressure.HPA,), SensorDeviceClass.DISTANCE: ( UnitOfLength.CENTIMETERS, @@ -544,6 +613,7 @@ UNCONVERTED_UNITS_METRIC_SYSTEM = { @pytest.mark.parametrize( "device_class", [ + SensorDeviceClass.AREA, SensorDeviceClass.ATMOSPHERIC_PRESSURE, SensorDeviceClass.DISTANCE, SensorDeviceClass.GAS, @@ -572,6 +642,21 @@ def test_metric_converted_units(device_class: SensorDeviceClass) -> None: @pytest.mark.parametrize( ("device_class", "original_unit", "state_unit"), [ + # Test area conversion + ( + SensorDeviceClass.AREA, + UnitOfArea.SQUARE_MILLIMETERS, + UnitOfArea.SQUARE_INCHES, + ), + ( + SensorDeviceClass.AREA, + UnitOfArea.SQUARE_CENTIMETERS, + UnitOfArea.SQUARE_INCHES, + ), + (SensorDeviceClass.AREA, UnitOfArea.SQUARE_METERS, UnitOfArea.SQUARE_FEET), + (SensorDeviceClass.AREA, UnitOfArea.SQUARE_KILOMETERS, UnitOfArea.SQUARE_MILES), + (SensorDeviceClass.AREA, UnitOfArea.HECTARES, UnitOfArea.ACRES), + (SensorDeviceClass.AREA, "very_area", None), # Test atmospheric pressure ( SensorDeviceClass.ATMOSPHERIC_PRESSURE, @@ -721,6 +806,13 @@ def test_get_us_converted_unit( UNCONVERTED_UNITS_US_SYSTEM = { + SensorDeviceClass.AREA: ( + UnitOfArea.SQUARE_FEET, + UnitOfArea.SQUARE_INCHES, + UnitOfArea.SQUARE_MILES, + UnitOfArea.SQUARE_YARDS, + UnitOfArea.ACRES, + ), SensorDeviceClass.ATMOSPHERIC_PRESSURE: (UnitOfPressure.INHG,), SensorDeviceClass.DISTANCE: ( UnitOfLength.FEET, diff --git a/tests/util/yaml/test_init.py b/tests/util/yaml/test_init.py index 8db3f49ab8e..12a7eca5f9d 100644 --- a/tests/util/yaml/test_init.py +++ b/tests/util/yaml/test_init.py @@ -494,31 +494,6 @@ def mock_integration_frame() -> Generator[Mock]: yield correct_frame -@pytest.mark.parametrize( - ("loader_class", "message"), - [ - (yaml.loader.SafeLoader, "'SafeLoader' instead of 'FastSafeLoader'"), - ( - yaml.loader.SafeLineLoader, - "'SafeLineLoader' instead of 'PythonSafeLoader'", - ), - ], -) -@pytest.mark.usefixtures("mock_integration_frame") -async def test_deprecated_loaders( - caplog: pytest.LogCaptureFixture, - loader_class: type, - message: str, -) -> None: - """Test instantiating the deprecated yaml loaders logs a warning.""" - with ( - pytest.raises(TypeError), - patch("homeassistant.helpers.frame._REPORTED_INTEGRATIONS", set()), - ): - loader_class() - assert (f"Detected that integration 'hue' uses deprecated {message}") in caplog.text - - @pytest.mark.usefixtures("try_both_loaders") def test_string_annotated() -> None: """Test strings are annotated with file + line."""